1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::marker::PhantomData;
11use std::ops::{Range, RangeBounds};
12
13use arrayvec::ArrayVec;
14use zerocopy::SplitByteSlice;
15
16use crate::{
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20 canonicalize_range,
21};
22
23#[derive(Copy, Clone, Debug)]
29pub enum Either<A, B> {
30 A(A),
31 B(B),
32}
33
34impl<A, B> Either<A, B> {
35 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
41 match self {
42 Either::A(a) => Either::A(f(a)),
43 Either::B(b) => Either::B(b),
44 }
45 }
46
47 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
53 match self {
54 Either::A(a) => Either::A(a),
55 Either::B(b) => Either::B(f(b)),
56 }
57 }
58
59 pub fn unwrap_a(self) -> A {
65 match self {
66 Either::A(x) => x,
67 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
68 }
69 }
70
71 pub fn unwrap_b(self) -> B {
77 match self {
78 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
79 Either::B(x) => x,
80 }
81 }
82}
83
84impl<A> Either<A, A> {
85 pub fn into_inner(self) -> A {
88 match self {
89 Either::A(x) => x,
90 Either::B(x) => x,
91 }
92 }
93}
94
95impl<A> Either<A, Never> {
96 #[inline]
98 pub fn into_a(self) -> A {
99 match self {
100 Either::A(a) => a,
101 }
102 }
103}
104
105impl<B> Either<Never, B> {
106 #[inline]
108 pub fn into_b(self) -> B {
109 match self {
110 Either::B(b) => b,
111 }
112 }
113}
114
115macro_rules! call_method_on_either {
116 ($val:expr, $method:ident, $($args:expr),*) => {
117 match $val {
118 Either::A(a) => a.$method($($args),*),
119 Either::B(b) => b.$method($($args),*),
120 }
121 };
122 ($val:expr, $method:ident) => {
123 call_method_on_either!($val, $method,)
124 };
125}
126
127impl<A, B> FragmentedBuffer for Either<A, B>
134where
135 A: FragmentedBuffer,
136 B: FragmentedBuffer,
137{
138 fn len(&self) -> usize {
139 call_method_on_either!(self, len)
140 }
141
142 fn with_bytes<'a, R, F>(&'a self, f: F) -> R
143 where
144 F: for<'b> FnOnce(FragmentedBytes<'b, 'a>) -> R,
145 {
146 call_method_on_either!(self, with_bytes, f)
147 }
148}
149
150impl<A, B> ContiguousBuffer for Either<A, B>
151where
152 A: ContiguousBuffer,
153 B: ContiguousBuffer,
154{
155}
156
157impl<A, B> ShrinkBuffer for Either<A, B>
158where
159 A: ShrinkBuffer,
160 B: ShrinkBuffer,
161{
162 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
163 call_method_on_either!(self, shrink, range)
164 }
165 fn shrink_front(&mut self, n: usize) {
166 call_method_on_either!(self, shrink_front, n)
167 }
168 fn shrink_back(&mut self, n: usize) {
169 call_method_on_either!(self, shrink_back, n)
170 }
171}
172
173impl<A, B> ParseBuffer for Either<A, B>
174where
175 A: ParseBuffer,
176 B: ParseBuffer,
177{
178 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
179 call_method_on_either!(self, parse)
180 }
181 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
182 &'a mut self,
183 args: ParseArgs,
184 ) -> Result<P, P::Error> {
185 call_method_on_either!(self, parse_with, args)
186 }
187}
188
189impl<A, B> FragmentedBufferMut for Either<A, B>
190where
191 A: FragmentedBufferMut,
192 B: FragmentedBufferMut,
193{
194 fn with_bytes_mut<'a, R, F>(&'a mut self, f: F) -> R
195 where
196 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> R,
197 {
198 call_method_on_either!(self, with_bytes_mut, f)
199 }
200}
201
202impl<A, B> ParseBufferMut for Either<A, B>
203where
204 A: ParseBufferMut,
205 B: ParseBufferMut,
206{
207 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
208 call_method_on_either!(self, parse_mut)
209 }
210 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
211 &'a mut self,
212 args: ParseArgs,
213 ) -> Result<P, P::Error> {
214 call_method_on_either!(self, parse_with_mut, args)
215 }
216}
217
218impl<A, B> GrowBuffer for Either<A, B>
219where
220 A: GrowBuffer,
221 B: GrowBuffer,
222{
223 #[inline]
224 fn with_parts<'a, O, F>(&'a self, f: F) -> O
225 where
226 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
227 {
228 call_method_on_either!(self, with_parts, f)
229 }
230 fn capacity(&self) -> usize {
231 call_method_on_either!(self, capacity)
232 }
233 fn prefix_len(&self) -> usize {
234 call_method_on_either!(self, prefix_len)
235 }
236 fn suffix_len(&self) -> usize {
237 call_method_on_either!(self, suffix_len)
238 }
239 fn grow_front(&mut self, n: usize) {
240 call_method_on_either!(self, grow_front, n)
241 }
242 fn grow_back(&mut self, n: usize) {
243 call_method_on_either!(self, grow_back, n)
244 }
245 fn reset(&mut self) {
246 call_method_on_either!(self, reset)
247 }
248}
249
250impl<A, B> GrowBufferMut for Either<A, B>
251where
252 A: GrowBufferMut,
253 B: GrowBufferMut,
254{
255 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
256 where
257 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
258 {
259 call_method_on_either!(self, with_parts_mut, f)
260 }
261
262 fn with_all_contents_mut<'a, O, F>(&'a mut self, f: F) -> O
263 where
264 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
265 {
266 call_method_on_either!(self, with_all_contents_mut, f)
267 }
268
269 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
270 call_method_on_either!(self, serialize, builder)
271 }
272}
273
274impl<A, B> Buffer for Either<A, B>
275where
276 A: Buffer,
277 B: Buffer,
278{
279 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
280 &'a mut self,
281 args: ParseArgs,
282 ) -> Result<(P, &'a [u8]), P::Error> {
283 call_method_on_either!(self, parse_with_view, args)
284 }
285}
286
287impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
288 fn as_ref(&self) -> &[u8] {
289 call_method_on_either!(self, as_ref)
290 }
291}
292
293impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
294 fn as_mut(&mut self) -> &mut [u8] {
295 call_method_on_either!(self, as_mut)
296 }
297}
298
299#[derive(Clone, Debug)]
305pub struct Buf<B> {
306 buf: B,
307 body: Range<usize>,
308}
309
310impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
311 fn eq(&self, other: &Self) -> bool {
312 let self_slice = AsRef::<[u8]>::as_ref(self);
313 let other_slice = AsRef::<[u8]>::as_ref(other);
314 PartialEq::eq(self_slice, other_slice)
315 }
316}
317
318impl<B: AsRef<[u8]>> Eq for Buf<B> {}
319
320impl Buf<Vec<u8>> {
321 pub fn into_inner(self) -> Vec<u8> {
323 let Buf { mut buf, body } = self;
324 let len = body.end - body.start;
325 let _ = buf.drain(..body.start);
326 buf.truncate(len);
327 buf
328 }
329}
330
331impl<B: AsRef<[u8]>> Buf<B> {
332 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
343 let len = buf.as_ref().len();
344 Buf { buf, body: canonicalize_range(len, &body) }
345 }
346
347 pub fn buffer_view(&mut self) -> BufView<'_> {
349 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
350 }
351}
352
353impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
354 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
356 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
357 }
358}
359
360impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
361 fragmented_buffer_method_impls!();
362}
363impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
364impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
365 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
366 let len = self.len();
367 let mut range = canonicalize_range(len, &range);
368 range.start += self.body.start;
369 range.end += self.body.start;
370 self.body = range;
371 }
372
373 fn shrink_front(&mut self, n: usize) {
374 assert!(n <= self.len());
375 self.body.start += n;
376 }
377 fn shrink_back(&mut self, n: usize) {
378 assert!(n <= self.len());
379 self.body.end -= n;
380 }
381}
382impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
383 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
384 &'a mut self,
385 args: ParseArgs,
386 ) -> Result<P, P::Error> {
387 P::parse(self.buffer_view(), args)
388 }
389}
390
391impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
392 fragmented_buffer_mut_method_impls!();
393}
394
395impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
396 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
397 &'a mut self,
398 args: ParseArgs,
399 ) -> Result<P, P::Error> {
400 P::parse_mut(self.buffer_view_mut(), args)
401 }
402}
403
404impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
405 fn with_parts<'a, O, F>(&'a self, f: F) -> O
406 where
407 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
408 {
409 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
410 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
411 let mut body = [&body[..]];
412 f(prefix, body.as_fragmented_byte_slice(), suffix)
413 }
414 fn capacity(&self) -> usize {
415 self.buf.as_ref().len()
416 }
417 fn prefix_len(&self) -> usize {
418 self.body.start
419 }
420 fn suffix_len(&self) -> usize {
421 self.buf.as_ref().len() - self.body.end
422 }
423 fn grow_front(&mut self, n: usize) {
424 assert!(n <= self.body.start);
425 self.body.start -= n;
426 }
427 fn grow_back(&mut self, n: usize) {
428 assert!(n <= self.buf.as_ref().len() - self.body.end);
429 self.body.end += n;
430 }
431}
432
433impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
434 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
435 where
436 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
437 {
438 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
439 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
440 let mut body = [&mut body[..]];
441 f(prefix, body.as_fragmented_byte_slice(), suffix)
442 }
443
444 fn with_all_contents_mut<'a, O, F>(&'a mut self, f: F) -> O
445 where
446 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
447 {
448 let mut all = [self.buf.as_mut()];
449 f(all.as_fragmented_byte_slice())
450 }
451}
452
453impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
454 fn as_ref(&self) -> &[u8] {
455 &self.buf.as_ref()[self.body.clone()]
456 }
457}
458
459impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
460 fn as_mut(&mut self) -> &mut [u8] {
461 &mut self.buf.as_mut()[self.body.clone()]
462 }
463}
464
465impl<B: AsRef<[u8]>> Buffer for Buf<B> {
466 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
467 &'a mut self,
468 args: ParseArgs,
469 ) -> Result<(P, &'a [u8]), P::Error> {
470 let &mut Self { ref mut body, ref buf } = self;
471 let body_before = body.clone();
472 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
473 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
474 }
475}
476
477pub struct BufView<'a> {
482 buf: &'a [u8],
483 body: &'a mut Range<usize>,
484}
485
486impl<'a> BufferView<&'a [u8]> for BufView<'a> {
487 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
488 if self.len() < n {
489 return None;
490 }
491 self.body.start += n;
492 self.buf.split_off(..n)
493 }
494
495 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
496 if self.len() < n {
497 return None;
498 }
499 self.body.end -= n;
500
501 let split = <[u8]>::len(self.buf).checked_sub(n).unwrap();
502 self.buf.split_off(split..)
503 }
504
505 fn into_rest(self) -> &'a [u8] {
506 self.buf
507 }
508}
509
510impl<'a> AsRef<[u8]> for BufView<'a> {
511 fn as_ref(&self) -> &[u8] {
512 self.buf
513 }
514}
515
516pub struct BufViewMut<'a> {
522 buf: &'a mut [u8],
523 body: &'a mut Range<usize>,
524}
525
526impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
527 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
528 if self.len() < n {
529 return None;
530 }
531 self.body.start += n;
532 self.buf.split_off_mut(..n)
533 }
534
535 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
536 if self.len() < n {
537 return None;
538 }
539 self.body.end -= n;
540
541 let split = <[u8]>::len(self.buf).checked_sub(n)?;
542 Some(self.buf.split_off_mut(split..)?)
543 }
544
545 fn into_rest(self) -> &'a mut [u8] {
546 self.buf
547 }
548}
549
550impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
551
552impl<'a> AsRef<[u8]> for BufViewMut<'a> {
553 fn as_ref(&self) -> &[u8] {
554 self.buf
555 }
556}
557
558impl<'a> AsMut<[u8]> for BufViewMut<'a> {
559 fn as_mut(&mut self) -> &mut [u8] {
560 self.buf
561 }
562}
563
564#[derive(Copy, Clone, Debug, Eq, PartialEq)]
578pub struct PacketConstraints {
579 header_len: usize,
580 footer_len: usize,
581 min_body_len: usize,
582 max_body_len: usize,
583}
584
585impl PacketConstraints {
586 pub const UNCONSTRAINED: Self =
590 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
591
592 #[inline]
600 pub fn new(
601 header_len: usize,
602 footer_len: usize,
603 min_body_len: usize,
604 max_body_len: usize,
605 ) -> PacketConstraints {
606 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
607 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
608 )
609 }
610
611 #[inline]
617 pub fn try_new(
618 header_len: usize,
619 footer_len: usize,
620 min_body_len: usize,
621 max_body_len: usize,
622 ) -> Option<PacketConstraints> {
623 let header_min_body_footer_overflows = header_len
625 .checked_add(min_body_len)
626 .and_then(|sum| sum.checked_add(footer_len))
627 .is_none();
628 let max_less_than_min = max_body_len < min_body_len;
630 if max_less_than_min || header_min_body_footer_overflows {
631 return None;
632 }
633 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
634 }
635
636 #[inline]
640 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
641 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
646 }
647
648 #[inline]
650 pub fn header_len(&self) -> usize {
651 self.header_len
652 }
653
654 #[inline]
656 pub fn footer_len(&self) -> usize {
657 self.footer_len
658 }
659
660 #[inline]
676 pub fn min_body_len(&self) -> usize {
677 self.min_body_len
678 }
679
680 #[inline]
684 pub fn max_body_len(&self) -> usize {
685 self.max_body_len
686 }
687
688 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
698 let inner = self;
699 let header_len = inner.header_len.checked_add(outer.header_len)?;
701 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
703 let inner_header_footer_len = inner.header_len + inner.footer_len;
706 let min_body_len = cmp::max(
710 outer.min_body_len.saturating_sub(inner_header_footer_len),
711 inner.min_body_len,
712 );
713 let max_body_len =
718 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
719 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
723 }
724}
725
726pub struct SerializeTarget<'a> {
729 #[allow(missing_docs)]
730 pub header: &'a mut [u8],
731 #[allow(missing_docs)]
732 pub footer: &'a mut [u8],
733}
734
735pub trait PacketBuilder: Sized {
746 fn constraints(&self) -> PacketConstraints;
748
749 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
778
779 #[inline]
784 fn wrap_body<B>(self, body: B) -> Nested<B, Self> {
785 Nested { inner: body, outer: self }
786 }
787}
788
789impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
790 #[inline]
791 fn constraints(&self) -> PacketConstraints {
792 B::constraints(self)
793 }
794 #[inline]
795 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
796 B::serialize(self, target, body)
797 }
798}
799
800impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
801 #[inline]
802 fn constraints(&self) -> PacketConstraints {
803 B::constraints(self)
804 }
805 #[inline]
806 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
807 B::serialize(self, target, body)
808 }
809}
810
811impl PacketBuilder for () {
812 #[inline]
813 fn constraints(&self) -> PacketConstraints {
814 PacketConstraints::UNCONSTRAINED
815 }
816 #[inline]
817 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
818}
819
820impl PacketBuilder for Never {
821 fn constraints(&self) -> PacketConstraints {
822 match *self {}
823 }
824 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
825}
826
827#[derive(Copy, Clone, Debug, Eq, PartialEq)]
835pub struct Nested<I, O> {
836 inner: I,
837 outer: O,
838}
839
840impl<I, O> Nested<I, O> {
841 #[inline]
844 pub fn into_inner(self) -> I {
845 self.inner
846 }
847
848 #[inline]
851 pub fn into_outer(self) -> O {
852 self.outer
853 }
854
855 #[inline]
856 pub fn inner(&self) -> &I {
857 &self.inner
858 }
859
860 #[inline]
861 pub fn inner_mut(&mut self) -> &mut I {
862 &mut self.inner
863 }
864
865 #[inline]
866 pub fn outer(&self) -> &O {
867 &self.outer
868 }
869
870 #[inline]
871 pub fn outer_mut(&mut self) -> &mut O {
872 &mut self.outer
873 }
874}
875
876#[derive(Copy, Clone, Debug)]
882#[cfg_attr(test, derive(Eq, PartialEq))]
883pub struct LimitedSizePacketBuilder {
884 pub limit: usize,
886}
887
888impl PacketBuilder for LimitedSizePacketBuilder {
889 fn constraints(&self) -> PacketConstraints {
890 PacketConstraints::with_max_body_len(self.limit)
891 }
892
893 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
894}
895
896pub trait InnerPacketBuilder {
910 fn bytes_len(&self) -> usize;
912
913 fn serialize(&self, buffer: &mut [u8]);
928
929 #[inline]
936 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
937 where
938 Self: Sized,
939 {
940 self.into_serializer_with(EmptyBuf)
941 }
942
943 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
956 where
957 Self: Sized,
958 {
959 buffer.shrink_back_to(0);
960 InnerSerializer { inner: self, buffer }
961 }
962}
963
964impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
965 #[inline]
966 fn bytes_len(&self) -> usize {
967 I::bytes_len(self)
968 }
969 #[inline]
970 fn serialize(&self, buffer: &mut [u8]) {
971 I::serialize(self, buffer)
972 }
973}
974impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
975 #[inline]
976 fn bytes_len(&self) -> usize {
977 I::bytes_len(self)
978 }
979 #[inline]
980 fn serialize(&self, buffer: &mut [u8]) {
981 I::serialize(self, buffer)
982 }
983}
984impl<'a> InnerPacketBuilder for &'a [u8] {
985 #[inline]
986 fn bytes_len(&self) -> usize {
987 self.len()
988 }
989 #[inline]
990 fn serialize(&self, buffer: &mut [u8]) {
991 buffer.copy_from_slice(self);
992 }
993}
994impl<'a> InnerPacketBuilder for &'a mut [u8] {
995 #[inline]
996 fn bytes_len(&self) -> usize {
997 self.len()
998 }
999 #[inline]
1000 fn serialize(&self, buffer: &mut [u8]) {
1001 buffer.copy_from_slice(self);
1002 }
1003}
1004impl<'a> InnerPacketBuilder for Vec<u8> {
1005 #[inline]
1006 fn bytes_len(&self) -> usize {
1007 self.len()
1008 }
1009 #[inline]
1010 fn serialize(&self, buffer: &mut [u8]) {
1011 buffer.copy_from_slice(self.as_slice());
1012 }
1013}
1014impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
1015 fn bytes_len(&self) -> usize {
1016 self.as_slice().bytes_len()
1017 }
1018 fn serialize(&self, buffer: &mut [u8]) {
1019 self.as_slice().serialize(buffer);
1020 }
1021}
1022
1023pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1030
1031impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1032 fn bytes_len(&self) -> usize {
1033 self.0.deref().bytes_len()
1034 }
1035 fn serialize(&self, buffer: &mut [u8]) {
1036 self.0.deref().serialize(buffer)
1037 }
1038}
1039
1040impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1041 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1042 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1043 }
1044}
1045
1046#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1053pub enum SerializeError<A> {
1054 Alloc(A),
1056 SizeLimitExceeded,
1058}
1059
1060impl<A> SerializeError<A> {
1061 #[inline]
1063 pub fn is_alloc(&self) -> bool {
1064 match self {
1065 SerializeError::Alloc(_) => true,
1066 SerializeError::SizeLimitExceeded => false,
1067 }
1068 }
1069
1070 #[inline]
1072 pub fn is_size_limit_exceeded(&self) -> bool {
1073 match self {
1074 SerializeError::Alloc(_) => false,
1075 SerializeError::SizeLimitExceeded => true,
1076 }
1077 }
1078
1079 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1081 match self {
1082 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1083 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1084 }
1085 }
1086}
1087
1088impl<A> From<A> for SerializeError<A> {
1089 fn from(a: A) -> SerializeError<A> {
1090 SerializeError::Alloc(a)
1091 }
1092}
1093
1094#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1103pub struct BufferTooShortError;
1104
1105pub trait BufferProvider<Input, Output> {
1122 type Error;
1126
1127 fn alloc_no_reuse(
1137 self,
1138 prefix: usize,
1139 body: usize,
1140 suffix: usize,
1141 ) -> Result<Output, Self::Error>;
1142
1143 fn reuse_or_realloc(
1156 self,
1157 buffer: Input,
1158 prefix: usize,
1159 suffix: usize,
1160 ) -> Result<Output, (Self::Error, Input)>;
1161}
1162
1163pub trait BufferAlloc<Output> {
1184 type Error;
1188
1189 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1191}
1192
1193impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1194 type Error = E;
1195
1196 #[inline]
1197 fn alloc(self, len: usize) -> Result<O, E> {
1198 self(len)
1199 }
1200}
1201
1202impl BufferAlloc<Never> for () {
1203 type Error = ();
1204
1205 #[inline]
1206 fn alloc(self, _len: usize) -> Result<Never, ()> {
1207 Err(())
1208 }
1209}
1210
1211pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1222 Ok(Buf::new(vec![0; len], ..))
1223}
1224
1225pub trait LayoutBufferAlloc<O> {
1228 type Error;
1232
1233 fn layout_alloc(self, prefix: usize, body: usize, suffix: usize) -> Result<O, Self::Error>;
1236}
1237
1238impl<O: ShrinkBuffer, E, F: FnOnce(usize) -> Result<O, E>> LayoutBufferAlloc<O> for F {
1239 type Error = E;
1240
1241 #[inline]
1242 fn layout_alloc(self, prefix: usize, body: usize, suffix: usize) -> Result<O, E> {
1243 let mut b = self(prefix + body + suffix)?;
1244 b.shrink_front(prefix);
1245 b.shrink_back(suffix);
1246 Ok(b)
1247 }
1248}
1249
1250impl LayoutBufferAlloc<Never> for () {
1251 type Error = ();
1252
1253 #[inline]
1254 fn layout_alloc(self, _prefix: usize, _body: usize, _suffix: usize) -> Result<Never, ()> {
1255 Err(())
1256 }
1257}
1258
1259#[inline]
1281pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1282 mut buffer: B,
1283 prefix: usize,
1284 suffix: usize,
1285 max_copy_bytes: usize,
1286) -> Result<B, B> {
1287 let need_prefix = prefix;
1288 let need_suffix = suffix;
1289 let have_prefix = buffer.prefix_len();
1290 let have_body = buffer.len();
1291 let have_suffix = buffer.suffix_len();
1292 let need_capacity = need_prefix + have_body + need_suffix;
1293
1294 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1295 Ok(buffer)
1297 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1298 buffer.reset();
1302
1303 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1309 buffer.shrink(need_prefix..(need_prefix + have_body));
1310 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1311 debug_assert!(buffer.suffix_len() >= need_suffix);
1312 debug_assert_eq!(buffer.len(), have_body);
1313 Ok(buffer)
1314 } else {
1315 Err(buffer)
1316 }
1317}
1318
1319pub struct MaybeReuseBufferProvider<A>(pub A);
1323
1324impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1325 for MaybeReuseBufferProvider<A>
1326{
1327 type Error = A::Error;
1328
1329 fn alloc_no_reuse(
1330 self,
1331 prefix: usize,
1332 body: usize,
1333 suffix: usize,
1334 ) -> Result<Either<I, O>, Self::Error> {
1335 let Self(alloc) = self;
1336 let need_capacity = prefix + body + suffix;
1337 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1338 buf.shrink(prefix..(prefix + body));
1339 Either::B(buf)
1340 })
1341 }
1342
1343 #[inline]
1352 fn reuse_or_realloc(
1353 self,
1354 buffer: I,
1355 need_prefix: usize,
1356 need_suffix: usize,
1357 ) -> Result<Either<I, O>, (A::Error, I)> {
1358 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1363 Ok(buffer) => Ok(Either::A(buffer)),
1364 Err(buffer) => {
1365 let have_body = buffer.len();
1366 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1367 self,
1368 need_prefix,
1369 have_body,
1370 need_suffix,
1371 ) {
1372 Ok(buf) => buf,
1373 Err(err) => return Err((err, buffer)),
1374 };
1375
1376 buf.copy_from(&buffer);
1377 debug_assert_eq!(buf.prefix_len(), need_prefix);
1378 debug_assert!(buf.suffix_len() >= need_suffix);
1379 debug_assert_eq!(buf.len(), have_body);
1380 Ok(buf)
1381 }
1382 }
1383 }
1384}
1385
1386impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1387 type Error = A::Error;
1388
1389 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1390 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1391 .map(Either::into_inner)
1392 }
1393
1394 #[inline]
1403 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1404 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1405 .map(Either::into_inner)
1406 }
1407}
1408
1409pub struct NoReuseBufferProvider<A>(pub A);
1413
1414impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1415 for NoReuseBufferProvider<A>
1416{
1417 type Error = A::Error;
1418
1419 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1420 let Self(alloc) = self;
1421 alloc.alloc(prefix + body + suffix).map(|mut b| {
1422 b.shrink(prefix..prefix + body);
1423 b
1424 })
1425 }
1426
1427 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1428 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1429 .map(|mut b| {
1430 b.copy_from(&buffer);
1431 b
1432 })
1433 .map_err(|e| (e, buffer))
1434 }
1435}
1436
1437pub trait Serializer: Sized {
1438 type Buffer;
1440
1441 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1454 self,
1455 outer: PacketConstraints,
1456 provider: P,
1457 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1458
1459 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
1466 &self,
1467 outer: PacketConstraints,
1468 alloc: A,
1469 ) -> Result<B, SerializeError<A::Error>>;
1470
1471 #[inline]
1487 #[allow(clippy::type_complexity)]
1488 fn serialize_vec(
1489 self,
1490 outer: PacketConstraints,
1491 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1492 where
1493 Self::Buffer: ReusableBuffer,
1494 {
1495 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1496 }
1497
1498 #[inline]
1512 fn serialize_no_alloc(
1513 self,
1514 outer: PacketConstraints,
1515 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1516 where
1517 Self::Buffer: ReusableBuffer,
1518 {
1519 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1520 |(err, slf)| {
1521 (
1522 match err {
1523 SerializeError::Alloc(()) => BufferTooShortError.into(),
1524 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1525 },
1526 slf,
1527 )
1528 },
1529 )
1530 }
1531
1532 #[inline]
1541 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1542 self,
1543 provider: P,
1544 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1545 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1546 }
1547
1548 #[inline]
1559 #[allow(clippy::type_complexity)]
1560 fn serialize_vec_outer(
1561 self,
1562 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1563 where
1564 Self::Buffer: ReusableBuffer,
1565 {
1566 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1567 }
1568
1569 #[inline]
1579 fn serialize_no_alloc_outer(
1580 self,
1581 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1582 where
1583 Self::Buffer: ReusableBuffer,
1584 {
1585 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1586 }
1587
1588 #[inline]
1591 fn serialize_vec_outer_no_reuse(&self) -> Result<Buf<Vec<u8>>, SerializeError<Never>> {
1592 self.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
1593 }
1594
1595 #[inline]
1602 fn wrap_in<B: PacketBuilder>(self, outer: B) -> Nested<Self, B> {
1603 outer.wrap_body(self)
1604 }
1605
1606 #[inline]
1615 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1616 self.wrap_in(LimitedSizePacketBuilder { limit })
1617 }
1618}
1619
1620#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1627pub struct InnerSerializer<I, B> {
1628 inner: I,
1629 buffer: B,
1634}
1635
1636impl<I, B> InnerSerializer<I, B> {
1637 pub fn inner(&self) -> &I {
1638 &self.inner
1639 }
1640}
1641
1642struct InnerPacketBuilderWrapper<I>(I);
1648
1649impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1650 fn constraints(&self) -> PacketConstraints {
1651 let Self(wrapped) = self;
1652 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1653 }
1654
1655 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1656 let Self(wrapped) = self;
1657
1658 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1662 debug_assert_eq!(target.footer.len(), 0);
1663
1664 InnerPacketBuilder::serialize(wrapped, target.header);
1665 }
1666}
1667
1668impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1669 type Buffer = B;
1670
1671 #[inline]
1672 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1673 self,
1674 outer: PacketConstraints,
1675 provider: P,
1676 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1677 debug_assert_eq!(self.buffer.len(), 0);
1678 InnerPacketBuilderWrapper(self.inner)
1679 .wrap_body(self.buffer)
1680 .serialize(outer, provider)
1681 .map_err(|(err, Nested { inner: buffer, outer: pb })| {
1682 (err, InnerSerializer { inner: pb.0, buffer })
1683 })
1684 }
1685
1686 #[inline]
1687 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1688 &self,
1689 outer: PacketConstraints,
1690 alloc: A,
1691 ) -> Result<BB, SerializeError<A::Error>> {
1692 InnerPacketBuilderWrapper(&self.inner).wrap_body(EmptyBuf).serialize_new_buf(outer, alloc)
1693 }
1694}
1695
1696impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1697 type Buffer = B;
1698
1699 #[inline]
1700 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1701 self,
1702 outer: PacketConstraints,
1703 provider: P,
1704 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1705 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1706 .serialize(outer, provider)
1707 .map_err(|(err, ser)| (err, ser.buffer))
1708 }
1709
1710 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1711 &self,
1712 outer: PacketConstraints,
1713 alloc: A,
1714 ) -> Result<BB, SerializeError<A::Error>> {
1715 if self.len() > outer.max_body_len() {
1716 return Err(SerializeError::SizeLimitExceeded);
1717 }
1718
1719 let padding = outer.min_body_len().saturating_sub(self.len());
1720 let tail_size = padding + outer.footer_len();
1721 let mut buffer = alloc.layout_alloc(outer.header_len(), self.len(), tail_size)?;
1722 buffer.copy_from(self);
1723 buffer.grow_back(padding);
1724 Ok(buffer)
1725 }
1726}
1727
1728pub enum EitherSerializer<A, B> {
1732 A(A),
1733 B(B),
1734}
1735
1736impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1737 type Buffer = A::Buffer;
1738
1739 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1740 self,
1741 outer: PacketConstraints,
1742 provider: P,
1743 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1744 match self {
1745 EitherSerializer::A(s) => {
1746 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1747 }
1748 EitherSerializer::B(s) => {
1749 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1750 }
1751 }
1752 }
1753
1754 fn serialize_new_buf<TB: GrowBufferMut, BA: LayoutBufferAlloc<TB>>(
1755 &self,
1756 outer: PacketConstraints,
1757 alloc: BA,
1758 ) -> Result<TB, SerializeError<BA::Error>> {
1759 match self {
1760 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1761 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1762 }
1763 }
1764}
1765
1766#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1769pub enum TruncateDirection {
1770 DiscardFront,
1773 DiscardBack,
1776 NoTruncating,
1778}
1779
1780#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1792pub struct TruncatingSerializer<B> {
1793 buffer: B,
1794 direction: TruncateDirection,
1795}
1796
1797impl<B> TruncatingSerializer<B> {
1798 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1800 TruncatingSerializer { buffer, direction }
1801 }
1802
1803 pub fn buffer(&self) -> &B {
1805 &self.buffer
1806 }
1807
1808 pub fn buffer_mut(&mut self) -> &mut B {
1810 &mut self.buffer
1811 }
1812}
1813
1814impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1815 type Buffer = B;
1816
1817 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1818 mut self,
1819 outer: PacketConstraints,
1820 provider: P,
1821 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1822 let original_len = self.buffer.len();
1823 let excess_bytes = if original_len > outer.max_body_len {
1824 Some(original_len - outer.max_body_len)
1825 } else {
1826 None
1827 };
1828 if let Some(excess_bytes) = excess_bytes {
1829 match self.direction {
1830 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1831 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1832 TruncateDirection::NoTruncating => {
1833 return Err((SerializeError::SizeLimitExceeded, self));
1834 }
1835 }
1836 }
1837
1838 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1839
1840 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1844 match provider.reuse_or_realloc(
1845 self.buffer,
1846 outer.header_len(),
1847 padding + outer.footer_len(),
1848 ) {
1849 Ok(buffer) => Ok(buffer),
1850 Err((err, mut buffer)) => {
1851 if let Some(excess_bytes) = excess_bytes {
1855 match self.direction {
1856 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1857 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1858 TruncateDirection::NoTruncating => unreachable!(),
1859 }
1860 }
1861
1862 Err((
1863 SerializeError::Alloc(err),
1864 TruncatingSerializer { buffer, direction: self.direction },
1865 ))
1866 }
1867 }
1868 }
1869
1870 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1871 &self,
1872 outer: PacketConstraints,
1873 alloc: A,
1874 ) -> Result<BB, SerializeError<A::Error>> {
1875 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1876 let discarded_bytes = self.buffer.len() - truncated_size;
1877 let padding = outer.min_body_len().saturating_sub(truncated_size);
1878 let tail_size = padding + outer.footer_len();
1879 let mut buffer = alloc.layout_alloc(outer.header_len(), truncated_size, tail_size)?;
1880 buffer.with_bytes_mut(|mut dst| {
1881 self.buffer.with_bytes(|src| {
1882 let src = match (discarded_bytes > 0, self.direction) {
1883 (false, _) => src,
1884 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1885 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1886 (true, TruncateDirection::NoTruncating) => {
1887 return Err(SerializeError::SizeLimitExceeded);
1888 }
1889 };
1890 dst.copy_from(&src);
1891 Ok(())
1892 })
1893 })?;
1894 buffer.grow_back_zero(padding);
1895 Ok(buffer)
1896 }
1897}
1898
1899impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1900 type Buffer = I::Buffer;
1901
1902 #[inline]
1903 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1904 self,
1905 outer: PacketConstraints,
1906 provider: P,
1907 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1908 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1909 return Err((SerializeError::SizeLimitExceeded, self));
1910 };
1911
1912 match self.inner.serialize(outer, provider) {
1913 Ok(mut buf) => {
1914 buf.serialize(&self.outer);
1915 Ok(buf)
1916 }
1917 Err((err, inner)) => Err((err, self.outer.wrap_body(inner))),
1918 }
1919 }
1920
1921 #[inline]
1922 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
1923 &self,
1924 outer: PacketConstraints,
1925 alloc: A,
1926 ) -> Result<B, SerializeError<A::Error>> {
1927 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1928 return Err(SerializeError::SizeLimitExceeded);
1929 };
1930
1931 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1932 GrowBufferMut::serialize(&mut buf, &self.outer);
1933 Ok(buf)
1934 }
1935}
1936
1937pub trait PartialPacketBuilder: PacketBuilder {
1939 fn partial_serialize(&self, body_len: usize, buffer: &mut [u8]);
1948}
1949
1950impl PartialPacketBuilder for () {
1951 fn partial_serialize(&self, _body_len: usize, _buffer: &mut [u8]) {}
1952}
1953
1954#[derive(Debug, Eq, PartialEq)]
1956pub struct PartialSerializeResult {
1957 pub bytes_written: usize,
1959
1960 pub total_size: usize,
1962}
1963
1964pub trait PartialSerializer {
1969 fn partial_serialize(
1979 &self,
1980 outer: PacketConstraints,
1981 buffer: &mut [u8],
1982 ) -> Result<PartialSerializeResult, SerializeError<Never>>;
1983}
1984
1985impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for B {
1986 fn partial_serialize(
1987 &self,
1988 _outer: PacketConstraints,
1989 _buffer: &mut [u8],
1990 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1991 Ok(PartialSerializeResult { bytes_written: 0, total_size: self.len() })
1992 }
1993}
1994
1995impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for TruncatingSerializer<B> {
1996 fn partial_serialize(
1997 &self,
1998 outer: PacketConstraints,
1999 _buffer: &mut [u8],
2000 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2001 let total_size =
2002 cmp::max(outer.min_body_len(), cmp::min(self.buffer().len(), outer.max_body_len()));
2003 Ok(PartialSerializeResult { bytes_written: 0, total_size })
2004 }
2005}
2006
2007impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> PartialSerializer
2008 for InnerSerializer<I, B>
2009{
2010 fn partial_serialize(
2011 &self,
2012 outer: PacketConstraints,
2013 _buffer: &mut [u8],
2014 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2015 Ok(PartialSerializeResult {
2016 bytes_written: 0,
2017 total_size: cmp::max(self.inner().bytes_len(), outer.min_body_len()),
2018 })
2019 }
2020}
2021
2022impl<A: Serializer + PartialSerializer, B: Serializer + PartialSerializer> PartialSerializer
2023 for EitherSerializer<A, B>
2024{
2025 fn partial_serialize(
2026 &self,
2027 outer: PacketConstraints,
2028 buffer: &mut [u8],
2029 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2030 match self {
2031 EitherSerializer::A(s) => s.partial_serialize(outer, buffer),
2032 EitherSerializer::B(s) => s.partial_serialize(outer, buffer),
2033 }
2034 }
2035}
2036
2037impl<I: PartialSerializer, O: PartialPacketBuilder> PartialSerializer for Nested<I, O> {
2038 fn partial_serialize(
2039 &self,
2040 outer: PacketConstraints,
2041 buffer: &mut [u8],
2042 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2043 let header_constraints = self.outer.constraints();
2044 let Some(constraints) = outer.try_encapsulate(&header_constraints) else {
2045 return Err(SerializeError::SizeLimitExceeded);
2046 };
2047
2048 let header_len = header_constraints.header_len();
2049 let inner_buf = buffer.get_mut(header_len..).unwrap_or(&mut []);
2050 let mut result = self.inner.partial_serialize(constraints, inner_buf)?;
2051 if header_len <= buffer.len() {
2052 self.outer.partial_serialize(result.total_size, &mut buffer[..header_len]);
2053 result.bytes_written += header_len;
2054 }
2055 result.total_size += header_len + header_constraints.footer_len();
2056 Ok(result)
2057 }
2058}
2059
2060mod sealed {
2061 use super::*;
2062
2063 pub trait DynamicSerializerInner {
2068 fn serialize_dyn_alloc(
2078 &self,
2079 outer: PacketConstraints,
2080 alloc: &mut dyn DynamicBufferAlloc,
2081 ) -> Result<(usize, usize), SerializeError<DynAllocError>>;
2082 }
2083
2084 pub trait DynamicBufferAlloc {
2090 fn alloc(
2108 &mut self,
2109 prefix: usize,
2110 body: usize,
2111 suffix: usize,
2112 ) -> Result<Buf<&mut [u8]>, DynAllocError>;
2113 }
2114
2115 pub struct DynAllocError;
2118}
2119
2120use sealed::{DynAllocError, DynamicBufferAlloc, DynamicSerializerInner};
2121
2122fn dyn_serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2123 serializer: &dyn DynamicSerializerInner,
2124 outer: PacketConstraints,
2125 alloc: A,
2126) -> Result<B, SerializeError<A::Error>> {
2127 enum Adapter<A: LayoutBufferAlloc<B>, B> {
2128 Empty,
2129 Alloc(A),
2130 Buffer(B),
2131 Error(A::Error),
2132 }
2133
2134 impl<A: LayoutBufferAlloc<B>, B: GrowBufferMut> DynamicBufferAlloc for Adapter<A, B> {
2135 fn alloc(
2136 &mut self,
2137 prefix: usize,
2138 body: usize,
2139 suffix: usize,
2140 ) -> Result<Buf<&mut [u8]>, DynAllocError> {
2141 let alloc = match core::mem::replace(self, Self::Empty) {
2142 Self::Alloc(a) => a,
2143 _ => panic!("unexpected alloc state"),
2144 };
2145
2146 let buffer = match alloc.layout_alloc(prefix, body, suffix) {
2147 Ok(b) => b,
2148 Err(e) => {
2149 *self = Self::Error(e);
2150 return Err(DynAllocError);
2151 }
2152 };
2153 *self = Self::Buffer(buffer);
2154 let buffer = match self {
2155 Self::Buffer(b) => b.with_all_contents_mut(|b| match b.try_into_contiguous() {
2156 Ok(b) => b,
2157 Err(_) => todo!(
2158 "https://fxbug.dev/428952155: support dyn serialize fragmented buffers"
2159 ),
2160 }),
2161 _ => unreachable!(),
2163 };
2164 Ok(Buf::new(buffer, prefix..(buffer.len() - suffix)))
2165 }
2166 }
2167
2168 let mut adapter = Adapter::Alloc(alloc);
2169 let (prefix, suffix) = match serializer.serialize_dyn_alloc(outer, &mut adapter) {
2170 Ok(b) => b,
2171 Err(SerializeError::SizeLimitExceeded) => {
2172 return Err(SerializeError::SizeLimitExceeded);
2173 }
2174 Err(SerializeError::Alloc(DynAllocError)) => match adapter {
2175 Adapter::Error(e) => {
2176 return Err(SerializeError::Alloc(e));
2177 }
2178 _ => {
2179 unreachable!();
2180 }
2181 },
2182 };
2183
2184 let mut buffer = match adapter {
2185 Adapter::Buffer(b) => b,
2186 _ => unreachable!("unexpected alloc state"),
2187 };
2188 buffer.grow_front(buffer.prefix_len().checked_sub(prefix).unwrap_or_else(|| {
2189 panic!("failed to grow buffer front; want: {} got: {}", prefix, buffer.prefix_len())
2190 }));
2191 buffer.grow_back(buffer.suffix_len().checked_sub(suffix).unwrap_or_else(|| {
2192 panic!("failed to grow buffer back; want: {} got: {}", suffix, buffer.suffix_len())
2193 }));
2194 Ok(buffer)
2195}
2196
2197#[derive(Copy, Clone)]
2202pub struct DynSerializer<'a>(&'a dyn DynamicSerializerInner);
2203
2204impl<'a> DynSerializer<'a> {
2205 pub fn new<S: Serializer>(s: &'a S) -> Self {
2207 Self::new_dyn(s)
2208 }
2209
2210 pub fn new_dyn(s: &'a dyn DynamicSerializer) -> Self {
2212 Self(s)
2213 }
2214}
2215
2216impl Serializer for DynSerializer<'_> {
2217 type Buffer = EmptyBuf;
2218
2219 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2220 self,
2221 outer: PacketConstraints,
2222 provider: P,
2223 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2224 struct Adapter<S, P>(P, PhantomData<S>);
2225
2226 impl<S, B, P> LayoutBufferAlloc<B> for Adapter<S, P>
2227 where
2228 P: BufferProvider<S, B>,
2229 {
2230 type Error = P::Error;
2231
2232 fn layout_alloc(
2233 self,
2234 prefix: usize,
2235 body: usize,
2236 suffix: usize,
2237 ) -> Result<B, Self::Error> {
2238 let Self(provider, PhantomData) = self;
2239 provider.alloc_no_reuse(prefix, body, suffix)
2240 }
2241 }
2242
2243 let Self(serializer) = self;
2244 match dyn_serialize_new_buf(serializer, outer, Adapter(provider, PhantomData)) {
2245 Ok(b) => Ok(b),
2246 Err(e) => Err((e, self)),
2247 }
2248 }
2249
2250 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2251 &self,
2252 outer: PacketConstraints,
2253 alloc: A,
2254 ) -> Result<B, SerializeError<A::Error>> {
2255 let Self(serializer) = self;
2256 dyn_serialize_new_buf(*serializer, outer, alloc)
2257 }
2258}
2259
2260impl<O> DynamicSerializerInner for O
2261where
2262 O: Serializer,
2263{
2264 fn serialize_dyn_alloc(
2265 &self,
2266 outer: PacketConstraints,
2267 alloc: &mut dyn DynamicBufferAlloc,
2268 ) -> Result<(usize, usize), SerializeError<DynAllocError>> {
2269 struct Adapter<'a>(&'a mut dyn DynamicBufferAlloc);
2270 impl<'a> LayoutBufferAlloc<Buf<&'a mut [u8]>> for Adapter<'a> {
2271 type Error = DynAllocError;
2272
2273 fn layout_alloc(
2274 self,
2275 prefix: usize,
2276 body: usize,
2277 suffix: usize,
2278 ) -> Result<Buf<&'a mut [u8]>, Self::Error> {
2279 let Self(inner) = self;
2280 inner.alloc(prefix, body, suffix)
2281 }
2282 }
2283 self.serialize_new_buf(outer, Adapter(alloc))
2284 .map(|buffer| (buffer.prefix_len(), buffer.suffix_len()))
2285 }
2286}
2287
2288pub trait DynamicSerializer: DynamicSerializerInner {}
2303impl<O> DynamicSerializer for O where O: DynamicSerializerInner {}
2304
2305#[cfg(test)]
2306mod tests {
2307 use super::*;
2308 use crate::BufferMut;
2309 use std::fmt::Debug;
2310 use test_case::test_case;
2311 use test_util::{assert_geq, assert_leq};
2312
2313 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2319 struct DummyPacketBuilder {
2320 header_len: usize,
2321 footer_len: usize,
2322 min_body_len: usize,
2323 max_body_len: usize,
2324 header_byte: u8,
2325 footer_byte: u8,
2326 }
2327
2328 impl DummyPacketBuilder {
2329 fn new(
2330 header_len: usize,
2331 footer_len: usize,
2332 min_body_len: usize,
2333 max_body_len: usize,
2334 ) -> DummyPacketBuilder {
2335 DummyPacketBuilder {
2336 header_len,
2337 footer_len,
2338 min_body_len,
2339 max_body_len,
2340 header_byte: 0xFF,
2341 footer_byte: 0xFE,
2342 }
2343 }
2344 }
2345
2346 impl PacketBuilder for DummyPacketBuilder {
2347 fn constraints(&self) -> PacketConstraints {
2348 PacketConstraints::new(
2349 self.header_len,
2350 self.footer_len,
2351 self.min_body_len,
2352 self.max_body_len,
2353 )
2354 }
2355
2356 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
2357 assert_eq!(target.header.len(), self.header_len);
2358 assert_eq!(target.footer.len(), self.footer_len);
2359 assert!(body.len() >= self.min_body_len);
2360 assert!(body.len() <= self.max_body_len);
2361 target.header.fill(self.header_byte);
2362 target.footer.fill(self.footer_byte);
2363 }
2364 }
2365
2366 impl PartialPacketBuilder for DummyPacketBuilder {
2367 fn partial_serialize(&self, _body_len: usize, buffer: &mut [u8]) {
2368 buffer.fill(self.header_byte)
2369 }
2370 }
2371
2372 impl InnerPacketBuilder for DummyPacketBuilder {
2373 fn bytes_len(&self) -> usize {
2374 self.header_len
2375 }
2376
2377 fn serialize(&self, buffer: &mut [u8]) {
2378 assert_eq!(buffer.len(), self.header_len);
2379 buffer.fill(self.header_byte);
2380 }
2381 }
2382
2383 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2385 struct SerializerVerifier {
2386 inner_len: Option<usize>,
2389
2390 truncating: bool,
2393 }
2394
2395 impl SerializerVerifier {
2396 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
2397 let inner_len = serializer
2398 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2399 .map(|buf| buf.len())
2400 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
2401 .ok();
2402 Self { inner_len, truncating }
2403 }
2404
2405 fn verify_result<B: GrowBufferMut, A>(
2406 &self,
2407 result: Result<&B, &SerializeError<A>>,
2408 outer: PacketConstraints,
2409 ) {
2410 let should_exceed_size_limit = match self.inner_len {
2411 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
2412 None => true,
2413 };
2414
2415 match result {
2416 Ok(buf) => {
2417 assert_geq!(buf.prefix_len(), outer.header_len());
2418 assert_geq!(buf.suffix_len(), outer.footer_len());
2419 assert_leq!(buf.len(), outer.max_body_len());
2420
2421 let padding = outer.min_body_len().saturating_sub(buf.len());
2426 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
2427
2428 assert!(!should_exceed_size_limit);
2429 }
2430 Err(err) => {
2431 if should_exceed_size_limit {
2434 assert!(err.is_size_limit_exceeded());
2435 } else {
2436 assert!(err.is_alloc());
2437 }
2438 }
2439 }
2440 }
2441 }
2442
2443 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2452 struct VerifyingSerializer<S> {
2453 ser: S,
2454 verifier: SerializerVerifier,
2455 }
2456
2457 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2458 where
2459 S::Buffer: ReusableBuffer,
2460 {
2461 type Buffer = S::Buffer;
2462
2463 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2464 self,
2465 outer: PacketConstraints,
2466 provider: P,
2467 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2468 let Self { ser, verifier } = self;
2469 let orig = ser.clone();
2470
2471 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2472 assert_eq!(ser, orig);
2475 (err, Self { ser, verifier })
2476 });
2477
2478 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2479
2480 result
2481 }
2482
2483 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2484 &self,
2485 outer: PacketConstraints,
2486 alloc: A,
2487 ) -> Result<B, SerializeError<A::Error>> {
2488 let res = self.ser.serialize_new_buf(outer, alloc);
2489 self.verifier.verify_result(res.as_ref(), outer);
2490 res
2491 }
2492 }
2493
2494 trait SerializerExt: Serializer {
2495 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2496 where
2497 Self::Buffer: ReusableBuffer,
2498 {
2499 let verifier = SerializerVerifier::new(&self, truncating);
2500 VerifyingSerializer { ser: self, verifier }
2501 }
2502
2503 fn wrap_in_verifying<B: PacketBuilder>(
2504 self,
2505 outer: B,
2506 truncating: bool,
2507 ) -> VerifyingSerializer<Nested<Self, B>>
2508 where
2509 Self::Buffer: ReusableBuffer,
2510 {
2511 self.wrap_in(outer).into_verifying(truncating)
2512 }
2513
2514 fn with_size_limit_verifying(
2515 self,
2516 limit: usize,
2517 truncating: bool,
2518 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2519 where
2520 Self::Buffer: ReusableBuffer,
2521 {
2522 self.with_size_limit(limit).into_verifying(truncating)
2523 }
2524 }
2525
2526 impl<S: Serializer> SerializerExt for S {}
2527
2528 #[test]
2529 fn test_either_into_inner() {
2530 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2531 if c { Either::A(a) } else { Either::B(b) }
2532 }
2533
2534 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2535 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2536 }
2537
2538 #[test]
2539 fn test_either_unwrap_success() {
2540 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2541 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2542 }
2543
2544 #[test]
2545 #[should_panic]
2546 fn test_either_unwrap_a_panic() {
2547 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2548 }
2549
2550 #[test]
2551 #[should_panic]
2552 fn test_either_unwrap_b_panic() {
2553 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2554 }
2555
2556 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2557 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2558 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2559 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2560 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2561 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2562 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2563 }
2564
2565 #[test]
2566 fn test_packet_constraints() {
2567 use PacketConstraints as PC;
2568
2569 assert!(PC::try_new(0, 0, 0, 0).is_some());
2573 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2575 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2577 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2579
2580 let pc = PC::new(10, 10, 0, usize::MAX);
2584 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2585
2586 let pc = PC::new(10, 10, 0, usize::MAX);
2587 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2588
2589 let inner = PC::new(10, 10, 0, usize::MAX);
2600 let outer = PC::new(0, 0, 10, usize::MAX);
2601 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2602
2603 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2607 let outer = PC::new(1, 0, 0, usize::MAX);
2608 assert_eq!(inner.try_encapsulate(&outer), None);
2609
2610 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2614 let outer = PC::new(0, 1, 0, usize::MAX);
2615 assert_eq!(inner.try_encapsulate(&outer), None);
2616
2617 let one_fifth_max = (usize::MAX / 5) + 1;
2624 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2625 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2626 assert_eq!(inner.try_encapsulate(&outer), None);
2627
2628 let inner = PC::new(10, 10, 0, usize::MAX);
2633 let outer = PC::new(0, 0, 0, 10);
2634 assert_eq!(inner.try_encapsulate(&outer), None);
2635
2636 let inner = PC::new(0, 0, 10, usize::MAX);
2642 let outer = PC::new(0, 0, 0, 5);
2643 assert_eq!(inner.try_encapsulate(&outer), None);
2644 }
2645
2646 #[test]
2647 fn test_inner_serializer() {
2648 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2649
2650 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2651 let mut v = Vec::new();
2652 for slc in slices.into_iter() {
2653 v.extend_from_slice(slc);
2654 }
2655 v
2656 }
2657
2658 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2660 assert_eq!(buf.as_ref(), INNER);
2661
2662 let buf = INNER
2665 .into_serializer()
2666 .into_verifying(false)
2667 .wrap_in(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2668 .serialize_vec_outer()
2669 .unwrap();
2670 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2671
2672 let buf = INNER
2676 .into_serializer()
2677 .into_verifying(false)
2678 .wrap_in(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2679 .serialize_vec_outer()
2680 .unwrap();
2681 assert_eq!(
2682 buf.as_ref(),
2683 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2684 );
2685
2686 assert_eq!(
2688 INNER
2689 .into_serializer()
2690 .into_verifying(false)
2691 .wrap_in(DummyPacketBuilder::new(0, 0, 0, 9))
2692 .serialize_vec_outer()
2693 .unwrap_err()
2694 .0,
2695 SerializeError::SizeLimitExceeded
2696 );
2697
2698 assert_eq!(
2702 INNER
2703 .into_serializer_with(Buf::new(vec![0xFF], ..))
2704 .into_verifying(false)
2705 .serialize_vec_outer()
2706 .unwrap()
2707 .as_ref(),
2708 INNER
2709 );
2710 }
2711
2712 #[test]
2713 fn test_buffer_serializer_and_inner_serializer() {
2714 fn verify_buffer_serializer<B: BufferMut + Debug>(
2715 buffer: B,
2716 header_len: usize,
2717 footer_len: usize,
2718 min_body_len: usize,
2719 ) {
2720 let old_body = buffer.to_flattened_vec();
2721 let serializer =
2722 DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2723 .wrap_body(buffer);
2724
2725 let buffer0 = serializer
2726 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2727 .unwrap();
2728 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2729
2730 let buffer = serializer.serialize_vec_outer().unwrap();
2731 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2732 }
2733
2734 fn verify_inner_packet_builder_serializer(
2735 body: &[u8],
2736 header_len: usize,
2737 footer_len: usize,
2738 min_body_len: usize,
2739 ) {
2740 let buffer = DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2741 .wrap_body(body.into_serializer())
2742 .serialize_vec_outer()
2743 .unwrap();
2744 verify(buffer, body, header_len, footer_len, min_body_len);
2745 }
2746
2747 fn verify<B: Buffer>(
2748 buffer: B,
2749 body: &[u8],
2750 header_len: usize,
2751 footer_len: usize,
2752 min_body_len: usize,
2753 ) {
2754 let flat = buffer.to_flattened_vec();
2755 let header_bytes = &flat[..header_len];
2756 let body_bytes = &flat[header_len..header_len + body.len()];
2757 let padding_len = min_body_len.saturating_sub(body.len());
2758 let padding_bytes =
2759 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2760 let total_body_len = body.len() + padding_len;
2761 let footer_bytes = &flat[header_len + total_body_len..];
2762 assert_eq!(
2763 buffer.len() - total_body_len,
2764 header_len + footer_len,
2765 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2766 buffer.len(),
2767 header_len,
2768 footer_len,
2769 min_body_len,
2770 );
2771
2772 assert!(
2774 header_bytes.iter().all(|b| *b == 0xFF),
2775 "header_bytes {:?} are not filled with 0xFF's",
2776 header_bytes,
2777 );
2778 assert_eq!(body_bytes, body);
2779 assert!(
2781 padding_bytes.iter().all(|b| *b == 0),
2782 "padding_bytes {:?} are not filled with 0s",
2783 padding_bytes,
2784 );
2785 assert!(
2787 footer_bytes.iter().all(|b| *b == 0xFE),
2788 "footer_bytes {:?} are not filled with 0xFE's",
2789 footer_bytes,
2790 );
2791 }
2792
2793 for buf_len in 0..8 {
2796 for range_start in 0..buf_len {
2797 for range_end in range_start..buf_len {
2798 for prefix in 0..8 {
2799 for suffix in 0..8 {
2800 for min_body in 0..8 {
2801 let mut vec = vec![0; buf_len];
2802 #[allow(clippy::needless_range_loop)]
2807 for i in 0..vec.len() {
2808 vec[i] = i as u8;
2809 }
2810 verify_buffer_serializer(
2811 Buf::new(vec.as_mut_slice(), range_start..range_end),
2812 prefix,
2813 suffix,
2814 min_body,
2815 );
2816 if range_start == 0 {
2817 verify_inner_packet_builder_serializer(
2826 &vec.as_slice()[range_start..range_end],
2827 prefix,
2828 suffix,
2829 min_body,
2830 );
2831 }
2832 }
2833 }
2834 }
2835 }
2836 }
2837 }
2838 }
2839
2840 #[test]
2841 fn test_min_body_len() {
2842 let body = &[1, 2];
2847
2848 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2851 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2853 let buf = body
2854 .into_serializer()
2855 .into_verifying(false)
2856 .wrap_in_verifying(inner, false)
2857 .wrap_in_verifying(outer, false)
2858 .serialize_vec_outer()
2859 .unwrap();
2860 assert_eq!(buf.prefix_len(), 0);
2861 assert_eq!(buf.suffix_len(), 0);
2862 assert_eq!(
2863 buf.as_ref(),
2864 &[
2865 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2872 );
2873 }
2874
2875 #[test]
2876 fn test_size_limit() {
2877 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2879 where
2880 S::Buffer: ReusableBuffer,
2881 {
2882 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2888
2889 assert!(
2894 ser.clone()
2895 .wrap_in_verifying(pb, false)
2896 .with_size_limit_verifying(3, false)
2897 .serialize_vec_outer()
2898 .is_ok()
2899 );
2900 assert!(
2902 ser.clone()
2903 .wrap_in_verifying(pb, false)
2904 .with_size_limit_verifying(4, false)
2905 .serialize_vec_outer()
2906 .is_ok()
2907 );
2908 assert!(
2912 ser.clone()
2913 .with_size_limit_verifying(1, false)
2914 .wrap_in_verifying(pb, false)
2915 .with_size_limit_verifying(3, false)
2916 .serialize_vec_outer()
2917 .is_ok()
2918 );
2919 assert!(
2922 ser.clone()
2923 .with_size_limit_verifying(0, false)
2924 .wrap_in_verifying(pb, false)
2925 .serialize_vec_outer()
2926 .is_err()
2927 );
2928 assert!(
2934 ser.clone()
2935 .wrap_in_verifying(pb, false)
2936 .with_size_limit_verifying(1, false)
2937 .serialize_vec_outer()
2938 .is_err()
2939 );
2940 }
2941
2942 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2944 test(Buf::new(vec![0], ..).into_verifying(false));
2945 }
2946
2947 #[test]
2948 fn test_truncating_serializer() {
2949 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2950 where
2951 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2952 {
2953 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2954 assert_eq!(buf.as_ref(), &expected[..]);
2955 let buf = ser.serialize_vec_outer().unwrap();
2956 assert_eq!(buf.as_ref(), &expected[..]);
2957 }
2958
2959 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2961 let ser =
2962 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2963 .into_verifying(true)
2964 .with_size_limit_verifying(4, true);
2965 verify_result(ser, &[6, 7, 8, 9]);
2966
2967 let ser =
2969 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2970 .into_verifying(true)
2971 .with_size_limit_verifying(7, true);
2972 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2973
2974 let ser =
2976 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2977 .into_verifying(false)
2978 .with_size_limit_verifying(5, true);
2979 assert!(ser.clone().serialize_vec_outer().is_err());
2980 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2981 assert!(ser.serialize_vec_outer().is_err());
2982
2983 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2987 ser: S,
2988 err: SerializeError<BufferTooShortError>,
2989 ) where
2990 S::Buffer: ReusableBuffer + Debug,
2991 {
2992 let (e, new_ser) = DummyPacketBuilder::new(2, 2, 0, 1)
3001 .wrap_body(ser.clone())
3002 .serialize_no_alloc_outer()
3003 .unwrap_err();
3004 assert_eq!(err, e);
3005 assert_eq!(new_ser.into_inner(), ser);
3006 }
3007
3008 let body = Buf::new(vec![1, 2], ..);
3009 test_serialization_failure(
3010 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
3011 .into_verifying(true),
3012 SerializeError::Alloc(BufferTooShortError),
3013 );
3014 test_serialization_failure(
3015 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
3016 .into_verifying(true),
3017 SerializeError::Alloc(BufferTooShortError),
3018 );
3019 test_serialization_failure(
3020 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
3021 .into_verifying(false),
3022 SerializeError::SizeLimitExceeded,
3023 );
3024 }
3025
3026 #[test]
3027 fn test_try_reuse_buffer() {
3028 fn test_expect_success(
3029 body_range: Range<usize>,
3030 prefix: usize,
3031 suffix: usize,
3032 max_copy_bytes: usize,
3033 ) {
3034 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3035 let buffer = Buf::new(&mut bytes[..], body_range);
3036 let body = buffer.as_ref().to_vec();
3037 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
3038 assert_eq!(buffer.as_ref(), body.as_slice());
3039 assert!(buffer.prefix_len() >= prefix);
3040 assert!(buffer.suffix_len() >= suffix);
3041 }
3042
3043 fn test_expect_failure(
3044 body_range: Range<usize>,
3045 prefix: usize,
3046 suffix: usize,
3047 max_copy_bytes: usize,
3048 ) {
3049 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3050 let buffer = Buf::new(&mut bytes[..], body_range.clone());
3051 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3052 let orig = Buf::new(&mut bytes[..], body_range.clone());
3053 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
3054 assert_eq!(buffer, orig);
3055 }
3056
3057 test_expect_success(0..10, 0, 0, 0);
3059 test_expect_success(1..9, 1, 1, 0);
3061 test_expect_success(0..9, 1, 0, 9);
3064 test_expect_success(1..10, 0, 1, 9);
3065 test_expect_failure(0..9, 1, 0, 8);
3067 test_expect_failure(1..10, 0, 1, 8);
3068 }
3069
3070 #[test]
3071 fn test_maybe_reuse_buffer_provider() {
3072 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
3073 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3074 let buffer = Buf::new(&mut bytes[..], body_range);
3075 let body = buffer.as_ref().to_vec();
3076 let buffer = BufferProvider::reuse_or_realloc(
3077 MaybeReuseBufferProvider(new_buf_vec),
3078 buffer,
3079 prefix,
3080 suffix,
3081 )
3082 .unwrap();
3083 match &buffer {
3084 Either::A(_) if expect_a => {}
3085 Either::B(_) if !expect_a => {}
3086 Either::A(_) => panic!("expected Eitehr::B variant"),
3087 Either::B(_) => panic!("expected Eitehr::A variant"),
3088 }
3089 let bytes: &[u8] = buffer.as_ref();
3090 assert_eq!(bytes, body.as_slice());
3091 assert!(buffer.prefix_len() >= prefix);
3092 assert!(buffer.suffix_len() >= suffix);
3093 }
3094
3095 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
3097 test_expect(body_range, prefix, suffix, true);
3098 }
3099
3100 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
3102 test_expect(body_range, prefix, suffix, false);
3103 }
3104
3105 test_expect_reuse(0..10, 0, 0);
3107 test_expect_reuse(1..9, 1, 1);
3109 test_expect_reuse(0..9, 1, 0);
3112 test_expect_reuse(1..10, 0, 1);
3113 test_expect_realloc(0..9, 1, 1);
3115 test_expect_realloc(1..10, 1, 1);
3116 }
3117
3118 #[test]
3119 fn test_no_reuse_buffer_provider() {
3120 #[track_caller]
3121 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
3122 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3123 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
3125 let body = internal_buffer.as_ref().to_vec();
3126 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
3129 NoReuseBufferProvider(new_buf_vec),
3130 internal_buffer,
3131 prefix,
3132 suffix,
3133 )
3134 .unwrap();
3135 let bytes: &[u8] = buffer.as_ref();
3136 assert_eq!(bytes, body.as_slice());
3137 assert_eq!(buffer.prefix_len(), prefix);
3138 assert_eq!(buffer.suffix_len(), suffix);
3139 }
3140 test_expect(0..10, 0, 0);
3142 test_expect(1..9, 1, 1);
3144 test_expect(0..9, 10, 10);
3146 test_expect(1..10, 15, 15);
3147 }
3148
3149 struct ScatterGatherBuf<B> {
3173 data: Vec<u8>,
3174 mid: usize,
3175 range: Range<usize>,
3176 inner: B,
3177 }
3178
3179 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
3180 fn len(&self) -> usize {
3181 self.inner.len() + (self.range.end - self.range.start)
3182 }
3183
3184 fn with_bytes<'a, R, F>(&'a self, f: F) -> R
3185 where
3186 F: for<'b> FnOnce(FragmentedBytes<'b, 'a>) -> R,
3187 {
3188 let (_, rest) = self.data.split_at(self.range.start);
3189 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
3190 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
3191 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
3192 f(FragmentedBytes::new(&mut bytes[..]))
3193 }
3194 }
3195
3196 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
3197 fn with_bytes_mut<'a, R, F>(&'a mut self, f: F) -> R
3198 where
3199 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> R,
3200 {
3201 let (_, rest) = self.data.split_at_mut(self.range.start);
3202 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
3203 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
3204 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
3205 f(FragmentedBytesMut::new(&mut bytes[..]))
3206 }
3207 }
3208
3209 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
3210 fn with_parts<'a, O, F>(&'a self, f: F) -> O
3211 where
3212 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
3213 {
3214 let (prefix, rest) = self.data.split_at(self.range.start);
3215 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
3216 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
3217 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
3218 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
3219 }
3220 fn prefix_len(&self) -> usize {
3221 self.range.start
3222 }
3223
3224 fn suffix_len(&self) -> usize {
3225 self.data.len() - self.range.end
3226 }
3227
3228 fn grow_front(&mut self, n: usize) {
3229 self.range.start -= n;
3230 }
3231
3232 fn grow_back(&mut self, n: usize) {
3233 self.range.end += n;
3234 assert!(self.range.end <= self.data.len());
3235 }
3236 }
3237
3238 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
3239 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
3240 where
3241 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
3242 {
3243 let (prefix, rest) = self.data.split_at_mut(self.range.start);
3244 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
3245 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
3246 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
3247 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
3248 }
3249
3250 fn with_all_contents_mut<'a, O, F>(&'a mut self, _f: F) -> O
3251 where
3252 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
3253 {
3254 unimplemented!()
3255 }
3256 }
3257
3258 struct ScatterGatherProvider;
3259
3260 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
3261 type Error = Never;
3262
3263 fn alloc_no_reuse(
3264 self,
3265 _prefix: usize,
3266 _body: usize,
3267 _suffix: usize,
3268 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
3269 unimplemented!("not used in tests")
3270 }
3271
3272 fn reuse_or_realloc(
3273 self,
3274 buffer: B,
3275 prefix: usize,
3276 suffix: usize,
3277 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
3278 let inner = buffer;
3279 let data = vec![0; prefix + suffix];
3280 let range = Range { start: prefix, end: prefix };
3281 let mid = prefix;
3282 Ok(ScatterGatherBuf { inner, data, range, mid })
3283 }
3284 }
3285
3286 #[test]
3287 fn test_scatter_gather_serialize() {
3288 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
3291 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
3292 let ser = pb.wrap_body(buf);
3293 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
3294 let flattened = result.to_flattened_vec();
3295 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
3296 }
3297
3298 #[test]
3299 fn dyn_serialize() {
3300 let body = Buf::new(vec![10, 20, 30, 40, 50], ..);
3301 let header1 = DummyPacketBuilder {
3302 header_len: 5,
3303 footer_len: 0,
3304 min_body_len: 0,
3305 max_body_len: usize::MAX,
3306 header_byte: 0xAA,
3307 footer_byte: 0xBB,
3308 };
3309 let header2 = DummyPacketBuilder {
3310 header_len: 3,
3311 footer_len: 2,
3312 min_body_len: 0,
3313 max_body_len: usize::MAX,
3314 header_byte: 0xCC,
3315 footer_byte: 0xDD,
3316 };
3317 let ser1 = body.clone().wrap_in(header1).wrap_in(header2);
3319 let ser2 = body.wrap_in(header1);
3321 let ser2 = DynSerializer::new(&ser2).wrap_in(header2);
3322 let ser3 = ser1.clone();
3324 let ser3 = DynSerializer::new(&ser3);
3325 let ser4 = DynSerializer::new(&ser2);
3327
3328 fn serialize(s: impl Serializer<Buffer: ReusableBuffer>) -> Vec<u8> {
3329 s.serialize_vec(PacketConstraints::UNCONSTRAINED)
3330 .map_err(|(e, _)| e)
3331 .unwrap()
3332 .unwrap_b()
3333 .into_inner()
3334 }
3335
3336 fn serialize_new(s: impl Serializer) -> Vec<u8> {
3337 s.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap().into_inner()
3338 }
3339
3340 let expect = serialize(ser1.clone());
3341 assert_eq!(serialize(ser2), expect);
3342 assert_eq!(serialize(ser3), expect);
3343 assert_eq!(serialize(ser4), expect);
3344 assert_eq!(serialize_new(ser1), expect);
3345 assert_eq!(serialize_new(ser2), expect);
3346 assert_eq!(serialize_new(ser3), expect);
3347 assert_eq!(serialize_new(ser4), expect);
3348 }
3349}