1use std::cmp;
8use std::convert::Infallible as Never;
9use std::fmt::{self, Debug, Formatter};
10use std::marker::PhantomData;
11use std::ops::{Range, RangeBounds};
12
13use arrayvec::ArrayVec;
14use zerocopy::SplitByteSlice;
15
16use crate::{
17 AsFragmentedByteSlice, Buffer, BufferView, BufferViewMut, ContiguousBuffer, EmptyBuf,
18 FragmentedBuffer, FragmentedBufferMut, FragmentedBytes, FragmentedBytesMut, GrowBuffer,
19 GrowBufferMut, ParsablePacket, ParseBuffer, ParseBufferMut, ReusableBuffer, ShrinkBuffer,
20 canonicalize_range, take_back, take_back_mut, take_front, take_front_mut,
21};
22
23#[derive(Copy, Clone, Debug)]
29pub enum Either<A, B> {
30 A(A),
31 B(B),
32}
33
34impl<A, B> Either<A, B> {
35 pub fn map_a<AA, F: FnOnce(A) -> AA>(self, f: F) -> Either<AA, B> {
41 match self {
42 Either::A(a) => Either::A(f(a)),
43 Either::B(b) => Either::B(b),
44 }
45 }
46
47 pub fn map_b<BB, F: FnOnce(B) -> BB>(self, f: F) -> Either<A, BB> {
53 match self {
54 Either::A(a) => Either::A(a),
55 Either::B(b) => Either::B(f(b)),
56 }
57 }
58
59 pub fn unwrap_a(self) -> A {
65 match self {
66 Either::A(x) => x,
67 Either::B(_) => panic!("This `Either<A, B>` does not hold the `A` variant"),
68 }
69 }
70
71 pub fn unwrap_b(self) -> B {
77 match self {
78 Either::A(_) => panic!("This `Either<A, B>` does not hold the `B` variant"),
79 Either::B(x) => x,
80 }
81 }
82}
83
84impl<A> Either<A, A> {
85 pub fn into_inner(self) -> A {
88 match self {
89 Either::A(x) => x,
90 Either::B(x) => x,
91 }
92 }
93}
94
95impl<A> Either<A, Never> {
96 #[inline]
98 pub fn into_a(self) -> A {
99 match self {
100 Either::A(a) => a,
101 }
102 }
103}
104
105impl<B> Either<Never, B> {
106 #[inline]
108 pub fn into_b(self) -> B {
109 match self {
110 Either::B(b) => b,
111 }
112 }
113}
114
115macro_rules! call_method_on_either {
116 ($val:expr, $method:ident, $($args:expr),*) => {
117 match $val {
118 Either::A(a) => a.$method($($args),*),
119 Either::B(b) => b.$method($($args),*),
120 }
121 };
122 ($val:expr, $method:ident) => {
123 call_method_on_either!($val, $method,)
124 };
125}
126
127impl<A, B> FragmentedBuffer for Either<A, B>
134where
135 A: FragmentedBuffer,
136 B: FragmentedBuffer,
137{
138 fn len(&self) -> usize {
139 call_method_on_either!(self, len)
140 }
141
142 fn with_bytes<'a, R, F>(&'a self, f: F) -> R
143 where
144 F: for<'b> FnOnce(FragmentedBytes<'b, 'a>) -> R,
145 {
146 call_method_on_either!(self, with_bytes, f)
147 }
148}
149
150impl<A, B> ContiguousBuffer for Either<A, B>
151where
152 A: ContiguousBuffer,
153 B: ContiguousBuffer,
154{
155}
156
157impl<A, B> ShrinkBuffer for Either<A, B>
158where
159 A: ShrinkBuffer,
160 B: ShrinkBuffer,
161{
162 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
163 call_method_on_either!(self, shrink, range)
164 }
165 fn shrink_front(&mut self, n: usize) {
166 call_method_on_either!(self, shrink_front, n)
167 }
168 fn shrink_back(&mut self, n: usize) {
169 call_method_on_either!(self, shrink_back, n)
170 }
171}
172
173impl<A, B> ParseBuffer for Either<A, B>
174where
175 A: ParseBuffer,
176 B: ParseBuffer,
177{
178 fn parse<'a, P: ParsablePacket<&'a [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
179 call_method_on_either!(self, parse)
180 }
181 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
182 &'a mut self,
183 args: ParseArgs,
184 ) -> Result<P, P::Error> {
185 call_method_on_either!(self, parse_with, args)
186 }
187}
188
189impl<A, B> FragmentedBufferMut for Either<A, B>
190where
191 A: FragmentedBufferMut,
192 B: FragmentedBufferMut,
193{
194 fn with_bytes_mut<'a, R, F>(&'a mut self, f: F) -> R
195 where
196 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> R,
197 {
198 call_method_on_either!(self, with_bytes_mut, f)
199 }
200}
201
202impl<A, B> ParseBufferMut for Either<A, B>
203where
204 A: ParseBufferMut,
205 B: ParseBufferMut,
206{
207 fn parse_mut<'a, P: ParsablePacket<&'a mut [u8], ()>>(&'a mut self) -> Result<P, P::Error> {
208 call_method_on_either!(self, parse_mut)
209 }
210 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
211 &'a mut self,
212 args: ParseArgs,
213 ) -> Result<P, P::Error> {
214 call_method_on_either!(self, parse_with_mut, args)
215 }
216}
217
218impl<A, B> GrowBuffer for Either<A, B>
219where
220 A: GrowBuffer,
221 B: GrowBuffer,
222{
223 #[inline]
224 fn with_parts<'a, O, F>(&'a self, f: F) -> O
225 where
226 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
227 {
228 call_method_on_either!(self, with_parts, f)
229 }
230 fn capacity(&self) -> usize {
231 call_method_on_either!(self, capacity)
232 }
233 fn prefix_len(&self) -> usize {
234 call_method_on_either!(self, prefix_len)
235 }
236 fn suffix_len(&self) -> usize {
237 call_method_on_either!(self, suffix_len)
238 }
239 fn grow_front(&mut self, n: usize) {
240 call_method_on_either!(self, grow_front, n)
241 }
242 fn grow_back(&mut self, n: usize) {
243 call_method_on_either!(self, grow_back, n)
244 }
245 fn reset(&mut self) {
246 call_method_on_either!(self, reset)
247 }
248}
249
250impl<A, B> GrowBufferMut for Either<A, B>
251where
252 A: GrowBufferMut,
253 B: GrowBufferMut,
254{
255 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
256 where
257 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
258 {
259 call_method_on_either!(self, with_parts_mut, f)
260 }
261
262 fn with_all_contents_mut<'a, O, F>(&'a mut self, f: F) -> O
263 where
264 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
265 {
266 call_method_on_either!(self, with_all_contents_mut, f)
267 }
268
269 fn serialize<BB: PacketBuilder>(&mut self, builder: BB) {
270 call_method_on_either!(self, serialize, builder)
271 }
272}
273
274impl<A, B> Buffer for Either<A, B>
275where
276 A: Buffer,
277 B: Buffer,
278{
279 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
280 &'a mut self,
281 args: ParseArgs,
282 ) -> Result<(P, &'a [u8]), P::Error> {
283 call_method_on_either!(self, parse_with_view, args)
284 }
285}
286
287impl<A: AsRef<[u8]>, B: AsRef<[u8]>> AsRef<[u8]> for Either<A, B> {
288 fn as_ref(&self) -> &[u8] {
289 call_method_on_either!(self, as_ref)
290 }
291}
292
293impl<A: AsMut<[u8]>, B: AsMut<[u8]>> AsMut<[u8]> for Either<A, B> {
294 fn as_mut(&mut self) -> &mut [u8] {
295 call_method_on_either!(self, as_mut)
296 }
297}
298
299#[derive(Clone, Debug)]
305pub struct Buf<B> {
306 buf: B,
307 body: Range<usize>,
308}
309
310impl<B: AsRef<[u8]>> PartialEq for Buf<B> {
311 fn eq(&self, other: &Self) -> bool {
312 let self_slice = AsRef::<[u8]>::as_ref(self);
313 let other_slice = AsRef::<[u8]>::as_ref(other);
314 PartialEq::eq(self_slice, other_slice)
315 }
316}
317
318impl<B: AsRef<[u8]>> Eq for Buf<B> {}
319
320impl Buf<Vec<u8>> {
321 pub fn into_inner(self) -> Vec<u8> {
323 let Buf { mut buf, body } = self;
324 let len = body.end - body.start;
325 let _ = buf.drain(..body.start);
326 buf.truncate(len);
327 buf
328 }
329}
330
331impl<B: AsRef<[u8]>> Buf<B> {
332 pub fn new<R: RangeBounds<usize>>(buf: B, body: R) -> Buf<B> {
343 let len = buf.as_ref().len();
344 Buf { buf, body: canonicalize_range(len, &body) }
345 }
346
347 pub fn buffer_view(&mut self) -> BufView<'_> {
349 BufView { buf: &self.buf.as_ref()[self.body.clone()], body: &mut self.body }
350 }
351}
352
353impl<B: AsRef<[u8]> + AsMut<[u8]>> Buf<B> {
354 pub fn buffer_view_mut(&mut self) -> BufViewMut<'_> {
356 BufViewMut { buf: &mut self.buf.as_mut()[self.body.clone()], body: &mut self.body }
357 }
358}
359
360impl<B: AsRef<[u8]>> FragmentedBuffer for Buf<B> {
361 fragmented_buffer_method_impls!();
362}
363impl<B: AsRef<[u8]>> ContiguousBuffer for Buf<B> {}
364impl<B: AsRef<[u8]>> ShrinkBuffer for Buf<B> {
365 fn shrink<R: RangeBounds<usize>>(&mut self, range: R) {
366 let len = self.len();
367 let mut range = canonicalize_range(len, &range);
368 range.start += self.body.start;
369 range.end += self.body.start;
370 self.body = range;
371 }
372
373 fn shrink_front(&mut self, n: usize) {
374 assert!(n <= self.len());
375 self.body.start += n;
376 }
377 fn shrink_back(&mut self, n: usize) {
378 assert!(n <= self.len());
379 self.body.end -= n;
380 }
381}
382impl<B: AsRef<[u8]>> ParseBuffer for Buf<B> {
383 fn parse_with<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
384 &'a mut self,
385 args: ParseArgs,
386 ) -> Result<P, P::Error> {
387 P::parse(self.buffer_view(), args)
388 }
389}
390
391impl<B: AsRef<[u8]> + AsMut<[u8]>> FragmentedBufferMut for Buf<B> {
392 fragmented_buffer_mut_method_impls!();
393}
394
395impl<B: AsRef<[u8]> + AsMut<[u8]>> ParseBufferMut for Buf<B> {
396 fn parse_with_mut<'a, ParseArgs, P: ParsablePacket<&'a mut [u8], ParseArgs>>(
397 &'a mut self,
398 args: ParseArgs,
399 ) -> Result<P, P::Error> {
400 P::parse_mut(self.buffer_view_mut(), args)
401 }
402}
403
404impl<B: AsRef<[u8]>> GrowBuffer for Buf<B> {
405 fn with_parts<'a, O, F>(&'a self, f: F) -> O
406 where
407 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
408 {
409 let (prefix, buf) = self.buf.as_ref().split_at(self.body.start);
410 let (body, suffix) = buf.split_at(self.body.end - self.body.start);
411 let mut body = [&body[..]];
412 f(prefix, body.as_fragmented_byte_slice(), suffix)
413 }
414 fn capacity(&self) -> usize {
415 self.buf.as_ref().len()
416 }
417 fn prefix_len(&self) -> usize {
418 self.body.start
419 }
420 fn suffix_len(&self) -> usize {
421 self.buf.as_ref().len() - self.body.end
422 }
423 fn grow_front(&mut self, n: usize) {
424 assert!(n <= self.body.start);
425 self.body.start -= n;
426 }
427 fn grow_back(&mut self, n: usize) {
428 assert!(n <= self.buf.as_ref().len() - self.body.end);
429 self.body.end += n;
430 }
431}
432
433impl<B: AsRef<[u8]> + AsMut<[u8]>> GrowBufferMut for Buf<B> {
434 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
435 where
436 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
437 {
438 let (prefix, buf) = self.buf.as_mut().split_at_mut(self.body.start);
439 let (body, suffix) = buf.split_at_mut(self.body.end - self.body.start);
440 let mut body = [&mut body[..]];
441 f(prefix, body.as_fragmented_byte_slice(), suffix)
442 }
443
444 fn with_all_contents_mut<'a, O, F>(&'a mut self, f: F) -> O
445 where
446 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
447 {
448 let mut all = [self.buf.as_mut()];
449 f(all.as_fragmented_byte_slice())
450 }
451}
452
453impl<B: AsRef<[u8]>> AsRef<[u8]> for Buf<B> {
454 fn as_ref(&self) -> &[u8] {
455 &self.buf.as_ref()[self.body.clone()]
456 }
457}
458
459impl<B: AsMut<[u8]>> AsMut<[u8]> for Buf<B> {
460 fn as_mut(&mut self) -> &mut [u8] {
461 &mut self.buf.as_mut()[self.body.clone()]
462 }
463}
464
465impl<B: AsRef<[u8]>> Buffer for Buf<B> {
466 fn parse_with_view<'a, ParseArgs, P: ParsablePacket<&'a [u8], ParseArgs>>(
467 &'a mut self,
468 args: ParseArgs,
469 ) -> Result<(P, &'a [u8]), P::Error> {
470 let Self { body, ref buf } = self;
471 let body_before = body.clone();
472 let view = BufView { buf: &buf.as_ref()[body.clone()], body };
473 P::parse(view, args).map(|r| (r, &buf.as_ref()[body_before]))
474 }
475}
476
477pub struct BufView<'a> {
482 buf: &'a [u8],
483 body: &'a mut Range<usize>,
484}
485
486impl<'a> BufferView<&'a [u8]> for BufView<'a> {
487 fn take_front(&mut self, n: usize) -> Option<&'a [u8]> {
488 if self.len() < n {
489 return None;
490 }
491 self.body.start += n;
492 Some(take_front(&mut self.buf, n))
493 }
494
495 fn take_back(&mut self, n: usize) -> Option<&'a [u8]> {
496 if self.len() < n {
497 return None;
498 }
499 self.body.end -= n;
500 Some(take_back(&mut self.buf, n))
501 }
502
503 fn into_rest(self) -> &'a [u8] {
504 self.buf
505 }
506}
507
508impl<'a> AsRef<[u8]> for BufView<'a> {
509 fn as_ref(&self) -> &[u8] {
510 self.buf
511 }
512}
513
514pub struct BufViewMut<'a> {
520 buf: &'a mut [u8],
521 body: &'a mut Range<usize>,
522}
523
524impl<'a> BufferView<&'a mut [u8]> for BufViewMut<'a> {
525 fn take_front(&mut self, n: usize) -> Option<&'a mut [u8]> {
526 if self.len() < n {
527 return None;
528 }
529 self.body.start += n;
530 Some(take_front_mut(&mut self.buf, n))
531 }
532
533 fn take_back(&mut self, n: usize) -> Option<&'a mut [u8]> {
534 if self.len() < n {
535 return None;
536 }
537 self.body.end -= n;
538 Some(take_back_mut(&mut self.buf, n))
539 }
540
541 fn into_rest(self) -> &'a mut [u8] {
542 self.buf
543 }
544}
545
546impl<'a> BufferViewMut<&'a mut [u8]> for BufViewMut<'a> {}
547
548impl<'a> AsRef<[u8]> for BufViewMut<'a> {
549 fn as_ref(&self) -> &[u8] {
550 self.buf
551 }
552}
553
554impl<'a> AsMut<[u8]> for BufViewMut<'a> {
555 fn as_mut(&mut self) -> &mut [u8] {
556 self.buf
557 }
558}
559
560#[derive(Copy, Clone, Debug, Eq, PartialEq)]
574pub struct PacketConstraints {
575 header_len: usize,
576 footer_len: usize,
577 min_body_len: usize,
578 max_body_len: usize,
579}
580
581impl PacketConstraints {
582 pub const UNCONSTRAINED: Self =
586 Self { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len: usize::MAX };
587
588 #[inline]
596 pub fn new(
597 header_len: usize,
598 footer_len: usize,
599 min_body_len: usize,
600 max_body_len: usize,
601 ) -> PacketConstraints {
602 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len).expect(
603 "max_body_len < min_body_len or header_len + min_body_len + footer_len overflows usize",
604 )
605 }
606
607 #[inline]
613 pub fn try_new(
614 header_len: usize,
615 footer_len: usize,
616 min_body_len: usize,
617 max_body_len: usize,
618 ) -> Option<PacketConstraints> {
619 let header_min_body_footer_overflows = header_len
621 .checked_add(min_body_len)
622 .and_then(|sum| sum.checked_add(footer_len))
623 .is_none();
624 let max_less_than_min = max_body_len < min_body_len;
626 if max_less_than_min || header_min_body_footer_overflows {
627 return None;
628 }
629 Some(PacketConstraints { header_len, footer_len, min_body_len, max_body_len })
630 }
631
632 #[inline]
636 pub fn with_max_body_len(max_body_len: usize) -> PacketConstraints {
637 PacketConstraints { header_len: 0, footer_len: 0, min_body_len: 0, max_body_len }
642 }
643
644 #[inline]
646 pub fn header_len(&self) -> usize {
647 self.header_len
648 }
649
650 #[inline]
652 pub fn footer_len(&self) -> usize {
653 self.footer_len
654 }
655
656 #[inline]
672 pub fn min_body_len(&self) -> usize {
673 self.min_body_len
674 }
675
676 #[inline]
680 pub fn max_body_len(&self) -> usize {
681 self.max_body_len
682 }
683
684 pub fn try_encapsulate(&self, outer: &Self) -> Option<PacketConstraints> {
694 let inner = self;
695 let header_len = inner.header_len.checked_add(outer.header_len)?;
697 let footer_len = inner.footer_len.checked_add(outer.footer_len)?;
699 let inner_header_footer_len = inner.header_len + inner.footer_len;
702 let min_body_len = cmp::max(
706 outer.min_body_len.saturating_sub(inner_header_footer_len),
707 inner.min_body_len,
708 );
709 let max_body_len =
714 cmp::min(outer.max_body_len.checked_sub(inner_header_footer_len)?, inner.max_body_len);
715 PacketConstraints::try_new(header_len, footer_len, min_body_len, max_body_len)
719 }
720}
721
722pub struct SerializeTarget<'a> {
725 #[allow(missing_docs)]
726 pub header: &'a mut [u8],
727 #[allow(missing_docs)]
728 pub footer: &'a mut [u8],
729}
730
731pub trait PacketBuilder: Sized {
742 fn constraints(&self) -> PacketConstraints;
744
745 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>);
774
775 #[inline]
780 fn wrap_body<B>(self, body: B) -> Nested<B, Self> {
781 Nested { inner: body, outer: self }
782 }
783}
784
785impl<'a, B: PacketBuilder> PacketBuilder for &'a B {
786 #[inline]
787 fn constraints(&self) -> PacketConstraints {
788 B::constraints(self)
789 }
790 #[inline]
791 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
792 B::serialize(self, target, body)
793 }
794}
795
796impl<'a, B: PacketBuilder> PacketBuilder for &'a mut B {
797 #[inline]
798 fn constraints(&self) -> PacketConstraints {
799 B::constraints(self)
800 }
801 #[inline]
802 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
803 B::serialize(self, target, body)
804 }
805}
806
807impl PacketBuilder for () {
808 #[inline]
809 fn constraints(&self) -> PacketConstraints {
810 PacketConstraints::UNCONSTRAINED
811 }
812 #[inline]
813 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
814}
815
816impl PacketBuilder for Never {
817 fn constraints(&self) -> PacketConstraints {
818 match *self {}
819 }
820 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
821}
822
823#[derive(Copy, Clone, Debug, Eq, PartialEq)]
831pub struct Nested<I, O> {
832 inner: I,
833 outer: O,
834}
835
836impl<I, O> Nested<I, O> {
837 #[inline]
840 pub fn into_inner(self) -> I {
841 self.inner
842 }
843
844 #[inline]
847 pub fn into_outer(self) -> O {
848 self.outer
849 }
850
851 #[inline]
852 pub fn inner(&self) -> &I {
853 &self.inner
854 }
855
856 #[inline]
857 pub fn inner_mut(&mut self) -> &mut I {
858 &mut self.inner
859 }
860
861 #[inline]
862 pub fn outer(&self) -> &O {
863 &self.outer
864 }
865
866 #[inline]
867 pub fn outer_mut(&mut self) -> &mut O {
868 &mut self.outer
869 }
870}
871
872#[derive(Copy, Clone, Debug)]
878#[cfg_attr(test, derive(Eq, PartialEq))]
879pub struct LimitedSizePacketBuilder {
880 pub limit: usize,
882}
883
884impl PacketBuilder for LimitedSizePacketBuilder {
885 fn constraints(&self) -> PacketConstraints {
886 PacketConstraints::with_max_body_len(self.limit)
887 }
888
889 fn serialize(&self, _target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {}
890}
891
892pub trait InnerPacketBuilder {
906 fn bytes_len(&self) -> usize;
908
909 fn serialize(&self, buffer: &mut [u8]);
924
925 #[inline]
932 fn into_serializer(self) -> InnerSerializer<Self, EmptyBuf>
933 where
934 Self: Sized,
935 {
936 self.into_serializer_with(EmptyBuf)
937 }
938
939 fn into_serializer_with<B: ShrinkBuffer>(self, mut buffer: B) -> InnerSerializer<Self, B>
952 where
953 Self: Sized,
954 {
955 buffer.shrink_back_to(0);
956 InnerSerializer { inner: self, buffer }
957 }
958}
959
960impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a I {
961 #[inline]
962 fn bytes_len(&self) -> usize {
963 I::bytes_len(self)
964 }
965 #[inline]
966 fn serialize(&self, buffer: &mut [u8]) {
967 I::serialize(self, buffer)
968 }
969}
970impl<'a, I: InnerPacketBuilder> InnerPacketBuilder for &'a mut I {
971 #[inline]
972 fn bytes_len(&self) -> usize {
973 I::bytes_len(self)
974 }
975 #[inline]
976 fn serialize(&self, buffer: &mut [u8]) {
977 I::serialize(self, buffer)
978 }
979}
980impl<'a> InnerPacketBuilder for &'a [u8] {
981 #[inline]
982 fn bytes_len(&self) -> usize {
983 self.len()
984 }
985 #[inline]
986 fn serialize(&self, buffer: &mut [u8]) {
987 buffer.copy_from_slice(self);
988 }
989}
990impl<'a> InnerPacketBuilder for &'a mut [u8] {
991 #[inline]
992 fn bytes_len(&self) -> usize {
993 self.len()
994 }
995 #[inline]
996 fn serialize(&self, buffer: &mut [u8]) {
997 buffer.copy_from_slice(self);
998 }
999}
1000impl<'a> InnerPacketBuilder for Vec<u8> {
1001 #[inline]
1002 fn bytes_len(&self) -> usize {
1003 self.len()
1004 }
1005 #[inline]
1006 fn serialize(&self, buffer: &mut [u8]) {
1007 buffer.copy_from_slice(self.as_slice());
1008 }
1009}
1010impl<const N: usize> InnerPacketBuilder for ArrayVec<u8, N> {
1011 fn bytes_len(&self) -> usize {
1012 self.as_slice().bytes_len()
1013 }
1014 fn serialize(&self, buffer: &mut [u8]) {
1015 self.as_slice().serialize(buffer);
1016 }
1017}
1018
1019pub struct ByteSliceInnerPacketBuilder<B>(pub B);
1026
1027impl<B: SplitByteSlice> InnerPacketBuilder for ByteSliceInnerPacketBuilder<B> {
1028 fn bytes_len(&self) -> usize {
1029 self.0.deref().bytes_len()
1030 }
1031 fn serialize(&self, buffer: &mut [u8]) {
1032 self.0.deref().serialize(buffer)
1033 }
1034}
1035
1036impl<B: SplitByteSlice> Debug for ByteSliceInnerPacketBuilder<B> {
1037 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
1038 write!(f, "ByteSliceInnerPacketBuilder({:?})", self.0.as_ref())
1039 }
1040}
1041
1042#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1049pub enum SerializeError<A> {
1050 Alloc(A),
1052 SizeLimitExceeded,
1054}
1055
1056impl<A> SerializeError<A> {
1057 #[inline]
1059 pub fn is_alloc(&self) -> bool {
1060 match self {
1061 SerializeError::Alloc(_) => true,
1062 SerializeError::SizeLimitExceeded => false,
1063 }
1064 }
1065
1066 #[inline]
1068 pub fn is_size_limit_exceeded(&self) -> bool {
1069 match self {
1070 SerializeError::Alloc(_) => false,
1071 SerializeError::SizeLimitExceeded => true,
1072 }
1073 }
1074
1075 pub fn map_alloc<T, F: FnOnce(A) -> T>(self, f: F) -> SerializeError<T> {
1077 match self {
1078 SerializeError::Alloc(a) => SerializeError::Alloc(f(a)),
1079 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1080 }
1081 }
1082}
1083
1084impl<A> From<A> for SerializeError<A> {
1085 fn from(a: A) -> SerializeError<A> {
1086 SerializeError::Alloc(a)
1087 }
1088}
1089
1090#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1099pub struct BufferTooShortError;
1100
1101pub trait BufferProvider<Input, Output> {
1118 type Error;
1122
1123 fn alloc_no_reuse(
1133 self,
1134 prefix: usize,
1135 body: usize,
1136 suffix: usize,
1137 ) -> Result<Output, Self::Error>;
1138
1139 fn reuse_or_realloc(
1152 self,
1153 buffer: Input,
1154 prefix: usize,
1155 suffix: usize,
1156 ) -> Result<Output, (Self::Error, Input)>;
1157}
1158
1159pub trait BufferAlloc<Output> {
1180 type Error;
1184
1185 fn alloc(self, len: usize) -> Result<Output, Self::Error>;
1187}
1188
1189impl<O, E, F: FnOnce(usize) -> Result<O, E>> BufferAlloc<O> for F {
1190 type Error = E;
1191
1192 #[inline]
1193 fn alloc(self, len: usize) -> Result<O, E> {
1194 self(len)
1195 }
1196}
1197
1198impl BufferAlloc<Never> for () {
1199 type Error = ();
1200
1201 #[inline]
1202 fn alloc(self, _len: usize) -> Result<Never, ()> {
1203 Err(())
1204 }
1205}
1206
1207pub fn new_buf_vec(len: usize) -> Result<Buf<Vec<u8>>, Never> {
1218 Ok(Buf::new(vec![0; len], ..))
1219}
1220
1221pub trait LayoutBufferAlloc<O> {
1224 type Error;
1228
1229 fn layout_alloc(self, prefix: usize, body: usize, suffix: usize) -> Result<O, Self::Error>;
1232}
1233
1234impl<O: ShrinkBuffer, E, F: FnOnce(usize) -> Result<O, E>> LayoutBufferAlloc<O> for F {
1235 type Error = E;
1236
1237 #[inline]
1238 fn layout_alloc(self, prefix: usize, body: usize, suffix: usize) -> Result<O, E> {
1239 let mut b = self(prefix + body + suffix)?;
1240 b.shrink_front(prefix);
1241 b.shrink_back(suffix);
1242 Ok(b)
1243 }
1244}
1245
1246impl LayoutBufferAlloc<Never> for () {
1247 type Error = ();
1248
1249 #[inline]
1250 fn layout_alloc(self, _prefix: usize, _body: usize, _suffix: usize) -> Result<Never, ()> {
1251 Err(())
1252 }
1253}
1254
1255#[inline]
1277pub fn try_reuse_buffer<B: GrowBufferMut + ShrinkBuffer>(
1278 mut buffer: B,
1279 prefix: usize,
1280 suffix: usize,
1281 max_copy_bytes: usize,
1282) -> Result<B, B> {
1283 let need_prefix = prefix;
1284 let need_suffix = suffix;
1285 let have_prefix = buffer.prefix_len();
1286 let have_body = buffer.len();
1287 let have_suffix = buffer.suffix_len();
1288 let need_capacity = need_prefix + have_body + need_suffix;
1289
1290 if have_prefix >= need_prefix && have_suffix >= need_suffix {
1291 Ok(buffer)
1293 } else if buffer.capacity() >= need_capacity && have_body <= max_copy_bytes {
1294 buffer.reset();
1298
1299 buffer.copy_within(have_prefix..(have_prefix + have_body), need_prefix);
1305 buffer.shrink(need_prefix..(need_prefix + have_body));
1306 debug_assert_eq!(buffer.prefix_len(), need_prefix);
1307 debug_assert!(buffer.suffix_len() >= need_suffix);
1308 debug_assert_eq!(buffer.len(), have_body);
1309 Ok(buffer)
1310 } else {
1311 Err(buffer)
1312 }
1313}
1314
1315pub struct MaybeReuseBufferProvider<A>(pub A);
1319
1320impl<I: ReusableBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, Either<I, O>>
1321 for MaybeReuseBufferProvider<A>
1322{
1323 type Error = A::Error;
1324
1325 fn alloc_no_reuse(
1326 self,
1327 prefix: usize,
1328 body: usize,
1329 suffix: usize,
1330 ) -> Result<Either<I, O>, Self::Error> {
1331 let Self(alloc) = self;
1332 let need_capacity = prefix + body + suffix;
1333 BufferAlloc::alloc(alloc, need_capacity).map(|mut buf| {
1334 buf.shrink(prefix..(prefix + body));
1335 Either::B(buf)
1336 })
1337 }
1338
1339 #[inline]
1348 fn reuse_or_realloc(
1349 self,
1350 buffer: I,
1351 need_prefix: usize,
1352 need_suffix: usize,
1353 ) -> Result<Either<I, O>, (A::Error, I)> {
1354 match try_reuse_buffer(buffer, need_prefix, need_suffix, usize::MAX) {
1359 Ok(buffer) => Ok(Either::A(buffer)),
1360 Err(buffer) => {
1361 let have_body = buffer.len();
1362 let mut buf = match BufferProvider::<I, Either<I, O>>::alloc_no_reuse(
1363 self,
1364 need_prefix,
1365 have_body,
1366 need_suffix,
1367 ) {
1368 Ok(buf) => buf,
1369 Err(err) => return Err((err, buffer)),
1370 };
1371
1372 buf.copy_from(&buffer);
1373 debug_assert_eq!(buf.prefix_len(), need_prefix);
1374 debug_assert!(buf.suffix_len() >= need_suffix);
1375 debug_assert_eq!(buf.len(), have_body);
1376 Ok(buf)
1377 }
1378 }
1379 }
1380}
1381
1382impl<B: ReusableBuffer, A: BufferAlloc<B>> BufferProvider<B, B> for MaybeReuseBufferProvider<A> {
1383 type Error = A::Error;
1384
1385 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<B, Self::Error> {
1386 BufferProvider::<B, Either<B, B>>::alloc_no_reuse(self, prefix, body, suffix)
1387 .map(Either::into_inner)
1388 }
1389
1390 #[inline]
1399 fn reuse_or_realloc(self, buffer: B, prefix: usize, suffix: usize) -> Result<B, (A::Error, B)> {
1400 BufferProvider::<B, Either<B, B>>::reuse_or_realloc(self, buffer, prefix, suffix)
1401 .map(Either::into_inner)
1402 }
1403}
1404
1405pub struct NoReuseBufferProvider<A>(pub A);
1409
1410impl<I: FragmentedBuffer, O: ReusableBuffer, A: BufferAlloc<O>> BufferProvider<I, O>
1411 for NoReuseBufferProvider<A>
1412{
1413 type Error = A::Error;
1414
1415 fn alloc_no_reuse(self, prefix: usize, body: usize, suffix: usize) -> Result<O, A::Error> {
1416 let Self(alloc) = self;
1417 alloc.alloc(prefix + body + suffix).map(|mut b| {
1418 b.shrink(prefix..prefix + body);
1419 b
1420 })
1421 }
1422
1423 fn reuse_or_realloc(self, buffer: I, prefix: usize, suffix: usize) -> Result<O, (A::Error, I)> {
1424 BufferProvider::<I, O>::alloc_no_reuse(self, prefix, buffer.len(), suffix)
1425 .map(|mut b| {
1426 b.copy_from(&buffer);
1427 b
1428 })
1429 .map_err(|e| (e, buffer))
1430 }
1431}
1432
1433pub trait Serializer: Sized {
1434 type Buffer;
1436
1437 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1450 self,
1451 outer: PacketConstraints,
1452 provider: P,
1453 ) -> Result<B, (SerializeError<P::Error>, Self)>;
1454
1455 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
1462 &self,
1463 outer: PacketConstraints,
1464 alloc: A,
1465 ) -> Result<B, SerializeError<A::Error>>;
1466
1467 #[inline]
1483 #[allow(clippy::type_complexity)]
1484 fn serialize_vec(
1485 self,
1486 outer: PacketConstraints,
1487 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1488 where
1489 Self::Buffer: ReusableBuffer,
1490 {
1491 self.serialize(outer, MaybeReuseBufferProvider(new_buf_vec))
1492 }
1493
1494 #[inline]
1508 fn serialize_no_alloc(
1509 self,
1510 outer: PacketConstraints,
1511 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1512 where
1513 Self::Buffer: ReusableBuffer,
1514 {
1515 self.serialize(outer, MaybeReuseBufferProvider(())).map(Either::into_a).map_err(
1516 |(err, slf)| {
1517 (
1518 match err {
1519 SerializeError::Alloc(()) => BufferTooShortError.into(),
1520 SerializeError::SizeLimitExceeded => SerializeError::SizeLimitExceeded,
1521 },
1522 slf,
1523 )
1524 },
1525 )
1526 }
1527
1528 #[inline]
1537 fn serialize_outer<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
1538 self,
1539 provider: P,
1540 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1541 self.serialize(PacketConstraints::UNCONSTRAINED, provider)
1542 }
1543
1544 #[inline]
1555 #[allow(clippy::type_complexity)]
1556 fn serialize_vec_outer(
1557 self,
1558 ) -> Result<Either<Self::Buffer, Buf<Vec<u8>>>, (SerializeError<Never>, Self)>
1559 where
1560 Self::Buffer: ReusableBuffer,
1561 {
1562 self.serialize_vec(PacketConstraints::UNCONSTRAINED)
1563 }
1564
1565 #[inline]
1575 fn serialize_no_alloc_outer(
1576 self,
1577 ) -> Result<Self::Buffer, (SerializeError<BufferTooShortError>, Self)>
1578 where
1579 Self::Buffer: ReusableBuffer,
1580 {
1581 self.serialize_no_alloc(PacketConstraints::UNCONSTRAINED)
1582 }
1583
1584 #[inline]
1591 fn wrap_in<B: PacketBuilder>(self, outer: B) -> Nested<Self, B> {
1592 outer.wrap_body(self)
1593 }
1594
1595 #[inline]
1604 fn with_size_limit(self, limit: usize) -> Nested<Self, LimitedSizePacketBuilder> {
1605 self.wrap_in(LimitedSizePacketBuilder { limit })
1606 }
1607}
1608
1609#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1616pub struct InnerSerializer<I, B> {
1617 inner: I,
1618 buffer: B,
1623}
1624
1625impl<I, B> InnerSerializer<I, B> {
1626 pub fn inner(&self) -> &I {
1627 &self.inner
1628 }
1629}
1630
1631struct InnerPacketBuilderWrapper<I>(I);
1637
1638impl<I: InnerPacketBuilder> PacketBuilder for InnerPacketBuilderWrapper<I> {
1639 fn constraints(&self) -> PacketConstraints {
1640 let Self(wrapped) = self;
1641 PacketConstraints::new(wrapped.bytes_len(), 0, 0, usize::MAX)
1642 }
1643
1644 fn serialize(&self, target: &mut SerializeTarget<'_>, _body: FragmentedBytesMut<'_, '_>) {
1645 let Self(wrapped) = self;
1646
1647 debug_assert_eq!(target.header.len(), wrapped.bytes_len());
1651 debug_assert_eq!(target.footer.len(), 0);
1652
1653 InnerPacketBuilder::serialize(wrapped, target.header);
1654 }
1655}
1656
1657impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> Serializer for InnerSerializer<I, B> {
1658 type Buffer = B;
1659
1660 #[inline]
1661 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1662 self,
1663 outer: PacketConstraints,
1664 provider: P,
1665 ) -> Result<BB, (SerializeError<P::Error>, InnerSerializer<I, B>)> {
1666 debug_assert_eq!(self.buffer.len(), 0);
1667 InnerPacketBuilderWrapper(self.inner)
1668 .wrap_body(self.buffer)
1669 .serialize(outer, provider)
1670 .map_err(|(err, Nested { inner: buffer, outer: pb })| {
1671 (err, InnerSerializer { inner: pb.0, buffer })
1672 })
1673 }
1674
1675 #[inline]
1676 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1677 &self,
1678 outer: PacketConstraints,
1679 alloc: A,
1680 ) -> Result<BB, SerializeError<A::Error>> {
1681 InnerPacketBuilderWrapper(&self.inner).wrap_body(EmptyBuf).serialize_new_buf(outer, alloc)
1682 }
1683}
1684
1685impl<B: GrowBuffer + ShrinkBuffer> Serializer for B {
1686 type Buffer = B;
1687
1688 #[inline]
1689 fn serialize<BB: GrowBufferMut, P: BufferProvider<Self::Buffer, BB>>(
1690 self,
1691 outer: PacketConstraints,
1692 provider: P,
1693 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1694 TruncatingSerializer::new(self, TruncateDirection::NoTruncating)
1695 .serialize(outer, provider)
1696 .map_err(|(err, ser)| (err, ser.buffer))
1697 }
1698
1699 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1700 &self,
1701 outer: PacketConstraints,
1702 alloc: A,
1703 ) -> Result<BB, SerializeError<A::Error>> {
1704 if self.len() > outer.max_body_len() {
1705 return Err(SerializeError::SizeLimitExceeded);
1706 }
1707
1708 let padding = outer.min_body_len().saturating_sub(self.len());
1709 let tail_size = padding + outer.footer_len();
1710 let mut buffer = alloc.layout_alloc(outer.header_len(), self.len(), tail_size)?;
1711 buffer.copy_from(self);
1712 buffer.grow_back(padding);
1713 Ok(buffer)
1714 }
1715}
1716
1717pub enum EitherSerializer<A, B> {
1721 A(A),
1722 B(B),
1723}
1724
1725impl<A: Serializer, B: Serializer<Buffer = A::Buffer>> Serializer for EitherSerializer<A, B> {
1726 type Buffer = A::Buffer;
1727
1728 fn serialize<TB: GrowBufferMut, P: BufferProvider<Self::Buffer, TB>>(
1729 self,
1730 outer: PacketConstraints,
1731 provider: P,
1732 ) -> Result<TB, (SerializeError<P::Error>, Self)> {
1733 match self {
1734 EitherSerializer::A(s) => {
1735 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::A(s)))
1736 }
1737 EitherSerializer::B(s) => {
1738 s.serialize(outer, provider).map_err(|(err, s)| (err, EitherSerializer::B(s)))
1739 }
1740 }
1741 }
1742
1743 fn serialize_new_buf<TB: GrowBufferMut, BA: LayoutBufferAlloc<TB>>(
1744 &self,
1745 outer: PacketConstraints,
1746 alloc: BA,
1747 ) -> Result<TB, SerializeError<BA::Error>> {
1748 match self {
1749 EitherSerializer::A(s) => s.serialize_new_buf(outer, alloc),
1750 EitherSerializer::B(s) => s.serialize_new_buf(outer, alloc),
1751 }
1752 }
1753}
1754
1755#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1758pub enum TruncateDirection {
1759 DiscardFront,
1762 DiscardBack,
1765 NoTruncating,
1767}
1768
1769#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1781pub struct TruncatingSerializer<B> {
1782 buffer: B,
1783 direction: TruncateDirection,
1784}
1785
1786impl<B> TruncatingSerializer<B> {
1787 pub fn new(buffer: B, direction: TruncateDirection) -> TruncatingSerializer<B> {
1789 TruncatingSerializer { buffer, direction }
1790 }
1791
1792 pub fn buffer(&self) -> &B {
1794 &self.buffer
1795 }
1796
1797 pub fn buffer_mut(&mut self) -> &mut B {
1799 &mut self.buffer
1800 }
1801}
1802
1803impl<B: GrowBuffer + ShrinkBuffer> Serializer for TruncatingSerializer<B> {
1804 type Buffer = B;
1805
1806 fn serialize<BB: GrowBufferMut, P: BufferProvider<B, BB>>(
1807 mut self,
1808 outer: PacketConstraints,
1809 provider: P,
1810 ) -> Result<BB, (SerializeError<P::Error>, Self)> {
1811 let original_len = self.buffer.len();
1812 let excess_bytes = if original_len > outer.max_body_len {
1813 Some(original_len - outer.max_body_len)
1814 } else {
1815 None
1816 };
1817 if let Some(excess_bytes) = excess_bytes {
1818 match self.direction {
1819 TruncateDirection::DiscardFront => self.buffer.shrink_front(excess_bytes),
1820 TruncateDirection::DiscardBack => self.buffer.shrink_back(excess_bytes),
1821 TruncateDirection::NoTruncating => {
1822 return Err((SerializeError::SizeLimitExceeded, self));
1823 }
1824 }
1825 }
1826
1827 let padding = outer.min_body_len().saturating_sub(self.buffer.len());
1828
1829 debug_assert!(self.buffer.len() + padding <= outer.max_body_len());
1833 match provider.reuse_or_realloc(
1834 self.buffer,
1835 outer.header_len(),
1836 padding + outer.footer_len(),
1837 ) {
1838 Ok(buffer) => Ok(buffer),
1839 Err((err, mut buffer)) => {
1840 if let Some(excess_bytes) = excess_bytes {
1844 match self.direction {
1845 TruncateDirection::DiscardFront => buffer.grow_front(excess_bytes),
1846 TruncateDirection::DiscardBack => buffer.grow_back(excess_bytes),
1847 TruncateDirection::NoTruncating => unreachable!(),
1848 }
1849 }
1850
1851 Err((
1852 SerializeError::Alloc(err),
1853 TruncatingSerializer { buffer, direction: self.direction },
1854 ))
1855 }
1856 }
1857 }
1858
1859 fn serialize_new_buf<BB: GrowBufferMut, A: LayoutBufferAlloc<BB>>(
1860 &self,
1861 outer: PacketConstraints,
1862 alloc: A,
1863 ) -> Result<BB, SerializeError<A::Error>> {
1864 let truncated_size = cmp::min(self.buffer.len(), outer.max_body_len());
1865 let discarded_bytes = self.buffer.len() - truncated_size;
1866 let padding = outer.min_body_len().saturating_sub(truncated_size);
1867 let tail_size = padding + outer.footer_len();
1868 let mut buffer = alloc.layout_alloc(outer.header_len(), truncated_size, tail_size)?;
1869 buffer.with_bytes_mut(|mut dst| {
1870 self.buffer.with_bytes(|src| {
1871 let src = match (discarded_bytes > 0, self.direction) {
1872 (false, _) => src,
1873 (true, TruncateDirection::DiscardFront) => src.slice(discarded_bytes..),
1874 (true, TruncateDirection::DiscardBack) => src.slice(..truncated_size),
1875 (true, TruncateDirection::NoTruncating) => {
1876 return Err(SerializeError::SizeLimitExceeded);
1877 }
1878 };
1879 dst.copy_from(&src);
1880 Ok(())
1881 })
1882 })?;
1883 buffer.grow_back_zero(padding);
1884 Ok(buffer)
1885 }
1886}
1887
1888impl<I: Serializer, O: PacketBuilder> Serializer for Nested<I, O> {
1889 type Buffer = I::Buffer;
1890
1891 #[inline]
1892 fn serialize<B: GrowBufferMut, P: BufferProvider<I::Buffer, B>>(
1893 self,
1894 outer: PacketConstraints,
1895 provider: P,
1896 ) -> Result<B, (SerializeError<P::Error>, Self)> {
1897 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1898 return Err((SerializeError::SizeLimitExceeded, self));
1899 };
1900
1901 match self.inner.serialize(outer, provider) {
1902 Ok(mut buf) => {
1903 buf.serialize(&self.outer);
1904 Ok(buf)
1905 }
1906 Err((err, inner)) => Err((err, self.outer.wrap_body(inner))),
1907 }
1908 }
1909
1910 #[inline]
1911 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
1912 &self,
1913 outer: PacketConstraints,
1914 alloc: A,
1915 ) -> Result<B, SerializeError<A::Error>> {
1916 let Some(outer) = self.outer.constraints().try_encapsulate(&outer) else {
1917 return Err(SerializeError::SizeLimitExceeded);
1918 };
1919
1920 let mut buf = self.inner.serialize_new_buf(outer, alloc)?;
1921 GrowBufferMut::serialize(&mut buf, &self.outer);
1922 Ok(buf)
1923 }
1924}
1925
1926pub trait PartialPacketBuilder: PacketBuilder {
1928 fn partial_serialize(&self, body_len: usize, buffer: &mut [u8]);
1937}
1938
1939impl PartialPacketBuilder for () {
1940 fn partial_serialize(&self, _body_len: usize, _buffer: &mut [u8]) {}
1941}
1942
1943#[derive(Debug, Eq, PartialEq)]
1945pub struct PartialSerializeResult {
1946 pub bytes_written: usize,
1948
1949 pub total_size: usize,
1951}
1952
1953pub trait PartialSerializer {
1958 fn partial_serialize(
1968 &self,
1969 outer: PacketConstraints,
1970 buffer: &mut [u8],
1971 ) -> Result<PartialSerializeResult, SerializeError<Never>>;
1972}
1973
1974impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for B {
1975 fn partial_serialize(
1976 &self,
1977 _outer: PacketConstraints,
1978 _buffer: &mut [u8],
1979 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1980 Ok(PartialSerializeResult { bytes_written: 0, total_size: self.len() })
1981 }
1982}
1983
1984impl<B: GrowBuffer + ShrinkBuffer> PartialSerializer for TruncatingSerializer<B> {
1985 fn partial_serialize(
1986 &self,
1987 outer: PacketConstraints,
1988 _buffer: &mut [u8],
1989 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
1990 let total_size =
1991 cmp::max(outer.min_body_len(), cmp::min(self.buffer().len(), outer.max_body_len()));
1992 Ok(PartialSerializeResult { bytes_written: 0, total_size })
1993 }
1994}
1995
1996impl<I: InnerPacketBuilder, B: GrowBuffer + ShrinkBuffer> PartialSerializer
1997 for InnerSerializer<I, B>
1998{
1999 fn partial_serialize(
2000 &self,
2001 outer: PacketConstraints,
2002 _buffer: &mut [u8],
2003 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2004 Ok(PartialSerializeResult {
2005 bytes_written: 0,
2006 total_size: cmp::max(self.inner().bytes_len(), outer.min_body_len()),
2007 })
2008 }
2009}
2010
2011impl<A: Serializer + PartialSerializer, B: Serializer + PartialSerializer> PartialSerializer
2012 for EitherSerializer<A, B>
2013{
2014 fn partial_serialize(
2015 &self,
2016 outer: PacketConstraints,
2017 buffer: &mut [u8],
2018 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2019 match self {
2020 EitherSerializer::A(s) => s.partial_serialize(outer, buffer),
2021 EitherSerializer::B(s) => s.partial_serialize(outer, buffer),
2022 }
2023 }
2024}
2025
2026impl<I: PartialSerializer, O: PartialPacketBuilder> PartialSerializer for Nested<I, O> {
2027 fn partial_serialize(
2028 &self,
2029 outer: PacketConstraints,
2030 buffer: &mut [u8],
2031 ) -> Result<PartialSerializeResult, SerializeError<Never>> {
2032 let header_constraints = self.outer.constraints();
2033 let Some(constraints) = outer.try_encapsulate(&header_constraints) else {
2034 return Err(SerializeError::SizeLimitExceeded);
2035 };
2036
2037 let header_len = header_constraints.header_len();
2038 let inner_buf = buffer.get_mut(header_len..).unwrap_or(&mut []);
2039 let mut result = self.inner.partial_serialize(constraints, inner_buf)?;
2040 if header_len <= buffer.len() {
2041 self.outer.partial_serialize(result.total_size, &mut buffer[..header_len]);
2042 result.bytes_written += header_len;
2043 }
2044 result.total_size += header_len + header_constraints.footer_len();
2045 Ok(result)
2046 }
2047}
2048
2049mod sealed {
2050 use super::*;
2051
2052 pub trait DynamicSerializerInner {
2057 fn serialize_dyn_alloc(
2067 &self,
2068 outer: PacketConstraints,
2069 alloc: &mut dyn DynamicBufferAlloc,
2070 ) -> Result<(usize, usize), SerializeError<DynAllocError>>;
2071 }
2072
2073 pub trait DynamicBufferAlloc {
2079 fn alloc(
2097 &mut self,
2098 prefix: usize,
2099 body: usize,
2100 suffix: usize,
2101 ) -> Result<Buf<&mut [u8]>, DynAllocError>;
2102 }
2103
2104 pub struct DynAllocError;
2107}
2108
2109use sealed::{DynAllocError, DynamicBufferAlloc, DynamicSerializerInner};
2110
2111fn dyn_serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2112 serializer: &dyn DynamicSerializerInner,
2113 outer: PacketConstraints,
2114 alloc: A,
2115) -> Result<B, SerializeError<A::Error>> {
2116 enum Adapter<A: LayoutBufferAlloc<B>, B> {
2117 Empty,
2118 Alloc(A),
2119 Buffer(B),
2120 Error(A::Error),
2121 }
2122
2123 impl<A: LayoutBufferAlloc<B>, B: GrowBufferMut> DynamicBufferAlloc for Adapter<A, B> {
2124 fn alloc(
2125 &mut self,
2126 prefix: usize,
2127 body: usize,
2128 suffix: usize,
2129 ) -> Result<Buf<&mut [u8]>, DynAllocError> {
2130 let alloc = match core::mem::replace(self, Self::Empty) {
2131 Self::Alloc(a) => a,
2132 _ => panic!("unexpected alloc state"),
2133 };
2134
2135 let buffer = match alloc.layout_alloc(prefix, body, suffix) {
2136 Ok(b) => b,
2137 Err(e) => {
2138 *self = Self::Error(e);
2139 return Err(DynAllocError);
2140 }
2141 };
2142 *self = Self::Buffer(buffer);
2143 let buffer = match self {
2144 Self::Buffer(b) => b.with_all_contents_mut(|b| match b.try_into_contiguous() {
2145 Ok(b) => b,
2146 Err(_) => todo!(
2147 "https://fxbug.dev/428952155: support dyn serialize fragmented buffers"
2148 ),
2149 }),
2150 _ => unreachable!(),
2152 };
2153 Ok(Buf::new(buffer, prefix..(buffer.len() - suffix)))
2154 }
2155 }
2156
2157 let mut adapter = Adapter::Alloc(alloc);
2158 let (prefix, suffix) = match serializer.serialize_dyn_alloc(outer, &mut adapter) {
2159 Ok(b) => b,
2160 Err(SerializeError::SizeLimitExceeded) => {
2161 return Err(SerializeError::SizeLimitExceeded);
2162 }
2163 Err(SerializeError::Alloc(DynAllocError)) => match adapter {
2164 Adapter::Error(e) => {
2165 return Err(SerializeError::Alloc(e));
2166 }
2167 _ => {
2168 unreachable!();
2169 }
2170 },
2171 };
2172
2173 let mut buffer = match adapter {
2174 Adapter::Buffer(b) => b,
2175 _ => unreachable!("unexpected alloc state"),
2176 };
2177 buffer.grow_front(buffer.prefix_len().checked_sub(prefix).unwrap_or_else(|| {
2178 panic!("failed to grow buffer front; want: {} got: {}", prefix, buffer.prefix_len())
2179 }));
2180 buffer.grow_back(buffer.suffix_len().checked_sub(suffix).unwrap_or_else(|| {
2181 panic!("failed to grow buffer back; want: {} got: {}", suffix, buffer.suffix_len())
2182 }));
2183 Ok(buffer)
2184}
2185
2186#[derive(Copy, Clone)]
2191pub struct DynSerializer<'a>(&'a dyn DynamicSerializerInner);
2192
2193impl<'a> DynSerializer<'a> {
2194 pub fn new<S: Serializer>(s: &'a S) -> Self {
2196 Self::new_dyn(s)
2197 }
2198
2199 pub fn new_dyn(s: &'a dyn DynamicSerializer) -> Self {
2201 Self(s)
2202 }
2203}
2204
2205impl Serializer for DynSerializer<'_> {
2206 type Buffer = EmptyBuf;
2207
2208 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2209 self,
2210 outer: PacketConstraints,
2211 provider: P,
2212 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2213 struct Adapter<S, P>(P, PhantomData<S>);
2214
2215 impl<S, B, P> LayoutBufferAlloc<B> for Adapter<S, P>
2216 where
2217 P: BufferProvider<S, B>,
2218 {
2219 type Error = P::Error;
2220
2221 fn layout_alloc(
2222 self,
2223 prefix: usize,
2224 body: usize,
2225 suffix: usize,
2226 ) -> Result<B, Self::Error> {
2227 let Self(provider, PhantomData) = self;
2228 provider.alloc_no_reuse(prefix, body, suffix)
2229 }
2230 }
2231
2232 let Self(serializer) = self;
2233 match dyn_serialize_new_buf(serializer, outer, Adapter(provider, PhantomData)) {
2234 Ok(b) => Ok(b),
2235 Err(e) => Err((e, self)),
2236 }
2237 }
2238
2239 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2240 &self,
2241 outer: PacketConstraints,
2242 alloc: A,
2243 ) -> Result<B, SerializeError<A::Error>> {
2244 let Self(serializer) = self;
2245 dyn_serialize_new_buf(*serializer, outer, alloc)
2246 }
2247}
2248
2249impl<O> DynamicSerializerInner for O
2250where
2251 O: Serializer,
2252{
2253 fn serialize_dyn_alloc(
2254 &self,
2255 outer: PacketConstraints,
2256 alloc: &mut dyn DynamicBufferAlloc,
2257 ) -> Result<(usize, usize), SerializeError<DynAllocError>> {
2258 struct Adapter<'a>(&'a mut dyn DynamicBufferAlloc);
2259 impl<'a> LayoutBufferAlloc<Buf<&'a mut [u8]>> for Adapter<'a> {
2260 type Error = DynAllocError;
2261
2262 fn layout_alloc(
2263 self,
2264 prefix: usize,
2265 body: usize,
2266 suffix: usize,
2267 ) -> Result<Buf<&'a mut [u8]>, Self::Error> {
2268 let Self(inner) = self;
2269 inner.alloc(prefix, body, suffix)
2270 }
2271 }
2272 self.serialize_new_buf(outer, Adapter(alloc))
2273 .map(|buffer| (buffer.prefix_len(), buffer.suffix_len()))
2274 }
2275}
2276
2277pub trait DynamicSerializer: DynamicSerializerInner {}
2292impl<O> DynamicSerializer for O where O: DynamicSerializerInner {}
2293
2294#[cfg(test)]
2295mod tests {
2296 use super::*;
2297 use crate::BufferMut;
2298 use std::fmt::Debug;
2299 use test_case::test_case;
2300 use test_util::{assert_geq, assert_leq};
2301
2302 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2308 struct DummyPacketBuilder {
2309 header_len: usize,
2310 footer_len: usize,
2311 min_body_len: usize,
2312 max_body_len: usize,
2313 header_byte: u8,
2314 footer_byte: u8,
2315 }
2316
2317 impl DummyPacketBuilder {
2318 fn new(
2319 header_len: usize,
2320 footer_len: usize,
2321 min_body_len: usize,
2322 max_body_len: usize,
2323 ) -> DummyPacketBuilder {
2324 DummyPacketBuilder {
2325 header_len,
2326 footer_len,
2327 min_body_len,
2328 max_body_len,
2329 header_byte: 0xFF,
2330 footer_byte: 0xFE,
2331 }
2332 }
2333 }
2334
2335 impl PacketBuilder for DummyPacketBuilder {
2336 fn constraints(&self) -> PacketConstraints {
2337 PacketConstraints::new(
2338 self.header_len,
2339 self.footer_len,
2340 self.min_body_len,
2341 self.max_body_len,
2342 )
2343 }
2344
2345 fn serialize(&self, target: &mut SerializeTarget<'_>, body: FragmentedBytesMut<'_, '_>) {
2346 assert_eq!(target.header.len(), self.header_len);
2347 assert_eq!(target.footer.len(), self.footer_len);
2348 assert!(body.len() >= self.min_body_len);
2349 assert!(body.len() <= self.max_body_len);
2350 target.header.fill(self.header_byte);
2351 target.footer.fill(self.footer_byte);
2352 }
2353 }
2354
2355 impl PartialPacketBuilder for DummyPacketBuilder {
2356 fn partial_serialize(&self, _body_len: usize, buffer: &mut [u8]) {
2357 buffer.fill(self.header_byte)
2358 }
2359 }
2360
2361 impl InnerPacketBuilder for DummyPacketBuilder {
2362 fn bytes_len(&self) -> usize {
2363 self.header_len
2364 }
2365
2366 fn serialize(&self, buffer: &mut [u8]) {
2367 assert_eq!(buffer.len(), self.header_len);
2368 buffer.fill(self.header_byte);
2369 }
2370 }
2371
2372 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2374 struct SerializerVerifier {
2375 inner_len: Option<usize>,
2378
2379 truncating: bool,
2382 }
2383
2384 impl SerializerVerifier {
2385 fn new<S: Serializer>(serializer: &S, truncating: bool) -> Self {
2386 let inner_len = serializer
2387 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2388 .map(|buf| buf.len())
2389 .inspect_err(|err| assert!(err.is_size_limit_exceeded()))
2390 .ok();
2391 Self { inner_len, truncating }
2392 }
2393
2394 fn verify_result<B: GrowBufferMut, A>(
2395 &self,
2396 result: Result<&B, &SerializeError<A>>,
2397 outer: PacketConstraints,
2398 ) {
2399 let should_exceed_size_limit = match self.inner_len {
2400 Some(inner_len) => outer.max_body_len() < inner_len && !self.truncating,
2401 None => true,
2402 };
2403
2404 match result {
2405 Ok(buf) => {
2406 assert_geq!(buf.prefix_len(), outer.header_len());
2407 assert_geq!(buf.suffix_len(), outer.footer_len());
2408 assert_leq!(buf.len(), outer.max_body_len());
2409
2410 let padding = outer.min_body_len().saturating_sub(buf.len());
2415 assert_leq!(padding + outer.footer_len(), buf.suffix_len());
2416
2417 assert!(!should_exceed_size_limit);
2418 }
2419 Err(err) => {
2420 if should_exceed_size_limit {
2423 assert!(err.is_size_limit_exceeded());
2424 } else {
2425 assert!(err.is_alloc());
2426 }
2427 }
2428 }
2429 }
2430 }
2431
2432 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
2441 struct VerifyingSerializer<S> {
2442 ser: S,
2443 verifier: SerializerVerifier,
2444 }
2445
2446 impl<S: Serializer + Debug + Clone + Eq> Serializer for VerifyingSerializer<S>
2447 where
2448 S::Buffer: ReusableBuffer,
2449 {
2450 type Buffer = S::Buffer;
2451
2452 fn serialize<B: GrowBufferMut, P: BufferProvider<Self::Buffer, B>>(
2453 self,
2454 outer: PacketConstraints,
2455 provider: P,
2456 ) -> Result<B, (SerializeError<P::Error>, Self)> {
2457 let Self { ser, verifier } = self;
2458 let orig = ser.clone();
2459
2460 let result = ser.serialize(outer, provider).map_err(|(err, ser)| {
2461 assert_eq!(ser, orig);
2464 (err, Self { ser, verifier })
2465 });
2466
2467 verifier.verify_result(result.as_ref().map_err(|(err, _ser)| err), outer);
2468
2469 result
2470 }
2471
2472 fn serialize_new_buf<B: GrowBufferMut, A: LayoutBufferAlloc<B>>(
2473 &self,
2474 outer: PacketConstraints,
2475 alloc: A,
2476 ) -> Result<B, SerializeError<A::Error>> {
2477 let res = self.ser.serialize_new_buf(outer, alloc);
2478 self.verifier.verify_result(res.as_ref(), outer);
2479 res
2480 }
2481 }
2482
2483 trait SerializerExt: Serializer {
2484 fn into_verifying(self, truncating: bool) -> VerifyingSerializer<Self>
2485 where
2486 Self::Buffer: ReusableBuffer,
2487 {
2488 let verifier = SerializerVerifier::new(&self, truncating);
2489 VerifyingSerializer { ser: self, verifier }
2490 }
2491
2492 fn wrap_in_verifying<B: PacketBuilder>(
2493 self,
2494 outer: B,
2495 truncating: bool,
2496 ) -> VerifyingSerializer<Nested<Self, B>>
2497 where
2498 Self::Buffer: ReusableBuffer,
2499 {
2500 self.wrap_in(outer).into_verifying(truncating)
2501 }
2502
2503 fn with_size_limit_verifying(
2504 self,
2505 limit: usize,
2506 truncating: bool,
2507 ) -> VerifyingSerializer<Nested<Self, LimitedSizePacketBuilder>>
2508 where
2509 Self::Buffer: ReusableBuffer,
2510 {
2511 self.with_size_limit(limit).into_verifying(truncating)
2512 }
2513 }
2514
2515 impl<S: Serializer> SerializerExt for S {}
2516
2517 #[test]
2518 fn test_either_into_inner() {
2519 fn ret_either(a: u32, b: u32, c: bool) -> Either<u32, u32> {
2520 if c { Either::A(a) } else { Either::B(b) }
2521 }
2522
2523 assert_eq!(ret_either(1, 2, true).into_inner(), 1);
2524 assert_eq!(ret_either(1, 2, false).into_inner(), 2);
2525 }
2526
2527 #[test]
2528 fn test_either_unwrap_success() {
2529 assert_eq!(Either::<u16, u32>::A(5).unwrap_a(), 5);
2530 assert_eq!(Either::<u16, u32>::B(10).unwrap_b(), 10);
2531 }
2532
2533 #[test]
2534 #[should_panic]
2535 fn test_either_unwrap_a_panic() {
2536 let _: u16 = Either::<u16, u32>::B(10).unwrap_a();
2537 }
2538
2539 #[test]
2540 #[should_panic]
2541 fn test_either_unwrap_b_panic() {
2542 let _: u32 = Either::<u16, u32>::A(5).unwrap_b();
2543 }
2544
2545 #[test_case(Buf::new((0..100).collect(), ..); "entire buf")]
2546 #[test_case(Buf::new((0..100).collect(), 0..0); "empty range")]
2547 #[test_case(Buf::new((0..100).collect(), ..50); "prefix")]
2548 #[test_case(Buf::new((0..100).collect(), 50..); "suffix")]
2549 #[test_case(Buf::new((0..100).collect(), 25..75); "middle")]
2550 fn test_buf_into_inner(buf: Buf<Vec<u8>>) {
2551 assert_eq!(buf.clone().as_ref(), buf.into_inner());
2552 }
2553
2554 #[test]
2555 fn test_packet_constraints() {
2556 use PacketConstraints as PC;
2557
2558 assert!(PC::try_new(0, 0, 0, 0).is_some());
2562 assert!(PC::try_new(usize::MAX / 2, usize::MAX / 2, 0, 0).is_some());
2564 assert_eq!(PC::try_new(usize::MAX, 1, 0, 0), None);
2566 assert_eq!(PC::try_new(0, 0, 1, 0), None);
2568
2569 let pc = PC::new(10, 10, 0, usize::MAX);
2573 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2574
2575 let pc = PC::new(10, 10, 0, usize::MAX);
2576 assert_eq!(pc.try_encapsulate(&pc).unwrap(), PC::new(20, 20, 0, usize::MAX - 20));
2577
2578 let inner = PC::new(10, 10, 0, usize::MAX);
2589 let outer = PC::new(0, 0, 10, usize::MAX);
2590 assert_eq!(inner.try_encapsulate(&outer).unwrap(), PC::new(10, 10, 0, usize::MAX - 20));
2591
2592 let inner = PC::new(usize::MAX, 0, 0, usize::MAX);
2596 let outer = PC::new(1, 0, 0, usize::MAX);
2597 assert_eq!(inner.try_encapsulate(&outer), None);
2598
2599 let inner = PC::new(0, usize::MAX, 0, usize::MAX);
2603 let outer = PC::new(0, 1, 0, usize::MAX);
2604 assert_eq!(inner.try_encapsulate(&outer), None);
2605
2606 let one_fifth_max = (usize::MAX / 5) + 1;
2613 let inner = PC::new(one_fifth_max, one_fifth_max, one_fifth_max, usize::MAX);
2614 let outer = PC::new(one_fifth_max, one_fifth_max, 0, usize::MAX);
2615 assert_eq!(inner.try_encapsulate(&outer), None);
2616
2617 let inner = PC::new(10, 10, 0, usize::MAX);
2622 let outer = PC::new(0, 0, 0, 10);
2623 assert_eq!(inner.try_encapsulate(&outer), None);
2624
2625 let inner = PC::new(0, 0, 10, usize::MAX);
2631 let outer = PC::new(0, 0, 0, 5);
2632 assert_eq!(inner.try_encapsulate(&outer), None);
2633 }
2634
2635 #[test]
2636 fn test_inner_serializer() {
2637 const INNER: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2638
2639 fn concat<'a, I: IntoIterator<Item = &'a &'a [u8]>>(slices: I) -> Vec<u8> {
2640 let mut v = Vec::new();
2641 for slc in slices.into_iter() {
2642 v.extend_from_slice(slc);
2643 }
2644 v
2645 }
2646
2647 let buf = INNER.into_serializer().serialize_vec_outer().unwrap();
2649 assert_eq!(buf.as_ref(), INNER);
2650
2651 let buf = INNER
2654 .into_serializer()
2655 .into_verifying(false)
2656 .wrap_in(DummyPacketBuilder::new(0, 0, 20, usize::MAX))
2657 .serialize_vec_outer()
2658 .unwrap();
2659 assert_eq!(buf.as_ref(), concat(&[INNER, vec![0; 10].as_ref()]).as_slice());
2660
2661 let buf = INNER
2665 .into_serializer()
2666 .into_verifying(false)
2667 .wrap_in(DummyPacketBuilder::new(10, 10, 0, usize::MAX))
2668 .serialize_vec_outer()
2669 .unwrap();
2670 assert_eq!(
2671 buf.as_ref(),
2672 concat(&[vec![0xFF; 10].as_ref(), INNER, vec![0xFE; 10].as_ref()]).as_slice()
2673 );
2674
2675 assert_eq!(
2677 INNER
2678 .into_serializer()
2679 .into_verifying(false)
2680 .wrap_in(DummyPacketBuilder::new(0, 0, 0, 9))
2681 .serialize_vec_outer()
2682 .unwrap_err()
2683 .0,
2684 SerializeError::SizeLimitExceeded
2685 );
2686
2687 assert_eq!(
2691 INNER
2692 .into_serializer_with(Buf::new(vec![0xFF], ..))
2693 .into_verifying(false)
2694 .serialize_vec_outer()
2695 .unwrap()
2696 .as_ref(),
2697 INNER
2698 );
2699 }
2700
2701 #[test]
2702 fn test_buffer_serializer_and_inner_serializer() {
2703 fn verify_buffer_serializer<B: BufferMut + Debug>(
2704 buffer: B,
2705 header_len: usize,
2706 footer_len: usize,
2707 min_body_len: usize,
2708 ) {
2709 let old_body = buffer.to_flattened_vec();
2710 let serializer =
2711 DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2712 .wrap_body(buffer);
2713
2714 let buffer0 = serializer
2715 .serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec)
2716 .unwrap();
2717 verify(buffer0, &old_body, header_len, footer_len, min_body_len);
2718
2719 let buffer = serializer.serialize_vec_outer().unwrap();
2720 verify(buffer, &old_body, header_len, footer_len, min_body_len);
2721 }
2722
2723 fn verify_inner_packet_builder_serializer(
2724 body: &[u8],
2725 header_len: usize,
2726 footer_len: usize,
2727 min_body_len: usize,
2728 ) {
2729 let buffer = DummyPacketBuilder::new(header_len, footer_len, min_body_len, usize::MAX)
2730 .wrap_body(body.into_serializer())
2731 .serialize_vec_outer()
2732 .unwrap();
2733 verify(buffer, body, header_len, footer_len, min_body_len);
2734 }
2735
2736 fn verify<B: Buffer>(
2737 buffer: B,
2738 body: &[u8],
2739 header_len: usize,
2740 footer_len: usize,
2741 min_body_len: usize,
2742 ) {
2743 let flat = buffer.to_flattened_vec();
2744 let header_bytes = &flat[..header_len];
2745 let body_bytes = &flat[header_len..header_len + body.len()];
2746 let padding_len = min_body_len.saturating_sub(body.len());
2747 let padding_bytes =
2748 &flat[header_len + body.len()..header_len + body.len() + padding_len];
2749 let total_body_len = body.len() + padding_len;
2750 let footer_bytes = &flat[header_len + total_body_len..];
2751 assert_eq!(
2752 buffer.len() - total_body_len,
2753 header_len + footer_len,
2754 "buffer.len()({}) - total_body_len({}) != header_len({}) + footer_len({})",
2755 buffer.len(),
2756 header_len,
2757 footer_len,
2758 min_body_len,
2759 );
2760
2761 assert!(
2763 header_bytes.iter().all(|b| *b == 0xFF),
2764 "header_bytes {:?} are not filled with 0xFF's",
2765 header_bytes,
2766 );
2767 assert_eq!(body_bytes, body);
2768 assert!(
2770 padding_bytes.iter().all(|b| *b == 0),
2771 "padding_bytes {:?} are not filled with 0s",
2772 padding_bytes,
2773 );
2774 assert!(
2776 footer_bytes.iter().all(|b| *b == 0xFE),
2777 "footer_bytes {:?} are not filled with 0xFE's",
2778 footer_bytes,
2779 );
2780 }
2781
2782 for buf_len in 0..8 {
2785 for range_start in 0..buf_len {
2786 for range_end in range_start..buf_len {
2787 for prefix in 0..8 {
2788 for suffix in 0..8 {
2789 for min_body in 0..8 {
2790 let mut vec = vec![0; buf_len];
2791 #[allow(clippy::needless_range_loop)]
2796 for i in 0..vec.len() {
2797 vec[i] = i as u8;
2798 }
2799 verify_buffer_serializer(
2800 Buf::new(vec.as_mut_slice(), range_start..range_end),
2801 prefix,
2802 suffix,
2803 min_body,
2804 );
2805 if range_start == 0 {
2806 verify_inner_packet_builder_serializer(
2815 &vec.as_slice()[range_start..range_end],
2816 prefix,
2817 suffix,
2818 min_body,
2819 );
2820 }
2821 }
2822 }
2823 }
2824 }
2825 }
2826 }
2827 }
2828
2829 #[test]
2830 fn test_min_body_len() {
2831 let body = &[1, 2];
2836
2837 let inner = DummyPacketBuilder::new(2, 2, 0, usize::MAX);
2840 let outer = DummyPacketBuilder::new(2, 2, 8, usize::MAX);
2842 let buf = body
2843 .into_serializer()
2844 .into_verifying(false)
2845 .wrap_in_verifying(inner, false)
2846 .wrap_in_verifying(outer, false)
2847 .serialize_vec_outer()
2848 .unwrap();
2849 assert_eq!(buf.prefix_len(), 0);
2850 assert_eq!(buf.suffix_len(), 0);
2851 assert_eq!(
2852 buf.as_ref(),
2853 &[
2854 0xFF, 0xFF, 0xFF, 0xFF, 1, 2, 0xFE, 0xFE, 0, 0, 0xFE, 0xFE ]
2861 );
2862 }
2863
2864 #[test]
2865 fn test_size_limit() {
2866 fn test<S: Serializer + Clone + Debug + Eq>(ser: S)
2868 where
2869 S::Buffer: ReusableBuffer,
2870 {
2871 let pb = DummyPacketBuilder::new(1, 1, 0, usize::MAX);
2877
2878 assert!(
2883 ser.clone()
2884 .wrap_in_verifying(pb, false)
2885 .with_size_limit_verifying(3, false)
2886 .serialize_vec_outer()
2887 .is_ok()
2888 );
2889 assert!(
2891 ser.clone()
2892 .wrap_in_verifying(pb, false)
2893 .with_size_limit_verifying(4, false)
2894 .serialize_vec_outer()
2895 .is_ok()
2896 );
2897 assert!(
2901 ser.clone()
2902 .with_size_limit_verifying(1, false)
2903 .wrap_in_verifying(pb, false)
2904 .with_size_limit_verifying(3, false)
2905 .serialize_vec_outer()
2906 .is_ok()
2907 );
2908 assert!(
2911 ser.clone()
2912 .with_size_limit_verifying(0, false)
2913 .wrap_in_verifying(pb, false)
2914 .serialize_vec_outer()
2915 .is_err()
2916 );
2917 assert!(
2923 ser.clone()
2924 .wrap_in_verifying(pb, false)
2925 .with_size_limit_verifying(1, false)
2926 .serialize_vec_outer()
2927 .is_err()
2928 );
2929 }
2930
2931 test(DummyPacketBuilder::new(1, 0, 0, usize::MAX).into_serializer().into_verifying(false));
2933 test(Buf::new(vec![0], ..).into_verifying(false));
2934 }
2935
2936 #[test]
2937 fn test_truncating_serializer() {
2938 fn verify_result<S: Serializer + Debug>(ser: S, expected: &[u8])
2939 where
2940 S::Buffer: ReusableBuffer + AsRef<[u8]>,
2941 {
2942 let buf = ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap();
2943 assert_eq!(buf.as_ref(), &expected[..]);
2944 let buf = ser.serialize_vec_outer().unwrap();
2945 assert_eq!(buf.as_ref(), &expected[..]);
2946 }
2947
2948 let body = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
2950 let ser =
2951 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardFront)
2952 .into_verifying(true)
2953 .with_size_limit_verifying(4, true);
2954 verify_result(ser, &[6, 7, 8, 9]);
2955
2956 let ser =
2958 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::DiscardBack)
2959 .into_verifying(true)
2960 .with_size_limit_verifying(7, true);
2961 verify_result(ser, &[0, 1, 2, 3, 4, 5, 6]);
2962
2963 let ser =
2965 TruncatingSerializer::new(Buf::new(body.clone(), ..), TruncateDirection::NoTruncating)
2966 .into_verifying(false)
2967 .with_size_limit_verifying(5, true);
2968 assert!(ser.clone().serialize_vec_outer().is_err());
2969 assert!(ser.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).is_err());
2970 assert!(ser.serialize_vec_outer().is_err());
2971
2972 fn test_serialization_failure<S: Serializer + Clone + Eq + Debug>(
2976 ser: S,
2977 err: SerializeError<BufferTooShortError>,
2978 ) where
2979 S::Buffer: ReusableBuffer + Debug,
2980 {
2981 let (e, new_ser) = DummyPacketBuilder::new(2, 2, 0, 1)
2990 .wrap_body(ser.clone())
2991 .serialize_no_alloc_outer()
2992 .unwrap_err();
2993 assert_eq!(err, e);
2994 assert_eq!(new_ser.into_inner(), ser);
2995 }
2996
2997 let body = Buf::new(vec![1, 2], ..);
2998 test_serialization_failure(
2999 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
3000 .into_verifying(true),
3001 SerializeError::Alloc(BufferTooShortError),
3002 );
3003 test_serialization_failure(
3004 TruncatingSerializer::new(body.clone(), TruncateDirection::DiscardFront)
3005 .into_verifying(true),
3006 SerializeError::Alloc(BufferTooShortError),
3007 );
3008 test_serialization_failure(
3009 TruncatingSerializer::new(body.clone(), TruncateDirection::NoTruncating)
3010 .into_verifying(false),
3011 SerializeError::SizeLimitExceeded,
3012 );
3013 }
3014
3015 #[test]
3016 fn test_try_reuse_buffer() {
3017 fn test_expect_success(
3018 body_range: Range<usize>,
3019 prefix: usize,
3020 suffix: usize,
3021 max_copy_bytes: usize,
3022 ) {
3023 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3024 let buffer = Buf::new(&mut bytes[..], body_range);
3025 let body = buffer.as_ref().to_vec();
3026 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap();
3027 assert_eq!(buffer.as_ref(), body.as_slice());
3028 assert!(buffer.prefix_len() >= prefix);
3029 assert!(buffer.suffix_len() >= suffix);
3030 }
3031
3032 fn test_expect_failure(
3033 body_range: Range<usize>,
3034 prefix: usize,
3035 suffix: usize,
3036 max_copy_bytes: usize,
3037 ) {
3038 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3039 let buffer = Buf::new(&mut bytes[..], body_range.clone());
3040 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3041 let orig = Buf::new(&mut bytes[..], body_range.clone());
3042 let buffer = try_reuse_buffer(buffer, prefix, suffix, max_copy_bytes).unwrap_err();
3043 assert_eq!(buffer, orig);
3044 }
3045
3046 test_expect_success(0..10, 0, 0, 0);
3048 test_expect_success(1..9, 1, 1, 0);
3050 test_expect_success(0..9, 1, 0, 9);
3053 test_expect_success(1..10, 0, 1, 9);
3054 test_expect_failure(0..9, 1, 0, 8);
3056 test_expect_failure(1..10, 0, 1, 8);
3057 }
3058
3059 #[test]
3060 fn test_maybe_reuse_buffer_provider() {
3061 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize, expect_a: bool) {
3062 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3063 let buffer = Buf::new(&mut bytes[..], body_range);
3064 let body = buffer.as_ref().to_vec();
3065 let buffer = BufferProvider::reuse_or_realloc(
3066 MaybeReuseBufferProvider(new_buf_vec),
3067 buffer,
3068 prefix,
3069 suffix,
3070 )
3071 .unwrap();
3072 match &buffer {
3073 Either::A(_) if expect_a => {}
3074 Either::B(_) if !expect_a => {}
3075 Either::A(_) => panic!("expected Eitehr::B variant"),
3076 Either::B(_) => panic!("expected Eitehr::A variant"),
3077 }
3078 let bytes: &[u8] = buffer.as_ref();
3079 assert_eq!(bytes, body.as_slice());
3080 assert!(buffer.prefix_len() >= prefix);
3081 assert!(buffer.suffix_len() >= suffix);
3082 }
3083
3084 fn test_expect_reuse(body_range: Range<usize>, prefix: usize, suffix: usize) {
3086 test_expect(body_range, prefix, suffix, true);
3087 }
3088
3089 fn test_expect_realloc(body_range: Range<usize>, prefix: usize, suffix: usize) {
3091 test_expect(body_range, prefix, suffix, false);
3092 }
3093
3094 test_expect_reuse(0..10, 0, 0);
3096 test_expect_reuse(1..9, 1, 1);
3098 test_expect_reuse(0..9, 1, 0);
3101 test_expect_reuse(1..10, 0, 1);
3102 test_expect_realloc(0..9, 1, 1);
3104 test_expect_realloc(1..10, 1, 1);
3105 }
3106
3107 #[test]
3108 fn test_no_reuse_buffer_provider() {
3109 #[track_caller]
3110 fn test_expect(body_range: Range<usize>, prefix: usize, suffix: usize) {
3111 let mut bytes = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
3112 let internal_buffer: Buf<&mut [u8]> = Buf::new(&mut bytes[..], body_range);
3114 let body = internal_buffer.as_ref().to_vec();
3115 let buffer: Buf<Vec<u8>> = BufferProvider::reuse_or_realloc(
3118 NoReuseBufferProvider(new_buf_vec),
3119 internal_buffer,
3120 prefix,
3121 suffix,
3122 )
3123 .unwrap();
3124 let bytes: &[u8] = buffer.as_ref();
3125 assert_eq!(bytes, body.as_slice());
3126 assert_eq!(buffer.prefix_len(), prefix);
3127 assert_eq!(buffer.suffix_len(), suffix);
3128 }
3129 test_expect(0..10, 0, 0);
3131 test_expect(1..9, 1, 1);
3133 test_expect(0..9, 10, 10);
3135 test_expect(1..10, 15, 15);
3136 }
3137
3138 struct ScatterGatherBuf<B> {
3162 data: Vec<u8>,
3163 mid: usize,
3164 range: Range<usize>,
3165 inner: B,
3166 }
3167
3168 impl<B: BufferMut> FragmentedBuffer for ScatterGatherBuf<B> {
3169 fn len(&self) -> usize {
3170 self.inner.len() + (self.range.end - self.range.start)
3171 }
3172
3173 fn with_bytes<'a, R, F>(&'a self, f: F) -> R
3174 where
3175 F: for<'b> FnOnce(FragmentedBytes<'b, 'a>) -> R,
3176 {
3177 let (_, rest) = self.data.split_at(self.range.start);
3178 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
3179 let (suffix_b, _) = rest.split_at(self.range.end - self.mid);
3180 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
3181 f(FragmentedBytes::new(&mut bytes[..]))
3182 }
3183 }
3184
3185 impl<B: BufferMut> FragmentedBufferMut for ScatterGatherBuf<B> {
3186 fn with_bytes_mut<'a, R, F>(&'a mut self, f: F) -> R
3187 where
3188 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> R,
3189 {
3190 let (_, rest) = self.data.split_at_mut(self.range.start);
3191 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
3192 let (suffix_b, _) = rest.split_at_mut(self.range.end - self.mid);
3193 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
3194 f(FragmentedBytesMut::new(&mut bytes[..]))
3195 }
3196 }
3197
3198 impl<B: BufferMut> GrowBuffer for ScatterGatherBuf<B> {
3199 fn with_parts<'a, O, F>(&'a self, f: F) -> O
3200 where
3201 F: for<'b> FnOnce(&'a [u8], FragmentedBytes<'b, 'a>, &'a [u8]) -> O,
3202 {
3203 let (prefix, rest) = self.data.split_at(self.range.start);
3204 let (prefix_b, rest) = rest.split_at(self.mid - self.range.start);
3205 let (suffix_b, suffix) = rest.split_at(self.range.end - self.mid);
3206 let mut bytes = [prefix_b, self.inner.as_ref(), suffix_b];
3207 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
3208 }
3209 fn prefix_len(&self) -> usize {
3210 self.range.start
3211 }
3212
3213 fn suffix_len(&self) -> usize {
3214 self.data.len() - self.range.end
3215 }
3216
3217 fn grow_front(&mut self, n: usize) {
3218 self.range.start -= n;
3219 }
3220
3221 fn grow_back(&mut self, n: usize) {
3222 self.range.end += n;
3223 assert!(self.range.end <= self.data.len());
3224 }
3225 }
3226
3227 impl<B: BufferMut> GrowBufferMut for ScatterGatherBuf<B> {
3228 fn with_parts_mut<'a, O, F>(&'a mut self, f: F) -> O
3229 where
3230 F: for<'b> FnOnce(&'a mut [u8], FragmentedBytesMut<'b, 'a>, &'a mut [u8]) -> O,
3231 {
3232 let (prefix, rest) = self.data.split_at_mut(self.range.start);
3233 let (prefix_b, rest) = rest.split_at_mut(self.mid - self.range.start);
3234 let (suffix_b, suffix) = rest.split_at_mut(self.range.end - self.mid);
3235 let mut bytes = [prefix_b, self.inner.as_mut(), suffix_b];
3236 f(prefix, bytes.as_fragmented_byte_slice(), suffix)
3237 }
3238
3239 fn with_all_contents_mut<'a, O, F>(&'a mut self, _f: F) -> O
3240 where
3241 F: for<'b> FnOnce(FragmentedBytesMut<'b, 'a>) -> O,
3242 {
3243 unimplemented!()
3244 }
3245 }
3246
3247 struct ScatterGatherProvider;
3248
3249 impl<B: BufferMut> BufferProvider<B, ScatterGatherBuf<B>> for ScatterGatherProvider {
3250 type Error = Never;
3251
3252 fn alloc_no_reuse(
3253 self,
3254 _prefix: usize,
3255 _body: usize,
3256 _suffix: usize,
3257 ) -> Result<ScatterGatherBuf<B>, Self::Error> {
3258 unimplemented!("not used in tests")
3259 }
3260
3261 fn reuse_or_realloc(
3262 self,
3263 buffer: B,
3264 prefix: usize,
3265 suffix: usize,
3266 ) -> Result<ScatterGatherBuf<B>, (Self::Error, B)> {
3267 let inner = buffer;
3268 let data = vec![0; prefix + suffix];
3269 let range = Range { start: prefix, end: prefix };
3270 let mid = prefix;
3271 Ok(ScatterGatherBuf { inner, data, range, mid })
3272 }
3273 }
3274
3275 #[test]
3276 fn test_scatter_gather_serialize() {
3277 let buf = Buf::new(vec![10, 20, 30, 40, 50], ..);
3280 let pb = DummyPacketBuilder::new(3, 2, 0, usize::MAX);
3281 let ser = pb.wrap_body(buf);
3282 let result = ser.serialize_outer(ScatterGatherProvider {}).unwrap();
3283 let flattened = result.to_flattened_vec();
3284 assert_eq!(&flattened[..], &[0xFF, 0xFF, 0xFF, 10, 20, 30, 40, 50, 0xFE, 0xFE]);
3285 }
3286
3287 #[test]
3288 fn dyn_serialize() {
3289 let body = Buf::new(vec![10, 20, 30, 40, 50], ..);
3290 let header1 = DummyPacketBuilder {
3291 header_len: 5,
3292 footer_len: 0,
3293 min_body_len: 0,
3294 max_body_len: usize::MAX,
3295 header_byte: 0xAA,
3296 footer_byte: 0xBB,
3297 };
3298 let header2 = DummyPacketBuilder {
3299 header_len: 3,
3300 footer_len: 2,
3301 min_body_len: 0,
3302 max_body_len: usize::MAX,
3303 header_byte: 0xCC,
3304 footer_byte: 0xDD,
3305 };
3306 let ser1 = body.clone().wrap_in(header1).wrap_in(header2);
3308 let ser2 = body.wrap_in(header1);
3310 let ser2 = DynSerializer::new(&ser2).wrap_in(header2);
3311 let ser3 = ser1.clone();
3313 let ser3 = DynSerializer::new(&ser3);
3314 let ser4 = DynSerializer::new(&ser2);
3316
3317 fn serialize(s: impl Serializer<Buffer: ReusableBuffer>) -> Vec<u8> {
3318 s.serialize_vec(PacketConstraints::UNCONSTRAINED)
3319 .map_err(|(e, _)| e)
3320 .unwrap()
3321 .unwrap_b()
3322 .into_inner()
3323 }
3324
3325 fn serialize_new(s: impl Serializer) -> Vec<u8> {
3326 s.serialize_new_buf(PacketConstraints::UNCONSTRAINED, new_buf_vec).unwrap().into_inner()
3327 }
3328
3329 let expect = serialize(ser1.clone());
3330 assert_eq!(serialize(ser2), expect);
3331 assert_eq!(serialize(ser3), expect);
3332 assert_eq!(serialize(ser4), expect);
3333 assert_eq!(serialize_new(ser1), expect);
3334 assert_eq!(serialize_new(ser2), expect);
3335 assert_eq!(serialize_new(ser3), expect);
3336 assert_eq!(serialize_new(ser4), expect);
3337 }
3338}