fidl_next_codec/
encoder.rs
1use core::marker::PhantomData;
8use core::mem::MaybeUninit;
9use core::slice::from_mut;
10
11use crate::{Chunk, Encode, EncodeError, Slot, WireU64, ZeroPadding, CHUNK_SIZE};
12
13pub trait InternalHandleEncoder {
15 #[doc(hidden)]
20 fn __internal_handle_count(&self) -> usize;
21}
22
23pub trait Encoder: InternalHandleEncoder {
25 fn bytes_written(&self) -> usize;
27
28 fn write_zeroes(&mut self, len: usize);
32
33 fn write(&mut self, bytes: &[u8]);
37
38 fn rewrite(&mut self, pos: usize, bytes: &[u8]);
40}
41
42impl InternalHandleEncoder for Vec<Chunk> {
43 #[inline]
44 fn __internal_handle_count(&self) -> usize {
45 0
46 }
47}
48
49impl Encoder for Vec<Chunk> {
50 #[inline]
51 fn bytes_written(&self) -> usize {
52 self.len() * CHUNK_SIZE
53 }
54
55 #[inline]
56 fn write_zeroes(&mut self, len: usize) {
57 let count = len.div_ceil(CHUNK_SIZE);
58 self.reserve(count);
59 let ptr = unsafe { self.as_mut_ptr().add(self.len()) };
60 unsafe {
61 ptr.write_bytes(0, count);
62 }
63 unsafe {
64 self.set_len(self.len() + count);
65 }
66 }
67
68 #[inline]
69 fn write(&mut self, bytes: &[u8]) {
70 if bytes.is_empty() {
71 return;
72 }
73
74 let count = bytes.len().div_ceil(CHUNK_SIZE);
75 self.reserve(count);
76
77 unsafe {
79 self.as_mut_ptr().add(self.len() + count - 1).write(WireU64(0));
80 }
81 let ptr = unsafe { self.as_mut_ptr().add(self.len()).cast::<u8>() };
82
83 unsafe {
85 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
86 }
87
88 unsafe {
90 self.set_len(self.len() + count);
91 }
92 }
93
94 #[inline]
95 fn rewrite(&mut self, pos: usize, bytes: &[u8]) {
96 assert!(pos + bytes.len() <= self.bytes_written());
97
98 let ptr = unsafe { self.as_mut_ptr().cast::<u8>().add(pos) };
99 unsafe {
100 ptr.copy_from_nonoverlapping(bytes.as_ptr(), bytes.len());
101 }
102 }
103}
104
105pub trait EncoderExt {
107 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T>;
109
110 fn encode_next_slice<T: Encode<Self>>(&mut self, values: &mut [T]) -> Result<(), EncodeError>;
114
115 fn encode_next<T: Encode<Self>>(&mut self, value: &mut T) -> Result<(), EncodeError>;
119}
120
121impl<E: Encoder + ?Sized> EncoderExt for E {
122 fn preallocate<T>(&mut self, len: usize) -> Preallocated<'_, Self, T> {
123 let pos = self.bytes_written();
124
125 self.write_zeroes(len * size_of::<T>());
127
128 Preallocated {
129 encoder: self,
130 pos,
131 #[cfg(debug_assertions)]
132 remaining: len,
133 _phantom: PhantomData,
134 }
135 }
136
137 fn encode_next_slice<T: Encode<Self>>(&mut self, values: &mut [T]) -> Result<(), EncodeError> {
138 let mut slots = self.preallocate::<T::Encoded>(values.len());
139
140 let mut backing = MaybeUninit::<T::Encoded>::uninit();
141 unsafe {
142 <T::Encoded as ZeroPadding>::zero_padding(backing.as_mut_ptr());
143 }
144 let mut slot = unsafe { Slot::new_unchecked(backing.as_mut_ptr()) };
145 for value in values {
146 value.encode(slots.encoder, slot.as_mut())?;
147 slots.write_next(slot.as_mut());
148 }
149
150 Ok(())
151 }
152
153 fn encode_next<T: Encode<Self>>(&mut self, value: &mut T) -> Result<(), EncodeError> {
154 self.encode_next_slice(from_mut(value))
155 }
156}
157
158pub struct Preallocated<'a, E: ?Sized, T> {
160 pub encoder: &'a mut E,
162 pos: usize,
163 #[cfg(debug_assertions)]
164 remaining: usize,
165 _phantom: PhantomData<T>,
166}
167
168impl<E: Encoder + ?Sized, T> Preallocated<'_, E, T> {
169 pub fn write_next(&mut self, slot: Slot<'_, T>) {
171 #[cfg(debug_assertions)]
172 {
173 assert!(self.remaining > 0, "attemped to write more slots than preallocated");
174 self.remaining -= 1;
175 }
176
177 self.encoder.rewrite(self.pos, slot.as_bytes());
178 self.pos += size_of::<T>();
179 }
180}