netlink_packet_utils/
nla.rs
1use crate::traits::{Emitable, Parseable};
4use crate::DecodeError;
5use byteorder::{ByteOrder, NativeEndian};
6use core::ops::Range;
7use thiserror::Error;
8
9type Field = Range<usize>;
11
12pub const NLA_F_NESTED: u16 = 0x8000;
14pub const NLA_F_NET_BYTEORDER: u16 = 0x4000;
17pub const NLA_TYPE_MASK: u16 = !(NLA_F_NET_BYTEORDER | NLA_F_NESTED);
19pub const NLA_ALIGNTO: usize = 4;
21pub const NLA_HEADER_SIZE: usize = 4;
23
24#[derive(Debug, Error)]
25pub enum NlaError {
26 #[error("buffer has length {buffer_len}, but an NLA header is {} bytes", TYPE.end)]
27 BufferTooSmall { buffer_len: usize },
28
29 #[error("buffer has length: {buffer_len}, but the NLA is {nla_len} bytes")]
30 LengthMismatch { buffer_len: usize, nla_len: u16 },
31
32 #[error(
33 "NLA has invalid length: {nla_len} (should be at least {} bytes", TYPE.end
34 )]
35 InvalidLength { nla_len: u16 },
36}
37
38#[macro_export]
39macro_rules! nla_align {
40 ($len: expr) => {
41 ($len + NLA_ALIGNTO - 1) & !(NLA_ALIGNTO - 1)
42 };
43}
44
45const LENGTH: Field = 0..2;
46const TYPE: Field = 2..4;
47#[allow(non_snake_case)]
48fn VALUE(length: usize) -> Field {
49 TYPE.end..TYPE.end + length
50}
51
52#[derive(Debug, PartialEq, Eq, Clone, Copy)]
56pub struct NlaBuffer<T: AsRef<[u8]>> {
57 buffer: T,
58}
59
60impl<T: AsRef<[u8]>> NlaBuffer<T> {
61 pub fn new(buffer: T) -> NlaBuffer<T> {
62 NlaBuffer { buffer }
63 }
64
65 pub fn new_checked(buffer: T) -> Result<NlaBuffer<T>, NlaError> {
66 let buffer = Self::new(buffer);
67 buffer.check_buffer_length()?;
68 Ok(buffer)
69 }
70
71 pub fn check_buffer_length(&self) -> Result<(), NlaError> {
72 let len = self.buffer.as_ref().len();
73 if len < TYPE.end {
74 Err(NlaError::BufferTooSmall { buffer_len: len }.into())
75 } else if len < self.length() as usize {
76 Err(NlaError::LengthMismatch { buffer_len: len, nla_len: self.length() }.into())
77 } else if (self.length() as usize) < TYPE.end {
78 Err(NlaError::InvalidLength { nla_len: self.length() }.into())
79 } else {
80 Ok(())
81 }
82 }
83
84 pub fn into_inner(self) -> T {
86 self.buffer
87 }
88
89 pub fn inner(&mut self) -> &T {
91 &self.buffer
92 }
93
94 pub fn inner_mut(&mut self) -> &mut T {
96 &mut self.buffer
97 }
98
99 pub fn kind(&self) -> u16 {
101 let data = self.buffer.as_ref();
102 NativeEndian::read_u16(&data[TYPE]) & NLA_TYPE_MASK
103 }
104
105 pub fn nested_flag(&self) -> bool {
106 let data = self.buffer.as_ref();
107 (NativeEndian::read_u16(&data[TYPE]) & NLA_F_NESTED) != 0
108 }
109
110 pub fn network_byte_order_flag(&self) -> bool {
111 let data = self.buffer.as_ref();
112 (NativeEndian::read_u16(&data[TYPE]) & NLA_F_NET_BYTEORDER) != 0
113 }
114
115 pub fn length(&self) -> u16 {
120 let data = self.buffer.as_ref();
121 NativeEndian::read_u16(&data[LENGTH])
122 }
123
124 pub fn value_length(&self) -> usize {
131 self.length() as usize - TYPE.end
132 }
133}
134
135impl<T: AsRef<[u8]> + AsMut<[u8]>> NlaBuffer<T> {
136 pub fn set_kind(&mut self, kind: u16) {
138 let data = self.buffer.as_mut();
139 NativeEndian::write_u16(&mut data[TYPE], kind & NLA_TYPE_MASK)
140 }
141
142 pub fn set_nested_flag(&mut self) {
143 let kind = self.kind();
144 let data = self.buffer.as_mut();
145 NativeEndian::write_u16(&mut data[TYPE], kind | NLA_F_NESTED)
146 }
147
148 pub fn set_network_byte_order_flag(&mut self) {
149 let kind = self.kind();
150 let data = self.buffer.as_mut();
151 NativeEndian::write_u16(&mut data[TYPE], kind | NLA_F_NET_BYTEORDER)
152 }
153
154 pub fn set_length(&mut self, length: u16) {
156 let data = self.buffer.as_mut();
157 NativeEndian::write_u16(&mut data[LENGTH], length)
158 }
159}
160
161impl<'buffer, T: AsRef<[u8]> + ?Sized> NlaBuffer<&'buffer T> {
162 pub fn value(&self) -> &[u8] {
164 &self.buffer.as_ref()[VALUE(self.value_length())]
165 }
166}
167
168impl<'buffer, T: AsRef<[u8]> + AsMut<[u8]> + ?Sized> NlaBuffer<&'buffer mut T> {
169 pub fn value_mut(&mut self) -> &mut [u8] {
171 let length = VALUE(self.value_length());
172 &mut self.buffer.as_mut()[length]
173 }
174}
175
176#[derive(Debug, PartialEq, Eq, Clone)]
177pub struct DefaultNla {
178 kind: u16,
179 value: Vec<u8>,
180}
181
182impl DefaultNla {
183 pub fn new(kind: u16, value: Vec<u8>) -> Self {
184 Self { kind, value }
185 }
186}
187
188impl Nla for DefaultNla {
189 fn value_len(&self) -> usize {
190 self.value.len()
191 }
192 fn kind(&self) -> u16 {
193 self.kind
194 }
195 fn emit_value(&self, buffer: &mut [u8]) {
196 buffer.copy_from_slice(self.value.as_slice());
197 }
198}
199
200impl<'buffer, T: AsRef<[u8]> + ?Sized> Parseable<NlaBuffer<&'buffer T>> for DefaultNla {
201 type Error = DecodeError;
202
203 fn parse(buf: &NlaBuffer<&'buffer T>) -> Result<Self, Self::Error> {
204 let mut kind = buf.kind();
205
206 if buf.network_byte_order_flag() {
207 kind |= NLA_F_NET_BYTEORDER;
208 }
209
210 if buf.nested_flag() {
211 kind |= NLA_F_NESTED;
212 }
213
214 Ok(DefaultNla { kind, value: buf.value().to_vec() })
215 }
216}
217
218pub trait Nla {
219 fn value_len(&self) -> usize;
220 fn kind(&self) -> u16;
221 fn emit_value(&self, buffer: &mut [u8]);
222
223 #[inline]
224 fn is_nested(&self) -> bool {
225 (self.kind() & NLA_F_NESTED) != 0
226 }
227
228 #[inline]
229 fn is_network_byteorder(&self) -> bool {
230 (self.kind() & NLA_F_NET_BYTEORDER) != 0
231 }
232}
233
234impl<T: Nla> Emitable for T {
235 fn buffer_len(&self) -> usize {
236 nla_align!(self.value_len()) + NLA_HEADER_SIZE
237 }
238 fn emit(&self, buffer: &mut [u8]) {
239 let mut buffer = NlaBuffer::new(buffer);
240 buffer.set_kind(self.kind());
241
242 if self.is_network_byteorder() {
243 buffer.set_network_byte_order_flag()
244 }
245
246 if self.is_nested() {
247 buffer.set_nested_flag()
248 }
249
250 buffer.set_length(self.value_len() as u16 + NLA_HEADER_SIZE as u16);
252
253 self.emit_value(buffer.value_mut());
254
255 let padding = nla_align!(self.value_len()) - self.value_len();
256 for i in 0..padding {
257 buffer.inner_mut()[NLA_HEADER_SIZE + self.value_len() + i] = 0;
258 }
259 }
260}
261
262impl<'a, T: Nla> Emitable for &'a [T] {
270 fn buffer_len(&self) -> usize {
271 self.iter().fold(0, |acc, nla| {
272 assert_eq!(nla.buffer_len() % NLA_ALIGNTO, 0);
273 acc + nla.buffer_len()
274 })
275 }
276
277 fn emit(&self, buffer: &mut [u8]) {
278 let mut start = 0;
279 let mut end: usize;
280 for nla in self.iter() {
281 let attr_len = nla.buffer_len();
282 assert_eq!(nla.buffer_len() % NLA_ALIGNTO, 0);
283 end = start + attr_len;
284 nla.emit(&mut buffer[start..end]);
285 start = end;
286 }
287 }
288}
289
290#[derive(Debug, Clone, Copy, PartialEq, Eq)]
293pub struct NlasIterator<T> {
294 position: usize,
295 buffer: T,
296}
297
298impl<T> NlasIterator<T> {
299 pub fn new(buffer: T) -> Self {
300 NlasIterator { position: 0, buffer }
301 }
302}
303
304impl<'buffer, T: AsRef<[u8]> + ?Sized + 'buffer> Iterator for NlasIterator<&'buffer T> {
305 type Item = Result<NlaBuffer<&'buffer [u8]>, NlaError>;
306
307 fn next(&mut self) -> Option<Self::Item> {
308 if self.position >= self.buffer.as_ref().len() {
309 return None;
310 }
311
312 match NlaBuffer::new_checked(&self.buffer.as_ref()[self.position..]) {
313 Ok(nla_buffer) => {
314 self.position += nla_align!(nla_buffer.length() as usize);
315 Some(Ok(nla_buffer))
316 }
317 Err(e) => {
318 self.position = self.buffer.as_ref().len();
322 Some(Err(e))
323 }
324 }
325 }
326}
327
328#[cfg(test)]
329mod tests {
330 use super::*;
331
332 #[test]
333 fn network_byteorder() {
334 static TEST_ATTRIBUTE: &[u8] = &[0x08, 0x00, 0x06, 0x40, 0x00, 0x00, 0x0e, 0x10];
337 let buffer = NlaBuffer::new(TEST_ATTRIBUTE);
338 let buffer_is_net = buffer.network_byte_order_flag();
339 let buffer_is_nest = buffer.nested_flag();
340
341 let nla = DefaultNla::parse(&buffer).unwrap();
342 let mut emitted_buffer = vec![0; nla.buffer_len()];
343
344 nla.emit(&mut emitted_buffer);
345
346 let attr_is_net = nla.is_network_byteorder();
347 let attr_is_nest = nla.is_nested();
348
349 let emit = NlaBuffer::new(emitted_buffer);
350 let emit_is_net = emit.network_byte_order_flag();
351 let emit_is_nest = emit.nested_flag();
352
353 assert_eq!([buffer_is_net, buffer_is_nest], [attr_is_net, attr_is_nest]);
354 assert_eq!([attr_is_net, attr_is_nest], [emit_is_net, emit_is_nest]);
355 }
356
357 fn get_len() -> usize {
358 18446744073709551615
360 }
361
362 #[test]
363 fn test_align() {
364 assert_eq!(nla_align!(13), 16);
365 assert_eq!(nla_align!(16), 16);
366 assert_eq!(nla_align!(0), 0);
367 assert_eq!(nla_align!(1), 4);
368 assert_eq!(nla_align!(get_len() - 4), usize::MAX - 3);
369 }
370 #[test]
371 #[should_panic]
372 fn test_align_overflow() {
373 assert_eq!(nla_align!(get_len() - 3), usize::MAX);
374 }
375}