ctr/flavors/
ctr64.rs
1use super::CtrFlavor;
3use cipher::{
4 generic_array::{ArrayLength, GenericArray},
5 typenum::{PartialDiv, PartialQuot, Unsigned, U8},
6};
7
8#[cfg(feature = "zeroize")]
9use cipher::zeroize::{Zeroize, ZeroizeOnDrop};
10
11type ChunkSize = U8;
12type Chunks<B> = PartialQuot<B, ChunkSize>;
13const CS: usize = ChunkSize::USIZE;
14
15#[derive(Clone)]
16pub struct CtrNonce64<N: ArrayLength<u64>> {
17 ctr: u64,
18 nonce: GenericArray<u64, N>,
19}
20
21#[cfg(feature = "zeroize")]
22impl<N: ArrayLength<u64>> Drop for CtrNonce64<N> {
23 fn drop(&mut self) {
24 self.ctr.zeroize();
25 self.nonce.zeroize();
26 }
27}
28
29#[cfg(feature = "zeroize")]
30impl<N: ArrayLength<u64>> ZeroizeOnDrop for CtrNonce64<N> {}
31
32pub enum Ctr64BE {}
34
35impl<B> CtrFlavor<B> for Ctr64BE
36where
37 B: ArrayLength<u8> + PartialDiv<ChunkSize>,
38 Chunks<B>: ArrayLength<u64>,
39{
40 type CtrNonce = CtrNonce64<Chunks<B>>;
41 type Backend = u64;
42 const NAME: &'static str = "64BE";
43
44 #[inline]
45 fn remaining(cn: &Self::CtrNonce) -> Option<usize> {
46 (core::u64::MAX - cn.ctr).try_into().ok()
47 }
48
49 #[inline(always)]
50 fn current_block(cn: &Self::CtrNonce) -> GenericArray<u8, B> {
51 let mut block = GenericArray::<u8, B>::default();
52 for i in 0..Chunks::<B>::USIZE {
53 let t = if i == Chunks::<B>::USIZE - 1 {
54 cn.ctr.wrapping_add(cn.nonce[i]).to_be_bytes()
55 } else {
56 cn.nonce[i].to_ne_bytes()
57 };
58 block[CS * i..][..CS].copy_from_slice(&t);
59 }
60 block
61 }
62
63 #[inline]
64 fn next_block(cn: &mut Self::CtrNonce) -> GenericArray<u8, B> {
65 let block = Self::current_block(cn);
66 cn.ctr = cn.ctr.wrapping_add(1);
67 block
68 }
69
70 #[inline]
71 fn from_nonce(block: &GenericArray<u8, B>) -> Self::CtrNonce {
72 let mut nonce = GenericArray::<u64, Chunks<B>>::default();
73 for i in 0..Chunks::<B>::USIZE {
74 let chunk = block[CS * i..][..CS].try_into().unwrap();
75 nonce[i] = if i == Chunks::<B>::USIZE - 1 {
76 u64::from_be_bytes(chunk)
77 } else {
78 u64::from_ne_bytes(chunk)
79 }
80 }
81 let ctr = 0;
82 Self::CtrNonce { ctr, nonce }
83 }
84
85 #[inline]
86 fn as_backend(cn: &Self::CtrNonce) -> Self::Backend {
87 cn.ctr
88 }
89
90 #[inline]
91 fn set_from_backend(cn: &mut Self::CtrNonce, v: Self::Backend) {
92 cn.ctr = v;
93 }
94}
95
96pub enum Ctr64LE {}
98
99impl<B> CtrFlavor<B> for Ctr64LE
100where
101 B: ArrayLength<u8> + PartialDiv<ChunkSize>,
102 Chunks<B>: ArrayLength<u64>,
103{
104 type CtrNonce = CtrNonce64<Chunks<B>>;
105 type Backend = u64;
106 const NAME: &'static str = "64LE";
107
108 #[inline]
109 fn remaining(cn: &Self::CtrNonce) -> Option<usize> {
110 (core::u64::MAX - cn.ctr).try_into().ok()
111 }
112
113 #[inline(always)]
114 fn current_block(cn: &Self::CtrNonce) -> GenericArray<u8, B> {
115 let mut block = GenericArray::<u8, B>::default();
116 for i in 0..Chunks::<B>::USIZE {
117 let t = if i == 0 {
118 cn.ctr.wrapping_add(cn.nonce[i]).to_le_bytes()
119 } else {
120 cn.nonce[i].to_ne_bytes()
121 };
122 block[CS * i..][..CS].copy_from_slice(&t);
123 }
124 block
125 }
126
127 #[inline]
128 fn next_block(cn: &mut Self::CtrNonce) -> GenericArray<u8, B> {
129 let block = Self::current_block(cn);
130 cn.ctr = cn.ctr.wrapping_add(1);
131 block
132 }
133
134 #[inline]
135 fn from_nonce(block: &GenericArray<u8, B>) -> Self::CtrNonce {
136 let mut nonce = GenericArray::<u64, Chunks<B>>::default();
137 for i in 0..Chunks::<B>::USIZE {
138 let chunk = block[CS * i..][..CS].try_into().unwrap();
139 nonce[i] = if i == 0 {
140 u64::from_le_bytes(chunk)
141 } else {
142 u64::from_ne_bytes(chunk)
143 }
144 }
145 let ctr = 0;
146 Self::CtrNonce { ctr, nonce }
147 }
148
149 #[inline]
150 fn as_backend(cn: &Self::CtrNonce) -> Self::Backend {
151 cn.ctr
152 }
153
154 #[inline]
155 fn set_from_backend(cn: &mut Self::CtrNonce, v: Self::Backend) {
156 cn.ctr = v;
157 }
158}