1use core::{
2 borrow::Borrow,
3 cmp::Ordering,
4 iter::FusedIterator,
5 marker::PhantomData,
6 ops::{Bound, RangeBounds},
7 ptr::addr_of_mut,
8};
9
10use crate::{
11 alloc::vec::Vec,
12 collections::btree_map::{
13 entries_to_height, ArchivedBTreeMap, InnerNode, LeafNode, Node,
14 NodeKind,
15 },
16 seal::Seal,
17 RelPtr,
18};
19
20impl<K, V, const E: usize> ArchivedBTreeMap<K, V, E> {
21 pub fn iter(&self) -> Iter<'_, K, V, E> {
23 let this = (self as *const Self).cast_mut();
24 Iter {
25 inner: unsafe { RawIter::new(this) },
26 _phantom: PhantomData,
27 }
28 }
29
30 pub fn iter_seal(this: Seal<'_, Self>) -> IterSeal<'_, K, V, E> {
32 let this = unsafe { Seal::unseal_unchecked(this) as *mut Self };
33 IterSeal {
34 inner: unsafe { RawIter::new(this) },
35 _phantom: PhantomData,
36 }
37 }
38
39 pub fn keys(&self) -> Keys<'_, K, V, E> {
41 let this = (self as *const Self).cast_mut();
42 Keys {
43 inner: unsafe { RawIter::new(this) },
44 _phantom: PhantomData,
45 }
46 }
47
48 pub fn values(&self) -> Values<'_, K, V, E> {
50 let this = (self as *const Self).cast_mut();
51 Values {
52 inner: unsafe { RawIter::new(this) },
53 _phantom: PhantomData,
54 }
55 }
56
57 pub fn values_seal(this: Seal<'_, Self>) -> ValuesSeal<'_, K, V, E> {
59 let this = unsafe { Seal::unseal_unchecked(this) as *mut Self };
60 ValuesSeal {
61 inner: unsafe { RawIter::new(this) },
62 _phantom: PhantomData,
63 }
64 }
65
66 pub fn range<Q, R>(&self, range: R) -> Range<'_, K, V, E>
68 where
69 Q: Ord + ?Sized,
70 R: RangeBounds<Q>,
71 K: Borrow<Q> + Ord,
72 {
73 self.range_with(range, |q, k| q.cmp(k.borrow()))
74 }
75
76 pub fn range_with<Q, R, C>(&self, range: R, cmp: C) -> Range<'_, K, V, E>
81 where
82 Q: Ord + ?Sized,
83 R: RangeBounds<Q>,
84 C: Fn(&Q, &K) -> Ordering,
85 K: Ord,
86 {
87 let this = (self as *const Self).cast_mut();
88 Range {
89 inner: unsafe { RawRangeIter::new(this, range, cmp) },
90 _phantom: PhantomData,
91 }
92 }
93
94 pub fn range_seal<Q, R>(
96 this: Seal<'_, Self>,
97 range: R,
98 ) -> RangeSeal<'_, K, V, E>
99 where
100 Q: Ord + ?Sized,
101 R: RangeBounds<Q>,
102 K: Borrow<Q> + Ord,
103 {
104 Self::range_seal_with(this, range, |q, k| q.cmp(k.borrow()))
105 }
106
107 pub fn range_seal_with<Q, R, C>(
112 this: Seal<'_, Self>,
113 range: R,
114 cmp: C,
115 ) -> RangeSeal<'_, K, V, E>
116 where
117 Q: Ord + ?Sized,
118 R: RangeBounds<Q>,
119 C: Fn(&Q, &K) -> Ordering,
120 K: Ord,
121 {
122 let this = unsafe { Seal::unseal_unchecked(this) as *mut Self };
123 RangeSeal {
124 inner: unsafe { RawRangeIter::new(this, range, cmp) },
125 _phantom: PhantomData,
126 }
127 }
128}
129
130macro_rules! impl_iter_traits {
131 ($($iter_ty:ident),*) => {
132 $(
133 impl<'a, K, V, const E: usize> ExactSizeIterator
134 for $iter_ty<'a, K, V, E>
135 {
136 }
137
138 impl<'a, K, V, const E: usize> FusedIterator
139 for $iter_ty<'a, K, V, E>
140 {
141 }
142 )*
143 };
144}
145
146pub struct Iter<'a, K, V, const E: usize> {
151 inner: RawIter<K, V, E>,
152 _phantom: PhantomData<&'a ArchivedBTreeMap<K, V, E>>,
153}
154
155impl<'a, K, V, const E: usize> Iterator for Iter<'a, K, V, E> {
156 type Item = (&'a K, &'a V);
157
158 fn next(&mut self) -> Option<Self::Item> {
159 self.inner
160 .next()
161 .map(|(k, v)| (unsafe { &*k }, unsafe { &*v }))
162 }
163
164 fn size_hint(&self) -> (usize, Option<usize>) {
165 self.inner.size_hint()
166 }
167}
168
169pub struct IterSeal<'a, K, V, const E: usize> {
174 inner: RawIter<K, V, E>,
175 _phantom: PhantomData<Seal<'a, ArchivedBTreeMap<K, V, E>>>,
176}
177
178impl<'a, K, V, const E: usize> Iterator for IterSeal<'a, K, V, E> {
179 type Item = (&'a K, Seal<'a, V>);
180
181 fn next(&mut self) -> Option<Self::Item> {
182 self.inner
183 .next()
184 .map(|(k, v)| (unsafe { &*k }, Seal::new(unsafe { &mut *v })))
185 }
186
187 fn size_hint(&self) -> (usize, Option<usize>) {
188 self.inner.size_hint()
189 }
190}
191
192pub struct Keys<'a, K, V, const E: usize> {
197 inner: RawIter<K, V, E>,
198 _phantom: PhantomData<&'a ArchivedBTreeMap<K, V, E>>,
199}
200
201impl<'a, K, V, const E: usize> Iterator for Keys<'a, K, V, E> {
202 type Item = &'a K;
203
204 fn next(&mut self) -> Option<Self::Item> {
205 self.inner.next().map(|(k, _)| unsafe { &*k })
206 }
207
208 fn size_hint(&self) -> (usize, Option<usize>) {
209 self.inner.size_hint()
210 }
211}
212
213pub struct Values<'a, K, V, const E: usize> {
218 inner: RawIter<K, V, E>,
219 _phantom: PhantomData<&'a ArchivedBTreeMap<K, V, E>>,
220}
221
222impl<'a, K, V, const E: usize> Iterator for Values<'a, K, V, E> {
223 type Item = &'a V;
224
225 fn next(&mut self) -> Option<Self::Item> {
226 self.inner.next().map(|(_, v)| unsafe { &*v })
227 }
228
229 fn size_hint(&self) -> (usize, Option<usize>) {
230 self.inner.size_hint()
231 }
232}
233
234pub struct ValuesSeal<'a, K, V, const E: usize> {
239 inner: RawIter<K, V, E>,
240 _phantom: PhantomData<Seal<'a, ArchivedBTreeMap<K, V, E>>>,
241}
242
243impl<'a, K, V, const E: usize> Iterator for ValuesSeal<'a, K, V, E> {
244 type Item = Seal<'a, V>;
245
246 fn next(&mut self) -> Option<Self::Item> {
247 self.inner
248 .next()
249 .map(|(_, v)| Seal::new(unsafe { &mut *v }))
250 }
251
252 fn size_hint(&self) -> (usize, Option<usize>) {
253 self.inner.size_hint()
254 }
255}
256
257struct RawIter<K, V, const E: usize> {
258 remaining: usize,
259 stack: Vec<(*mut Node<K, V, E>, usize)>,
260}
261
262impl<K, V, const E: usize> RawIter<K, V, E> {
263 unsafe fn new(map: *mut ArchivedBTreeMap<K, V, E>) -> Self {
264 let remaining = unsafe { (*map).len.to_native() as usize };
265 let mut stack = Vec::new();
266 if remaining != 0 {
267 stack.reserve(entries_to_height::<E>(remaining) as usize);
268 let mut current =
269 unsafe { RelPtr::as_ptr_raw(addr_of_mut!((*map).root)) };
270 loop {
271 stack.push((current, 0));
272 let kind = unsafe { (*current).kind };
273 match kind {
274 NodeKind::Inner => {
275 let inner = current.cast::<InnerNode<K, V, E>>();
276 let lesser =
277 unsafe { addr_of_mut!((*inner).lesser_nodes[0]) };
278 current = unsafe { RelPtr::as_ptr_raw(lesser) };
279 }
280 NodeKind::Leaf => break,
281 }
282 }
283 }
284
285 Self { remaining, stack }
286 }
287}
288
289impl<K, V, const E: usize> Iterator for RawIter<K, V, E> {
290 type Item = (*mut K, *mut V);
291
292 fn next(&mut self) -> Option<Self::Item> {
293 let (current, i) = self.stack.pop()?;
294 self.remaining -= 1;
295
296 let k = unsafe { addr_of_mut!((*current).keys[i]).cast::<K>() };
297 let v = unsafe { addr_of_mut!((*current).values[i]).cast::<V>() };
298 let next_i = i + 1;
299
300 let kind = unsafe { (*current).kind };
302 match kind {
303 NodeKind::Inner => {
304 let inner = current.cast::<InnerNode<K, V, E>>();
305 let next = if next_i < E {
306 self.stack.push((current, next_i));
308
309 unsafe { addr_of_mut!((*inner).lesser_nodes[next_i]) }
311 } else {
312 unsafe { addr_of_mut!((*inner).greater_node) }
314 };
315
316 let next_is_invalid = unsafe { RelPtr::is_invalid_raw(next) };
317 if !next_is_invalid {
318 let mut current = unsafe { RelPtr::as_ptr_raw(next) };
320 loop {
321 self.stack.push((current, 0));
322 let kind = unsafe { (*current).kind };
323 match kind {
324 NodeKind::Inner => {
325 let inner =
326 current.cast::<InnerNode<K, V, E>>();
327 let lesser = unsafe {
328 addr_of_mut!((*inner).lesser_nodes[0])
329 };
330 current = unsafe { RelPtr::as_ptr_raw(lesser) };
331 }
332 NodeKind::Leaf => break,
333 }
334 }
335 }
336 }
337 NodeKind::Leaf => {
338 let leaf = current.cast::<LeafNode<K, V, E>>();
339 let len = unsafe { (*leaf).len.to_native() as usize };
340 if next_i < len {
341 self.stack.push((current, next_i));
342 }
343 }
344 }
345
346 Some((k, v))
347 }
348
349 #[inline]
350 fn size_hint(&self) -> (usize, Option<usize>) {
351 let remaining = self.remaining;
352 (remaining, Some(remaining))
353 }
354}
355
356impl<K, V, const E: usize> ExactSizeIterator for RawIter<K, V, E> {}
357
358impl_iter_traits!(Iter, IterSeal, Keys, Values, ValuesSeal);
359
360pub struct Range<'a, K, V, const E: usize> {
362 inner: RawRangeIter<K, V, E>,
363 _phantom: PhantomData<&'a ArchivedBTreeMap<K, V, E>>,
364}
365
366impl<'a, K, V, const E: usize> Iterator for Range<'a, K, V, E> {
367 type Item = (&'a K, &'a V);
368
369 fn next(&mut self) -> Option<Self::Item> {
370 self.inner
371 .next()
372 .map(|(k, v)| (unsafe { &*k }, unsafe { &*v }))
373 }
374
375 fn size_hint(&self) -> (usize, Option<usize>) {
376 (0, None)
377 }
378}
379
380pub struct RangeSeal<'a, K, V, const E: usize> {
382 inner: RawRangeIter<K, V, E>,
383 _phantom: PhantomData<Seal<'a, ArchivedBTreeMap<K, V, E>>>,
384}
385
386impl<'a, K, V, const E: usize> Iterator for RangeSeal<'a, K, V, E> {
387 type Item = (&'a K, Seal<'a, V>);
388
389 fn next(&mut self) -> Option<Self::Item> {
390 self.inner
391 .next()
392 .map(|(k, v)| (unsafe { &*k }, Seal::new(unsafe { &mut *v })))
393 }
394
395 fn size_hint(&self) -> (usize, Option<usize>) {
396 (0, None)
397 }
398}
399
400impl<'a, K, V, const E: usize> FusedIterator for Range<'a, K, V, E> {}
401impl<'a, K, V, const E: usize> FusedIterator for RangeSeal<'a, K, V, E> {}
402
403struct RawRangeIter<K, V, const E: usize> {
404 stack: Vec<(*mut Node<K, V, E>, usize)>,
405 end_key: Option<*mut K>,
406}
407
408impl<K, V, const E: usize> RawRangeIter<K, V, E> {
409 unsafe fn new<Q, R, C>(
410 map: *mut ArchivedBTreeMap<K, V, E>,
411 range: R,
412 cmp: C,
413 ) -> Self
414 where
415 Q: Ord + ?Sized,
416 R: RangeBounds<Q>,
417 C: Fn(&Q, &K) -> Ordering,
418 K: Ord,
419 {
420 let len = unsafe { (*map).len.to_native() as usize };
421 if len == 0 {
422 return Self {
423 stack: Vec::new(),
424 end_key: None,
425 };
426 }
427
428 let mut stack =
429 Vec::with_capacity(entries_to_height::<E>(len) as usize);
430
431 unsafe { Self::init_stack_for_lower(map, &range, &mut stack, &cmp) };
432
433 let end_key = unsafe { Self::find_first_past_upper(map, &range, &cmp) };
434
435 if let Some(ek) = end_key {
438 if let Some((node, idx)) = stack.last() {
439 let k =
440 unsafe { addr_of_mut!((*(*node)).keys[*idx]).cast::<K>() };
441 if k == ek {
442 stack.clear();
443 }
444 }
445 }
446
447 Self { stack, end_key }
448 }
449
450 fn key_satisfies_lower<Q, C>(key: &K, lower: Bound<&Q>, cmp: &C) -> bool
451 where
452 Q: Ord + ?Sized,
453 C: Fn(&Q, &K) -> Ordering,
454 {
455 match lower {
456 Bound::Unbounded => true,
457 Bound::Included(lb) => cmp(lb, key).is_le(),
458 Bound::Excluded(lb) => cmp(lb, key).is_lt(),
459 }
460 }
461
462 unsafe fn init_stack_for_lower<Q, R, C>(
463 map: *mut ArchivedBTreeMap<K, V, E>,
464 range: &R,
465 stack: &mut Vec<(*mut Node<K, V, E>, usize)>,
466 cmp: &C,
467 ) where
468 Q: Ord + ?Sized,
469 R: RangeBounds<Q>,
470 C: Fn(&Q, &K) -> Ordering,
471 K: Ord,
472 {
473 let lower = range.start_bound();
474
475 if matches!(lower, Bound::Unbounded) {
476 let mut current =
477 unsafe { RelPtr::as_ptr_raw(addr_of_mut!((*map).root)) };
478 loop {
479 stack.push((current, 0));
480 let kind = unsafe { (*current).kind };
481 match kind {
482 NodeKind::Inner => {
483 let inner = current.cast::<InnerNode<K, V, E>>();
484 let lesser =
485 unsafe { addr_of_mut!((*inner).lesser_nodes[0]) };
486 if unsafe { RelPtr::is_invalid_raw(lesser) } {
487 break;
488 }
489 current = unsafe { RelPtr::as_ptr_raw(lesser) };
490 }
491 NodeKind::Leaf => break,
492 }
493 }
494 return;
495 }
496
497 let mut current =
498 unsafe { RelPtr::as_ptr_raw(addr_of_mut!((*map).root)) };
499 'descend: loop {
500 match unsafe { (*current).kind } {
501 NodeKind::Inner => {
502 for i in 0..E {
503 let k_ptr = unsafe {
504 addr_of_mut!((*current).keys[i]).cast::<K>()
505 };
506 let k_ref = unsafe { &*k_ptr };
507 if Self::key_satisfies_lower(k_ref, lower, cmp) {
508 stack.push((current, i));
509 let inner = current.cast::<InnerNode<K, V, E>>();
510 let lesser = unsafe {
511 addr_of_mut!((*inner).lesser_nodes[i])
512 };
513 if unsafe { RelPtr::is_invalid_raw(lesser) } {
514 break 'descend;
515 } else {
516 current = unsafe { RelPtr::as_ptr_raw(lesser) };
517 continue 'descend;
518 }
519 }
520 }
521 let inner = current.cast::<InnerNode<K, V, E>>();
522 let greater =
523 unsafe { addr_of_mut!((*inner).greater_node) };
524 if unsafe { RelPtr::is_invalid_raw(greater) } {
525 break;
526 } else {
527 current = unsafe { RelPtr::as_ptr_raw(greater) };
528 }
529 }
530 NodeKind::Leaf => {
531 let leaf = current.cast::<LeafNode<K, V, E>>();
532 let len = unsafe { (*leaf).len.to_native() as usize };
533 for i in 0..len {
534 let k_ptr = unsafe {
535 addr_of_mut!((*current).keys[i]).cast::<K>()
536 };
537 let k_ref = unsafe { &*k_ptr };
538 if Self::key_satisfies_lower(k_ref, lower, cmp) {
539 stack.push((current, i));
540 break 'descend;
541 }
542 }
543 break;
544 }
545 }
546 }
547 }
548
549 fn key_is_past_upper<Q, C>(key: &K, upper: Bound<&Q>, cmp: &C) -> bool
550 where
551 Q: Ord + ?Sized,
552 C: Fn(&Q, &K) -> Ordering,
553 {
554 match upper {
555 Bound::Unbounded => false,
556 Bound::Included(ub) => cmp(ub, key).is_lt(),
557 Bound::Excluded(ub) => cmp(ub, key).is_le(),
558 }
559 }
560
561 unsafe fn find_first_past_upper<Q, R, C>(
562 map: *mut ArchivedBTreeMap<K, V, E>,
563 range: &R,
564 cmp: &C,
565 ) -> Option<*mut K>
566 where
567 Q: Ord + ?Sized,
568 R: RangeBounds<Q> + ?Sized,
569 C: Fn(&Q, &K) -> Ordering,
570 K: Ord,
571 {
572 let upper = range.end_bound();
573
574 match upper {
575 Bound::Unbounded => None,
576 Bound::Included(_) | Bound::Excluded(_) => {
577 let mut current =
578 unsafe { RelPtr::as_ptr_raw(addr_of_mut!((*map).root)) };
579 let mut candidate = None;
580 'search: loop {
581 match unsafe { (*current).kind } {
582 NodeKind::Inner => {
583 for i in 0..E {
584 let k_ptr = unsafe {
585 addr_of_mut!((*current).keys[i]).cast::<K>()
586 };
587 let k_ref = unsafe { &*k_ptr };
588 if Self::key_is_past_upper(k_ref, upper, cmp) {
589 candidate = Some(k_ptr);
590 let inner =
591 current.cast::<InnerNode<K, V, E>>();
592 let lesser = unsafe {
593 addr_of_mut!((*inner).lesser_nodes[i])
594 };
595 if unsafe { RelPtr::is_invalid_raw(lesser) }
596 {
597 break 'search;
598 } else {
599 current = unsafe {
600 RelPtr::as_ptr_raw(lesser)
601 };
602 continue 'search;
603 }
604 }
605 }
606 let inner = current.cast::<InnerNode<K, V, E>>();
607 let greater =
608 unsafe { addr_of_mut!((*inner).greater_node) };
609 if unsafe { RelPtr::is_invalid_raw(greater) } {
610 break;
611 } else {
612 current =
613 unsafe { RelPtr::as_ptr_raw(greater) };
614 }
615 }
616 NodeKind::Leaf => {
617 let leaf = current.cast::<LeafNode<K, V, E>>();
618 let len =
619 unsafe { (*leaf).len.to_native() as usize };
620 for i in 0..len {
621 let k_ptr = unsafe {
622 addr_of_mut!((*current).keys[i]).cast::<K>()
623 };
624 let k_ref = unsafe { &*k_ptr };
625 if Self::key_is_past_upper(k_ref, upper, cmp) {
626 return Some(k_ptr);
627 }
628 }
629 break;
630 }
631 }
632 }
633 candidate
634 }
635 }
636 }
637}
638
639impl<K, V, const E: usize> Iterator for RawRangeIter<K, V, E> {
640 type Item = (*mut K, *mut V);
641
642 fn next(&mut self) -> Option<Self::Item> {
643 let (current, i) = self.stack.pop()?;
644
645 let k = unsafe { addr_of_mut!((*current).keys[i]).cast::<K>() };
646 if let Some(end) = self.end_key {
647 if k == end {
648 self.stack.clear();
649 return None;
650 }
651 }
652
653 let v = unsafe { addr_of_mut!((*current).values[i]).cast::<V>() };
654 let next_i = i + 1;
655
656 match unsafe { (*current).kind } {
657 NodeKind::Inner => {
658 let inner = current.cast::<InnerNode<K, V, E>>();
659 let next = if next_i < E {
660 self.stack.push((current, next_i));
661 unsafe { addr_of_mut!((*inner).lesser_nodes[next_i]) }
662 } else {
663 unsafe { addr_of_mut!((*inner).greater_node) }
664 };
665
666 if !unsafe { RelPtr::is_invalid_raw(next) } {
667 let mut current = unsafe { RelPtr::as_ptr_raw(next) };
668 loop {
669 self.stack.push((current, 0));
670 match unsafe { (*current).kind } {
671 NodeKind::Inner => {
672 let inner =
673 current.cast::<InnerNode<K, V, E>>();
674 let lesser = unsafe {
675 addr_of_mut!((*inner).lesser_nodes[0])
676 };
677 current = unsafe { RelPtr::as_ptr_raw(lesser) };
678 }
679 NodeKind::Leaf => break,
680 }
681 }
682 }
683 }
684 NodeKind::Leaf => {
685 let leaf = current.cast::<LeafNode<K, V, E>>();
686 let len = unsafe { (*leaf).len.to_native() as usize };
687 if next_i < len {
688 self.stack.push((current, next_i));
689 }
690 }
691 }
692
693 Some((k, v))
694 }
695}