1use core::fmt::{Debug, Error, Formatter};
6use core::hash::{Hash, Hasher};
7use core::mem::{size_of, MaybeUninit};
8use core::ops::*;
9
10use crate::types::{BitOps, Bits, BitsImpl};
11
12pub struct Bitmap<const SIZE: usize>
20where
21 BitsImpl<{ SIZE }>: Bits,
22{
23 pub(crate) data: <BitsImpl<{ SIZE }> as Bits>::Store,
24}
25
26impl<const SIZE: usize> Clone for Bitmap<{ SIZE }>
27where
28 BitsImpl<{ SIZE }>: Bits,
29{
30 fn clone(&self) -> Self {
31 Bitmap::from_value(self.data)
32 }
33}
34
35impl<const SIZE: usize> Copy for Bitmap<{ SIZE }> where BitsImpl<{ SIZE }>: Bits {}
36
37impl<const SIZE: usize> Default for Bitmap<{ SIZE }>
38where
39 BitsImpl<{ SIZE }>: Bits,
40{
41 fn default() -> Self {
42 Bitmap {
43 data: <BitsImpl<SIZE> as Bits>::Store::default(),
44 }
45 }
46}
47
48impl<const SIZE: usize> PartialEq for Bitmap<{ SIZE }>
49where
50 BitsImpl<{ SIZE }>: Bits,
51{
52 fn eq(&self, other: &Self) -> bool {
53 self.data == other.data
54 }
55}
56
57impl<const SIZE: usize> Eq for Bitmap<{ SIZE }> where BitsImpl<{ SIZE }>: Bits {}
58
59impl<const SIZE: usize> Hash for Bitmap<{ SIZE }>
60where
61 BitsImpl<{ SIZE }>: Bits,
62 <BitsImpl<{ SIZE }> as Bits>::Store: Hash,
63{
64 fn hash<H: Hasher>(&self, state: &mut H) {
65 self.as_value().hash(state)
66 }
67}
68
69impl<const SIZE: usize> PartialOrd for Bitmap<{ SIZE }>
70where
71 BitsImpl<{ SIZE }>: Bits,
72 <BitsImpl<{ SIZE }> as Bits>::Store: PartialOrd,
73{
74 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
75 self.as_value().partial_cmp(other.as_value())
76 }
77}
78
79impl<const SIZE: usize> Ord for Bitmap<{ SIZE }>
80where
81 BitsImpl<{ SIZE }>: Bits,
82 <BitsImpl<{ SIZE }> as Bits>::Store: Ord,
83{
84 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
85 self.as_value().cmp(other.as_value())
86 }
87}
88
89#[cfg(feature = "std")]
90impl<const SIZE: usize> Debug for Bitmap<{ SIZE }>
91where
92 BitsImpl<{ SIZE }>: Bits,
93{
94 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
95 write!(
96 f,
97 "{}",
98 <BitsImpl::<SIZE> as Bits>::Store::to_hex(&self.data)
99 )
100 }
101}
102
103impl<const SIZE: usize> AsRef<[u8]> for Bitmap<{ SIZE }>
104where
105 BitsImpl<{ SIZE }>: Bits,
106{
107 fn as_ref(&self) -> &[u8] {
108 unsafe {
109 core::slice::from_raw_parts(
110 &self.data as *const _ as *const u8,
111 size_of::<<BitsImpl<SIZE> as Bits>::Store>(),
112 )
113 }
114 }
115}
116
117impl<const SIZE: usize> AsMut<[u8]> for Bitmap<{ SIZE }>
118where
119 BitsImpl<{ SIZE }>: Bits,
120{
121 fn as_mut(&mut self) -> &mut [u8] {
122 unsafe {
123 core::slice::from_raw_parts_mut(
124 &mut self.data as *mut _ as *mut u8,
125 size_of::<<BitsImpl<SIZE> as Bits>::Store>(),
126 )
127 }
128 }
129}
130
131impl<const SIZE: usize> TryFrom<&[u8]> for Bitmap<{ SIZE }>
132where
133 BitsImpl<{ SIZE }>: Bits,
134{
135 type Error = ();
136
137 fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
138 if value.len() == size_of::<<BitsImpl<SIZE> as Bits>::Store>() {
139 let mut data: MaybeUninit<<BitsImpl<SIZE> as Bits>::Store> = MaybeUninit::uninit();
140 let data_ptr: *mut u8 = data.as_mut_ptr().cast();
141 Ok(unsafe {
142 data_ptr.copy_from_nonoverlapping(value.as_ptr(), value.len());
143 Self {
144 data: data.assume_init(),
145 }
146 })
147 } else {
148 Err(())
149 }
150 }
151}
152
153#[cfg(not(feature = "std"))]
154impl<const SIZE: usize> Debug for Bitmap<{ SIZE }>
155where
156 BitsImpl<{ SIZE }>: Bits,
157{
158 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
159 write!(f, "Bitmap<{}> {{ ... }}", SIZE)
160 }
161}
162
163impl<const SIZE: usize> Bitmap<{ SIZE }>
164where
165 BitsImpl<SIZE>: Bits,
166{
167 #[inline]
169 pub fn new() -> Self {
170 Self::default()
171 }
172
173 #[inline]
176 pub fn mask(bits: usize) -> Self {
177 debug_assert!(bits <= SIZE);
178 Self {
179 data: <BitsImpl<SIZE> as Bits>::Store::make_mask(bits),
180 }
181 }
182
183 #[inline]
185 pub fn from_value(data: <BitsImpl<SIZE> as Bits>::Store) -> Self {
186 Self { data }
187 }
188
189 #[inline]
191 pub fn into_value(self) -> <BitsImpl<SIZE> as Bits>::Store {
192 self.data
193 }
194
195 #[inline]
197 pub fn as_value(&self) -> &<BitsImpl<SIZE> as Bits>::Store {
198 &self.data
199 }
200
201 #[inline]
202 pub fn as_bytes(&self) -> &[u8] {
203 AsRef::<[u8]>::as_ref(self)
204 }
205
206 #[inline]
208 pub fn len(self) -> usize {
209 <BitsImpl<SIZE> as Bits>::Store::len(&self.data)
210 }
211
212 #[inline]
214 pub fn is_empty(self) -> bool {
215 self.first_index().is_none()
216 }
217
218 #[inline]
220 pub fn is_full(self) -> bool {
221 self.first_false_index().is_none()
222 }
223
224 #[inline]
226 pub fn get(self, index: usize) -> bool {
227 debug_assert!(index < SIZE);
228 <BitsImpl<SIZE> as Bits>::Store::get(&self.data, index)
229 }
230
231 #[inline]
235 pub fn set(&mut self, index: usize, value: bool) -> bool {
236 debug_assert!(index < SIZE);
237 <BitsImpl<SIZE> as Bits>::Store::set(&mut self.data, index, value)
238 }
239
240 #[inline]
242 pub fn first_index(self) -> Option<usize> {
243 <BitsImpl<SIZE> as Bits>::Store::first_index(&self.data)
244 }
245
246 #[inline]
248 pub fn last_index(self) -> Option<usize> {
249 <BitsImpl<SIZE> as Bits>::Store::last_index(&self.data)
250 }
251
252 #[inline]
254 pub fn next_index(self, index: usize) -> Option<usize> {
255 <BitsImpl<SIZE> as Bits>::Store::next_index(&self.data, index)
256 }
257
258 #[inline]
260 pub fn prev_index(self, index: usize) -> Option<usize> {
261 <BitsImpl<SIZE> as Bits>::Store::prev_index(&self.data, index)
262 }
263
264 #[inline]
266 pub fn first_false_index(self) -> Option<usize> {
267 <BitsImpl<SIZE> as Bits>::corrected_first_false_index(&self.data)
268 }
269
270 #[inline]
272 pub fn last_false_index(self) -> Option<usize> {
273 <BitsImpl<SIZE> as Bits>::corrected_last_false_index(&self.data)
274 }
275
276 #[inline]
278 pub fn next_false_index(self, index: usize) -> Option<usize> {
279 <BitsImpl<SIZE> as Bits>::corrected_next_false_index(&self.data, index)
280 }
281
282 #[inline]
284 pub fn prev_false_index(self, index: usize) -> Option<usize> {
285 <BitsImpl<SIZE> as Bits>::Store::prev_false_index(&self.data, index)
286 }
287
288 #[inline]
290 pub fn invert(&mut self) {
291 <BitsImpl<SIZE> as Bits>::Store::invert(&mut self.data);
292 }
293}
294
295impl<'a, const SIZE: usize> IntoIterator for &'a Bitmap<{ SIZE }>
296where
297 BitsImpl<{ SIZE }>: Bits,
298{
299 type Item = usize;
300 type IntoIter = Iter<'a, { SIZE }>;
301
302 fn into_iter(self) -> Self::IntoIter {
303 Iter {
304 head: None,
305 tail: Some(SIZE + 1),
306 data: self,
307 }
308 }
309}
310
311impl<const SIZE: usize> BitAnd for Bitmap<{ SIZE }>
312where
313 BitsImpl<{ SIZE }>: Bits,
314{
315 type Output = Self;
316 fn bitand(mut self, rhs: Self) -> Self::Output {
317 <BitsImpl<SIZE> as Bits>::Store::bit_and(&mut self.data, &rhs.data);
318 self
319 }
320}
321
322impl<const SIZE: usize> BitOr for Bitmap<{ SIZE }>
323where
324 BitsImpl<{ SIZE }>: Bits,
325{
326 type Output = Self;
327 fn bitor(mut self, rhs: Self) -> Self::Output {
328 <BitsImpl<SIZE> as Bits>::Store::bit_or(&mut self.data, &rhs.data);
329 self
330 }
331}
332
333impl<const SIZE: usize> BitXor for Bitmap<{ SIZE }>
334where
335 BitsImpl<{ SIZE }>: Bits,
336{
337 type Output = Self;
338 fn bitxor(mut self, rhs: Self) -> Self::Output {
339 <BitsImpl<SIZE> as Bits>::Store::bit_xor(&mut self.data, &rhs.data);
340 self
341 }
342}
343
344impl<const SIZE: usize> Not for Bitmap<{ SIZE }>
345where
346 BitsImpl<{ SIZE }>: Bits,
347{
348 type Output = Self;
349 fn not(mut self) -> Self::Output {
350 <BitsImpl<SIZE> as Bits>::Store::invert(&mut self.data);
351 self
352 }
353}
354
355impl<const SIZE: usize> BitAndAssign for Bitmap<{ SIZE }>
356where
357 BitsImpl<{ SIZE }>: Bits,
358{
359 fn bitand_assign(&mut self, rhs: Self) {
360 <BitsImpl<SIZE> as Bits>::Store::bit_and(&mut self.data, &rhs.data);
361 }
362}
363
364impl<const SIZE: usize> BitOrAssign for Bitmap<{ SIZE }>
365where
366 BitsImpl<{ SIZE }>: Bits,
367{
368 fn bitor_assign(&mut self, rhs: Self) {
369 <BitsImpl<SIZE> as Bits>::Store::bit_or(&mut self.data, &rhs.data);
370 }
371}
372
373impl<const SIZE: usize> BitXorAssign for Bitmap<{ SIZE }>
374where
375 BitsImpl<{ SIZE }>: Bits,
376{
377 fn bitxor_assign(&mut self, rhs: Self) {
378 <BitsImpl<SIZE> as Bits>::Store::bit_xor(&mut self.data, &rhs.data);
379 }
380}
381
382impl From<[u128; 2]> for Bitmap<256> {
383 fn from(data: [u128; 2]) -> Self {
384 Bitmap { data }
385 }
386}
387
388impl From<[u128; 3]> for Bitmap<384> {
389 fn from(data: [u128; 3]) -> Self {
390 Bitmap { data }
391 }
392}
393
394impl From<[u128; 4]> for Bitmap<512> {
395 fn from(data: [u128; 4]) -> Self {
396 Bitmap { data }
397 }
398}
399
400impl From<[u128; 5]> for Bitmap<640> {
401 fn from(data: [u128; 5]) -> Self {
402 Bitmap { data }
403 }
404}
405
406impl From<[u128; 6]> for Bitmap<768> {
407 fn from(data: [u128; 6]) -> Self {
408 Bitmap { data }
409 }
410}
411
412impl From<[u128; 7]> for Bitmap<896> {
413 fn from(data: [u128; 7]) -> Self {
414 Bitmap { data }
415 }
416}
417
418impl From<[u128; 8]> for Bitmap<1024> {
419 fn from(data: [u128; 8]) -> Self {
420 Bitmap { data }
421 }
422}
423
424impl From<Bitmap<256>> for [u128; 2] {
425 fn from(bitmap: Bitmap<256>) -> Self {
426 bitmap.into_value()
427 }
428}
429
430impl From<Bitmap<384>> for [u128; 3] {
431 fn from(bitmap: Bitmap<384>) -> Self {
432 bitmap.into_value()
433 }
434}
435
436impl From<Bitmap<512>> for [u128; 4] {
437 fn from(bitmap: Bitmap<512>) -> Self {
438 bitmap.into_value()
439 }
440}
441
442impl From<Bitmap<640>> for [u128; 5] {
443 fn from(bitmap: Bitmap<640>) -> Self {
444 bitmap.into_value()
445 }
446}
447
448impl From<Bitmap<768>> for [u128; 6] {
449 fn from(bitmap: Bitmap<768>) -> Self {
450 bitmap.into_value()
451 }
452}
453
454impl From<Bitmap<896>> for [u128; 7] {
455 fn from(bitmap: Bitmap<896>) -> Self {
456 bitmap.into_value()
457 }
458}
459
460impl From<Bitmap<1024>> for [u128; 8] {
461 fn from(bitmap: Bitmap<1024>) -> Self {
462 bitmap.into_value()
463 }
464}
465
466#[derive(Clone, Debug)]
483pub struct Iter<'a, const SIZE: usize>
484where
485 BitsImpl<SIZE>: Bits,
486{
487 head: Option<usize>,
488 tail: Option<usize>,
489 data: &'a Bitmap<{ SIZE }>,
490}
491
492impl<'a, const SIZE: usize> Iterator for Iter<'a, SIZE>
493where
494 BitsImpl<{ SIZE }>: Bits,
495{
496 type Item = usize;
497
498 fn next(&mut self) -> Option<Self::Item> {
499 let result;
500
501 match self.head {
502 None => {
503 result = self.data.first_index();
504 }
505 Some(index) => {
506 if index >= SIZE {
507 result = None
508 } else {
509 result = self.data.next_index(index);
510 }
511 }
512 }
513
514 if let Some(index) = result {
515 if let Some(tail) = self.tail {
516 if tail < index {
517 self.head = Some(SIZE + 1);
518 self.tail = None;
519 return None;
520 }
521 } else {
522 self.head = Some(SIZE + 1);
524 return None;
525 }
526
527 self.head = Some(index);
528 } else {
529 self.head = Some(SIZE + 1);
530 }
531
532 result
533 }
534}
535
536impl<'a, const SIZE: usize> DoubleEndedIterator for Iter<'a, SIZE>
537where
538 BitsImpl<{ SIZE }>: Bits,
539{
540 fn next_back(&mut self) -> Option<Self::Item> {
541 let result;
542
543 match self.tail {
544 None => {
545 result = None;
546 }
547 Some(index) => {
548 if index >= SIZE {
549 result = self.data.last_index();
550 } else {
551 result = self.data.prev_index(index);
552 }
553 }
554 }
555
556 if let Some(index) = result {
557 if let Some(head) = self.head {
558 if head > index {
559 self.head = Some(SIZE + 1);
560 self.tail = None;
561 return None;
562 }
563 }
564
565 self.tail = Some(index);
566 } else {
567 self.tail = None;
568 }
569
570 result
571 }
572}
573
574#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
575#[allow(clippy::cast_ptr_alignment)]
576mod x86_arch {
577 use super::*;
578 #[cfg(target_arch = "x86")]
579 use core::arch::x86::*;
580 #[cfg(target_arch = "x86_64")]
581 use core::arch::x86_64::*;
582
583 impl Bitmap<128> {
584 #[target_feature(enable = "sse2")]
585 pub unsafe fn load_m128i(&self) -> __m128i {
586 _mm_loadu_si128(&self.data as *const _ as *const __m128i)
587 }
588 }
589
590 impl Bitmap<256> {
591 #[target_feature(enable = "sse2")]
592 pub unsafe fn load_m128i(&self) -> [__m128i; 2] {
593 let ptr = &self.data as *const _ as *const __m128i;
594 [_mm_loadu_si128(ptr), _mm_loadu_si128(ptr.add(1))]
595 }
596
597 #[target_feature(enable = "avx")]
598 pub unsafe fn load_m256i(&self) -> __m256i {
599 _mm256_loadu_si256(&self.data as *const _ as *const __m256i)
600 }
601 }
602
603 impl Bitmap<512> {
604 #[target_feature(enable = "sse2")]
605 pub unsafe fn load_m128i(&self) -> [__m128i; 4] {
606 let ptr = &self.data as *const _ as *const __m128i;
607 [
608 _mm_loadu_si128(ptr),
609 _mm_loadu_si128(ptr.add(1)),
610 _mm_loadu_si128(ptr.add(2)),
611 _mm_loadu_si128(ptr.add(3)),
612 ]
613 }
614
615 #[target_feature(enable = "avx")]
616 pub unsafe fn load_m256i(&self) -> [__m256i; 2] {
617 let ptr = &self.data as *const _ as *const __m256i;
618 [_mm256_loadu_si256(ptr), _mm256_loadu_si256(ptr.add(1))]
619 }
620 }
621
622 impl Bitmap<768> {
623 #[target_feature(enable = "sse2")]
624 pub unsafe fn load_m128i(&self) -> [__m128i; 6] {
625 let ptr = &self.data as *const _ as *const __m128i;
626 [
627 _mm_loadu_si128(ptr),
628 _mm_loadu_si128(ptr.add(1)),
629 _mm_loadu_si128(ptr.add(2)),
630 _mm_loadu_si128(ptr.add(3)),
631 _mm_loadu_si128(ptr.add(4)),
632 _mm_loadu_si128(ptr.add(5)),
633 ]
634 }
635
636 #[target_feature(enable = "avx")]
637 pub unsafe fn load_m256i(&self) -> [__m256i; 3] {
638 let ptr = &self.data as *const _ as *const __m256i;
639 [
640 _mm256_loadu_si256(ptr),
641 _mm256_loadu_si256(ptr.add(1)),
642 _mm256_loadu_si256(ptr.add(2)),
643 ]
644 }
645 }
646
647 impl Bitmap<1024> {
648 #[target_feature(enable = "sse2")]
649 pub unsafe fn load_m128i(&self) -> [__m128i; 8] {
650 let ptr = &self.data as *const _ as *const __m128i;
651 [
652 _mm_loadu_si128(ptr),
653 _mm_loadu_si128(ptr.add(1)),
654 _mm_loadu_si128(ptr.add(2)),
655 _mm_loadu_si128(ptr.add(3)),
656 _mm_loadu_si128(ptr.add(4)),
657 _mm_loadu_si128(ptr.add(5)),
658 _mm_loadu_si128(ptr.add(6)),
659 _mm_loadu_si128(ptr.add(7)),
660 ]
661 }
662
663 #[target_feature(enable = "avx")]
664 pub unsafe fn load_m256i(&self) -> [__m256i; 4] {
665 let ptr = &self.data as *const _ as *const __m256i;
666 [
667 _mm256_loadu_si256(ptr),
668 _mm256_loadu_si256(ptr.add(1)),
669 _mm256_loadu_si256(ptr.add(2)),
670 _mm256_loadu_si256(ptr.add(3)),
671 ]
672 }
673 }
674
675 impl From<__m128i> for Bitmap<128> {
676 fn from(data: __m128i) -> Self {
677 Self {
678 data: unsafe { core::mem::transmute(data) },
679 }
680 }
681 }
682
683 impl From<__m256i> for Bitmap<256> {
684 fn from(data: __m256i) -> Self {
685 Self {
686 data: unsafe { core::mem::transmute(data) },
687 }
688 }
689 }
690
691 impl From<Bitmap<128>> for __m128i {
692 fn from(data: Bitmap<128>) -> Self {
693 unsafe { data.load_m128i() }
694 }
695 }
696
697 impl From<Bitmap<256>> for __m256i {
698 fn from(data: Bitmap<256>) -> Self {
699 unsafe { data.load_m256i() }
700 }
701 }
702
703 #[cfg(test)]
704 mod test {
705 use super::*;
706
707 struct AlignmentTester<const B: usize>
708 where
709 BitsImpl<B>: Bits,
710 {
711 _byte: u8,
712 bits: Bitmap<B>,
713 }
714
715 #[test]
716 fn load_128() {
717 let mut t: AlignmentTester<128> = AlignmentTester {
718 _byte: 0,
719 bits: Bitmap::new(),
720 };
721 t.bits.set(5, true);
722 let m = unsafe { t.bits.load_m128i() };
723 let mut bits: Bitmap<128> = m.into();
724 assert!(bits.set(5, false));
725 assert!(bits.is_empty());
726 }
727
728 #[test]
729 fn load_256() {
730 let mut t: AlignmentTester<256> = AlignmentTester {
731 _byte: 0,
732 bits: Bitmap::new(),
733 };
734 t.bits.set(5, true);
735 let m = unsafe { t.bits.load_m256i() };
736 let mut bits: Bitmap<256> = m.into();
737 assert!(bits.set(5, false));
738 assert!(bits.is_empty());
739 }
740 }
741}
742
743#[cfg(test)]
744mod test {
745 use super::*;
746 use proptest::collection::btree_set;
747 use proptest::proptest;
748
749 proptest! {
750 #[test]
751 fn get_set_and_iter_61(bits in btree_set(0..61usize, 0..61)) {
752 let mut bitmap = Bitmap::<61>::new();
753 for i in &bits {
754 bitmap.set(*i, true);
755 }
756 for i in 0..61 {
757 assert_eq!(bitmap.get(i), bits.contains(&i));
758 }
759 assert_eq!(bitmap.first_index(), bits.clone().into_iter().next());
760 assert_eq!(bitmap.last_index(), bits.clone().into_iter().next_back());
761 assert!(bitmap.into_iter().eq(bits.clone().into_iter()));
762 assert!(bitmap.into_iter().rev().eq(bits.into_iter().rev()));
763 }
764
765 #[test]
766 fn get_set_and_iter_64(bits in btree_set(0..64usize, 0..64)) {
767 let mut bitmap = Bitmap::<64>::new();
768 for i in &bits {
769 bitmap.set(*i, true);
770 }
771 for i in 0..64 {
772 assert_eq!(bitmap.get(i), bits.contains(&i));
773 }
774 assert_eq!(bitmap.first_index(), bits.clone().into_iter().next());
775 assert_eq!(bitmap.last_index(), bits.clone().into_iter().next_back());
776 assert!(bitmap.into_iter().eq(bits.clone().into_iter()));
777 assert!(bitmap.into_iter().rev().eq(bits.into_iter().rev()));
778 }
779
780 #[test]
781 fn get_set_and_iter_1024(bits in btree_set(0..1024usize, 0..1024)) {
782 let mut bitmap = Bitmap::<1024>::new();
783 for i in &bits {
784 bitmap.set(*i, true);
785 }
786 for i in 0..1024 {
787 assert_eq!(bitmap.get(i), bits.contains(&i));
788 }
789 assert_eq!(bitmap.first_index(), bits.clone().into_iter().next());
790 assert_eq!(bitmap.last_index(), bits.clone().into_iter().next_back());
791 assert!(bitmap.into_iter().eq(bits.clone().into_iter()));
792 assert!(bitmap.into_iter().rev().eq(bits.into_iter().rev()));
793 }
794
795 #[test]
796 fn convert_1024(bits in btree_set(0..1024usize, 0..1024)) {
797 let mut bitmap = Bitmap::<1024>::new();
798 for i in &bits {
799 bitmap.set(*i, true);
800 }
801 let new_bitmap: Bitmap<1024> = TryFrom::try_from(bitmap.as_bytes()).expect("Unable to convert bitmap!");
802 assert_eq!(new_bitmap, bitmap);
803 }
804
805 #[test]
806 fn mask(shift in 0..=128usize) {
807 if shift <= 32 {
808 Bitmap::<32>::mask(shift);
809 }
810
811 if shift <= 64 {
812 Bitmap::<64>::mask(shift);
813 }
814
815 if shift <= 128 {
816 Bitmap::<128>::mask(shift);
817 }
818 }
819 }
820
821 #[test]
822 fn mask_limits() {
823 assert_eq!(Bitmap::<32>::mask(32).into_value(), u32::MAX);
824 assert_eq!(Bitmap::<64>::mask(64).into_value(), u64::MAX);
825 assert_eq!(Bitmap::<128>::mask(128).into_value(), u128::MAX);
826 assert!(Bitmap::<32>::mask(32).is_full());
827 assert!(Bitmap::<64>::mask(64).is_full());
828 assert!(Bitmap::<128>::mask(128).is_full());
829 }
830}