1use core::fmt;
3#[cfg(target_arch="x86_64")]
4use core::arch::{asm, x86_64::{_bittest64,_bittestandcomplement64,_bittestandreset64,_bittestandset64}};
5#[cfg(target_arch="x86")]
6use core::arch::{asm, x86::{_bittest,_bittestandcomplement,_bittestandreset,_bittestandset}};
7
8#[derive(PartialEq, Debug)]
9pub struct OutOfBitmapError
10{
11 position:usize,
12 limit:usize
13}
14
15impl OutOfBitmapError
16{
17 pub const fn new(position:usize,limit:usize)->Self
18 {
19 Self
20 {
21 position,
22 limit
23 }
24 }
25}
26
27impl fmt::Display for OutOfBitmapError
28{
29 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
30 {
31 write!(f,"bit {} is out of bitmap's limit {}",self.position,self.limit)
32 }
33}
34
35pub struct RefBitmap<const N:usize>;
39
40impl<'a,const N:usize> RefBitmap<N>
41{
42 pub unsafe fn from_raw_ptr(ptr:*const usize)->&'a Self
47 {
48 unsafe
49 {
50 &*ptr.cast()
51 }
52 }
53
54 pub unsafe fn from_raw_mut_ptr(ptr:*mut usize)->&'a mut Self
59 {
60 unsafe
61 {
62 &mut *ptr.cast()
63 }
64 }
65}
66
67impl<const N:usize> RefBitmap<N>
68{
69 pub fn test(&self,position:usize)->Result<bool,OutOfBitmapError>
84 {
85 if position<N
86 {
87 #[cfg(target_arch="x86_64")]
88 {
89 let bmp:*const i64=(&raw const *self).cast();
91 unsafe
92 {
93 Ok(_bittest64(bmp,position as i64)!=0)
94 }
95 }
96 #[cfg(target_arch="x86")]
97 {
98 let bmp:*const i32=(&raw const *self).cast();
100 unsafe
101 {
102 Ok(_bittest(bmp,position as i32)!=0)
103 }
104 }
105 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
106 {
107 let bmp:*const u32=(&raw const *self).cast();
109 let i=position>>5;
110 let j=position&0x1F;
111 unsafe
112 {
113 Ok((bmp.add(i).read_unaligned()&(1<<j))!=0)
114 }
115 }
116 }
117 else
118 {
119 Err(OutOfBitmapError::new(position,N))
120 }
121 }
122
123 pub fn set(&mut self,position:usize)->Result<bool,OutOfBitmapError>
136 {
137 if position<N
138 {
139 #[cfg(target_arch="x86_64")]
140 {
141 let bmp:*mut i64=(&raw mut *self).cast();
143 unsafe
144 {
145 Ok(_bittestandset64(bmp,position as i64)!=0)
146 }
147 }
148 #[cfg(target_arch="x86")]
149 {
150 let bmp:*mut i32=(&raw mut *self).cast();
152 unsafe
153 {
154 Ok(_bittestandset(bmp,position as i32)!=0)
155 }
156 }
157 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
158 {
159 let bmp:*mut u32=(&raw mut *self).cast();
161 let i=position>>5;
162 let j=position&0x1F;
163 let v=1<<j;
164 unsafe
165 {
166 let old=bmp.add(i).read_unaligned();
167 bmp.add(i).write_unaligned(old|v);
168 Ok((old&v)!=0)
169 }
170 }
171 }
172 else
173 {
174 Err(OutOfBitmapError::new(position,N))
175 }
176 }
177
178 pub fn reset(&mut self,position:usize)->Result<bool,OutOfBitmapError>
191 {
192 if position<N
193 {
194 #[cfg(target_arch="x86_64")]
195 {
196 let bmp:*mut i64=(&raw mut *self).cast();
198 unsafe
199 {
200 Ok(_bittestandreset64(bmp,position as i64)!=0)
201 }
202 }
203 #[cfg(target_arch="x86")]
204 {
205 let bmp:*mut i32=(&raw mut *self).cast();
207 unsafe
208 {
209 Ok(_bittestandreset(bmp,position as i32)!=0)
210 }
211 }
212 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
213 {
214 let bmp:*mut u32=(&raw mut *self).cast();
216 let i=position>>5;
217 let j=position&0x1F;
218 let v=1<<j;
219 unsafe
220 {
221 let old=bmp.add(i).read_unaligned();
222 bmp.add(i).write_unaligned(old&!v);
223 Ok((old&v)!=0)
224 }
225 }
226 }
227 else
228 {
229 Err(OutOfBitmapError::new(position,N))
230 }
231 }
232
233 pub fn complement(&mut self,position:usize)->Result<bool,OutOfBitmapError>
246 {
247 if position<N
248 {
249 #[cfg(target_arch="x86_64")]
250 {
251 let bmp:*mut i64=(&raw mut *self).cast();
253 unsafe
254 {
255 Ok(_bittestandcomplement64(bmp,position as i64)!=0)
256 }
257 }
258 #[cfg(target_arch="x86")]
259 {
260 let bmp:*mut i32=(&raw mut *self).cast();
262 unsafe
263 {
264 Ok(_bittestandcomplement(bmp,position as i32)!=0)
265 }
266 }
267 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
268 {
269 let bmp:*mut u32=(&raw mut *self).cast();
271 let i=position>>5;
272 let j=position&0x1F;
273 let v=1<<j;
274 unsafe
275 {
276 let old=bmp.add(i).read_unaligned();
277 bmp.add(i).write_unaligned(old^v);
278 Ok((old&v)!=0)
279 }
280 }
281 }
282 else
283 {
284 Err(OutOfBitmapError::new(position,N))
285 }
286 }
287
288 pub fn assign(&mut self,position:usize,value:bool)->Result<bool,OutOfBitmapError>
302 {
303 if position<N
304 {
305 #[cfg(target_arch="x86_64")]
306 {
307 let bmp:*mut i64=(&raw mut *self).cast();
309 unsafe
310 {
311 if value
312 {
313 Ok(_bittestandset64(bmp,position as i64)!=0)
314 }
315 else
316 {
317 Ok(_bittestandreset64(bmp,position as i64)!=0)
318 }
319 }
320 }
321 #[cfg(target_arch="x86")]
322 {
323 let bmp:*mut i32=(&raw mut *self).cast();
325 unsafe
326 {
327 if value
328 {
329 Ok(_bittestandset(bmp,position as i32)!=0)
330 }
331 else
332 {
333 Ok(_bittestandreset(bmp,position as i32)!=0)
334 }
335 }
336 }
337 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
338 {
339 let bmp:*mut u32=(&raw mut *self).cast();
341 let i=position>>5;
342 let j=position&0x1F;
343 let v=1<<j;
344 unsafe
345 {
346 let old=bmp.add(i).read_unaligned();
347 bmp.add(i).write_unaligned(if value {old|v} else {old&!v});
348 Ok((old&v)!=0)
349 }
350 }
351 }
352 else
353 {
354 Err(OutOfBitmapError::new(position,N))
355 }
356 }
357
358 pub fn search_cleared_forward(&self)->Option<usize>
370 {
371 #[cfg(target_arch="x86_64")]
372 {
373 let bmp:*const u64=(&raw const *self).cast();
374 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
375 for i in 0..lim
376 {
377 let j:u64;
378 let b:u8;
379 unsafe
380 {
381 asm!
382 (
383 "mov {v},qword ptr [{p}]",
384 "not {v}",
385 "bsf {r},{v}",
386 "setz {zf}",
387 p=in(reg) bmp.add(i),
388 v=out(reg) _,
389 r=out(reg) j,
390 zf=out(reg_byte) b
391 );
392 }
393 if b==0
394 {
395 let pos=(i<<6)+j as usize;
396 return if pos<N {Some(pos)} else {None};
397 }
398 }
399 None
400 }
401 #[cfg(target_arch="x86")]
402 {
403 let bmp:*const u32=(&raw const *self).cast();
404 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
405 for i in 0..lim
406 {
407 let j:u32;
408 let b:u8;
409 unsafe
410 {
411 asm!
412 (
413 "mov {v},dword ptr [{p}]",
414 "not {v}",
415 "bsf {r},{v}",
416 "setz {zf}",
417 p=in(reg) bmp.add(i),
418 v=out(reg) _,
419 r=out(reg) j,
420 zf=out(reg_byte) b
421 );
422 }
423 if b==0
424 {
425 let pos=(i<<5)+j as usize;
426 return if pos<N {Some(pos)} else {None};
427 }
428 }
429 None
430 }
431 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
432 {
433 for i in 0..N
434 {
435 if self.test(i)==Ok(false)
436 {
437 return Some(i);
438 }
439 }
440 None
441 }
442 }
443
444 pub fn search_set_forward(&self)->Option<usize>
456 {
457 #[cfg(target_arch="x86_64")]
458 {
459 let bmp:*const u64=(&raw const *self).cast();
460 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
461 for i in 0..lim
462 {
463 let j:u64;
464 let b:u8;
465 unsafe
466 {
467 asm!
468 (
469 "mov {v},qword ptr [{p}]",
470 "bsf {r},{v}",
471 "setz {zf}",
472 p=in(reg) bmp.add(i),
473 v=out(reg) _,
474 r=out(reg) j,
475 zf=out(reg_byte) b
476 );
477 }
478 if b==0
479 {
480 let pos=(i<<6)+j as usize;
481 return if pos<N {Some(pos)} else {None};
482 }
483 }
484 None
485 }
486 #[cfg(target_arch="x86")]
487 {
488 let bmp:*const u32=(&raw const *self).cast();
489 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
490 for i in 0..lim
491 {
492 let j:u32;
493 let b:u8;
494 unsafe
495 {
496 asm!
497 (
498 "bsf {r},dword ptr [{p}]",
499 "setz {zf}",
500 p=in(reg) bmp.add(i),
501 r=out(reg) j,
502 zf=out(reg_byte) b
503 );
504 }
505 if b==0
506 {
507 let pos=(i<<5)+j as usize;
508 return if pos<N {Some(pos)} else {None};
509 }
510 }
511 None
512 }
513 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
514 {
515 for i in 0..N
516 {
517 if self.test(i)==Ok(true)
518 {
519 return Some(i);
520 }
521 }
522 None
523 }
524 }
525
526 pub fn search_cleared_backward(&self)->Option<usize>
538 {
539 #[cfg(target_arch="x86_64")]
540 {
541 let bmp:*const u64=(&raw const *self).cast();
542 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
543 for i in (0..lim).rev()
544 {
545 let j:u64;
546 let b:u8;
547 unsafe
548 {
549 asm!
550 (
551 "mov {v},qword ptr [{p}]",
552 "not {v}",
553 "bsr {r},{v}",
554 "setz {zf}",
555 p=in(reg) bmp.add(i),
556 v=out(reg) _,
557 r=out(reg) j,
558 zf=out(reg_byte) b
559 );
560 }
561 if b==0
562 {
563 let pos=(i<<6)+j as usize;
564 return if pos<N {Some(pos)} else {None};
565 }
566 }
567 None
568 }
569 #[cfg(target_arch="x86")]
570 {
571 let bmp:*const u32=(&raw const *self).cast();
572 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
573 for i in (0..lim).rev()
574 {
575 let j:u32;
576 let b:u8;
577 unsafe
578 {
579 asm!
580 (
581 "mov {v},dword ptr [{p}]",
582 "not {v}",
583 "bsr {r},{v}",
584 "setz {zf}",
585 p=in(reg) bmp.add(i),
586 v=out(reg) _,
587 r=out(reg) j,
588 zf=out(reg_byte) b
589 );
590 }
591 if b==0
592 {
593 let pos=(i<<5)+j as usize;
594 return if pos<N {Some(pos)} else {None};
595 }
596 }
597 None
598 }
599 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
600 {
601 for i in (0..N).rev()
602 {
603 if self.test(i)==Ok(false)
604 {
605 return Some(i);
606 }
607 }
608 None
609 }
610 }
611
612 pub fn search_set_backward(&self)->Option<usize>
624 {
625 #[cfg(target_arch="x86_64")]
626 {
627 let bmp:*const u64=(&raw const *self).cast();
628 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
629 for i in (0..lim).rev()
630 {
631 let j:u64;
632 let b:u8;
633 unsafe
634 {
635 asm!
636 (
637 "mov {v},qword ptr [{p}]",
638 "bsr {r},{v}",
639 "setz {zf}",
640 p=in(reg) bmp.add(i),
641 v=out(reg) _,
642 r=out(reg) j,
643 zf=out(reg_byte) b
644 );
645 }
646 if b==0
647 {
648 let pos=(i<<6)+j as usize;
649 return if pos<N {Some(pos)} else {None};
650 }
651 }
652 None
653 }
654 #[cfg(target_arch="x86")]
655 {
656 let bmp:*const u32=(&raw const *self).cast();
657 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
658 for i in (0..lim).rev()
659 {
660 let j:u32;
661 let b:u8;
662 unsafe
663 {
664 asm!
665 (
666 "bsr {r},dword ptr [{p}]",
667 "setz {zf}",
668 p=in(reg) bmp.add(i),
669 r=out(reg) j,
670 zf=out(reg_byte) b
671 );
672 }
673 if b==0
674 {
675 let pos=(i<<5)+j as usize;
676 return if pos<N {Some(pos)} else {None};
677 }
678 }
679 None
680 }
681 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
682 {
683 for i in (0..N).rev()
684 {
685 if self.test(i)==Ok(true)
686 {
687 return Some(i);
688 }
689 }
690 None
691 }
692 }
693}