1use core::fmt;
3#[cfg(target_arch="x86_64")]
4use core::arch::{asm, x86_64::{_bittest64,_bittestandcomplement64,_bittestandreset64,_bittestandset64}};
5#[cfg(target_arch="x86")]
6use core::arch::{asm, x86::{_bittest,_bittestandcomplement,_bittestandreset,_bittestandset}};
7
8#[derive(PartialEq, Debug)]
9pub struct OutOfBitmapError
10{
11 position:usize,
12 limit:usize
13}
14
15impl OutOfBitmapError
16{
17 pub const fn new(position:usize,limit:usize)->Self
18 {
19 Self
20 {
21 position,
22 limit
23 }
24 }
25}
26
27impl fmt::Display for OutOfBitmapError
28{
29 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
30 {
31 write!(f,"bit {} is out of bitmap's limit {}",self.position,self.limit)
32 }
33}
34
35pub struct RefBitmap<const N:usize>;
39
40impl<'a,const N:usize> RefBitmap<N>
41{
42 pub unsafe fn from_raw_ptr(ptr:*const usize)->&'a Self
47 {
48 unsafe
49 {
50 &*ptr.cast()
51 }
52 }
53
54 pub unsafe fn from_raw_mut_ptr(ptr:*mut usize)->&'a mut Self
59 {
60 unsafe
61 {
62 &mut *ptr.cast()
63 }
64 }
65}
66
67impl<const N:usize> RefBitmap<N>
68{
69 pub fn test(&self,position:usize)->Result<bool,OutOfBitmapError>
84 {
85 if position<N
86 {
87 #[cfg(target_arch="x86_64")]
88 {
89 let bmp:*const i64=(&raw const *self).cast();
91 unsafe
92 {
93 Ok(_bittest64(bmp,position as i64)!=0)
94 }
95 }
96 #[cfg(target_arch="x86")]
97 {
98 let bmp:*const i32=(&raw const *self).cast();
100 unsafe
101 {
102 Ok(_bittest(bmp,position as i32)!=0)
103 }
104 }
105 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
106 {
107 let bmp:*const u32=(&raw const *self).cast();
109 let i=position>>5;
110 let j=position&0x1F;
111 unsafe
112 {
113 Ok((bmp.add(i).read_unaligned()&(1<<j))!=0)
114 }
115 }
116 }
117 else
118 {
119 Err(OutOfBitmapError::new(position,N))
120 }
121 }
122
123 pub fn set(&mut self,position:usize)->Result<bool,OutOfBitmapError>
136 {
137 if position<N
138 {
139 #[cfg(target_arch="x86_64")]
140 {
141 let bmp:*mut i64=(&raw mut *self).cast();
143 unsafe
144 {
145 Ok(_bittestandset64(bmp,position as i64)!=0)
146 }
147 }
148 #[cfg(target_arch="x86")]
149 {
150 let bmp:*mut i32=(&raw mut *self).cast();
152 unsafe
153 {
154 Ok(_bittestandset(bmp,position as i32)!=0)
155 }
156 }
157 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
158 {
159 let bmp:*mut u32=(&raw mut *self).cast();
161 let i=position>>5;
162 let j=position&0x1F;
163 let v=1<<j;
164 unsafe
165 {
166 let old=bmp.add(i).read_unaligned();
167 bmp.add(i).write_unaligned(old|v);
168 Ok((old&v)!=0)
169 }
170 }
171 }
172 else
173 {
174 Err(OutOfBitmapError::new(position,N))
175 }
176 }
177
178 pub fn reset(&mut self,position:usize)->Result<bool,OutOfBitmapError>
191 {
192 if position<N
193 {
194 #[cfg(target_arch="x86_64")]
195 {
196 let bmp:*mut i64=(&raw mut *self).cast();
198 unsafe
199 {
200 Ok(_bittestandreset64(bmp,position as i64)!=0)
201 }
202 }
203 #[cfg(target_arch="x86")]
204 {
205 let bmp:*mut i32=(&raw mut *self).cast();
207 unsafe
208 {
209 Ok(_bittestandreset(bmp,position as i32)!=0)
210 }
211 }
212 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
213 {
214 let bmp:*mut u32=(&raw mut *self).cast();
216 let i=position>>5;
217 let j=position&0x1F;
218 let v=1<<j;
219 unsafe
220 {
221 let old=bmp.add(i).read_unaligned();
222 bmp.add(i).write_unaligned(old&!v);
223 Ok((old&v)!=0)
224 }
225 }
226 }
227 else
228 {
229 Err(OutOfBitmapError::new(position,N))
230 }
231 }
232
233 pub fn complement(&mut self,position:usize)->Result<bool,OutOfBitmapError>
246 {
247 if position<N
248 {
249 #[cfg(target_arch="x86_64")]
250 {
251 let bmp:*mut i64=(&raw mut *self).cast();
253 unsafe
254 {
255 Ok(_bittestandcomplement64(bmp,position as i64)!=0)
256 }
257 }
258 #[cfg(target_arch="x86")]
259 {
260 let bmp:*mut i32=(&raw mut *self).cast();
262 unsafe
263 {
264 Ok(_bittestandcomplement(bmp,position as i32)!=0)
265 }
266 }
267 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
268 {
269 let bmp:*mut u32=(&raw mut *self).cast();
271 let i=position>>5;
272 let j=position&0x1F;
273 let v=1<<j;
274 unsafe
275 {
276 let old=bmp.add(i).read_unaligned();
277 let r=(old&v)!=0;
278 bmp.add(i).write_unaligned(if r {old&!v} else {old|v});
279 Ok(r)
280 }
281 }
282 }
283 else
284 {
285 Err(OutOfBitmapError::new(position,N))
286 }
287 }
288
289 pub fn assign(&mut self,position:usize,value:bool)->Result<bool,OutOfBitmapError>
303 {
304 if position<N
305 {
306 #[cfg(target_arch="x86_64")]
307 {
308 let bmp:*mut i64=(&raw mut *self).cast();
310 unsafe
311 {
312 if value
313 {
314 Ok(_bittestandset64(bmp,position as i64)!=0)
315 }
316 else
317 {
318 Ok(_bittestandreset64(bmp,position as i64)!=0)
319 }
320 }
321 }
322 #[cfg(target_arch="x86")]
323 {
324 let bmp:*mut i32=(&raw mut *self).cast();
326 unsafe
327 {
328 if value
329 {
330 Ok(_bittestandset(bmp,position as i32)!=0)
331 }
332 else
333 {
334 Ok(_bittestandreset(bmp,position as i32)!=0)
335 }
336 }
337 }
338 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
339 {
340 let bmp:*mut u32=(&raw mut *self).cast();
342 let i=position>>5;
343 let j=position&0x1F;
344 let v=1<<j;
345 unsafe
346 {
347 let old=bmp.add(i).read_unaligned();
348 bmp.add(i).write_unaligned(if value {old|v} else {old&!v});
349 Ok((old&v)!=0)
350 }
351 }
352 }
353 else
354 {
355 Err(OutOfBitmapError::new(position,N))
356 }
357 }
358
359 pub fn search_cleared_forward(&self)->Option<usize>
371 {
372 #[cfg(target_arch="x86_64")]
373 {
374 let bmp:*const u64=(&raw const *self).cast();
375 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
376 for i in 0..lim
377 {
378 let j:u64;
379 let b:u8;
380 unsafe
381 {
382 asm!
383 (
384 "mov {v},qword ptr [{p}]",
385 "not {v}",
386 "bsf {r},{v}",
387 "setz {zf}",
388 p=in(reg) bmp.add(i),
389 v=out(reg) _,
390 r=out(reg) j,
391 zf=out(reg_byte) b
392 );
393 }
394 if b==0
395 {
396 let pos=(i<<6)+j as usize;
397 return if pos<N {Some(pos)} else {None};
398 }
399 }
400 None
401 }
402 #[cfg(target_arch="x86")]
403 {
404 let bmp:*const u32=(&raw const *self).cast();
405 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
406 for i in 0..lim
407 {
408 let j:u32;
409 let b:u8;
410 unsafe
411 {
412 asm!
413 (
414 "mov {v},dword ptr [{p}]",
415 "not {v}",
416 "bsf {r},{v}",
417 "setz {zf}",
418 p=in(reg) bmp.add(i),
419 v=out(reg) _,
420 r=out(reg) j,
421 zf=out(reg_byte) b
422 );
423 }
424 if b==0
425 {
426 let pos=(i<<5)+j as usize;
427 return if pos<N {Some(pos)} else {None};
428 }
429 }
430 None
431 }
432 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
433 {
434 for i in 0..N
435 {
436 if self.test(i)==Ok(false)
437 {
438 return Some(i);
439 }
440 }
441 None
442 }
443 }
444
445 pub fn search_set_forward(&self)->Option<usize>
457 {
458 #[cfg(target_arch="x86_64")]
459 {
460 let bmp:*const u64=(&raw const *self).cast();
461 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
462 for i in 0..lim
463 {
464 let j:u64;
465 let b:u8;
466 unsafe
467 {
468 asm!
469 (
470 "mov {v},qword ptr [{p}]",
471 "bsf {r},{v}",
472 "setz {zf}",
473 p=in(reg) bmp.add(i),
474 v=out(reg) _,
475 r=out(reg) j,
476 zf=out(reg_byte) b
477 );
478 }
479 if b==0
480 {
481 let pos=(i<<6)+j as usize;
482 return if pos<N {Some(pos)} else {None};
483 }
484 }
485 None
486 }
487 #[cfg(target_arch="x86")]
488 {
489 let bmp:*const u32=(&raw const *self).cast();
490 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
491 for i in 0..lim
492 {
493 let j:u32;
494 let b:u8;
495 unsafe
496 {
497 asm!
498 (
499 "bsf {r},dword ptr [{p}]",
500 "setz {zf}",
501 p=in(reg) bmp.add(i),
502 r=out(reg) j,
503 zf=out(reg_byte) b
504 );
505 }
506 if b==0
507 {
508 let pos=(i<<5)+j as usize;
509 return if pos<N {Some(pos)} else {None};
510 }
511 }
512 None
513 }
514 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
515 {
516 for i in 0..N
517 {
518 if self.test(i)==Ok(true)
519 {
520 return Some(i);
521 }
522 }
523 None
524 }
525 }
526
527 pub fn search_cleared_backward(&self)->Option<usize>
539 {
540 #[cfg(target_arch="x86_64")]
541 {
542 let bmp:*const u64=(&raw const *self).cast();
543 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
544 for i in (0..lim).rev()
545 {
546 let j:u64;
547 let b:u8;
548 unsafe
549 {
550 asm!
551 (
552 "mov {v},qword ptr [{p}]",
553 "not {v}",
554 "bsr {r},{v}",
555 "setz {zf}",
556 p=in(reg) bmp.add(i),
557 v=out(reg) _,
558 r=out(reg) j,
559 zf=out(reg_byte) b
560 );
561 }
562 if b==0
563 {
564 let pos=(i<<6)+j as usize;
565 return if pos<N {Some(pos)} else {None};
566 }
567 }
568 None
569 }
570 #[cfg(target_arch="x86")]
571 {
572 let bmp:*const u32=(&raw const *self).cast();
573 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
574 for i in (0..lim).rev()
575 {
576 let j:u32;
577 let b:u8;
578 unsafe
579 {
580 asm!
581 (
582 "mov {v},dword ptr [{p}]",
583 "not {v}",
584 "bsr {r},{v}",
585 "setz {zf}",
586 p=in(reg) bmp.add(i),
587 v=out(reg) _,
588 r=out(reg) j,
589 zf=out(reg_byte) b
590 );
591 }
592 if b==0
593 {
594 let pos=(i<<5)+j as usize;
595 return if pos<N {Some(pos)} else {None};
596 }
597 }
598 None
599 }
600 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
601 {
602 for i in (0..N).rev()
603 {
604 if self.test(i)==Ok(false)
605 {
606 return Some(i);
607 }
608 }
609 None
610 }
611 }
612
613 pub fn search_set_backward(&self)->Option<usize>
625 {
626 #[cfg(target_arch="x86_64")]
627 {
628 let bmp:*const u64=(&raw const *self).cast();
629 let lim=(N>>6)+if (N&0x3F)!=0 {1} else {0};
630 for i in (0..lim).rev()
631 {
632 let j:u64;
633 let b:u8;
634 unsafe
635 {
636 asm!
637 (
638 "mov {v},qword ptr [{p}]",
639 "bsr {r},{v}",
640 "setz {zf}",
641 p=in(reg) bmp.add(i),
642 v=out(reg) _,
643 r=out(reg) j,
644 zf=out(reg_byte) b
645 );
646 }
647 if b==0
648 {
649 let pos=(i<<6)+j as usize;
650 return if pos<N {Some(pos)} else {None};
651 }
652 }
653 None
654 }
655 #[cfg(target_arch="x86")]
656 {
657 let bmp:*const u32=(&raw const *self).cast();
658 let lim=(N>>5)+if (N&0x1F)!=0 {1} else {0};
659 for i in (0..lim).rev()
660 {
661 let j:u32;
662 let b:u8;
663 unsafe
664 {
665 asm!
666 (
667 "bsr {r},dword ptr [{p}]",
668 "setz {zf}",
669 p=in(reg) bmp.add(i),
670 r=out(reg) j,
671 zf=out(reg_byte) b
672 );
673 }
674 if b==0
675 {
676 let pos=(i<<5)+j as usize;
677 return if pos<N {Some(pos)} else {None};
678 }
679 }
680 None
681 }
682 #[cfg(all(not(target_arch="x86_64"),not(target_arch="x86")))]
683 {
684 for i in (0..N).rev()
685 {
686 if self.test(i)==Ok(true)
687 {
688 return Some(i);
689 }
690 }
691 None
692 }
693 }
694}