1use crate::arena::*;
2use crate::atom_table::*;
3use crate::forms::*;
4use crate::machine::machine_indices::*;
5use crate::machine::partial_string::PartialString;
6use crate::machine::streams::*;
7use crate::parser::ast::Fixnum;
8
9use std::cmp::Ordering;
10use std::convert::TryFrom;
11use std::fmt;
12use std::mem;
13use std::ops::{Add, Sub, SubAssign};
14
15#[derive(BitfieldSpecifier, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
16#[bits = 6]
17pub enum HeapCellValueTag {
18 Str = 0b000011,
19 Lis = 0b000101,
20 Var = 0b000111,
21 StackVar = 0b001001,
22 AttrVar = 0b001011,
23 PStrLoc = 0b001101,
24 PStrOffset = 0b001111,
25 Cons = 0b0,
27 F64 = 0b010001,
28 Fixnum = 0b010011,
29 Char = 0b010101,
30 Atom = 0b010111,
31 PStr = 0b011001,
32 CStr = 0b011011,
33 CutPoint = 0b011111,
34}
35
36#[derive(BitfieldSpecifier, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
37#[bits = 6]
38pub enum HeapCellValueView {
39 Str = 0b000011,
40 Lis = 0b000101,
41 Var = 0b000111,
42 StackVar = 0b001001,
43 AttrVar = 0b001011,
44 PStrLoc = 0b001101,
45 PStrOffset = 0b001111,
46 Cons = 0b0,
48 F64 = 0b010001,
49 Fixnum = 0b010011,
50 Char = 0b010101,
51 Atom = 0b010111,
52 PStr = 0b011001,
53 CStr = 0b011011,
54 CutPoint = 0b011111,
55 TrailedHeapVar = 0b101111,
57 TrailedStackVar = 0b101011,
58 TrailedAttrVar = 0b100001,
59 TrailedAttrVarListLink = 0b100011,
60 TrailedAttachedValue = 0b100101,
61 TrailedBlackboardEntry = 0b100111,
62 TrailedBlackboardOffset = 0b110011,
63}
64
65#[derive(BitfieldSpecifier, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
66#[bits = 1]
67pub enum ConsPtrMaskTag {
68 Cons = 0b0,
69}
70
71#[bitfield]
72#[repr(u64)]
73#[derive(Copy, Clone, Debug)]
74pub struct ConsPtr {
75 ptr: B61,
76 f: bool,
77 m: bool,
78 tag: ConsPtrMaskTag,
79}
80
81impl ConsPtr {
82 #[inline(always)]
83 pub fn build_with(ptr: *const ArenaHeader, tag: ConsPtrMaskTag) -> Self {
84 ConsPtr::new()
85 .with_ptr(ptr as *const u8 as u64)
86 .with_f(false)
87 .with_m(false)
88 .with_tag(tag)
89 }
90
91 #[cfg(target_pointer_width = "32")]
92 #[inline(always)]
93 pub fn as_ptr(self) -> *mut u8 {
94 let bytes = self.into_bytes();
95 let raw_ptr_bytes = [bytes[1], bytes[2], bytes[3], bytes[4]];
96 unsafe { mem::transmute(raw_ptr_bytes) }
97 }
98
99 #[cfg(target_pointer_width = "64")]
100 #[inline(always)]
101 pub fn as_ptr(self) -> *mut u8 {
102 self.ptr() as *mut _
103 }
104
105 #[inline(always)]
106 pub fn get_tag(self) -> ConsPtrMaskTag {
107 self.tag()
108 }
109}
110
111#[derive(BitfieldSpecifier, Copy, Clone, Debug)]
112#[bits = 6]
113pub(crate) enum RefTag {
114 HeapCell = 0b000111,
115 StackCell = 0b001001,
116 AttrVar = 0b001011,
117}
118
119#[bitfield]
120#[repr(u64)]
121#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
122pub struct Ref {
123 val: B56,
124 #[allow(unused)]
125 m: bool,
126 #[allow(unused)]
127 f: bool,
128 tag: RefTag,
129}
130
131impl Ord for Ref {
132 fn cmp(&self, rhs: &Ref) -> Ordering {
133 match self.get_tag() {
134 RefTag::HeapCell | RefTag::AttrVar => match rhs.get_tag() {
135 RefTag::StackCell => Ordering::Less,
136 _ => self.get_value().cmp(&rhs.get_value()),
137 },
138 RefTag::StackCell => match rhs.get_tag() {
139 RefTag::StackCell => self.get_value().cmp(&rhs.get_value()),
140 _ => Ordering::Greater,
141 },
142 }
143 }
144}
145
146impl PartialOrd for Ref {
147 fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {
148 Some(self.cmp(rhs))
149 }
150}
151
152impl Ref {
153 #[inline(always)]
154 pub(crate) fn build_with(tag: RefTag, value: u64) -> Self {
155 Ref::new().with_tag(tag).with_val(value)
156 }
157
158 #[inline(always)]
159 pub(crate) fn get_tag(self) -> RefTag {
160 self.tag()
161 }
162
163 #[inline(always)]
164 pub(crate) fn get_value(self) -> u64 {
165 self.val()
166 }
167
168 #[inline(always)]
169 pub(crate) fn as_heap_cell_value(self) -> HeapCellValue {
170 HeapCellValue::from_bytes(self.into_bytes())
171 }
172
173 #[inline(always)]
174 pub(crate) fn heap_cell(h: usize) -> Self {
175 Ref::build_with(RefTag::HeapCell, h as u64)
176 }
177
178 #[inline(always)]
179 pub(crate) fn stack_cell(h: usize) -> Self {
180 Ref::build_with(RefTag::StackCell, h as u64)
181 }
182
183 #[inline(always)]
184 pub(crate) fn attr_var(h: usize) -> Self {
185 Ref::build_with(RefTag::AttrVar, h as u64)
186 }
187}
188
189#[derive(Debug, Clone, Copy)]
190pub enum TrailRef {
191 Ref(Ref),
192 AttrVarListLink(usize, usize),
193 BlackboardEntry(Atom),
194 BlackboardOffset(Atom, HeapCellValue), }
196
197#[allow(clippy::enum_variant_names)]
198#[derive(BitfieldSpecifier, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
199#[bits = 6]
200pub(crate) enum TrailEntryTag {
201 TrailedHeapVar = 0b101111,
202 TrailedStackVar = 0b101011,
203 TrailedAttrVar = 0b100001,
204 TrailedAttrVarListLink = 0b100011,
205 TrailedAttachedValue = 0b100101,
206 TrailedBlackboardEntry = 0b100111,
207 TrailedBlackboardOffset = 0b110011,
208}
209
210#[bitfield]
211#[derive(Copy, Clone, Debug)]
212#[repr(u64)]
213pub(crate) struct TrailEntry {
214 val: B56,
215 #[allow(unused)]
216 f: bool,
217 #[allow(unused)]
218 m: bool,
219 #[allow(unused)]
220 tag: TrailEntryTag,
221}
222
223impl TrailEntry {
224 #[inline(always)]
225 pub(crate) fn build_with(tag: TrailEntryTag, value: u64) -> Self {
226 TrailEntry::new()
227 .with_tag(tag)
228 .with_m(false)
229 .with_f(false)
230 .with_val(value)
231 }
232
233 #[inline(always)]
234 pub(crate) fn get_tag(self) -> TrailEntryTag {
235 match self.tag_or_err() {
236 Ok(tag) => tag,
237 Err(_) => TrailEntryTag::TrailedAttachedValue,
238 }
239 }
240
241 #[inline]
242 pub(crate) fn get_value(self) -> u64 {
243 self.val()
244 }
245}
246
247#[repr(u64)]
248#[bitfield]
249#[derive(Copy, Clone, Hash, PartialEq, Eq)]
250pub struct HeapCellValue {
251 val: B56,
252 f: bool,
253 m: bool,
254 tag: HeapCellValueTag,
255}
256
257impl fmt::Debug for HeapCellValue {
258 fn fmt(&self, f: &mut std::fmt::Formatter) -> fmt::Result {
259 match self.get_tag() {
260 HeapCellValueTag::F64 => f
261 .debug_struct("HeapCellValue")
262 .field("tag", &HeapCellValueTag::F64)
263 .field("offset", &self.get_value())
264 .field("m", &self.m())
265 .field("f", &self.f())
266 .finish(),
267 HeapCellValueTag::Cons => {
268 let cons_ptr = ConsPtr::from_bytes(self.into_bytes());
269
270 f.debug_struct("HeapCellValue")
271 .field("tag", &HeapCellValueTag::Cons)
272 .field("ptr", &cons_ptr.ptr())
273 .field("m", &cons_ptr.m())
274 .field("f", &cons_ptr.f())
275 .finish()
276 }
277 HeapCellValueTag::Atom => {
278 let (name, arity) = cell_as_atom_cell!(self).get_name_and_arity();
279
280 f.debug_struct("HeapCellValue")
281 .field("tag", &HeapCellValueTag::Atom)
282 .field("name", &name.as_str())
283 .field("arity", &arity)
284 .field("m", &self.m())
285 .field("f", &self.f())
286 .finish()
287 }
288 HeapCellValueTag::PStr => {
289 let (name, _) = cell_as_atom_cell!(self).get_name_and_arity();
290
291 f.debug_struct("HeapCellValue")
292 .field("tag", &HeapCellValueTag::PStr)
293 .field("contents", &name.as_str())
294 .field("m", &self.m())
295 .field("f", &self.f())
296 .finish()
297 }
298 tag => f
299 .debug_struct("HeapCellValue")
300 .field("tag", &tag)
301 .field("value", &self.get_value())
302 .field("m", &self.get_mark_bit())
303 .field("f", &self.get_forwarding_bit())
304 .finish(),
305 }
306 }
307}
308
309impl<T: ArenaAllocated> From<TypedArenaPtr<T>> for HeapCellValue {
310 #[inline]
311 fn from(arena_ptr: TypedArenaPtr<T>) -> HeapCellValue {
312 HeapCellValue::from(arena_ptr.header_ptr() as u64)
313 }
314}
315
316impl From<F64Ptr> for HeapCellValue {
317 #[inline]
318 fn from(f64_ptr: F64Ptr) -> HeapCellValue {
319 HeapCellValue::build_with(HeapCellValueTag::F64, f64_ptr.as_offset().to_u64())
320 }
321}
322
323impl From<ConsPtr> for HeapCellValue {
324 #[inline(always)]
325 fn from(cons_ptr: ConsPtr) -> HeapCellValue {
326 HeapCellValue::from_bytes(
327 ConsPtr::from(cons_ptr.as_ptr() as u64)
328 .with_tag(ConsPtrMaskTag::Cons)
329 .with_m(false)
330 .into_bytes(),
331 )
332 }
333}
334
335impl From<(Number, &mut Arena)> for HeapCellValue {
336 #[inline(always)]
337 fn from((n, arena): (Number, &mut Arena)) -> HeapCellValue {
338 use ordered_float::OrderedFloat;
339
340 match n {
341 Number::Float(OrderedFloat(n)) => HeapCellValue::from(float_alloc!(n, arena)),
342 Number::Integer(n) => HeapCellValue::from(n),
343 Number::Rational(n) => HeapCellValue::from(n),
344 Number::Fixnum(n) => fixnum_as_cell!(n),
345 }
346 }
347}
348
349impl HeapCellValue {
350 #[inline(always)]
351 pub fn build_with(tag: HeapCellValueTag, value: u64) -> Self {
352 HeapCellValue::new()
353 .with_tag(tag)
354 .with_val(value)
355 .with_m(false)
356 .with_f(false)
357 }
358
359 #[inline]
360 pub fn is_string_terminator(mut self, heap: &[HeapCellValue]) -> bool {
361 use crate::machine::heap::*;
362
363 loop {
364 return read_heap_cell!(self,
365 (HeapCellValueTag::Atom, (name, arity)) => {
366 name == atom!("[]") && arity == 0
367 }
368 (HeapCellValueTag::CStr) => {
369 true
370 }
371 (HeapCellValueTag::PStrLoc, h) => {
372 self = heap[h];
373 continue;
374 }
375 (HeapCellValueTag::AttrVar | HeapCellValueTag::Var, h) => {
376 let cell = heap_bound_store(heap, heap_bound_deref(heap, heap[h]));
377
378 if cell.is_var() {
379 return false;
380 }
381
382 self = cell;
383 continue;
384 }
385 (HeapCellValueTag::PStrOffset, pstr_offset) => {
386 heap[pstr_offset].get_tag() == HeapCellValueTag::CStr
387 }
388 _ => {
389 false
390 }
391 );
392 }
393 }
394
395 #[inline]
396 pub fn is_ref(self) -> bool {
397 matches!(
398 self.get_tag(),
399 HeapCellValueTag::Str
400 | HeapCellValueTag::Lis
401 | HeapCellValueTag::Var
402 | HeapCellValueTag::StackVar
403 | HeapCellValueTag::AttrVar
404 | HeapCellValueTag::PStrLoc
405 | HeapCellValueTag::PStrOffset
406 )
407 }
408
409 #[inline]
410 pub fn as_char(self) -> Option<char> {
411 read_heap_cell!(self,
412 (HeapCellValueTag::Char, c) => {
413 Some(c)
414 }
415 (HeapCellValueTag::Atom, (name, arity)) => {
416 if arity > 0 {
417 return None;
418 }
419
420 name.as_char()
421 }
422 _ => {
423 None
424 }
425 )
426 }
427
428 #[inline]
429 pub fn is_constant(self) -> bool {
430 match self.get_tag() {
431 HeapCellValueTag::Cons
432 | HeapCellValueTag::F64
433 | HeapCellValueTag::Fixnum
434 | HeapCellValueTag::CutPoint
435 | HeapCellValueTag::Char
436 | HeapCellValueTag::CStr => true,
437 HeapCellValueTag::Atom => cell_as_atom_cell!(self).get_arity() == 0,
438 _ => false,
439 }
440 }
441
442 #[inline(always)]
443 pub fn is_stack_var(self) -> bool {
444 self.get_tag() == HeapCellValueTag::StackVar
445 }
446
447 #[inline]
448 pub fn is_compound(self, heap: &[HeapCellValue]) -> bool {
449 match self.get_tag() {
450 HeapCellValueTag::Str => {
451 cell_as_atom_cell!(heap[self.get_value() as usize]).get_arity() > 0
452 }
453 HeapCellValueTag::Lis
454 | HeapCellValueTag::CStr
455 | HeapCellValueTag::PStr
456 | HeapCellValueTag::PStrLoc
457 | HeapCellValueTag::PStrOffset => true,
458 HeapCellValueTag::Atom => cell_as_atom_cell!(self).get_arity() > 0,
459 _ => false,
460 }
461 }
462
463 #[inline]
464 pub fn is_var(self) -> bool {
465 read_heap_cell!(self,
466 (HeapCellValueTag::Var | HeapCellValueTag::AttrVar | HeapCellValueTag::StackVar) => {
467 true
468 }
469 _ => {
470 false
471 }
472 )
473 }
474
475 #[inline]
476 pub(crate) fn as_var(self) -> Option<Ref> {
477 read_heap_cell!(self,
478 (HeapCellValueTag::Var, h) => {
479 Some(Ref::heap_cell(h))
480 }
481 (HeapCellValueTag::AttrVar, h) => {
482 Some(Ref::attr_var(h))
483 }
484 (HeapCellValueTag::StackVar, s) => {
485 Some(Ref::stack_cell(s))
486 }
487 _ => {
488 None
489 }
490 )
491 }
492
493 #[inline]
494 pub fn get_value(self) -> u64 {
495 self.val()
496 }
497
498 #[inline]
499 pub fn set_value(&mut self, val: u64) {
500 self.set_val(val);
501 }
502
503 #[inline]
504 pub fn get_tag(self) -> HeapCellValueTag {
505 match self.tag_or_err() {
506 Ok(tag) => tag,
507 Err(_) => match ConsPtr::from_bytes(self.into_bytes()).tag() {
508 ConsPtrMaskTag::Cons => HeapCellValueTag::Cons,
509 },
510 }
511 }
512
513 #[inline]
514 pub fn to_atom(self) -> Option<Atom> {
515 match self.tag() {
516 HeapCellValueTag::Atom => Some(Atom::from(self.val() << 3)),
517 _ => None,
518 }
519 }
520
521 #[inline]
522 pub fn to_pstr(self) -> Option<PartialString> {
523 match self.tag() {
524 HeapCellValueTag::PStr => Some(PartialString::from(Atom::from(self.val() << 3))),
525 _ => None,
526 }
527 }
528
529 #[inline]
530 pub fn to_fixnum(self) -> Option<Fixnum> {
531 match self.get_tag() {
532 HeapCellValueTag::Fixnum => Some(Fixnum::from_bytes(self.into_bytes())),
533 _ => None,
534 }
535 }
536
537 #[cfg(target_pointer_width = "32")]
538 #[inline]
539 pub fn from_raw_ptr_bytes(ptr_bytes: [u8; 4]) -> Self {
540 HeapCellValue::from_bytes([
541 ptr_bytes[0],
542 ptr_bytes[1],
543 ptr_bytes[2],
544 ptr_bytes[3],
545 0,
546 0,
547 0,
548 0,
549 ])
550 }
551 #[cfg(target_pointer_width = "64")]
552 #[inline]
553 pub fn from_raw_ptr_bytes(ptr_bytes: [u8; 8]) -> Self {
554 HeapCellValue::from_bytes(ptr_bytes)
555 }
556
557 #[inline]
558 #[cfg(target_pointer_width = "32")]
559 pub fn to_raw_ptr_bytes(self) -> [u8; 4] {
560 let bytes = self.into_bytes();
561 [bytes[0], bytes[1], bytes[2], bytes[3]]
562 }
563
564 #[inline]
565 #[cfg(target_pointer_width = "64")]
566 pub fn to_raw_ptr_bytes(self) -> [u8; 8] {
567 self.into_bytes()
568 }
569
570 #[inline]
571 pub fn to_untyped_arena_ptr_bytes(self) -> [u8; 8] {
572 self.into_bytes()
573 }
574
575 #[inline]
576 pub fn to_untyped_arena_ptr(self) -> Option<UntypedArenaPtr> {
577 match self.get_tag() {
578 HeapCellValueTag::Cons => Some(UntypedArenaPtr::from_bytes(
579 self.to_untyped_arena_ptr_bytes(),
580 )),
581 _ => None,
582 }
583 }
584
585 #[inline(always)]
586 pub fn get_forwarding_bit(self) -> bool {
587 match self.get_tag() {
588 HeapCellValueTag::Cons => ConsPtr::from_bytes(self.into_bytes()).f(),
589 _ => self.f(),
590 }
591 }
592
593 #[inline(always)]
594 pub fn set_forwarding_bit(&mut self, f: bool) {
595 match self.get_tag() {
596 HeapCellValueTag::Cons => {
597 let value = ConsPtr::from_bytes(self.into_bytes()).with_f(f);
598 *self = HeapCellValue::from_bytes(value.into_bytes());
599 }
600 _ => self.set_f(f),
601 }
602 }
603
604 #[inline(always)]
605 pub fn get_mark_bit(self) -> bool {
606 match self.get_tag() {
607 HeapCellValueTag::Cons => ConsPtr::from_bytes(self.into_bytes()).m(),
608 _ => self.m(),
609 }
610 }
611
612 #[inline(always)]
613 pub fn set_mark_bit(&mut self, m: bool) {
614 match self.get_tag() {
615 HeapCellValueTag::Cons => {
616 let value = ConsPtr::from_bytes(self.into_bytes()).with_m(m);
617 *self = HeapCellValue::from_bytes(value.into_bytes());
618 }
619 _ => self.set_m(m),
620 }
621 }
622
623 pub fn order_category(self, heap: &[HeapCellValue]) -> Option<TermOrderCategory> {
624 match Number::try_from(self).ok() {
625 Some(Number::Integer(_)) | Some(Number::Fixnum(_)) | Some(Number::Rational(_)) => {
626 Some(TermOrderCategory::Integer)
627 }
628 Some(Number::Float(_)) => Some(TermOrderCategory::FloatingPoint),
629 None => match self.get_tag() {
630 HeapCellValueTag::Var | HeapCellValueTag::StackVar | HeapCellValueTag::AttrVar => {
631 Some(TermOrderCategory::Variable)
632 }
633 HeapCellValueTag::Char => Some(TermOrderCategory::Atom),
634 HeapCellValueTag::Atom => Some(if cell_as_atom_cell!(self).get_arity() > 0 {
635 TermOrderCategory::Compound
636 } else {
637 TermOrderCategory::Atom
638 }),
639 HeapCellValueTag::Lis | HeapCellValueTag::PStrLoc | HeapCellValueTag::CStr => {
640 Some(TermOrderCategory::Compound)
641 }
642 HeapCellValueTag::Str => {
643 let value = heap[self.get_value() as usize];
644 let arity = cell_as_atom_cell!(value).get_arity();
645
646 if arity == 0 {
647 Some(TermOrderCategory::Atom)
648 } else {
649 Some(TermOrderCategory::Compound)
650 }
651 }
652 _ => None,
653 },
654 }
655 }
656
657 #[inline(always)]
658 pub fn is_protected(self, e: usize) -> bool {
659 read_heap_cell!(self,
660 (HeapCellValueTag::StackVar, s) => {
661 s < e
662 }
663 _ => {
664 true
665 }
666 )
667 }
668}
669
670const_assert!(mem::size_of::<HeapCellValue>() == 8);
671
672#[bitfield]
673#[repr(u64)]
674#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
675pub struct UntypedArenaPtr {
676 #[allow(unused)]
677 ptr: B61,
678 m: bool,
679 #[allow(unused)]
680 padding: B2,
681}
682
683impl UntypedArenaPtr {
684 #[inline(always)]
685 pub fn build_with(ptr: usize) -> Self {
686 UntypedArenaPtr::new().with_ptr(ptr as u64)
687 }
688}
689
690const_assert!(mem::size_of::<UntypedArenaPtr>() == 8);
691
692impl From<*const ArenaHeader> for UntypedArenaPtr {
693 #[inline]
694 fn from(ptr: *const ArenaHeader) -> UntypedArenaPtr {
695 UntypedArenaPtr::build_with(ptr as usize)
696 }
697}
698
699impl From<*const IndexPtr> for UntypedArenaPtr {
700 #[inline]
701 fn from(ptr: *const IndexPtr) -> UntypedArenaPtr {
702 UntypedArenaPtr::build_with(ptr as usize)
703 }
704}
705
706impl From<UntypedArenaPtr> for *const ArenaHeader {
707 #[inline]
708 fn from(ptr: UntypedArenaPtr) -> *const ArenaHeader {
709 ptr.get_ptr() as *const ArenaHeader
710 }
711}
712
713impl UntypedArenaPtr {
714 #[inline]
715 pub fn set_mark_bit(&mut self, m: bool) {
716 self.set_m(m);
717 }
718
719 #[cfg(target_pointer_width = "32")]
720 #[inline]
721 pub fn get_ptr(self) -> *const u8 {
722 let bytes = self.into_bytes();
723 let raw_ptr_bytes = [bytes[0], bytes[1], bytes[2], bytes[3]];
724 unsafe { mem::transmute(raw_ptr_bytes) }
725 }
726
727 #[cfg(target_pointer_width = "64")]
728 #[inline]
729 pub fn get_ptr(self) -> *const u8 {
730 self.ptr() as *const u8
731 }
732
733 #[inline]
734 pub fn get_tag(self) -> ArenaHeaderTag {
735 unsafe {
736 let header = *(self.get_ptr() as *const ArenaHeader);
737 header.get_tag()
738 }
739 }
740
741 #[inline]
742 pub fn payload_offset(self) -> *const u8 {
743 unsafe { self.get_ptr().add(mem::size_of::<ArenaHeader>()) }
744 }
745
746 #[inline]
747 pub fn get_mark_bit(self) -> bool {
748 self.m()
749 }
750}
751
752impl Add<usize> for HeapCellValue {
753 type Output = HeapCellValue;
754
755 fn add(self, rhs: usize) -> Self::Output {
756 match self.get_tag() {
757 tag @ HeapCellValueTag::Str
758 | tag @ HeapCellValueTag::Lis
759 | tag @ HeapCellValueTag::PStrOffset
760 | tag @ HeapCellValueTag::PStrLoc
761 | tag @ HeapCellValueTag::Var
762 | tag @ HeapCellValueTag::AttrVar => {
763 HeapCellValue::build_with(tag, (self.get_value() as usize + rhs) as u64)
764 }
765 _ => self,
766 }
767 }
768}
769
770impl Sub<usize> for HeapCellValue {
771 type Output = HeapCellValue;
772
773 fn sub(self, rhs: usize) -> Self::Output {
774 match self.get_tag() {
775 tag @ HeapCellValueTag::Str
776 | tag @ HeapCellValueTag::Lis
777 | tag @ HeapCellValueTag::PStrOffset
778 | tag @ HeapCellValueTag::PStrLoc
779 | tag @ HeapCellValueTag::Var
780 | tag @ HeapCellValueTag::AttrVar => {
781 HeapCellValue::build_with(tag, (self.get_value() as usize - rhs) as u64)
782 }
783 _ => self,
784 }
785 }
786}
787
788impl SubAssign<usize> for HeapCellValue {
789 #[inline(always)]
790 fn sub_assign(&mut self, rhs: usize) {
791 *self = *self - rhs;
792 }
793}
794
795impl Sub<i64> for HeapCellValue {
796 type Output = HeapCellValue;
797
798 fn sub(self, rhs: i64) -> Self::Output {
799 if rhs < 0 {
800 match self.get_tag() {
801 tag @ HeapCellValueTag::Str
802 | tag @ HeapCellValueTag::Lis
803 | tag @ HeapCellValueTag::PStrOffset
804 | tag @ HeapCellValueTag::PStrLoc
805 | tag @ HeapCellValueTag::Var
806 | tag @ HeapCellValueTag::AttrVar => {
807 HeapCellValue::build_with(tag, self.get_value() + rhs.unsigned_abs())
808 }
809 _ => self,
810 }
811 } else {
812 self.sub(rhs as usize)
813 }
814 }
815}