1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
pub mod heap;
pub mod stack;
pub mod store;

use crate::error::Kind;
use crate::table::Sort;
use crate::KResult;
use crate::Table;
use crate::Var;
pub use heap::Heap;
pub use stack::Stack;
pub use store::{PackedPtr, Ptr, Store};

#[derive(Debug, Default, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Context<S: Store> {
    pub(crate) proof_stack: Stack,
    pub(crate) proof_heap: Heap,
    pub(crate) unify_stack: Stack,
    pub(crate) unify_heap: Heap,
    pub(crate) hyp_stack: Stack,
    pub(crate) store: S,
    pub(crate) next_bv: u64,
}

impl<S: Store> Context<S> {
    pub fn proof_stack(&self) -> &Stack {
        &self.proof_stack
    }

    pub fn proof_heap(&self) -> &Heap {
        &self.proof_heap
    }

    pub fn unify_stack(&self) -> &Stack {
        &self.unify_stack
    }

    pub fn unify_heap(&self) -> &Heap {
        &self.unify_heap
    }

    pub fn store(&self) -> &S {
        &self.store
    }

    pub fn hyp_stack(&self) -> &Stack {
        &self.hyp_stack
    }

    pub fn clear_except_store(&mut self) {
        self.proof_stack.clear();
        self.proof_heap.clear();
        self.unify_stack.clear();
        self.unify_heap.clear();
        self.hyp_stack.clear();
    }

    pub fn binder_check<T: Table>(
        table: &T,
        ty: &T::Var,
        current_sort: u8,
        bv: &mut u64,
    ) -> KResult {
        let idx = ty.sort_idx();

        if idx >= current_sort {
            return Err(Kind::SortOutOfRange);
        }

        let sort = table.sort(ty.sort_idx()).ok_or(Kind::InvalidSort)?;
        let deps = ty.dependencies();

        if ty.is_bound() {
            if sort.is_strict() {
                return Err(Kind::SortIsStrict);
            }

            if deps != *bv {
                return Err(Kind::BindDep);
            }

            *bv *= 2;
        } else if (deps & !(*bv - 1)) != 0 {
            return Err(Kind::BindDep);
        }

        Ok(())
    }

    pub fn allocate_binders<T: Table<Var = S::Var>>(
        &mut self,
        table: &T,
        current_sort: u8,
        binders: &[T::Var],
    ) -> KResult {
        let mut next_bv = 1;

        for (i, ty) in binders.iter().enumerate() {
            Self::binder_check(table, ty, current_sort, &mut next_bv)?;

            let ptr = self.store.alloc_var(*ty, i as u16);
            self.proof_heap.push(ptr);
        }

        self.next_bv = next_bv;

        Ok(())
    }
}

use core::fmt::{self, Display, Formatter};

impl<S: Store> Display for Context<S> {
    fn fmt(&self, f: &mut Formatter) -> fmt::Result {
        let display = self.proof_stack.to_display(&self.store);
        writeln!(f, " Stack:\n{}", display)?;
        let display = self.unify_stack.to_display(&self.store);
        writeln!(f, " UStack:\n{}", display)?;
        let display = self.hyp_stack.to_display(&self.store);
        writeln!(f, " HStack:\n{}", display)
    }
}