lean_sys/
lib.rs

1/*!
2Rust bindings to [Lean 4](https://github.com/leanprover/lean4)'s C API
3
4Functions and comments manually translated from those in the [`lean.h` header](https://github.com/leanprover/lean4/blob/master/src/include/lean/lean.h) provided with Lean 4
5*/
6#![allow(non_upper_case_globals)]
7#![allow(non_camel_case_types)]
8#![allow(non_snake_case)]
9#![allow(clippy::missing_safety_doc)]
10#![no_std]
11
12use core::ffi::*;
13use core::sync::atomic::{AtomicI32, AtomicI64, Ordering};
14use memoffset::raw_field;
15use static_assertions::const_assert;
16
17pub mod alloc;
18pub mod array;
19pub mod closure;
20pub mod constructor;
21pub mod dbg;
22pub mod external;
23pub mod init;
24pub mod int;
25pub mod io;
26pub mod libuv;
27pub mod nat;
28pub mod panic;
29pub mod primitive;
30pub mod sarray;
31pub mod share_common;
32pub mod string;
33pub mod task;
34pub mod thread;
35pub mod thunk;
36
37pub use array::*;
38pub use closure::*;
39pub use constructor::*;
40pub use dbg::*;
41pub use external::*;
42pub use init::*;
43pub use int::*;
44pub use io::*;
45pub use libuv::*;
46pub use nat::*;
47pub use primitive::*;
48pub use sarray::*;
49pub use share_common::*;
50pub use string::*;
51pub use task::*;
52pub use thread::*;
53pub use thunk::*;
54
55pub const LEAN_SMALL_ALLOCATOR: bool = cfg!(feature = "small_allocator");
56pub const LEAN_CLOSURE_MAX_ARGS: u32 = 16;
57pub const LEAN_OBJECT_SIZE_DELTA: usize = 8;
58pub const LEAN_MAX_SMALL_OBJECT_SIZE: u32 = 4096;
59pub const LeanMaxCtorTag: u8 = 244;
60pub const LeanClosure: u8 = 245;
61pub const LeanArray: u8 = 246;
62pub const LeanStructArray: u8 = 247;
63pub const LeanScalarArray: u8 = 248;
64pub const LeanString: u8 = 249;
65pub const LeanMPZ: u8 = 250;
66pub const LeanThunk: u8 = 251;
67pub const LeanTask: u8 = 252;
68pub const LeanRef: u8 = 253;
69pub const LeanExternal: u8 = 254;
70pub const LeanReserved: u8 = 255;
71pub const LEAN_MAX_CTOR_FIELDS: u32 = 256;
72pub const LEAN_MAX_CTOR_SCALARS_SIZE: u32 = 1024;
73
74const_assert! {
75    layout_compat::<c_int, i32>() ||
76    layout_compat::<c_int, i64>()
77}
78
79#[inline(always)]
80pub const fn layout_compat<A, B>() -> bool {
81    core::mem::size_of::<A>() == core::mem::size_of::<B>()
82        && core::mem::align_of::<A>() == core::mem::align_of::<B>()
83}
84
85#[inline(always)]
86pub unsafe fn c_int_load(p: *const c_int, order: Ordering) -> c_int {
87    match c_int::BITS {
88        32 => (*(p as *const AtomicI32)).load(order) as c_int,
89        64 => (*(p as *const AtomicI64)).load(order) as c_int,
90        _ => unreachable!(),
91    }
92}
93
94mod types;
95pub use types::*;
96
97#[inline]
98pub fn lean_is_big_object_tag(tag: u8) -> bool {
99    matches!(
100        tag,
101        LeanArray | LeanStructArray | LeanScalarArray | LeanString
102    )
103}
104
105/// Whether this lean object is a scalar
106///
107/// # Examples
108/// ```rust
109/// # use lean_sys::*;
110/// unsafe { lean_initialize_runtime_module(); }
111/// let arr = unsafe { lean_mk_empty_array() };
112/// assert!(lean_is_scalar(lean_box(5)));
113/// assert!(!lean_is_scalar(arr));
114/// ```
115#[inline(always)]
116pub fn lean_is_scalar(obj: *const lean_object) -> bool {
117    (obj as usize & 1) == 1
118}
119
120/// Box a `usize` into a `lean_object`
121///
122/// # Examples
123/// ```rust
124/// # use lean_sys::*;
125/// for i in [0, 5, 658948, 1 << (usize::BITS - 2)] {
126///     assert_eq!(lean_unbox(lean_box(i)), i);
127/// }
128/// ```
129#[inline(always)]
130pub const fn lean_box(n: usize) -> *mut lean_object {
131    ((n << 1) | 1) as *mut lean_object
132}
133
134/// Unbox a `lean_object` into a `usize`
135///
136/// See [`lean_box`] for usage examples.
137#[inline(always)]
138pub fn lean_unbox(o: *const lean_object) -> usize {
139    o as usize >> 1
140}
141
142#[inline]
143pub const fn lean_align(v: usize, a: usize) -> usize {
144    (v / a).wrapping_mul(a) + a.wrapping_mul((v % a != 0) as usize)
145}
146
147#[inline]
148pub fn lean_get_slot_idx(sz: c_uint) -> c_uint {
149    debug_assert_ne!(sz, 0);
150    debug_assert_eq!(lean_align(sz as usize, LEAN_OBJECT_SIZE_DELTA), sz as usize);
151    sz / (LEAN_OBJECT_SIZE_DELTA as c_uint) - 1
152}
153
154#[inline]
155pub unsafe fn lean_alloc_small_object(sz: c_uint) -> *mut lean_object {
156    if LEAN_SMALL_ALLOCATOR {
157        let sz = lean_align(sz as usize, LEAN_OBJECT_SIZE_DELTA) as c_uint;
158        let slot_idx = lean_get_slot_idx(sz);
159        debug_assert!(sz <= LEAN_MAX_SMALL_OBJECT_SIZE);
160        lean_alloc_small(sz, slot_idx) as *mut _
161    } else {
162        lean_inc_heartbeat();
163        let mem = libc::malloc(core::mem::size_of::<usize>() + sz as usize) as *mut usize;
164        if mem.is_null() {
165            lean_internal_panic_out_of_memory()
166        }
167        *mem = sz as usize;
168        mem.add(1) as *mut _
169    }
170}
171
172#[inline]
173pub unsafe fn lean_alloc_ctor_memory(sz: c_uint) -> *mut lean_object {
174    if LEAN_SMALL_ALLOCATOR {
175        let sz1 = lean_align(sz as usize, LEAN_OBJECT_SIZE_DELTA) as c_uint;
176        let slot_idx = lean_get_slot_idx(sz1);
177        debug_assert!(sz1 <= LEAN_MAX_SMALL_OBJECT_SIZE);
178        let r = lean_alloc_small(sz1, slot_idx);
179        if sz1 > sz {
180            //TODO: "two structurally equal objects"?
181            /* Initialize last word.
182            In our runtime `lean_object_byte_size` is always
183            a multiple of the machine word size for constructors.
184
185            By setting the last word to 0, we make sure the sharing
186            maximizer procedures at `maxsharing.cpp` and `compact.cpp` are
187            not affected by uninitialized data at the (sz1 - sz) last bytes.
188            Otherwise, we may mistakenly assume to structurally equal
189            objects are not identical because of this uninitialized memory. */
190            let end = (r as *mut u8).add(sz1 as usize) as *mut usize;
191            *end.sub(1) = 0;
192        }
193        r as *mut lean_object
194    } else {
195        lean_alloc_small_object(sz)
196    }
197}
198
199#[inline(always)]
200pub unsafe fn lean_small_object_size(o: *mut lean_object) -> c_uint {
201    if LEAN_SMALL_ALLOCATOR {
202        lean_small_mem_size(o as *mut _)
203    } else {
204        *(o as *mut usize).sub(1) as c_uint
205    }
206}
207
208#[inline(always)]
209pub unsafe fn lean_free_small_object(o: *mut lean_object) {
210    if LEAN_SMALL_ALLOCATOR {
211        lean_free_small(o as *mut _)
212    } else {
213        libc::free((o as *mut usize).sub(1) as *mut _)
214    }
215}
216
217#[inline(always)]
218pub unsafe fn lean_ptr_tag(o: *const lean_object) -> u8 {
219    *raw_field!(o, lean_object, m_tag)
220}
221
222#[inline(always)]
223pub unsafe fn lean_ptr_other(o: *const lean_object) -> c_uint {
224    *raw_field!(o, lean_object, m_other) as c_uint
225}
226
227/// Whether an object is multi-threaded
228#[inline(always)]
229pub unsafe fn lean_is_mt(o: *const lean_object) -> bool {
230    relaxed_rc_load(o) < 0
231}
232
233/// Whether an object is single-threaded
234#[inline(always)]
235pub unsafe fn lean_is_st(o: *const lean_object) -> bool {
236    relaxed_rc_load(o) > 0
237}
238
239/// Perform a relaxed load of the reference counter of `o`, which must be a pointer into a valid [`lean_object`]
240#[inline(always)]
241pub unsafe fn relaxed_rc_load(o: *const lean_object) -> c_int {
242    c_int_load(lean_get_rc_mt_addr(o as *mut _), Ordering::Relaxed)
243}
244
245/* We never update the reference counter of objects stored in compact regions and allocated at initialization time. */
246#[inline(always)]
247pub unsafe fn lean_is_persistent(o: *const lean_object) -> bool {
248    relaxed_rc_load(o) == 0
249}
250
251/// Whether an object is reference counted
252#[inline(always)]
253pub unsafe fn lean_has_rc(o: *const lean_object) -> bool {
254    relaxed_rc_load(o) != 0
255}
256
257#[inline(always)]
258pub unsafe fn lean_get_rc_mt_addr(o: *mut lean_object) -> *mut c_int {
259    raw_field!(o, lean_object, m_rc) as *mut c_int
260}
261
262#[inline]
263pub unsafe fn lean_inc_ref(o: *mut lean_object) {
264    if lean_is_st(o) {
265        *(raw_field!(o, lean_object, m_rc) as *mut c_int) += 1
266    } else if *raw_field!(o, lean_object, m_rc) != 0 {
267        lean_inc_ref_cold(o)
268    }
269}
270
271#[inline]
272pub unsafe fn lean_inc_ref_n(o: *mut lean_object, n: usize) {
273    if lean_is_st(o) {
274        *(raw_field!(o, lean_object, m_rc) as *mut c_int) += n as c_int
275    } else if *raw_field!(o, lean_object, m_rc) != 0 {
276        lean_inc_ref_n_cold(o, n as c_uint)
277    }
278}
279
280#[inline]
281pub unsafe fn lean_dec_ref(o: *mut lean_object) {
282    if relaxed_rc_load(o) > 1 {
283        *(raw_field!(o, lean_object, m_rc) as *mut c_int) -= 1
284    } else if *raw_field!(o, lean_object, m_rc) != 0 {
285        lean_dec_ref_cold(o)
286    }
287}
288
289#[inline]
290pub unsafe fn lean_inc(o: *mut lean_object) {
291    if !lean_is_scalar(o) {
292        lean_inc_ref(o)
293    }
294}
295
296#[inline]
297pub unsafe fn lean_inc_n(o: *mut lean_object, n: usize) {
298    if !lean_is_scalar(o) {
299        lean_inc_ref_n(o, n)
300    }
301}
302
303#[inline]
304pub unsafe fn lean_dec(o: *mut lean_object) {
305    if !lean_is_scalar(o) {
306        lean_dec_ref(o)
307    }
308}
309
310#[inline(always)]
311pub unsafe fn lean_is_ctor(o: *const lean_object) -> bool {
312    lean_ptr_tag(o) <= LeanMaxCtorTag
313}
314
315#[inline(always)]
316pub unsafe fn lean_is_closure(o: *const lean_object) -> bool {
317    lean_ptr_tag(o) == LeanClosure
318}
319
320#[inline(always)]
321pub unsafe fn lean_is_array(o: *const lean_object) -> bool {
322    lean_ptr_tag(o) == LeanArray
323}
324
325#[inline(always)]
326pub unsafe fn lean_is_sarray(o: *const lean_object) -> bool {
327    lean_ptr_tag(o) == LeanScalarArray
328}
329
330#[inline(always)]
331pub unsafe fn lean_is_string(o: *const lean_object) -> bool {
332    lean_ptr_tag(o) == LeanString
333}
334
335#[inline(always)]
336pub unsafe fn lean_is_mpz(o: *const lean_object) -> bool {
337    lean_ptr_tag(o) == LeanMPZ
338}
339
340#[inline(always)]
341pub unsafe fn lean_is_thunk(o: *const lean_object) -> bool {
342    lean_ptr_tag(o) == LeanThunk
343}
344
345#[inline(always)]
346pub unsafe fn lean_is_task(o: *const lean_object) -> bool {
347    lean_ptr_tag(o) == LeanTask
348}
349
350#[inline(always)]
351pub unsafe fn lean_is_external(o: *const lean_object) -> bool {
352    lean_ptr_tag(o) == LeanExternal
353}
354
355#[inline(always)]
356pub unsafe fn lean_is_ref(o: *const lean_object) -> bool {
357    lean_ptr_tag(o) == LeanRef
358}
359
360#[inline(always)]
361pub unsafe fn lean_obj_tag(o: *const lean_object) -> c_uint {
362    if lean_is_scalar(o) {
363        lean_unbox(o) as c_uint
364    } else {
365        lean_ptr_tag(o) as c_uint
366    }
367}
368
369#[inline(always)]
370pub unsafe fn lean_to_ctor(o: *mut lean_object) -> *mut lean_ctor_object {
371    debug_assert!(lean_is_ctor(o));
372    o as *mut lean_ctor_object
373}
374
375#[inline(always)]
376pub unsafe fn lean_to_closure(o: *mut lean_object) -> *mut lean_closure_object {
377    debug_assert!(lean_is_closure(o));
378    o as *mut lean_closure_object
379}
380
381#[inline(always)]
382pub unsafe fn lean_to_array(o: *mut lean_object) -> *mut lean_array_object {
383    debug_assert!(lean_is_array(o));
384    o as *mut lean_array_object
385}
386
387#[inline(always)]
388pub unsafe fn lean_to_sarray(o: *mut lean_object) -> *mut lean_sarray_object {
389    debug_assert!(lean_is_sarray(o));
390    o as *mut lean_sarray_object
391}
392
393#[inline(always)]
394pub unsafe fn lean_to_string(o: *mut lean_object) -> *mut lean_string_object {
395    debug_assert!(lean_is_string(o));
396    o as *mut lean_string_object
397}
398
399#[inline(always)]
400pub unsafe fn lean_to_thunk(o: *mut lean_object) -> *mut lean_thunk_object {
401    debug_assert!(lean_is_thunk(o));
402    o as *mut lean_thunk_object
403}
404
405#[inline(always)]
406pub unsafe fn lean_to_task(o: *mut lean_object) -> *mut lean_task_object {
407    debug_assert!(lean_is_task(o));
408    o as *mut lean_task_object
409}
410
411#[inline(always)]
412pub unsafe fn lean_to_ref(o: *mut lean_object) -> *mut lean_ref_object {
413    debug_assert!(lean_is_ref(o));
414    o as *mut lean_ref_object
415}
416
417#[inline(always)]
418pub unsafe fn lean_to_external(o: *mut lean_object) -> *mut lean_external_object {
419    debug_assert!(lean_is_external(o));
420    o as *mut lean_external_object
421}
422
423#[inline]
424pub unsafe fn lean_is_exclusive(o: *mut lean_object) -> bool {
425    //TODO: likely... or, why not just relaxed_rc_load(o) == 1?
426    if lean_is_st(o) {
427        *raw_field!(o, lean_object, m_rc) == 1
428    } else {
429        false
430    }
431}
432
433#[inline]
434pub unsafe fn lean_is_exclusive_obj(o: *mut lean_object) -> u8 {
435    lean_is_exclusive(o) as u8
436}
437
438#[inline]
439pub unsafe fn lean_is_shared(o: *mut lean_object) -> bool {
440    //TODO: likely... or, why not just relaxed_rc_load(o) > 1?
441    if lean_is_st(o) {
442        *raw_field!(o, lean_object, m_rc) > 1
443    } else {
444        false
445    }
446}
447
448#[inline]
449pub unsafe fn lean_set_st_header(o: *mut lean_object, tag: c_uint, other: c_uint) {
450    *lean_get_rc_mt_addr(o) = 1;
451    (raw_field!(o, lean_object, m_cs_sz) as *mut u16).write(0);
452    (raw_field!(o, lean_object, m_other) as *mut u8).write(other as u8);
453    (raw_field!(o, lean_object, m_tag) as *mut u8).write(tag as u8);
454}
455
456/** Remark: we don't need a reference counter for objects that are not stored in the heap.
457Thus, we use the area to store the object size for small objects. */
458#[inline]
459pub unsafe fn lean_set_non_heap_header(o: *mut lean_object, sz: usize, tag: c_uint, other: c_uint) {
460    debug_assert!(sz > 0);
461    debug_assert!(sz < (1 << 16));
462    debug_assert!(sz == 1 || !lean_is_big_object_tag(tag as u8));
463    *lean_get_rc_mt_addr(o) = 0;
464    (raw_field!(o, lean_object, m_cs_sz) as *mut u16).write(sz as u16);
465    (raw_field!(o, lean_object, m_other) as *mut u8).write(other as u8);
466    (raw_field!(o, lean_object, m_tag) as *mut u8).write(tag as u8);
467}
468
469/** `lean_set_non_heap_header` for (potentially) big objects such as arrays and strings. */
470#[inline(always)]
471pub unsafe fn lean_set_non_heap_header_for_big(o: *mut lean_object, tag: c_uint, other: c_uint) {
472    lean_set_non_heap_header(o, 1, tag, other)
473}
474
475extern "C" {
476    pub fn lean_set_exit_on_panic(flag: bool);
477    /// Enable/disable panic messages
478    pub fn lean_set_panic_messages(flag: bool);
479    pub fn lean_panic_fn(default_val: *mut lean_object, msg: *mut lean_object);
480    pub fn lean_internal_panic(msg: *const u8) -> !;
481    pub fn lean_internal_panic_out_of_memory() -> !;
482    pub fn lean_internal_panic_unreachable() -> !;
483    pub fn lean_internal_panic_rc_overflow() -> !;
484    pub fn lean_alloc_small(sz: c_uint, slot_idx: c_uint) -> *mut c_void;
485    pub fn lean_free_small(p: *mut c_void);
486    pub fn lean_small_mem_size(p: *mut c_void) -> c_uint;
487    pub fn lean_inc_heartbeat();
488    pub fn lean_alloc_object(sz: usize) -> *mut lean_object;
489    pub fn lean_free_object(o: *mut lean_object);
490    /** The object size may be slightly bigger for constructor objects.
491    The runtime does not track the size of the scalar size area.
492    All constructor objects are "small", and allocated into pages.
493    We retrieve their size by accessing the page header. The size of
494    small objects is a multiple of `LEAN_OBJECT_SIZE_DELTA` */
495    pub fn lean_object_byte_size(o: *mut lean_object) -> usize;
496    /** Returns the size of the salient part of an object's storage,
497    i.e. the parts that contribute to the value representation;
498    padding or unused capacity is excluded. Operations that read
499    from an object's storage must only access these parts, since
500    the non-salient parts may not be initialized. */
501    pub fn lean_object_data_byte_size(o: *mut lean_object) -> usize;
502    #[cold]
503    pub fn lean_inc_ref_cold(o: *mut lean_object);
504    #[cold]
505    pub fn lean_inc_ref_n_cold(o: *mut lean_object, n: c_uint);
506    #[cold]
507    pub fn lean_dec_ref_cold(o: *mut lean_object);
508    pub fn lean_mark_mt(o: *mut lean_object);
509    pub fn lean_mark_persistent(o: *mut lean_object);
510}