torg_mask/lib.rs
1//! LLM logit masking for TØR-G constrained decoding.
2//!
3//! This crate provides utilities for integrating TØR-G's
4//! `valid_next_tokens()` with LLM inference engines.
5//!
6//! # Overview
7//!
8//! TØR-G graphs are constructed by feeding tokens to a `Builder`.
9//! At each step, only certain tokens are valid (returned by
10//! `Builder::valid_next_tokens()`). This crate provides:
11//!
12//! - **Token mapping**: Bidirectional mapping between TØR-G tokens and LLM vocab IDs
13//! - **Logit masking**: Efficiently mask invalid tokens in logit vectors
14//! - **Constrained decoding**: High-level orchestrator for decode loops
15//!
16//! # Example
17//!
18//! ```
19//! use torg_mask::{ConstrainedDecoder, MaskGenerator, TokenMapping};
20//!
21//! // Create a token mapping (in production, map to actual LLM vocab IDs)
22//! let mapping = TokenMapping::sequential(256);
23//! let generator = MaskGenerator::new(mapping.clone(), 1000);
24//! let mut decoder = ConstrainedDecoder::new(generator);
25//!
26//! // Simulate a decode loop
27//! let token_ids = [
28//! 5, // InputDecl
29//! 9, // Id(0)
30//! 3, // NodeStart
31//! 10, // Id(1)
32//! 0, // Or
33//! 9, // Id(0)
34//! 7, // True
35//! 4, // NodeEnd
36//! 6, // OutputDecl
37//! 10, // Id(1)
38//! ];
39//!
40//! for &id in &token_ids {
41//! let mask = decoder.next_mask();
42//! assert!(mask.is_allowed(id));
43//! decoder.feed_token(id).unwrap();
44//! }
45//!
46//! let graph = decoder.finish().unwrap();
47//! assert_eq!(graph.nodes.len(), 1);
48//! ```
49
50pub mod decoder;
51pub mod generator;
52pub mod mapping;
53pub mod mask;
54
55pub use decoder::{ConstrainedDecoder, DecodeError};
56pub use generator::MaskGenerator;
57pub use mapping::{TokenMapping, TokenMappingBuilder};
58pub use mask::LogitMask;
59
60// Re-export torg_core for convenience
61pub use torg_core;