base_d/
lib.rs

1//! # base-d
2//!
3//! A universal, multi-dictionary encoding library for Rust.
4//!
5//! Encode binary data using numerous dictionaries including RFC standards, ancient scripts,
6//! emoji, playing cards, and more. Supports three encoding modes: radix (true base
7//! conversion), RFC 4648 chunked encoding, and direct byte-range mapping.
8//!
9//! ## Quick Start
10//!
11//! ```
12//! use base_d::{DictionaryRegistry, Dictionary, encode, decode};
13//!
14//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
15//! // Load built-in dictionaries
16//! let config = DictionaryRegistry::load_default()?;
17//! let base64_config = config.get_dictionary("base64").unwrap();
18//!
19//! // Create dictionary
20//! let chars: Vec<char> = base64_config.chars.chars().collect();
21//! let padding = base64_config.padding.as_ref().and_then(|s| s.chars().next());
22//! let mut builder = Dictionary::builder()
23//!     .chars(chars)
24//!     .mode(base64_config.effective_mode());
25//! if let Some(p) = padding {
26//!     builder = builder.padding(p);
27//! }
28//! let dictionary = builder.build()?;
29//!
30//! // Encode and decode
31//! let data = b"Hello, World!";
32//! let encoded = encode(data, &dictionary);
33//! let decoded = decode(&encoded, &dictionary)?;
34//! assert_eq!(data, &decoded[..]);
35//! # Ok(())
36//! # }
37//! ```
38//!
39//! ## Features
40//!
41//! - **33 Built-in Dictionaries**: RFC standards, emoji, ancient scripts, and more
42//! - **3 Encoding Modes**: Radix, chunked (RFC-compliant), byte-range
43//! - **Streaming Support**: Memory-efficient processing for large files
44//! - **Custom Dictionaries**: Define your own via TOML configuration
45//! - **User Configuration**: Load dictionaries from `~/.config/base-d/dictionaries.toml`
46//! - **SIMD Acceleration**: AVX2/SSSE3 on x86_64, NEON on aarch64 (enabled by default)
47//!
48//! ## Cargo Features
49//!
50//! - `simd` (default): Enable SIMD acceleration for encoding/decoding.
51//!   Disable with `--no-default-features` for scalar-only builds.
52//!
53//! ## Encoding Modes
54//!
55//! ### Radix Base Conversion
56//!
57//! True base conversion treating data as a large number. Works with any dictionary size.
58//!
59//! ```
60//! use base_d::{Dictionary, EncodingMode, encode};
61//!
62//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
63//! let chars: Vec<char> = "😀😁😂🤣😃😄😅😆".chars().collect();
64//! let dictionary = Dictionary::builder()
65//!     .chars(chars)
66//!     .mode(EncodingMode::Radix)
67//!     .build()?;
68//!
69//! let encoded = encode(b"Hi", &dictionary);
70//! # Ok(())
71//! # }
72//! ```
73//!
74//! ### Chunked Mode (RFC 4648)
75//!
76//! Fixed-size bit groups, compatible with standard base64/base32.
77//!
78//! ```
79//! use base_d::{Dictionary, EncodingMode, encode};
80//!
81//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
82//! let chars: Vec<char> = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
83//!     .chars().collect();
84//! let dictionary = Dictionary::builder()
85//!     .chars(chars)
86//!     .mode(EncodingMode::Chunked)
87//!     .padding('=')
88//!     .build()?;
89//!
90//! let encoded = encode(b"Hello", &dictionary);
91//! assert_eq!(encoded, "SGVsbG8=");
92//! # Ok(())
93//! # }
94//! ```
95//!
96//! ### Byte Range Mode
97//!
98//! Direct 1:1 byte-to-emoji mapping. Zero encoding overhead.
99//!
100//! ```
101//! use base_d::{Dictionary, EncodingMode, encode};
102//!
103//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
104//! let dictionary = Dictionary::builder()
105//!     .mode(EncodingMode::ByteRange)
106//!     .start_codepoint(127991)  // U+1F3F7
107//!     .build()?;
108//!
109//! let data = b"Hi";
110//! let encoded = encode(data, &dictionary);
111//! assert_eq!(encoded.chars().count(), 2);  // 1:1 mapping
112//! # Ok(())
113//! # }
114//! ```
115//!
116//! ## Streaming
117//!
118//! For large files, use streaming to avoid loading entire file into memory:
119//!
120//! ```no_run
121//! use base_d::{DictionaryRegistry, StreamingEncoder};
122//! use std::fs::File;
123//!
124//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
125//! let config = DictionaryRegistry::load_default()?;
126//! let dictionary_config = config.get_dictionary("base64").unwrap();
127//!
128//! // ... create dictionary from config
129//! # let chars: Vec<char> = dictionary_config.chars.chars().collect();
130//! # let padding = dictionary_config.padding.as_ref().and_then(|s| s.chars().next());
131//! # let mut builder = base_d::Dictionary::builder().chars(chars).mode(dictionary_config.effective_mode());
132//! # if let Some(p) = padding { builder = builder.padding(p); }
133//! # let dictionary = builder.build()?;
134//!
135//! let mut input = File::open("large_file.bin")?;
136//! let output = File::create("encoded.txt")?;
137//!
138//! let mut encoder = StreamingEncoder::new(&dictionary, output);
139//! encoder.encode(&mut input)?;
140//! # Ok(())
141//! # }
142//! ```
143
144mod core;
145mod encoders;
146mod features;
147
148#[cfg(feature = "simd")]
149mod simd;
150
151pub mod bench;
152pub mod convenience;
153pub mod prelude;
154
155pub use convenience::{
156    CompressEncodeResult, HashEncodeResult, compress_encode, compress_encode_with, hash_encode,
157    hash_encode_with,
158};
159pub use core::config::{
160    CompressionConfig, DictionaryConfig, DictionaryRegistry, EncodingMode, Settings,
161};
162pub use core::dictionary::{Dictionary, DictionaryBuilder};
163pub use encoders::algorithms::{DecodeError, DictionaryNotFoundError, find_closest_dictionary};
164pub use encoders::streaming::{StreamingDecoder, StreamingEncoder};
165
166// Expose schema encoding functions for CLI
167pub use encoders::algorithms::schema::{
168    SchemaCompressionAlgo, decode_fiche, decode_fiche_path, decode_schema, encode_fiche,
169    encode_fiche_ascii, encode_fiche_light, encode_fiche_minified, encode_fiche_path,
170    encode_fiche_readable, encode_markdown_fiche, encode_markdown_fiche_ascii,
171    encode_markdown_fiche_light, encode_markdown_fiche_markdown, encode_markdown_fiche_readable,
172    encode_schema,
173};
174
175// Expose fiche auto-detection
176pub use encoders::algorithms::schema::fiche_analyzer::{DetectedMode, detect_fiche_mode};
177
178/// Schema encoding types and traits for building custom frontends
179///
180/// This module provides the intermediate representation (IR) layer for schema encoding,
181/// allowing library users to implement custom parsers (YAML, CSV, TOML, etc.) and
182/// serializers that leverage the binary encoding backend.
183///
184/// # Architecture
185///
186/// The schema encoding pipeline has three layers:
187///
188/// 1. **Input layer**: Parse custom formats into IR
189///    - Implement `InputParser` trait
190///    - Reference: `JsonParser`
191///
192/// 2. **Binary layer**: Pack/unpack IR to/from binary
193///    - `pack()` - IR to binary bytes
194///    - `unpack()` - Binary bytes to IR
195///    - `encode_framed()` - Binary to display96 with delimiters
196///    - `decode_framed()` - Display96 to binary
197///
198/// 3. **Output layer**: Serialize IR to custom formats
199///    - Implement `OutputSerializer` trait
200///    - Reference: `JsonSerializer`
201///
202/// # Example: Custom CSV Parser
203///
204/// ```ignore
205/// use base_d::schema::{
206///     InputParser, IntermediateRepresentation, SchemaHeader, FieldDef,
207///     FieldType, SchemaValue, SchemaError, pack, encode_framed,
208/// };
209///
210/// struct CsvParser;
211///
212/// impl InputParser for CsvParser {
213///     type Error = SchemaError;
214///
215///     fn parse(input: &str) -> Result<IntermediateRepresentation, Self::Error> {
216///         // Parse CSV headers
217///         let lines: Vec<&str> = input.lines().collect();
218///         let headers: Vec<&str> = lines[0].split(',').collect();
219///
220///         // Infer types and build fields
221///         let fields: Vec<FieldDef> = headers.iter()
222///             .map(|h| FieldDef::new(h.to_string(), FieldType::String))
223///             .collect();
224///
225///         // Parse rows
226///         let row_count = lines.len() - 1;
227///         let mut values = Vec::new();
228///         for line in &lines[1..] {
229///             for cell in line.split(',') {
230///                 values.push(SchemaValue::String(cell.to_string()));
231///             }
232///         }
233///
234///         let header = SchemaHeader::new(row_count, fields);
235///         IntermediateRepresentation::new(header, values)
236///     }
237/// }
238///
239/// // Encode CSV to schema format
240/// let csv = "name,age\nalice,30\nbob,25";
241/// let ir = CsvParser::parse(csv)?;
242/// let binary = pack(&ir);
243/// let encoded = encode_framed(&binary);
244/// ```
245///
246/// # IR Structure
247///
248/// The `IntermediateRepresentation` consists of:
249///
250/// * **Header**: Schema metadata
251///   - Field definitions (name + type)
252///   - Row count
253///   - Optional root key
254///   - Optional null bitmap
255///
256/// * **Values**: Flat array in row-major order
257///   - `[row0_field0, row0_field1, row1_field0, row1_field1, ...]`
258///
259/// # Type System
260///
261/// Supported field types:
262///
263/// * `U64` - Unsigned 64-bit integer
264/// * `I64` - Signed 64-bit integer
265/// * `F64` - 64-bit floating point
266/// * `String` - UTF-8 string
267/// * `Bool` - Boolean
268/// * `Null` - Null value
269/// * `Array(T)` - Homogeneous array of type T
270/// * `Any` - Mixed-type values
271///
272/// # Compression
273///
274/// Optional compression algorithms:
275///
276/// * `SchemaCompressionAlgo::Brotli` - Best ratio
277/// * `SchemaCompressionAlgo::Lz4` - Fastest
278/// * `SchemaCompressionAlgo::Zstd` - Balanced
279///
280/// # See Also
281///
282/// * [SCHEMA.md](../SCHEMA.md) - Full format specification
283/// * `encode_schema()` / `decode_schema()` - High-level JSON functions
284pub mod schema {
285    pub use crate::encoders::algorithms::schema::{
286        // IR types
287        FieldDef,
288        FieldType,
289        // Traits
290        InputParser,
291        IntermediateRepresentation,
292        // Reference implementations
293        JsonParser,
294        JsonSerializer,
295        OutputSerializer,
296        // Compression
297        SchemaCompressionAlgo,
298        // Errors
299        SchemaError,
300        SchemaHeader,
301        SchemaValue,
302        // Binary layer
303        decode_framed,
304        // High-level API
305        decode_schema,
306        encode_framed,
307        encode_schema,
308        pack,
309        unpack,
310    };
311}
312pub use features::{
313    CompressionAlgorithm, DictionaryDetector, DictionaryMatch, HashAlgorithm, XxHashConfig,
314    compress, decompress, detect_dictionary, hash, hash_with_config,
315};
316
317/// Encodes binary data using the specified dictionary.
318///
319/// Automatically selects the appropriate encoding strategy based on the
320/// dictionary's mode (Radix, Chunked, or ByteRange).
321///
322/// # Arguments
323///
324/// * `data` - The binary data to encode
325/// * `dictionary` - The dictionary to use for encoding
326///
327/// # Returns
328///
329/// A string containing the encoded data
330///
331/// # Examples
332///
333/// ```
334/// use base_d::{Dictionary, EncodingMode};
335///
336/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
337/// let chars: Vec<char> = "01".chars().collect();
338/// let dictionary = Dictionary::builder()
339///     .chars(chars)
340///     .mode(EncodingMode::Radix)
341///     .build()?;
342/// let encoded = base_d::encode(b"Hi", &dictionary);
343/// # Ok(())
344/// # }
345/// ```
346pub fn encode(data: &[u8], dictionary: &Dictionary) -> String {
347    match dictionary.mode() {
348        EncodingMode::Radix => encoders::algorithms::radix::encode(data, dictionary),
349        EncodingMode::Chunked => encoders::algorithms::chunked::encode_chunked(data, dictionary),
350        EncodingMode::ByteRange => {
351            encoders::algorithms::byte_range::encode_byte_range(data, dictionary)
352        }
353    }
354}
355
356/// Decodes a string back to binary data using the specified dictionary.
357///
358/// Automatically selects the appropriate decoding strategy based on the
359/// dictionary's mode (Radix, Chunked, or ByteRange).
360///
361/// # Arguments
362///
363/// * `encoded` - The encoded string to decode
364/// * `dictionary` - The dictionary used for encoding
365///
366/// # Returns
367///
368/// A `Result` containing the decoded binary data, or a `DecodeError` if
369/// the input is invalid
370///
371/// # Errors
372///
373/// Returns `DecodeError` if:
374/// - The input contains invalid characters
375/// - The input is empty
376/// - The padding is invalid (for chunked mode)
377///
378/// # Examples
379///
380/// ```
381/// use base_d::{Dictionary, EncodingMode, encode, decode};
382///
383/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
384/// let chars: Vec<char> = "01".chars().collect();
385/// let dictionary = Dictionary::builder()
386///     .chars(chars)
387///     .mode(EncodingMode::Radix)
388///     .build()?;
389/// let data = b"Hi";
390/// let encoded = encode(data, &dictionary);
391/// let decoded = decode(&encoded, &dictionary)?;
392/// assert_eq!(data, &decoded[..]);
393/// # Ok(())
394/// # }
395/// ```
396pub fn decode(encoded: &str, dictionary: &Dictionary) -> Result<Vec<u8>, DecodeError> {
397    match dictionary.mode() {
398        EncodingMode::Radix => encoders::algorithms::radix::decode(encoded, dictionary),
399        EncodingMode::Chunked => encoders::algorithms::chunked::decode_chunked(encoded, dictionary),
400        EncodingMode::ByteRange => {
401            encoders::algorithms::byte_range::decode_byte_range(encoded, dictionary)
402        }
403    }
404}
405
406#[cfg(test)]
407mod tests;