base_d/lib.rs
1//! # base-d
2//!
3//! A universal, multi-dictionary encoding library for Rust.
4//!
5//! Encode binary data using numerous dictionaries including RFC standards, ancient scripts,
6//! emoji, playing cards, and more. Supports three encoding modes: radix (true base
7//! conversion), RFC 4648 chunked encoding, and direct byte-range mapping.
8//!
9//! ## Quick Start
10//!
11//! ```
12//! use base_d::{DictionaryRegistry, Dictionary, encode, decode};
13//!
14//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
15//! // Load built-in dictionaries
16//! let config = DictionaryRegistry::load_default()?;
17//! let base64_config = config.get_dictionary("base64").unwrap();
18//!
19//! // Create dictionary
20//! let chars: Vec<char> = base64_config.chars.chars().collect();
21//! let padding = base64_config.padding.as_ref().and_then(|s| s.chars().next());
22//! let mut builder = Dictionary::builder()
23//! .chars(chars)
24//! .mode(base64_config.effective_mode());
25//! if let Some(p) = padding {
26//! builder = builder.padding(p);
27//! }
28//! let dictionary = builder.build()?;
29//!
30//! // Encode and decode
31//! let data = b"Hello, World!";
32//! let encoded = encode(data, &dictionary);
33//! let decoded = decode(&encoded, &dictionary)?;
34//! assert_eq!(data, &decoded[..]);
35//! # Ok(())
36//! # }
37//! ```
38//!
39//! ## Features
40//!
41//! - **33 Built-in Dictionaries**: RFC standards, emoji, ancient scripts, and more
42//! - **3 Encoding Modes**: Radix, chunked (RFC-compliant), byte-range
43//! - **Streaming Support**: Memory-efficient processing for large files
44//! - **Custom Dictionaries**: Define your own via TOML configuration
45//! - **User Configuration**: Load dictionaries from `~/.config/base-d/dictionaries.toml`
46//! - **SIMD Acceleration**: AVX2/SSSE3 on x86_64, NEON on aarch64 (enabled by default)
47//!
48//! ## Cargo Features
49//!
50//! - `simd` (default): Enable SIMD acceleration for encoding/decoding.
51//! Disable with `--no-default-features` for scalar-only builds.
52//!
53//! ## Encoding Modes
54//!
55//! ### Radix Base Conversion
56//!
57//! True base conversion treating data as a large number. Works with any dictionary size.
58//!
59//! ```
60//! use base_d::{Dictionary, EncodingMode, encode};
61//!
62//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
63//! let chars: Vec<char> = "😀😁😂🤣😃😄😅😆".chars().collect();
64//! let dictionary = Dictionary::builder()
65//! .chars(chars)
66//! .mode(EncodingMode::Radix)
67//! .build()?;
68//!
69//! let encoded = encode(b"Hi", &dictionary);
70//! # Ok(())
71//! # }
72//! ```
73//!
74//! ### Chunked Mode (RFC 4648)
75//!
76//! Fixed-size bit groups, compatible with standard base64/base32.
77//!
78//! ```
79//! use base_d::{Dictionary, EncodingMode, encode};
80//!
81//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
82//! let chars: Vec<char> = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
83//! .chars().collect();
84//! let dictionary = Dictionary::builder()
85//! .chars(chars)
86//! .mode(EncodingMode::Chunked)
87//! .padding('=')
88//! .build()?;
89//!
90//! let encoded = encode(b"Hello", &dictionary);
91//! assert_eq!(encoded, "SGVsbG8=");
92//! # Ok(())
93//! # }
94//! ```
95//!
96//! ### Byte Range Mode
97//!
98//! Direct 1:1 byte-to-emoji mapping. Zero encoding overhead.
99//!
100//! ```
101//! use base_d::{Dictionary, EncodingMode, encode};
102//!
103//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
104//! let dictionary = Dictionary::builder()
105//! .mode(EncodingMode::ByteRange)
106//! .start_codepoint(127991) // U+1F3F7
107//! .build()?;
108//!
109//! let data = b"Hi";
110//! let encoded = encode(data, &dictionary);
111//! assert_eq!(encoded.chars().count(), 2); // 1:1 mapping
112//! # Ok(())
113//! # }
114//! ```
115//!
116//! ## Streaming
117//!
118//! For large files, use streaming to avoid loading entire file into memory:
119//!
120//! ```no_run
121//! use base_d::{DictionaryRegistry, StreamingEncoder};
122//! use std::fs::File;
123//!
124//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
125//! let config = DictionaryRegistry::load_default()?;
126//! let dictionary_config = config.get_dictionary("base64").unwrap();
127//!
128//! // ... create dictionary from config
129//! # let chars: Vec<char> = dictionary_config.chars.chars().collect();
130//! # let padding = dictionary_config.padding.as_ref().and_then(|s| s.chars().next());
131//! # let mut builder = base_d::Dictionary::builder().chars(chars).mode(dictionary_config.effective_mode());
132//! # if let Some(p) = padding { builder = builder.padding(p); }
133//! # let dictionary = builder.build()?;
134//!
135//! let mut input = File::open("large_file.bin")?;
136//! let output = File::create("encoded.txt")?;
137//!
138//! let mut encoder = StreamingEncoder::new(&dictionary, output);
139//! encoder.encode(&mut input)?;
140//! # Ok(())
141//! # }
142//! ```
143
144mod core;
145mod encoders;
146mod features;
147
148#[cfg(feature = "simd")]
149mod simd;
150
151#[cfg(feature = "wasm")]
152pub mod wasm;
153
154pub mod bench;
155pub mod convenience;
156pub mod prelude;
157pub mod wordlists;
158
159pub use convenience::{
160 CompressEncodeResult, HashEncodeResult, compress_encode, compress_encode_with, hash_encode,
161 hash_encode_with,
162};
163pub use core::alternating_dictionary::AlternatingWordDictionary;
164pub use core::config::{
165 CompressionConfig, DictionaryConfig, DictionaryRegistry, DictionaryType, EncodingMode, Settings,
166};
167pub use core::dictionary::{Dictionary, DictionaryBuilder};
168pub use core::word_dictionary::{WordDictionary, WordDictionaryBuilder};
169pub use encoders::algorithms::{DecodeError, DictionaryNotFoundError, find_closest_dictionary};
170
171/// Word-based encoding using radix conversion.
172///
173/// Same mathematical approach as character-based radix encoding,
174/// but outputs words joined by a delimiter instead of concatenated characters.
175pub mod word {
176 pub use crate::encoders::algorithms::word::{decode, encode};
177}
178
179/// Alternating word-based encoding for PGP-style biometric word lists.
180///
181/// Provides direct 1:1 byte-to-word mapping where the dictionary selection
182/// alternates based on byte position (e.g., even/odd bytes use different dictionaries).
183pub mod word_alternating {
184 pub use crate::encoders::algorithms::word_alternating::{decode, encode};
185}
186pub use encoders::streaming::{StreamingDecoder, StreamingEncoder};
187
188// Expose schema encoding functions for CLI
189pub use encoders::algorithms::schema::{
190 SchemaCompressionAlgo, decode_fiche, decode_fiche_path, decode_schema, encode_fiche,
191 encode_fiche_ascii, encode_fiche_light, encode_fiche_minified, encode_fiche_path,
192 encode_fiche_readable, encode_markdown_fiche, encode_markdown_fiche_ascii,
193 encode_markdown_fiche_light, encode_markdown_fiche_markdown, encode_markdown_fiche_readable,
194 encode_schema,
195};
196
197// Expose fiche auto-detection
198pub use encoders::algorithms::schema::fiche_analyzer::{DetectedMode, detect_fiche_mode};
199
200/// Schema encoding types and traits for building custom frontends
201///
202/// This module provides the intermediate representation (IR) layer for schema encoding,
203/// allowing library users to implement custom parsers (YAML, CSV, TOML, etc.) and
204/// serializers that leverage the binary encoding backend.
205///
206/// # Architecture
207///
208/// The schema encoding pipeline has three layers:
209///
210/// 1. **Input layer**: Parse custom formats into IR
211/// - Implement `InputParser` trait
212/// - Reference: `JsonParser`
213///
214/// 2. **Binary layer**: Pack/unpack IR to/from binary
215/// - `pack()` - IR to binary bytes
216/// - `unpack()` - Binary bytes to IR
217/// - `encode_framed()` - Binary to display96 with delimiters
218/// - `decode_framed()` - Display96 to binary
219///
220/// 3. **Output layer**: Serialize IR to custom formats
221/// - Implement `OutputSerializer` trait
222/// - Reference: `JsonSerializer`
223///
224/// # Example: Custom CSV Parser
225///
226/// ```ignore
227/// use base_d::schema::{
228/// InputParser, IntermediateRepresentation, SchemaHeader, FieldDef,
229/// FieldType, SchemaValue, SchemaError, pack, encode_framed,
230/// };
231///
232/// struct CsvParser;
233///
234/// impl InputParser for CsvParser {
235/// type Error = SchemaError;
236///
237/// fn parse(input: &str) -> Result<IntermediateRepresentation, Self::Error> {
238/// // Parse CSV headers
239/// let lines: Vec<&str> = input.lines().collect();
240/// let headers: Vec<&str> = lines[0].split(',').collect();
241///
242/// // Infer types and build fields
243/// let fields: Vec<FieldDef> = headers.iter()
244/// .map(|h| FieldDef::new(h.to_string(), FieldType::String))
245/// .collect();
246///
247/// // Parse rows
248/// let row_count = lines.len() - 1;
249/// let mut values = Vec::new();
250/// for line in &lines[1..] {
251/// for cell in line.split(',') {
252/// values.push(SchemaValue::String(cell.to_string()));
253/// }
254/// }
255///
256/// let header = SchemaHeader::new(row_count, fields);
257/// IntermediateRepresentation::new(header, values)
258/// }
259/// }
260///
261/// // Encode CSV to schema format
262/// let csv = "name,age\nalice,30\nbob,25";
263/// let ir = CsvParser::parse(csv)?;
264/// let binary = pack(&ir);
265/// let encoded = encode_framed(&binary);
266/// ```
267///
268/// # IR Structure
269///
270/// The `IntermediateRepresentation` consists of:
271///
272/// * **Header**: Schema metadata
273/// - Field definitions (name + type)
274/// - Row count
275/// - Optional root key
276/// - Optional null bitmap
277///
278/// * **Values**: Flat array in row-major order
279/// - `[row0_field0, row0_field1, row1_field0, row1_field1, ...]`
280///
281/// # Type System
282///
283/// Supported field types:
284///
285/// * `U64` - Unsigned 64-bit integer
286/// * `I64` - Signed 64-bit integer
287/// * `F64` - 64-bit floating point
288/// * `String` - UTF-8 string
289/// * `Bool` - Boolean
290/// * `Null` - Null value
291/// * `Array(T)` - Homogeneous array of type T
292/// * `Any` - Mixed-type values
293///
294/// # Compression
295///
296/// Optional compression algorithms:
297///
298/// * `SchemaCompressionAlgo::Brotli` - Best ratio
299/// * `SchemaCompressionAlgo::Lz4` - Fastest
300/// * `SchemaCompressionAlgo::Zstd` - Balanced
301///
302/// # See Also
303///
304/// * [SCHEMA.md](../SCHEMA.md) - Full format specification
305/// * `encode_schema()` / `decode_schema()` - High-level JSON functions
306pub mod schema {
307 pub use crate::encoders::algorithms::schema::{
308 // IR types
309 FieldDef,
310 FieldType,
311 // Traits
312 InputParser,
313 IntermediateRepresentation,
314 // Reference implementations
315 JsonParser,
316 JsonSerializer,
317 OutputSerializer,
318 // Compression
319 SchemaCompressionAlgo,
320 // Errors
321 SchemaError,
322 SchemaHeader,
323 SchemaValue,
324 // Binary layer
325 decode_framed,
326 // High-level API
327 decode_schema,
328 encode_framed,
329 encode_schema,
330 pack,
331 unpack,
332 };
333}
334pub use features::{
335 CompressionAlgorithm, DictionaryDetector, DictionaryMatch, HashAlgorithm, XxHashConfig,
336 compress, decompress, detect_dictionary, hash, hash_with_config,
337};
338
339/// Encodes binary data using the specified dictionary.
340///
341/// Automatically selects the appropriate encoding strategy based on the
342/// dictionary's mode (Radix, Chunked, or ByteRange).
343///
344/// # Arguments
345///
346/// * `data` - The binary data to encode
347/// * `dictionary` - The dictionary to use for encoding
348///
349/// # Returns
350///
351/// A string containing the encoded data
352///
353/// # Examples
354///
355/// ```
356/// use base_d::{Dictionary, EncodingMode};
357///
358/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
359/// let chars: Vec<char> = "01".chars().collect();
360/// let dictionary = Dictionary::builder()
361/// .chars(chars)
362/// .mode(EncodingMode::Radix)
363/// .build()?;
364/// let encoded = base_d::encode(b"Hi", &dictionary);
365/// # Ok(())
366/// # }
367/// ```
368pub fn encode(data: &[u8], dictionary: &Dictionary) -> String {
369 match dictionary.mode() {
370 EncodingMode::Radix => encoders::algorithms::radix::encode(data, dictionary),
371 EncodingMode::Chunked => encoders::algorithms::chunked::encode_chunked(data, dictionary),
372 EncodingMode::ByteRange => {
373 encoders::algorithms::byte_range::encode_byte_range(data, dictionary)
374 }
375 }
376}
377
378/// Decodes a string back to binary data using the specified dictionary.
379///
380/// Automatically selects the appropriate decoding strategy based on the
381/// dictionary's mode (Radix, Chunked, or ByteRange).
382///
383/// # Arguments
384///
385/// * `encoded` - The encoded string to decode
386/// * `dictionary` - The dictionary used for encoding
387///
388/// # Returns
389///
390/// A `Result` containing the decoded binary data, or a `DecodeError` if
391/// the input is invalid
392///
393/// # Errors
394///
395/// Returns `DecodeError` if:
396/// - The input contains invalid characters
397/// - The input is empty
398/// - The padding is invalid (for chunked mode)
399///
400/// # Examples
401///
402/// ```
403/// use base_d::{Dictionary, EncodingMode, encode, decode};
404///
405/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
406/// let chars: Vec<char> = "01".chars().collect();
407/// let dictionary = Dictionary::builder()
408/// .chars(chars)
409/// .mode(EncodingMode::Radix)
410/// .build()?;
411/// let data = b"Hi";
412/// let encoded = encode(data, &dictionary);
413/// let decoded = decode(&encoded, &dictionary)?;
414/// assert_eq!(data, &decoded[..]);
415/// # Ok(())
416/// # }
417/// ```
418pub fn decode(encoded: &str, dictionary: &Dictionary) -> Result<Vec<u8>, DecodeError> {
419 match dictionary.mode() {
420 EncodingMode::Radix => encoders::algorithms::radix::decode(encoded, dictionary),
421 EncodingMode::Chunked => encoders::algorithms::chunked::decode_chunked(encoded, dictionary),
422 EncodingMode::ByteRange => {
423 encoders::algorithms::byte_range::decode_byte_range(encoded, dictionary)
424 }
425 }
426}
427
428#[cfg(test)]
429mod tests;