[package]
name = "lfm"
version = "0.1.1"
edition = "2024"
rust-version = "1.95"
description = "Rust ONNX inference for LiquidAI LFM2.5-VL (vision-language) models — implements the engine-agnostic llmtask::Task contract via llguidance for schema-constrained sampling"
license = "MIT OR Apache-2.0"
repository = "https://github.com/findit-ai/lfm"
homepage = "https://github.com/findit-ai/lfm"
documentation = "https://docs.rs/lfm"
keywords = ["vlm", "onnx", "vision-language", "lfm", "constrained-decoding"]
categories = ["api-bindings", "computer-vision", "multimedia::images"]
include = [
"src/**/*.rs",
"examples/**/*.rs",
"benches/**/*.rs",
"models/*.json",
"models/*.jinja",
"build.rs",
"Cargo.toml",
"README.md",
"CHANGELOG.md",
"LICENSE-*",
]
[dependencies]
llmtask = { version = "0.1", features = ["json", "regex"] }
ort = { version = "2.0.0-rc.12", optional = true }
tokenizers = { version = "0.23", optional = true, default-features = false, features = ["fancy-regex"] }
llguidance = { version = "1.7", optional = true }
toktrie = { version = "1.7", optional = true }
toktrie_hf_tokenizers = { version = "1.7", optional = true }
minijinja = { version = "2", optional = true, default-features = false, features = ["builtins", "json", "macros", "serde"] }
image = { version = "0.25", default-features = false }
smol_str = "0.3"
thiserror = "2"
tracing = "0.1"
serde = { version = "1", features = ["derive"] }
serde_json = { version = "1" }
fast_image_resize = "6.0.0"
[dev-dependencies]
serde_json = "1"
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]
criterion = "0.8"
[features]
default = ["inference", "bundled", "decoders"]
inference = ["dep:ort", "dep:tokenizers", "dep:llguidance", "dep:toktrie", "dep:toktrie_hf_tokenizers", "dep:minijinja"]
bundled = ["inference", "decoders"]
decoders = ["image/jpeg", "image/png"]
serde = ["smol_str/serde", "llmtask/serde"]
cuda = ["inference", "ort/cuda"]
tensorrt = ["inference", "ort/tensorrt"]
directml = ["inference", "ort/directml"]
rocm = ["inference", "ort/rocm"]
coreml = ["inference", "ort/coreml"]
integration = ["inference"]
[[test]]
name = "integration"
path = "tests/integration.rs"
required-features = ["integration"]
[[example]]
name = "smoke"
required-features = ["bundled", "inference", "decoders"]
[[example]]
name = "scene_analysis"
required-features = ["bundled", "inference", "decoders"]
[[example]]
name = "preprocess_only"
[[bench]]
name = "bench_preproc"
harness = false
[[bench]]
name = "bench_tile_grid"
harness = false
[[bench]]
name = "bench_chat_template"
harness = false
required-features = ["inference"]
[profile.bench]
opt-level = 3
debug = false
codegen-units = 1
lto = 'thin'
incremental = false
debug-assertions = false
overflow-checks = false
rpath = false
[package.metadata.docs.rs]
features = ["inference", "bundled", "decoders", "serde"]
rustdoc-args = ["--cfg", "docsrs"]
[lints.rust]
rust_2018_idioms = "warn"
single_use_lifetimes = "warn"
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(docsrs)', 'cfg(tarpaulin)'] }