[package]
edition = "2024"
rust-version = "1.95"
name = "lfm"
version = "0.1.1"
build = "build.rs"
include = [
"src/**/*.rs",
"examples/**/*.rs",
"benches/**/*.rs",
"models/*.json",
"models/*.jinja",
"build.rs",
"Cargo.toml",
"README.md",
"CHANGELOG.md",
"LICENSE-*",
]
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Rust ONNX inference for LiquidAI LFM2.5-VL (vision-language) models — implements the engine-agnostic llmtask::Task contract via llguidance for schema-constrained sampling"
homepage = "https://github.com/findit-ai/lfm"
documentation = "https://docs.rs/lfm"
readme = "README.md"
keywords = [
"vlm",
"onnx",
"vision-language",
"lfm",
"constrained-decoding",
]
categories = [
"api-bindings",
"computer-vision",
"multimedia::images",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/findit-ai/lfm"
[package.metadata.docs.rs]
features = [
"inference",
"bundled",
"decoders",
"serde",
]
rustdoc-args = [
"--cfg",
"docsrs",
]
[features]
bundled = [
"inference",
"decoders",
]
coreml = [
"inference",
"ort/coreml",
]
cuda = [
"inference",
"ort/cuda",
]
decoders = [
"image/jpeg",
"image/png",
]
default = [
"inference",
"bundled",
"decoders",
]
directml = [
"inference",
"ort/directml",
]
inference = [
"dep:ort",
"dep:tokenizers",
"dep:llguidance",
"dep:toktrie",
"dep:toktrie_hf_tokenizers",
"dep:minijinja",
]
integration = ["inference"]
rocm = [
"inference",
"ort/rocm",
]
serde = [
"smol_str/serde",
"llmtask/serde",
]
tensorrt = [
"inference",
"ort/tensorrt",
]
[lib]
name = "lfm"
path = "src/lib.rs"
[[example]]
name = "preprocess_only"
path = "examples/preprocess_only.rs"
[[example]]
name = "scene_analysis"
path = "examples/scene_analysis.rs"
required-features = [
"bundled",
"inference",
"decoders",
]
[[example]]
name = "smoke"
path = "examples/smoke.rs"
required-features = [
"bundled",
"inference",
"decoders",
]
[[bench]]
name = "bench_chat_template"
path = "benches/bench_chat_template.rs"
harness = false
required-features = ["inference"]
[[bench]]
name = "bench_preproc"
path = "benches/bench_preproc.rs"
harness = false
[[bench]]
name = "bench_tile_grid"
path = "benches/bench_tile_grid.rs"
harness = false
[dependencies.fast_image_resize]
version = "6.0.0"
[dependencies.image]
version = "0.25"
default-features = false
[dependencies.llguidance]
version = "1.7"
optional = true
[dependencies.llmtask]
version = "0.1"
features = [
"json",
"regex",
]
[dependencies.minijinja]
version = "2"
features = [
"builtins",
"json",
"macros",
"serde",
]
optional = true
default-features = false
[dependencies.ort]
version = "2.0.0-rc.12"
optional = true
[dependencies.serde]
version = "1"
features = ["derive"]
[dependencies.serde_json]
version = "1"
[dependencies.smol_str]
version = "0.3"
[dependencies.thiserror]
version = "2"
[dependencies.tokenizers]
version = "0.23"
features = ["fancy-regex"]
optional = true
default-features = false
[dependencies.toktrie]
version = "1.7"
optional = true
[dependencies.toktrie_hf_tokenizers]
version = "1.7"
optional = true
[dependencies.tracing]
version = "0.1"
[dev-dependencies.serde_json]
version = "1"
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies.criterion]
version = "0.8"
[lints.rust]
rust_2018_idioms = "warn"
single_use_lifetimes = "warn"
[lints.rust.unexpected_cfgs]
level = "warn"
priority = 0
check-cfg = [
"cfg(docsrs)",
"cfg(tarpaulin)",
]
[profile.bench]
opt-level = 3
lto = "thin"
codegen-units = 1
debug = 0
debug-assertions = false
rpath = false
overflow-checks = false
incremental = false