[package]
name = "embedd"
version.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
authors.workspace = true
description = "Embedding interfaces + local backends (Candle/HF)."
repository = "https://github.com/arclabs561/embedd"
homepage = "https://github.com/arclabs561/embedd"
documentation = "https://docs.rs/embedd"
readme = "README.md"
keywords = ["embeddings", "nlp", "inference", "candle"]
categories = ["science", "text-processing"]
[dependencies]
anyhow.workspace = true
innr = "0.1.8"
once_cell.workspace = true
serde_json.workspace = true
clap = { workspace = true, optional = true, features = ["derive"] }
serde = { workspace = true, optional = true, features = ["derive"] }
ureq = { workspace = true, optional = true }
reqwest = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
fastembed = { workspace = true, optional = true }
ort = { workspace = true, optional = true }
tokenizers = { workspace = true, optional = true }
burn = { workspace = true, optional = true }
qdrant-client = { workspace = true, optional = true }
candle-core = { workspace = true, optional = true }
candle-nn = { workspace = true, optional = true }
candle-transformers = { workspace = true, optional = true }
hf-hub = { workspace = true, optional = true }
[dev-dependencies]
proptest.workspace = true
criterion.workspace = true
assert_cmd = "2.0"
predicates = "3.0"
tokio = { workspace = true, features = ["rt", "macros"] }
[[bench]]
name = "embedd_bench"
harness = false
[features]
default = []
serde = ["dep:serde"]
candle-hf = [
"dep:candle-core",
"dep:candle-nn",
"dep:candle-transformers",
"dep:hf-hub",
"dep:tokenizers",
]
openai = ["dep:ureq", "dep:serde"]
tei = ["dep:ureq"]
hf-inference = ["dep:ureq", "dep:serde"]
fastembed = ["dep:fastembed"]
ort-tokenizers = ["dep:ort", "dep:tokenizers"]
burn-backend = ["dep:burn"]
siglip = []
qdrant = ["dep:qdrant-client", "dep:tokio"]
async-openai = ["dep:reqwest", "dep:tokio", "dep:serde"]
async-tei = ["dep:reqwest", "dep:tokio"]
async-hf-inference = ["dep:reqwest", "dep:tokio", "dep:serde"]
cli = ["dep:clap"]
all = [
"candle-hf", "openai", "tei", "hf-inference", "fastembed",
"ort-tokenizers", "burn-backend", "siglip", "serde",
"async-openai", "async-tei", "async-hf-inference",
"qdrant",
]
[[example]]
name = "hello_embed"
required-features = ["fastembed"]
[[example]]
name = "semantic_search"
required-features = ["fastembed"]
[[example]]
name = "sparse_retrieval"
required-features = ["fastembed"]
[[example]]
name = "batched_embed"
required-features = ["fastembed"]
[[bin]]
name = "embedd"
path = "src/bin/embedd.rs"
required-features = ["cli"]