tllama 0.1.1

Lightweight Local LLM Inference Engine
Documentation
[[bin]]
name = "tllama"
path = "src/main.rs"

[dependencies.actix-web]
optional = true
version = "4.11.0"

[dependencies.anyhow]
version = "1.0.100"

[dependencies.async-trait]
version = "0.1.89"

[dependencies.clap]
features = ["derive", "env"]
version = "4.5.47"

[dependencies.colored]
optional = true
version = "3.0.0"

[dependencies.ctrlc]
features = ["termination"]
optional = true
version = "3.5.0"

[dependencies.dirs]
version = "6.0.0"

[dependencies.encoding_rs]
version = "0.8.35"

[dependencies.gotpl]
optional = true
version = "0.2.3"

[dependencies.gtmpl-moyan]
optional = true
version = "0.7.1"

[dependencies.lazy_static]
version = "1.5.0"

[dependencies.llama-cpp-2]
default-features = false
features = ["openmp"]
optional = true
version = "0.1.121"

[dependencies.minijinja]
optional = true
version = "0.22.0"

[dependencies.rustyline]
optional = true
version = "17.0.1"

[dependencies.serde]
features = ["derive"]
version = "1.0.225"

[dependencies.serde_json]
version = "1.0.145"

[dependencies.tempfile]
optional = true
version = "3.23.0"

[dependencies.tokio]
features = ["rt-multi-thread", "macros"]
version = "1.47.1"

[dependencies.tokio-stream]
optional = true
version = "0.1.17"

[dependencies.tracing]
version = "0.1.41"

[dependencies.tracing-subscriber]
features = ["env-filter"]
version = "0.3.20"

[dependencies.uuid]
features = ["v4"]
optional = true
version = "1.18.1"

[dependencies.walkdir]
version = "2.5.0"

[features]
api = ["actix-web", "tokio-stream", "uuid"]
chat = ["rustyline", "colored", "ctrlc"]
default = ["tpl-gtmpl", "engine-llama-cpp", "api", "chat"]
engine-hf = ["tempfile"]
engine-llama-cpp = ["llama-cpp-2"]
hw-cuda = ["llama-cpp-2/cuda"]
hw-metal = ["llama-cpp-2/metal"]
hw-native = ["llama-cpp-2/native"]
hw-vulkan = ["llama-cpp-2/vulkan"]
tpl-gotpl = ["gotpl"]
tpl-gtmpl = ["gtmpl-moyan"]
tpl-minijinja = ["minijinja"]

[lib]
name = "tllama"
path = "src/lib.rs"

[package]
authors = ["moyan <moyan@moyanjdc.top>"]
autobenches = false
autobins = false
autoexamples = false
autolib = false
autotests = false
build = false
description = "Lightweight Local LLM Inference Engine"
documentation = "https://docs.rs/crate/tllama"
edition = "2024"
include = ["Cargo.toml", "src/**/*", "/README.md", "/LICENSE"]
license = "MIT"
name = "tllama"
readme = "README.md"
repository = "https://github.com/moyanj/tllama"
version = "0.1.1"