[package]
edition = "2021"
name = "llama-cpp-v3"
version = "0.1.6"
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Safe and ergonomic Rust wrapper for llama.cpp with dynamic loading"
readme = "README.md"
license = "MIT"
repository = "https://github.com/enlila/llama-cpp-v3"
[lib]
name = "llama_cpp_v3"
path = "src/lib.rs"
[[bin]]
name = "chat_template"
path = "src/bin/chat_template.rs"
[[bin]]
name = "simple"
path = "src/bin/simple.rs"
[dependencies.dirs]
version = "5"
[dependencies.llama-cpp-sys-v3]
version = "0.1.6"
[dependencies.serde_json]
version = "1.0.149"
[dependencies.thiserror]
version = "2"
[dependencies.ureq]
version = "3"
features = ["json"]
[dependencies.zip]
version = "2"