llama-server 0.1.1

Download, embed, and run llama.cpp in your Rust projects
Documentation
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.

[package]
edition = "2024"
name = "llama-server"
version = "0.1.1"
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Download, embed, and run llama.cpp in your Rust projects"
readme = "README.md"
license = "MIT"
repository = "https://github.com/hecrj/llama-server"

[lib]
name = "llama_server"
path = "src/lib.rs"

[dependencies.bitflags]
version = "2"

[dependencies.directories]
version = "6"

[dependencies.futures]
version = "0.3"

[dependencies.reqwest]
version = "0.13"
features = ["json"]

[dependencies.serde]
version = "1"
features = ["derive"]

[dependencies.sipper]
version = "0.1"

[dependencies.tokio]
version = "1"
features = [
    "rt",
    "fs",
    "io-util",
    "process",
]

[dependencies.zip]
version = "7"

[dev-dependencies.tokio]
version = "1"
features = ["macros"]

[lints.rust]
missing_docs = "deny"
unsafe_code = "deny"
unused_results = "deny"

[lints.rust.rust_2018_idioms]
level = "deny"
priority = -1