llama-cpp-v3 0.1.6

Safe and ergonomic Rust wrapper for llama.cpp with dynamic loading
Documentation
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.

[package]
edition = "2021"
name = "llama-cpp-v3"
version = "0.1.6"
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Safe and ergonomic Rust wrapper for llama.cpp with dynamic loading"
readme = "README.md"
license = "MIT"
repository = "https://github.com/enlila/llama-cpp-v3"

[lib]
name = "llama_cpp_v3"
path = "src/lib.rs"

[[bin]]
name = "chat_template"
path = "src/bin/chat_template.rs"

[[bin]]
name = "simple"
path = "src/bin/simple.rs"

[dependencies.dirs]
version = "5"

[dependencies.llama-cpp-sys-v3]
version = "0.1.6"

[dependencies.serde_json]
version = "1.0.149"

[dependencies.thiserror]
version = "2"

[dependencies.ureq]
version = "3"
features = ["json"]

[dependencies.zip]
version = "2"