[package]
edition = "2021"
name = "rusty-genius-cortex"
version = "0.1.2"
authors = ["Timothy Meade"]
build = false
publish = true
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Inference engine interaction layer for rusty-genius"
readme = "README.md"
keywords = [
"ai",
"llm",
"llama-cpp",
"gguf",
"inference",
]
categories = [
"science",
"algorithms",
"asynchronous",
]
license = "MIT"
repository = "https://github.com/tmzt/rusty-genius"
[features]
cuda = [
"llama-cpp-2/cuda",
"real-engine",
]
default = []
metal = [
"llama-cpp-2/metal",
"real-engine",
]
real-engine = ["dep:llama-cpp-2"]
vulkan = [
"llama-cpp-2/vulkan",
"real-engine",
]
[lib]
name = "rusty_genius_cortex"
path = "src/lib.rs"
[dependencies.anyhow]
version = "1.0"
[dependencies.async-std]
version = "1.12"
[dependencies.async-trait]
version = "0.1"
[dependencies.futures]
version = "0.3"
[dependencies.llama-cpp-2]
version = "=0.1.132"
features = ["sampler"]
optional = true
[dependencies.rusty-genius-core]
version = "0.1.2"