minillm 0.1.1

A mini inference engine for running transformer language models
Documentation
[[bin]]
name = "minillm"
path = "src/main.rs"

[dependencies.dotenv]
version = "0.15.0"

[dependencies.hf-hub]
version = "0.4.3"

[dependencies.ndarray]
version = "0.16.1"

[dependencies.safetensors]
version = "0.6.2"

[dependencies.serde]
features = ["derive"]
version = "1.0.225"

[dependencies.serde_json]
version = "1.0.145"

[dependencies.tokenizers]
features = ["http"]
version = "0.22.1"

[[example]]
name = "basic_generation"
path = "examples/basic_generation.rs"

[[example]]
name = "interactive_chat"
path = "examples/interactive_chat.rs"

[[example]]
name = "tokenization"
path = "examples/tokenization.rs"

[lib]
name = "minillm"
path = "src/lib.rs"

[package]
authors = ["BM Monjur Morshed"]
autobenches = false
autobins = false
autoexamples = false
autolib = false
autotests = false
build = false
categories = ["algorithms", "science"]
description = "A mini inference engine for running transformer language models"
documentation = "https://docs.rs/minillm"
edition = "2021"
exclude = ["target/", ".git/", ".gitignore"]
homepage = "https://github.com/bmqube/minillm"
keywords = ["llm", "transformer", "inference", "gpt", "ai"]
license = "MIT"
name = "minillm"
readme = "README.md"
repository = "https://github.com/bmqube/minillm"
version = "0.1.1"