[[bin]]
name = "inference-lab"
path = "src/main.rs"
[dependencies.clap]
features = ["derive"]
optional = true
version = "4.5"
[dependencies.colored]
optional = true
version = "2.1"
[dependencies.console_error_panic_hook]
version = "0.1"
[dependencies.env_logger]
optional = true
version = "0.11"
[dependencies.getrandom]
features = ["js"]
version = "0.2"
[dependencies.js-sys]
version = "0.3"
[dependencies.log]
version = "0.4"
[dependencies.ordered-float]
version = "4.0"
[dependencies.rand]
version = "0.8"
[dependencies.rand_distr]
version = "0.4"
[dependencies.serde]
features = ["derive"]
version = "1.0"
[dependencies.serde-wasm-bindgen]
version = "0.6"
[dependencies.serde_json]
version = "1.0"
[dependencies.tabled]
optional = true
version = "0.16"
[dependencies.toml]
version = "0.8"
[dependencies.wasm-bindgen]
version = "0.2"
[features]
cli = ["clap", "env_logger", "tabled", "colored"]
default = ["cli"]
[lib]
crate-type = ["cdylib", "rlib"]
name = "inference_lab"
path = "src/lib.rs"
[package]
authors = ["Doubleword"]
autobenches = false
autobins = false
autoexamples = false
autolib = false
autotests = false
build = false
categories = ["simulation", "wasm"]
description = "High-performance LLM inference simulator for analyzing serving systems"
edition = "2021"
exclude = ["pkg/", ".npmrc", "node_modules/"]
keywords = ["llm", "inference", "simulator", "performance"]
license = "MIT"
name = "inference-lab"
readme = "README.md"
repository = "https://github.com/doublewordai/inference-lab"
version = "0.1.0"
[profile.release]
opt-level = "s"