token-count 0.4.0

Count tokens for LLM models using exact tokenization
Documentation
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.

[package]
edition = "2021"
rust-version = "1.85.0"
name = "token-count"
version = "0.4.0"
authors = ["Shaun Burdick <hello@burdick.dev>"]
build = false
autolib = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Count tokens for LLM models using exact tokenization"
homepage = "https://github.com/shaunburdick/token-count"
readme = "README.md"
keywords = [
    "tokenizer",
    "llm",
    "gpt",
    "cli",
    "tokens",
]
categories = [
    "command-line-utilities",
    "text-processing",
]
license = "MIT"
repository = "https://github.com/shaunburdick/token-count"

[lib]
name = "token_count"
path = "src/lib.rs"

[[bin]]
name = "token-count"
path = "src/main.rs"

[[test]]
name = "claude_api"
path = "tests/claude_api.rs"

[[test]]
name = "claude_estimation"
path = "tests/claude_estimation.rs"

[[test]]
name = "cli_basic"
path = "tests/cli_basic.rs"

[[test]]
name = "end_to_end"
path = "tests/end_to_end.rs"

[[test]]
name = "error_handling"
path = "tests/error_handling.rs"

[[test]]
name = "file_input"
path = "tests/file_input.rs"

[[test]]
name = "google_tokenization"
path = "tests/google_tokenization.rs"

[[test]]
name = "help_version"
path = "tests/help_version.rs"

[[test]]
name = "input_limits"
path = "tests/input_limits.rs"

[[test]]
name = "model_aliases"
path = "tests/model_aliases.rs"

[[test]]
name = "output_tests"
path = "tests/output_tests.rs"

[[test]]
name = "performance"
path = "tests/performance.rs"

[[test]]
name = "tokenizer_tests"
path = "tests/tokenizer_tests.rs"

[[test]]
name = "verbosity"
path = "tests/verbosity.rs"

[[bench]]
name = "tokenization"
path = "benches/tokenization.rs"
harness = false

[dependencies.anyhow]
version = "1.0.102"

[dependencies.clap]
version = "4.6"
features = ["derive"]

[dependencies.gemini-tokenizer]
version = "0.2.0"

[dependencies.reqwest]
version = "0.12"
features = [
    "json",
    "rustls-tls",
]

[dependencies.serde]
version = "1.0.149"
features = ["derive"]

[dependencies.serde_json]
version = "1.0.149"

[dependencies.strsim]
version = "0.11"

[dependencies.thiserror]
version = "1.0"

[dependencies.tiktoken-rs]
version = "0.9.1"

[dependencies.tokio]
version = "1"
features = [
    "rt",
    "macros",
]

[dev-dependencies.assert_cmd]
version = "2.0"

[dev-dependencies.criterion]
version = "0.5"
features = ["html_reports"]

[dev-dependencies.mockito]
version = "1.0"

[dev-dependencies.predicates]
version = "3.0"

[dev-dependencies.tempfile]
version = "3.0"

[profile.release]
opt-level = 3
lto = "fat"
codegen-units = 1
strip = true