rust_tokenizers 8.1.0

High performance tokenizers for Rust
Documentation
[build-dependencies.protobuf-codegen-pure]
optional = true
version = "2"
[dependencies.csv]
version = "1"

[dependencies.hashbrown]
version = "0.13"

[dependencies.itertools]
version = "0.10"

[dependencies.lazy_static]
version = "1"

[dependencies.protobuf]
version = "2"

[dependencies.rayon]
version = "1"

[dependencies.regex]
version = "1"

[dependencies.serde]
features = ["derive"]
version = "1"

[dependencies.serde_json]
version = "1"

[dependencies.thiserror]
version = "1"

[dependencies.unicode-normalization]
version = "0.1"

[dependencies.unicode-normalization-alignments]
version = "0.1.12"
[dev-dependencies.anyhow]
version = "1"

[dev-dependencies.cached-path]
version = "0.6"

[dev-dependencies.dirs]
version = "4"

[dev-dependencies.tempfile]
version = "3"

[features]
proto-compile = ["protobuf-codegen-pure"]

[lib]
crate-type = ["lib"]
name = "rust_tokenizers"
path = "src/lib.rs"

[package]
authors = ["Guillaume Becquin <guillaume.becquin@gmail.com>"]
build = "build.rs"
description = "High performance tokenizers for Rust"
edition = "2018"
keywords = ["nlp", "machine-learning", "tokenizer"]
license = "Apache-2.0"
name = "rust_tokenizers"
readme = "README.md"
repository = "https://github.com/guillaume-be/rust-tokenizers"
version = "8.1.0"