rust_tokenizers 6.2.3

High performance tokenizers for Rust
Documentation
[build-dependencies.protobuf-codegen-pure]
optional = true
version = "2.22.1"
[dependencies.csv]
version = "1.1.6"

[dependencies.hashbrown]
version = "0.9.1"

[dependencies.itertools]
version = "0.10.0"

[dependencies.lazy_static]
version = "1.4.0"

[dependencies.protobuf]
version = "= 2.22.1"

[dependencies.rayon]
version = "1.5.0"

[dependencies.regex]
version = "1.4.5"

[dependencies.serde]
features = ["derive"]
version = "1.0.125"

[dependencies.serde_json]
version = "1.0.64"

[dependencies.thiserror]
version = "1.0.24"

[dependencies.unicode-normalization]
version = "0.1.17"

[dependencies.unicode-normalization-alignments]
version = "0.1.12"
[dev-dependencies.anyhow]
version = "1.0.38"

[dev-dependencies.dirs]
version = "3.0.1"

[dev-dependencies.reqwest]
features = ["blocking"]
version = "0.11.2"

[dev-dependencies.tempfile]
version = "3.2.0"

[features]
proto-compile = ["protobuf-codegen-pure"]

[lib]
crate-type = ["lib"]
name = "rust_tokenizers"
path = "src/lib.rs"

[package]
authors = ["Guillaume Becquin <guillaume.becquin@gmail.com>"]
build = "build.rs"
description = "High performance tokenizers for Rust"
edition = "2018"
keywords = ["nlp", "machine-learning", "tokenizer"]
license = "Apache-2.0"
name = "rust_tokenizers"
readme = "README.md"
repository = "https://github.com/guillaume-be/rust-tokenizers"
version = "6.2.3"