rust_tokenizers 1.0.0

High performance tokenizers for Rust
Documentation
[package]

name = "rust_tokenizers"

version = "1.0.0"

authors = ["Guillaume Becquin <guillaume.becquin@gmail.com>"]

edition = "2018"

description = "High performance tokenizers for Rust"

repository = "https://github.com/guillaume-be/rust-tokenizers"

license = "Apache-2.0"

readme = "README.md"



[dependencies]

csv = "1.1"

unicode-normalization = "0.1.8"

rayon = "1.2.1"

lazy_static = "1.4.0"

itertools = "0.8.2"

serde_json = "1.0.44"

regex = "1.3.1"



[dev-dependencies]

tempfile = "3.1.0"



[lib]

name = "rust_tokenizers"

path = "src/lib.rs"

crate-type = ["lib"]



[[bin]]

name = "rust_tokenizers_bin"

path = "src/main.rs"