great-tokenizer 0.2.0

A tokenizer
Documentation
[dependencies.anyhow]
features = ["backtrace", "std"]
version = "1.0.86"

[dependencies.derive_more]
features = ["full"]
version = "1.0.0"

[dependencies.regex]
version = "1.10.6"

[dependencies.serde]
version = "1.0.208"

[dependencies.serde_derive]
version = "1.0.208"

[dependencies.serde_json]
version = "1.0.125"

[dependencies.thiserror]
version = "1.0.63"

[dependencies.unicode-segmentation]
version = "1.11.0"

[lib]
name = "great_tokenizer"
path = "src/lib.rs"

[package]
autobenches = false
autobins = false
autoexamples = false
autotests = false
build = false
description = "A tokenizer"
documentation = "https://docs.rs/great-tokenizer/latest/great_tokenizer/"
edition = "2021"
homepage = "https://docs.rs/great-tokenizer/latest/great_tokenizer/"
license = "MIT"
name = "great-tokenizer"
readme = "README.md"
repository = "https://github.com/daiyan1145/tokenizer"
version = "0.2.0"