text-tokenizer 0.5.2

Custom text tokenizer
Documentation
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3

[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"

[[package]]
name = "opt_struct"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e63e6f4ff92296606bd2534f49e25ec5340e924e1292514da2eefbdd2b4d9591"

[[package]]
name = "text-parsing"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "267d97ddef0f7f34ef00f9a58a1c33b5d60e40e2877bd9de69c464c84db190cf"
dependencies = [
 "opt_struct",
 "unicode-properties",
]

[[package]]
name = "text-tokenizer"
version = "0.5.2"
dependencies = [
 "lazy_static",
 "text-parsing",
 "unicode-properties",
 "unicode-segmentation",
]

[[package]]
name = "unicode-properties"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291"

[[package]]
name = "unicode-segmentation"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"