text-tokenizer 0.6.4

Custom text tokenizer
Documentation
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4

[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"

[[package]]
name = "opt_struct"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6558f38f6119d51108988a3754de1373b1b7366081f358e70abf12828a535243"

[[package]]
name = "text-parsing"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb908cc87ad92e6f2ff1de383a7933d7772e2b179359062f9de3c114cef6e428"
dependencies = [
 "opt_struct",
 "unicode-properties",
]

[[package]]
name = "text-tokenizer"
version = "0.6.4"
dependencies = [
 "lazy_static",
 "text-parsing",
 "unicode-properties",
 "unicode-segmentation",
]

[[package]]
name = "unicode-properties"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"

[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"