text-tokenizer 0.5.8

Custom text tokenizer
Documentation
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4

[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"

[[package]]
name = "opt_struct"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72315ccdac8169475909cd07e24ad3bcccab97d182643bc1f262f2b2fe6b077b"

[[package]]
name = "text-parsing"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8b59b83a4a147965bb618b2f9cb3147ca48f057d8e27ffe7ee4b1eb42e7ece1"
dependencies = [
 "opt_struct",
 "unicode-properties",
]

[[package]]
name = "text-tokenizer"
version = "0.5.8"
dependencies = [
 "lazy_static",
 "text-parsing",
 "unicode-properties",
 "unicode-segmentation",
]

[[package]]
name = "unicode-properties"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"

[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"