qtransformers-core 0.1.0

Quantum-inspired attention mechanisms for transformer models.
Documentation
[build-dependencies.pyo3-build-config]
version = "0.22"

[dependencies.ndarray]
version = "0.15"

[dependencies.numpy]
version = "0.22"

[dependencies.pyo3]
features = ["extension-module", "auto-initialize"]
version = "0.22"

[dependencies.rand]
version = "0.8"

[dependencies.rayon]
version = "1.7"

[lib]
crate-type = ["cdylib", "rlib"]
name = "qtransformers_core"
path = "src/lib.rs"

[package]
authors = ["Lokesh Kumar <lkumar94@gmail.com>"]
autobenches = false
autobins = false
autoexamples = false
autolib = false
autotests = false
build = false
categories = ["science", "algorithms"]
description = "Quantum-inspired attention mechanisms for transformer models."
documentation = "https://docs.rs/qtransformers-core"
edition = "2021"
homepage = "https://github.com/kumarlokesh/q-transformers"
keywords = ["quantum", "attention", "transformer", "machine-learning", "nlp"]
license = "MIT"
name = "qtransformers-core"
readme = "README.md"
repository = "https://github.com/kumarlokesh/q-transformers"
version = "0.1.0"