Skip to main content

tract_transformers/
lib.rs

1pub mod ops;
2mod rewriter;
3use std::collections::HashSet;
4
5use rewriter::*;
6use tract_nnef::internal::*;
7
8register_simple_model_transform!("detect_apply_rope", ApplyRopeTransform);
9register_simple_model_transform!("detect_scaled_masked_softmax", ScaledMaskedSoftmaxTransform);
10register_simple_model_transform!("detect_kv_cache", KeyValueCacheTransform);
11register_simple_model_transform!(
12    "detect_sdpa_kv_cache_broadcast",
13    SdpaFuseKvCacheBroadcastTransform
14);
15register_simple_model_transform!("unfold_kv_cache", UnfoldKeyValueCacheTransform);
16register_simple_model_transform!("transformers_detect_all", TransformersTransform);
17
18pub fn register(registry: &mut Registry) {
19    ops::apply_rope::register(registry);
20    ops::scaled_masked_softmax::register(registry);
21    ops::sdpa::register(registry);
22    ops::dyn_kv_cache::register(registry);
23}
24
25pub trait WithTractTransformers {
26    fn enable_tract_transformers(&mut self);
27    fn with_tract_transformers(self) -> Self;
28}
29
30impl WithTractTransformers for tract_nnef::framework::Nnef {
31    fn enable_tract_transformers(&mut self) {
32        self.enable_tract_core();
33        self.registries.push(tract_transformers_registry());
34    }
35
36    fn with_tract_transformers(mut self) -> Self {
37        self.enable_tract_transformers();
38        self
39    }
40}
41
42pub fn tract_transformers_registry() -> Registry {
43    let mut reg = Registry::new("tract_transformers")
44        .with_doc("Extension `tract_transformers` extends NNEF with operators")
45        .with_doc("for transformer networks.")
46        .with_doc("")
47        .with_doc("Add `extension tract_transformers` to `graph.nnef`");
48
49    register(&mut reg);
50    reg
51}
52
53pub fn figure_out_causal_llm_b_s_p(
54    model: &TypedModel,
55) -> TractResult<(Option<Symbol>, Option<Symbol>, Option<Symbol>)> {
56    // expectations:
57    // - one input is for tokens, so integer dt (i64 ?) and typically of shape S or 1,S, or B,S
58    // - other inputs are kv cache, some kind of float. shape features both S and P, and B if B is present in tokens
59    let token_input = model
60        .inputs
61        .iter()
62        .position(|i| model.outlet_fact(*i).unwrap().datum_type.is_integer())
63        .context("No token input found")?;
64    let tokens_symbols = model.input_fact(token_input)?.shape.volume().symbols();
65    let kv_symbols = if let Some(kv_input) =
66        model.inputs.iter().position(|i| model.outlet_fact(*i).unwrap().datum_type.is_float())
67    {
68        model.input_fact(kv_input)?.shape.volume().symbols()
69    } else {
70        // Look for KVCache Op
71        let dummy_session_state = TurnState::default();
72        let mut symbols = HashSet::new();
73        for node in &model.nodes {
74            if let Some((_, fact)) =
75                node.op.state(&dummy_session_state, 0)?.and_then(|state| state.init_tensor_fact())
76            {
77                symbols = fact.shape.volume().symbols();
78                break;
79            }
80        }
81        symbols
82    };
83
84    let b = tokens_symbols.intersection(&kv_symbols).cloned().collect::<HashSet<_>>();
85    let s = tokens_symbols.difference(&b).cloned().collect::<HashSet<_>>();
86    let p = kv_symbols.difference(&b).cloned().collect::<HashSet<_>>();
87    Ok((b.into_iter().next(), s.into_iter().next(), p.into_iter().next()))
88}
89
90pub fn memory_arena_hints_for_causal_llm(model: &TypedModel) -> TractResult<SymbolValues> {
91    let (b, s, p) = figure_out_causal_llm_b_s_p(model)?;
92    let mut values = SymbolValues::default()
93        .with(&s.context("Could not determine sequence_len (S)")?, 1024)
94        .with(&p.context("Could not determine past_sequence_len (P)")?, 0);
95    if let Some(b) = b {
96        values = values.with(&b, 1);
97    }
98    Ok(values)
99}