Skip to main content

tract_core/ops/matmul/
de_block_quant.rs

1use tract_linalg::block_quant::{BlockQuant, BlockQuantFact, BlockQuantValue, Q4_0};
2
3use crate::internal::*;
4use crate::ops::einsum::einsum_matmul::EinSumMatMul;
5use crate::ops::konst::Const;
6use crate::transform::ModelTransform;
7
8#[derive(Debug)]
9pub struct BlockQuantTransform;
10
11impl ModelTransform for BlockQuantTransform {
12    fn name(&self) -> StaticName {
13        "BlockQuantTransform".into()
14    }
15
16    fn transform(&self, model: &mut TypedModel) -> TractResult<()> {
17        crate::ops::einsum::einsum_matmul::detect_all(model)?;
18        Rewriter::<()>::default()
19            .with_rule_for("block_quant_einsum_weights", block_quant_einsum_weights)
20            .rewrite(&(), model)?;
21        crate::ops::einsum::einsum_matmul::flatten_all(model)?;
22        Ok(())
23    }
24}
25
26fn block_quant_einsum_weights(
27    _ctx: &(),
28    model: &TypedModel,
29    node: &TypedNode,
30    prefix: &str,
31    op: &EinSumMatMul,
32) -> TractResult<Option<TypedModelPatch>> {
33    if node.inputs.len() != 2 {
34        return Ok(None);
35    }
36    for (slot, fact) in model.node_input_facts(node.id)?.iter().enumerate() {
37        let Some(a) = fact.konst.as_ref() else { continue };
38        if a.rank() != 2 {
39            continue;
40        };
41        if op.k_axis().inputs[slot][0] == 0 {
42            let mut patch = TypedModelPatch::default();
43            let mut taps = patch.taps(model, &node.inputs)?;
44            taps[slot] = patch.wire_node(
45                format!("{}.t_{}", &node.name, slot),
46                AxisOp::Move(1, 0),
47                &[taps[slot]],
48            )?[0];
49            let mut new_op = op.clone();
50            new_op.op.axes = op
51                .op
52                .axes
53                .clone()
54                .remove_axis_occurency(InOut::In(slot), 0)?
55                .with_extra_axis_occurency(op.k_axis, InOut::In(slot), 1)?;
56            let output = patch.wire_node(prefix, new_op, &taps)?;
57            patch.shunt_outside(model, node.id.into(), output[0])?;
58            return Ok(Some(patch));
59        }
60        let format = Q4_0;
61        let mut patch = TypedModelPatch::default();
62        let weights = if a.datum_type() == f16::datum_type() {
63            format.quant_f16(a.as_slice::<f16>()?)?
64        } else {
65            format.quant_f32(a.cast_to::<f32>()?.as_slice::<f32>()?)?
66        };
67        let name = &model.node(node.inputs[0].node).name;
68        let fact = BlockQuantFact::new(Box::new(format), a.shape().into());
69        let value = BlockQuantValue { fact: fact.clone(), value: Arc::new(weights) };
70        let weights = patch.wire_node(
71            format!("{name}.bq"),
72            Const::new_with_opaque_fact(rctensor0(Opaque(Arc::new(value))), Box::new(fact))?,
73            &[],
74        )?;
75        let tap = patch.tap_model(model, node.inputs[1])?;
76        let wire = patch.wire_node(prefix, op.op.clone(), &[weights[0], tap])?;
77        patch.shunt_outside(model, node.id.into(), wire[0])?;
78        return Ok(Some(patch));
79    }
80    Ok(None)
81}