qudit_tensor/
lib.rs

1mod bytecode;
2mod cpu;
3mod network;
4mod tree;
5
6pub use bytecode::Bytecode;
7pub use cpu::PinnedTNVM;
8pub use cpu::TNVM;
9pub use cpu::TNVMResult;
10pub use cpu::TNVMReturnType;
11pub use network::QuditCircuitTensorNetworkBuilder;
12pub use network::QuditTensor;
13pub use network::QuditTensorNetwork;
14
15pub fn compile_network(network: QuditTensorNetwork) -> Bytecode {
16    let optimal_path = network.solve_for_path();
17
18    // println!("{:?}", optimal_path);
19    let tree = network.path_to_ttgt_tree(optimal_path);
20    // println!("{:?}", tree);
21    crate::bytecode::BytecodeGenerator::new().generate(tree)
22}
23
24#[cfg(test)]
25mod tests {
26    use crate::bytecode::BytecodeGenerator;
27    use qudit_core::{ParamInfo, Radices};
28    use qudit_expr::GRADIENT;
29    use qudit_expr::TensorExpression;
30
31    use super::*;
32
33    #[test]
34    fn test_projective_measurement() {
35        let u3 = TensorExpression::new(
36            "U3(a, b, c) {
37            [
38                [cos(a/2), ~e^(c*i)*sin(a/2)],
39                [e^(b*i)*sin(a/2), e^(i*(b+c))*cos(a/2)],
40            ]
41        }",
42        );
43        // let p3 = TensorExpression::new("Phase<3>(a, b) {
44        //     [
45        //         [ 1, 0, 0 ],
46        //         [ 0, e^(i*a), 0 ],
47        //         [ 0, 0, e^(i*b) ]
48        //     ]
49        // }");
50        let classically_controlled_u3 = u3.stack_with_identity(&[1], 2);
51        let zz = TensorExpression::new(
52            "ZZParity() {
53            [
54                [
55                    [ 1, 0, 0, 0 ], 
56                    [ 0, 0, 0, 0 ],
57                    [ 0, 0, 0, 0 ],
58                    [ 0, 0, 0, 1 ],
59                ],
60                [
61                    [ 0, 0, 0, 0 ], 
62                    [ 0, 1, 0, 0 ],
63                    [ 0, 0, 1, 0 ],
64                    [ 0, 0, 0, 0 ],
65                ],
66            ]
67        }",
68        );
69
70        let network = QuditCircuitTensorNetworkBuilder::new(Radices::new([2, 2]), None)
71            .prepend_expression(
72                zz.clone(),
73                ParamInfo::empty(),
74                vec![0, 1],
75                vec![0, 1],
76                vec!["a".to_string()],
77            )
78            .prepend_expression(
79                classically_controlled_u3.clone(),
80                ParamInfo::parameterized(vec![0, 1, 2]),
81                vec![0],
82                vec![0],
83                vec!["a".to_string()],
84            )
85            .build();
86
87        let optimal_path = network.solve_for_path();
88        println!("Optimal Path: {:?}", optimal_path.path);
89        let tree = network.path_to_ttgt_tree(optimal_path);
90        println!("Expression Tree: {:?}", tree);
91        let code = BytecodeGenerator::new().generate(tree);
92        println!("Bytecode: \n{:?}", code);
93        // let Bytecode { expressions, const_code, dynamic_code, buffers } = code;
94        // let code = Bytecode { expressions, const_code, dynamic_code: dynamic_code[..1].to_vec(), buffers };
95
96        let params = [1.7, 1.7, 1.7];
97        let mut tnvm = TNVM::<qudit_core::c64, GRADIENT>::new(&code, None);
98        let out = tnvm.evaluate::<GRADIENT>(&params);
99        let out_fn = out.get_fn_result().unpack_tensor3d();
100        println!("{:?}", out_fn);
101        let out_fn = out.get_grad_result().unpack_tensor4d();
102        println!("{:?}", out_fn);
103    }
104}