pub struct Graph { /* private fields */ }Expand description
A minimal compute-graph container for an ML runtime intermediate representation (IR).
A Graph owns a set of Nodes indexed by NodeId. Each node encodes:
- an operation kind (
OpKind), - a list of input dependencies (by
NodeId), and - the inferred output tensor shape.
This type currently supports constructing a graph via:
Graph::input_nodefor source nodes, and- op constructors like
Graph::add,Graph::matmul, andGraph::relu.
Output nodes must be designated explicitly via Graph::set_output_node.
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![2, 3]);
let y = g.relu(x).unwrap();
g.set_output_node(y).unwrap();
assert_eq!(g.outputs().len(), 1);Implementations§
Source§impl Graph
impl Graph
Sourcepub fn new() -> Self
pub fn new() -> Self
Creates an empty graph with no nodes, inputs, or outputs.
§Examples
let g = Graph::new();
assert_eq!(g.num_nodes(), 0);
assert!(g.inputs().is_empty());
assert!(g.outputs().is_empty());Examples found in repository?
78fn main() {
79 // Build a tiny graph:
80 //
81 // out = add(a, b)
82 //
83 // The graph describes *what* should be computed, not how.
84 let mut graph = Graph::new();
85
86 let a = graph.input_node(vec![2, 2]);
87 let b = graph.input_node(vec![2, 2]);
88 let out = graph
89 .add(a, b)
90 .expect("Adding valid input nodes should succeed");
91
92 graph
93 .set_output_node(out)
94 .expect("Setting output node should succeed");
95
96 // Create a custom registry.
97 //
98 // Start from an empty registry and explicitly register only the kernel(s)
99 // needed by this graph.
100 let mut registry = KernelRegistry::new();
101
102 // Register our custom Add kernel.
103 //
104 // `register(...)` returns the previous mapping if one existed.
105 let old = registry.register(OpKind::Add, Box::new(CustomAddKernel));
106 assert!(
107 old.is_none(),
108 "First Add registration should not replace an existing kernel"
109 );
110
111 // Construct the executor with the custom registry.
112 let exec = Executor::new(registry);
113
114 // Bind runtime inputs.
115 //
116 // These are ordinary tensors supplied for the graph input nodes.
117 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
118 .expect("Tensor construction should succeed");
119 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
120 .expect("Tensor construction should succeed");
121
122 // Execute the graph.
123 //
124 // During execution:
125 // - the executor validates input bindings,
126 // - walks the graph in topological order,
127 // - sees an `OpKind::Add` node,
128 // - looks up `OpKind::Add` in the registry,
129 // - dispatches to `CustomAddKernel::compute(...)`.
130 let outputs = exec
131 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
132 .expect("Execution should succeed");
133
134 let result = outputs
135 .get(&out)
136 .expect("Declared output should be present in executor results");
137
138 println!("Computed output for node {:?}: {:?}", out, result);
139}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}29fn main() {
30 // Build the graph:
31 //
32 // a ----\
33 // add ---> out
34 // b ----/
35 //
36 // Here `a` and `b` are graph input nodes. They do not yet have runtime values;
37 // they only declare that the graph expects tensors of shape [2, 2].
38 let mut graph = Graph::new();
39
40 let a = graph.input_node(vec![2, 2]);
41 let b = graph.input_node(vec![2, 2]);
42
43 // Add an operation node.
44 //
45 // `graph.add(a, b)` does not perform arithmetic immediately. It adds a new node to
46 // the graph describing a future Add operation whose inputs are `a` and `b`.
47 //
48 // Shape validation happens here at graph-construction time. Since both inputs are
49 // [2, 2], the resulting Add node is also [2, 2].
50 let out = graph
51 .add(a, b)
52 .expect("Adding valid input nodes should succeed");
53
54 // Mark the node as an output. The executor will return a tensor for every node
55 // designated as a graph output.
56 graph
57 .set_output_node(out)
58 .expect("Setting output node should succeed");
59
60 // Create runtime tensors for the graph input nodes.
61 //
62 // These must match the shapes declared by the corresponding input nodes.
63 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
64 .expect("Tensor construction should succeed");
65 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
66 .expect("Tensor construction should succeed");
67
68 // Construct an executor with the default kernel registry.
69 //
70 // The registry determines which kernel implementation is used for each `OpKind`.
71 // In this example, the default registry is expected to contain an Add kernel.
72 let exec = Executor::new(KernelRegistry::default());
73
74 // Execute the graph.
75 //
76 // The bindings are `(NodeId, Tensor)` pairs. Each input node in the graph must be
77 // bound exactly once at runtime.
78 //
79 // Internally, the executor:
80 // 1. validates the bindings,
81 // 2. topologically orders the graph,
82 // 3. executes non-input nodes using registered kernels,
83 // 4. returns tensors for all declared output nodes.
84 let outputs = exec
85 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
86 .expect("Execution should succeed");
87
88 let result = outputs
89 .get(&out)
90 .expect("Declared output should be present in executor results");
91
92 println!("Computed output for node {:?}: {:?}", out, result);
93}46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}Sourcepub fn input_node(&mut self, shape: Vec<usize>) -> NodeId
pub fn input_node(&mut self, shape: Vec<usize>) -> NodeId
Creates a new input node with the given tensor shape and returns its NodeId.
Input nodes have no dependencies and an output shape equal to shape.
§Panics
Panics if a node ID collision is detected (an invariant violation indicating too many nodes have been allocated or ID generation is broken).
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![2, 3]);
assert!(g.node(x).is_ok());
assert_eq!(g.num_nodes(), 1);Examples found in repository?
78fn main() {
79 // Build a tiny graph:
80 //
81 // out = add(a, b)
82 //
83 // The graph describes *what* should be computed, not how.
84 let mut graph = Graph::new();
85
86 let a = graph.input_node(vec![2, 2]);
87 let b = graph.input_node(vec![2, 2]);
88 let out = graph
89 .add(a, b)
90 .expect("Adding valid input nodes should succeed");
91
92 graph
93 .set_output_node(out)
94 .expect("Setting output node should succeed");
95
96 // Create a custom registry.
97 //
98 // Start from an empty registry and explicitly register only the kernel(s)
99 // needed by this graph.
100 let mut registry = KernelRegistry::new();
101
102 // Register our custom Add kernel.
103 //
104 // `register(...)` returns the previous mapping if one existed.
105 let old = registry.register(OpKind::Add, Box::new(CustomAddKernel));
106 assert!(
107 old.is_none(),
108 "First Add registration should not replace an existing kernel"
109 );
110
111 // Construct the executor with the custom registry.
112 let exec = Executor::new(registry);
113
114 // Bind runtime inputs.
115 //
116 // These are ordinary tensors supplied for the graph input nodes.
117 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
118 .expect("Tensor construction should succeed");
119 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
120 .expect("Tensor construction should succeed");
121
122 // Execute the graph.
123 //
124 // During execution:
125 // - the executor validates input bindings,
126 // - walks the graph in topological order,
127 // - sees an `OpKind::Add` node,
128 // - looks up `OpKind::Add` in the registry,
129 // - dispatches to `CustomAddKernel::compute(...)`.
130 let outputs = exec
131 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
132 .expect("Execution should succeed");
133
134 let result = outputs
135 .get(&out)
136 .expect("Declared output should be present in executor results");
137
138 println!("Computed output for node {:?}: {:?}", out, result);
139}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}29fn main() {
30 // Build the graph:
31 //
32 // a ----\
33 // add ---> out
34 // b ----/
35 //
36 // Here `a` and `b` are graph input nodes. They do not yet have runtime values;
37 // they only declare that the graph expects tensors of shape [2, 2].
38 let mut graph = Graph::new();
39
40 let a = graph.input_node(vec![2, 2]);
41 let b = graph.input_node(vec![2, 2]);
42
43 // Add an operation node.
44 //
45 // `graph.add(a, b)` does not perform arithmetic immediately. It adds a new node to
46 // the graph describing a future Add operation whose inputs are `a` and `b`.
47 //
48 // Shape validation happens here at graph-construction time. Since both inputs are
49 // [2, 2], the resulting Add node is also [2, 2].
50 let out = graph
51 .add(a, b)
52 .expect("Adding valid input nodes should succeed");
53
54 // Mark the node as an output. The executor will return a tensor for every node
55 // designated as a graph output.
56 graph
57 .set_output_node(out)
58 .expect("Setting output node should succeed");
59
60 // Create runtime tensors for the graph input nodes.
61 //
62 // These must match the shapes declared by the corresponding input nodes.
63 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
64 .expect("Tensor construction should succeed");
65 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
66 .expect("Tensor construction should succeed");
67
68 // Construct an executor with the default kernel registry.
69 //
70 // The registry determines which kernel implementation is used for each `OpKind`.
71 // In this example, the default registry is expected to contain an Add kernel.
72 let exec = Executor::new(KernelRegistry::default());
73
74 // Execute the graph.
75 //
76 // The bindings are `(NodeId, Tensor)` pairs. Each input node in the graph must be
77 // bound exactly once at runtime.
78 //
79 // Internally, the executor:
80 // 1. validates the bindings,
81 // 2. topologically orders the graph,
82 // 3. executes non-input nodes using registered kernels,
83 // 4. returns tensors for all declared output nodes.
84 let outputs = exec
85 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
86 .expect("Execution should succeed");
87
88 let result = outputs
89 .get(&out)
90 .expect("Declared output should be present in executor results");
91
92 println!("Computed output for node {:?}: {:?}", out, result);
93}46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}Sourcepub fn matmul(
&mut self,
left: NodeId,
right: NodeId,
) -> Result<NodeId, GraphError>
pub fn matmul( &mut self, left: NodeId, right: NodeId, ) -> Result<NodeId, GraphError>
Adds a matrix multiplication node left × right.
Shape rule (2-D):
left.shape = [m, k]right.shape = [k, n]- output shape is
[m, n]
§Errors
Returns GraphError::InvalidNodeId if either left or right does not exist
in this graph.
Returns GraphError::ShapeMismatch if the inner dimensions do not match.
§Examples
let mut g = Graph::new();
let a = g.input_node(vec![2, 3]);
let b = g.input_node(vec![3, 4]);
let c = g.matmul(a, b).unwrap();
assert!(g.node(c).is_ok());
assert_eq!(g.num_nodes(), 3);
// Mismatched inner dimension: [2,3] x [2,4] is invalid
let bad = g.input_node(vec![2, 4]);
assert!(matches!(g.matmul(a, bad).unwrap_err(), GraphError::ShapeMismatch));Examples found in repository?
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}Sourcepub fn add(&mut self, left: NodeId, right: NodeId) -> Result<NodeId, GraphError>
pub fn add(&mut self, left: NodeId, right: NodeId) -> Result<NodeId, GraphError>
Adds an elementwise addition node left + right.
Shape rule:
shape(left) == shape(right)
§Errors
Returns GraphError::InvalidNodeId if either input does not exist in this graph.
Returns GraphError::ShapeMismatch if the shapes differ.
§Examples
let mut g = Graph::new();
let a = g.input_node(vec![2, 3]);
let b = g.input_node(vec![2, 3]);
let c = g.add(a, b).unwrap();
assert!(g.node(c).is_ok());
let d = g.input_node(vec![2, 4]);
assert!(matches!(g.add(a, d).unwrap_err(), GraphError::ShapeMismatch));Examples found in repository?
78fn main() {
79 // Build a tiny graph:
80 //
81 // out = add(a, b)
82 //
83 // The graph describes *what* should be computed, not how.
84 let mut graph = Graph::new();
85
86 let a = graph.input_node(vec![2, 2]);
87 let b = graph.input_node(vec![2, 2]);
88 let out = graph
89 .add(a, b)
90 .expect("Adding valid input nodes should succeed");
91
92 graph
93 .set_output_node(out)
94 .expect("Setting output node should succeed");
95
96 // Create a custom registry.
97 //
98 // Start from an empty registry and explicitly register only the kernel(s)
99 // needed by this graph.
100 let mut registry = KernelRegistry::new();
101
102 // Register our custom Add kernel.
103 //
104 // `register(...)` returns the previous mapping if one existed.
105 let old = registry.register(OpKind::Add, Box::new(CustomAddKernel));
106 assert!(
107 old.is_none(),
108 "First Add registration should not replace an existing kernel"
109 );
110
111 // Construct the executor with the custom registry.
112 let exec = Executor::new(registry);
113
114 // Bind runtime inputs.
115 //
116 // These are ordinary tensors supplied for the graph input nodes.
117 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
118 .expect("Tensor construction should succeed");
119 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
120 .expect("Tensor construction should succeed");
121
122 // Execute the graph.
123 //
124 // During execution:
125 // - the executor validates input bindings,
126 // - walks the graph in topological order,
127 // - sees an `OpKind::Add` node,
128 // - looks up `OpKind::Add` in the registry,
129 // - dispatches to `CustomAddKernel::compute(...)`.
130 let outputs = exec
131 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
132 .expect("Execution should succeed");
133
134 let result = outputs
135 .get(&out)
136 .expect("Declared output should be present in executor results");
137
138 println!("Computed output for node {:?}: {:?}", out, result);
139}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}29fn main() {
30 // Build the graph:
31 //
32 // a ----\
33 // add ---> out
34 // b ----/
35 //
36 // Here `a` and `b` are graph input nodes. They do not yet have runtime values;
37 // they only declare that the graph expects tensors of shape [2, 2].
38 let mut graph = Graph::new();
39
40 let a = graph.input_node(vec![2, 2]);
41 let b = graph.input_node(vec![2, 2]);
42
43 // Add an operation node.
44 //
45 // `graph.add(a, b)` does not perform arithmetic immediately. It adds a new node to
46 // the graph describing a future Add operation whose inputs are `a` and `b`.
47 //
48 // Shape validation happens here at graph-construction time. Since both inputs are
49 // [2, 2], the resulting Add node is also [2, 2].
50 let out = graph
51 .add(a, b)
52 .expect("Adding valid input nodes should succeed");
53
54 // Mark the node as an output. The executor will return a tensor for every node
55 // designated as a graph output.
56 graph
57 .set_output_node(out)
58 .expect("Setting output node should succeed");
59
60 // Create runtime tensors for the graph input nodes.
61 //
62 // These must match the shapes declared by the corresponding input nodes.
63 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
64 .expect("Tensor construction should succeed");
65 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
66 .expect("Tensor construction should succeed");
67
68 // Construct an executor with the default kernel registry.
69 //
70 // The registry determines which kernel implementation is used for each `OpKind`.
71 // In this example, the default registry is expected to contain an Add kernel.
72 let exec = Executor::new(KernelRegistry::default());
73
74 // Execute the graph.
75 //
76 // The bindings are `(NodeId, Tensor)` pairs. Each input node in the graph must be
77 // bound exactly once at runtime.
78 //
79 // Internally, the executor:
80 // 1. validates the bindings,
81 // 2. topologically orders the graph,
82 // 3. executes non-input nodes using registered kernels,
83 // 4. returns tensors for all declared output nodes.
84 let outputs = exec
85 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
86 .expect("Execution should succeed");
87
88 let result = outputs
89 .get(&out)
90 .expect("Declared output should be present in executor results");
91
92 println!("Computed output for node {:?}: {:?}", out, result);
93}46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}Sourcepub fn relu(&mut self, input: NodeId) -> Result<NodeId, GraphError>
pub fn relu(&mut self, input: NodeId) -> Result<NodeId, GraphError>
Adds a ReLU node relu(input).
ReLU preserves shape: shape(output) == shape(input).
§Errors
Returns GraphError::InvalidNodeId if input does not exist in this graph.
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![2, 3]);
let y = g.relu(x).unwrap();
assert!(g.node(y).is_ok());
// Using a NodeId from another graph is invalid
let mut other = Graph::new();
let foreign = other.input_node(vec![2, 3]);
assert!(matches!(g.relu(foreign).unwrap_err(), GraphError::InvalidNodeId));Examples found in repository?
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}Sourcepub fn set_output_node(&mut self, node: NodeId) -> Result<(), GraphError>
pub fn set_output_node(&mut self, node: NodeId) -> Result<(), GraphError>
Marks node as an output node.
Graphs must have at least one output node to be meaningful for execution, and may have multiple outputs. This method does not create a new node or execute anything; it only records the provided node ID as an output.
§Errors
Returns GraphError::InvalidNodeId if node does not exist in this graph.
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![2, 3]);
let y = g.relu(x).expect("No error should occur in the construction of this ReLU");
assert!(g.outputs().is_empty());
g.set_output_node(y).expect("We are passing a valid output node");
assert_eq!(g.outputs().contains(&y), true);
assert_eq!(g.outputs().len(), 1);
// A NodeId from another graph is invalid
let mut other = Graph::new();
let foreign = other.input_node(vec![2, 3]);
assert!(matches!(g.set_output_node(foreign).unwrap_err(), GraphError::InvalidNodeId));Examples found in repository?
78fn main() {
79 // Build a tiny graph:
80 //
81 // out = add(a, b)
82 //
83 // The graph describes *what* should be computed, not how.
84 let mut graph = Graph::new();
85
86 let a = graph.input_node(vec![2, 2]);
87 let b = graph.input_node(vec![2, 2]);
88 let out = graph
89 .add(a, b)
90 .expect("Adding valid input nodes should succeed");
91
92 graph
93 .set_output_node(out)
94 .expect("Setting output node should succeed");
95
96 // Create a custom registry.
97 //
98 // Start from an empty registry and explicitly register only the kernel(s)
99 // needed by this graph.
100 let mut registry = KernelRegistry::new();
101
102 // Register our custom Add kernel.
103 //
104 // `register(...)` returns the previous mapping if one existed.
105 let old = registry.register(OpKind::Add, Box::new(CustomAddKernel));
106 assert!(
107 old.is_none(),
108 "First Add registration should not replace an existing kernel"
109 );
110
111 // Construct the executor with the custom registry.
112 let exec = Executor::new(registry);
113
114 // Bind runtime inputs.
115 //
116 // These are ordinary tensors supplied for the graph input nodes.
117 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
118 .expect("Tensor construction should succeed");
119 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
120 .expect("Tensor construction should succeed");
121
122 // Execute the graph.
123 //
124 // During execution:
125 // - the executor validates input bindings,
126 // - walks the graph in topological order,
127 // - sees an `OpKind::Add` node,
128 // - looks up `OpKind::Add` in the registry,
129 // - dispatches to `CustomAddKernel::compute(...)`.
130 let outputs = exec
131 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
132 .expect("Execution should succeed");
133
134 let result = outputs
135 .get(&out)
136 .expect("Declared output should be present in executor results");
137
138 println!("Computed output for node {:?}: {:?}", out, result);
139}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}29fn main() {
30 // Build the graph:
31 //
32 // a ----\
33 // add ---> out
34 // b ----/
35 //
36 // Here `a` and `b` are graph input nodes. They do not yet have runtime values;
37 // they only declare that the graph expects tensors of shape [2, 2].
38 let mut graph = Graph::new();
39
40 let a = graph.input_node(vec![2, 2]);
41 let b = graph.input_node(vec![2, 2]);
42
43 // Add an operation node.
44 //
45 // `graph.add(a, b)` does not perform arithmetic immediately. It adds a new node to
46 // the graph describing a future Add operation whose inputs are `a` and `b`.
47 //
48 // Shape validation happens here at graph-construction time. Since both inputs are
49 // [2, 2], the resulting Add node is also [2, 2].
50 let out = graph
51 .add(a, b)
52 .expect("Adding valid input nodes should succeed");
53
54 // Mark the node as an output. The executor will return a tensor for every node
55 // designated as a graph output.
56 graph
57 .set_output_node(out)
58 .expect("Setting output node should succeed");
59
60 // Create runtime tensors for the graph input nodes.
61 //
62 // These must match the shapes declared by the corresponding input nodes.
63 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
64 .expect("Tensor construction should succeed");
65 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
66 .expect("Tensor construction should succeed");
67
68 // Construct an executor with the default kernel registry.
69 //
70 // The registry determines which kernel implementation is used for each `OpKind`.
71 // In this example, the default registry is expected to contain an Add kernel.
72 let exec = Executor::new(KernelRegistry::default());
73
74 // Execute the graph.
75 //
76 // The bindings are `(NodeId, Tensor)` pairs. Each input node in the graph must be
77 // bound exactly once at runtime.
78 //
79 // Internally, the executor:
80 // 1. validates the bindings,
81 // 2. topologically orders the graph,
82 // 3. executes non-input nodes using registered kernels,
83 // 4. returns tensors for all declared output nodes.
84 let outputs = exec
85 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
86 .expect("Execution should succeed");
87
88 let result = outputs
89 .get(&out)
90 .expect("Declared output should be present in executor results");
91
92 println!("Computed output for node {:?}: {:?}", out, result);
93}46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}Sourcepub fn node(&self, id: NodeId) -> Result<&Node, GraphError>
pub fn node(&self, id: NodeId) -> Result<&Node, GraphError>
Returns a shared reference to the node with the given NodeId.
§Errors
Returns GraphError::InvalidNodeId if the node is not present in this graph.
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![1, 1]);
assert!(g.node(x).is_ok());
// A NodeId from another graph is invalid
let mut other = Graph::new();
let foreign = other.input_node(vec![1, 1]);
assert!(matches!(g.node(foreign).unwrap_err(), GraphError::InvalidNodeId));Sourcepub fn num_nodes(&self) -> usize
pub fn num_nodes(&self) -> usize
Returns the total number of nodes stored in this graph.
§Examples
let mut g = Graph::new();
assert_eq!(g.num_nodes(), 0);
let x = g.input_node(vec![2, 3]);
let y = g.relu(x).unwrap();
assert_eq!(g.num_nodes(), 2);Sourcepub fn nodes(&self) -> impl Iterator<Item = &Node>
pub fn nodes(&self) -> impl Iterator<Item = &Node>
Returns the list of nodes.
Every inserted node is appended to this list
(including op nodes created by Graph::add, Graph::matmul, and Graph::relu).
§Examples
let mut g = Graph::new();
let a = g.input_node(vec![2, 3]);
let b = g.input_node(vec![2, 3]);
let c = g.add(a, b).unwrap();
// Includes both inputs and the derived node.
for node in g.nodes() {
assert!([a, b, c].contains(&node.id));
}
Sourcepub fn inputs(&self) -> &[NodeId]
pub fn inputs(&self) -> &[NodeId]
Returns the list of nodes recorded as inputs.
§Examples
let mut g = Graph::new();
let a = g.input_node(vec![2, 3]);
let b = g.input_node(vec![2, 3]);
let c = g.add(a, b).unwrap();
// Only includes both inputs.
assert_eq!(g.inputs(), &[a, b]);Sourcepub fn outputs(&self) -> &HashSet<NodeId>
pub fn outputs(&self) -> &HashSet<NodeId>
Returns the list of nodes marked as outputs via Graph::set_output_node.
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![2, 3]);
let y = g.relu(x).expect("No error should occur in the construction of this ReLU");
assert!(g.outputs().is_empty());
g.set_output_node(y).expect("We are passing a valid output node");
assert!(g.outputs().contains(&y));
assert_eq!(g.outputs().len(), 1);Sourcepub fn topo_sort(&self) -> Result<Vec<NodeId>, GraphError>
pub fn topo_sort(&self) -> Result<Vec<NodeId>, GraphError>
Computes a deterministic topological execution order (Kahn’s Algorithm) of all nodes in the graph.
Topological ordering guarantees that every node appears after all of its dependencies. This ordering is required for correct execution of the compute graph, since kernels must not execute before their input tensors are available.
The returned order includes every node in the graph exactly once.
§Determinism
Determinism is guaranteed by enforcing a stable tie-breaking rule when multiple
nodes are ready for execution. Nodes with zero remaining dependencies are processed
in ascending NodeId order.
This ensures:
- Reproducible execution across runs
- Independence from hash seed randomization
- Stable ordering suitable for debugging and testing
§Returns
A vector of NodeId representing the execution order.
The order satisfies the invariant:
For every node N:
all inputs(N) appear before N in the returned vector§Errors
Returns GraphError::CycleDetected if the graph contains a cycle. Assuming normal API
use, Graph methods will not allow cycle creation to ever occur.
Cycles violate compute graph semantics because no valid execution order exists.
§Complexity
Time complexity: O(V + E) Space complexity: O(V + E)
where:
- V = number of nodes
- E = number of edges (dependencies)
§Examples
let mut g = Graph::new();
let a = g.input_node(vec![2, 3]);
let b = g.relu(a).unwrap();
let c = g.relu(b).unwrap();
let order = g.topo_sort().unwrap();
let pos = |id| order.iter().position(|&x| x == id).unwrap();
assert!(pos(a) < pos(b));
assert!(pos(b) < pos(c));