pub struct Executor { /* private fields */ }Expand description
Executes graphs using kernels registered in a KernelRegistry.
An Executor owns a kernel registry and uses it to evaluate graph nodes
according to each node’s OpKind. Non-input nodes are executed in
deterministic topological order, and intermediate tensors are stored internally
until all requested graph outputs have been produced.
§Examples
let exec = Executor::new(KernelRegistry::default());Implementations§
Source§impl Executor
impl Executor
Sourcepub fn new(registry: KernelRegistry) -> Self
pub fn new(registry: KernelRegistry) -> Self
Creates a new executor backed by the provided kernel registry.
The registry determines which kernel implementation will be used for each operation kind encountered during execution.
§Examples
let registry = KernelRegistry::default();
let exec = Executor::new(registry);Examples found in repository?
78fn main() {
79 // Build a tiny graph:
80 //
81 // out = add(a, b)
82 //
83 // The graph describes *what* should be computed, not how.
84 let mut graph = Graph::new();
85
86 let a = graph.input_node(vec![2, 2]);
87 let b = graph.input_node(vec![2, 2]);
88 let out = graph
89 .add(a, b)
90 .expect("Adding valid input nodes should succeed");
91
92 graph
93 .set_output_node(out)
94 .expect("Setting output node should succeed");
95
96 // Create a custom registry.
97 //
98 // Start from an empty registry and explicitly register only the kernel(s)
99 // needed by this graph.
100 let mut registry = KernelRegistry::new();
101
102 // Register our custom Add kernel.
103 //
104 // `register(...)` returns the previous mapping if one existed.
105 let old = registry.register(OpKind::Add, Box::new(CustomAddKernel));
106 assert!(
107 old.is_none(),
108 "First Add registration should not replace an existing kernel"
109 );
110
111 // Construct the executor with the custom registry.
112 let exec = Executor::new(registry);
113
114 // Bind runtime inputs.
115 //
116 // These are ordinary tensors supplied for the graph input nodes.
117 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
118 .expect("Tensor construction should succeed");
119 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
120 .expect("Tensor construction should succeed");
121
122 // Execute the graph.
123 //
124 // During execution:
125 // - the executor validates input bindings,
126 // - walks the graph in topological order,
127 // - sees an `OpKind::Add` node,
128 // - looks up `OpKind::Add` in the registry,
129 // - dispatches to `CustomAddKernel::compute(...)`.
130 let outputs = exec
131 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
132 .expect("Execution should succeed");
133
134 let result = outputs
135 .get(&out)
136 .expect("Declared output should be present in executor results");
137
138 println!("Computed output for node {:?}: {:?}", out, result);
139}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}29fn main() {
30 // Build the graph:
31 //
32 // a ----\
33 // add ---> out
34 // b ----/
35 //
36 // Here `a` and `b` are graph input nodes. They do not yet have runtime values;
37 // they only declare that the graph expects tensors of shape [2, 2].
38 let mut graph = Graph::new();
39
40 let a = graph.input_node(vec![2, 2]);
41 let b = graph.input_node(vec![2, 2]);
42
43 // Add an operation node.
44 //
45 // `graph.add(a, b)` does not perform arithmetic immediately. It adds a new node to
46 // the graph describing a future Add operation whose inputs are `a` and `b`.
47 //
48 // Shape validation happens here at graph-construction time. Since both inputs are
49 // [2, 2], the resulting Add node is also [2, 2].
50 let out = graph
51 .add(a, b)
52 .expect("Adding valid input nodes should succeed");
53
54 // Mark the node as an output. The executor will return a tensor for every node
55 // designated as a graph output.
56 graph
57 .set_output_node(out)
58 .expect("Setting output node should succeed");
59
60 // Create runtime tensors for the graph input nodes.
61 //
62 // These must match the shapes declared by the corresponding input nodes.
63 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
64 .expect("Tensor construction should succeed");
65 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
66 .expect("Tensor construction should succeed");
67
68 // Construct an executor with the default kernel registry.
69 //
70 // The registry determines which kernel implementation is used for each `OpKind`.
71 // In this example, the default registry is expected to contain an Add kernel.
72 let exec = Executor::new(KernelRegistry::default());
73
74 // Execute the graph.
75 //
76 // The bindings are `(NodeId, Tensor)` pairs. Each input node in the graph must be
77 // bound exactly once at runtime.
78 //
79 // Internally, the executor:
80 // 1. validates the bindings,
81 // 2. topologically orders the graph,
82 // 3. executes non-input nodes using registered kernels,
83 // 4. returns tensors for all declared output nodes.
84 let outputs = exec
85 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
86 .expect("Execution should succeed");
87
88 let result = outputs
89 .get(&out)
90 .expect("Declared output should be present in executor results");
91
92 println!("Computed output for node {:?}: {:?}", out, result);
93}46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}Sourcepub fn execute(
&self,
graph: &Graph,
inputs: Vec<(NodeId, Tensor)>,
) -> Result<HashMap<NodeId, Tensor>, ExecutionError>
pub fn execute( &self, graph: &Graph, inputs: Vec<(NodeId, Tensor)>, ) -> Result<HashMap<NodeId, Tensor>, ExecutionError>
Executes graph using the provided input bindings.
Each binding is a (NodeId, Tensor) pair supplying the runtime value for a
graph input node. Execution proceeds in deterministic topological order:
- Validate the graph topology.
- Validate input bindings.
- Execute every non-input node using the corresponding registered kernel.
- Return a map containing the tensors for all graph output nodes.
The returned map is keyed by output NodeId. Output order is not part of
the contract.
§Binding Rules
The inputs vector must satisfy all of the following:
- every bound node must exist in
graph, - every bound node must be an
OpKind::Inputnode, - every graph input node must appear exactly once, and
- every bound tensor shape must match the input node’s declared shape.
§Errors
Returns:
ExecutionError::GraphErrorif topological traversal or graph lookup fails,ExecutionError::DuplicateBindingif the same input node is bound more than once,ExecutionError::InvalidBindingNodeif a binding references a node not in the graph,ExecutionError::BindingToNonInputNodeif a binding targets a non-input node,ExecutionError::InputShapeMismatchif a bound tensor has the wrong shape,ExecutionError::MissingInputif any graph input node is not bound,ExecutionError::KernelNotFoundif no kernel is registered for an op,ExecutionError::KernelExecutionFailedif a kernel returns an error during execution,ExecutionError::InternalErrorif an internal invariant is violated.
§Examples
let mut g = Graph::new();
let x = g.input_node(vec![2, 2]);
let y = g.relu(x).expect("Valid ReLU operation should succeed");
g.set_output_node(y).expect("Valid output node should succeed");
let x_tensor = Tensor::zeros(vec![2, 2]).expect("Tensor allocation should succeed");
let exec = Executor::new(KernelRegistry::default());
let outputs = exec
.execute(&g, vec![(x, x_tensor)])
.expect("Execution should succeed");
assert!(outputs.contains_key(&y));Examples found in repository?
78fn main() {
79 // Build a tiny graph:
80 //
81 // out = add(a, b)
82 //
83 // The graph describes *what* should be computed, not how.
84 let mut graph = Graph::new();
85
86 let a = graph.input_node(vec![2, 2]);
87 let b = graph.input_node(vec![2, 2]);
88 let out = graph
89 .add(a, b)
90 .expect("Adding valid input nodes should succeed");
91
92 graph
93 .set_output_node(out)
94 .expect("Setting output node should succeed");
95
96 // Create a custom registry.
97 //
98 // Start from an empty registry and explicitly register only the kernel(s)
99 // needed by this graph.
100 let mut registry = KernelRegistry::new();
101
102 // Register our custom Add kernel.
103 //
104 // `register(...)` returns the previous mapping if one existed.
105 let old = registry.register(OpKind::Add, Box::new(CustomAddKernel));
106 assert!(
107 old.is_none(),
108 "First Add registration should not replace an existing kernel"
109 );
110
111 // Construct the executor with the custom registry.
112 let exec = Executor::new(registry);
113
114 // Bind runtime inputs.
115 //
116 // These are ordinary tensors supplied for the graph input nodes.
117 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
118 .expect("Tensor construction should succeed");
119 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
120 .expect("Tensor construction should succeed");
121
122 // Execute the graph.
123 //
124 // During execution:
125 // - the executor validates input bindings,
126 // - walks the graph in topological order,
127 // - sees an `OpKind::Add` node,
128 // - looks up `OpKind::Add` in the registry,
129 // - dispatches to `CustomAddKernel::compute(...)`.
130 let outputs = exec
131 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
132 .expect("Execution should succeed");
133
134 let result = outputs
135 .get(&out)
136 .expect("Declared output should be present in executor results");
137
138 println!("Computed output for node {:?}: {:?}", out, result);
139}More examples
46fn main() {
47 let mut graph = Graph::new();
48
49 // Declare graph inputs.
50 //
51 // The shapes here establish the legal runtime tensor shapes:
52 // a: [2, 3]
53 // b: [3, 2]
54 // c: [2, 2]
55 let a = graph.input_node(vec![2, 3]);
56 let b = graph.input_node(vec![3, 2]);
57 let c = graph.input_node(vec![2, 2]);
58
59 // Build intermediate operations.
60 //
61 // `relu(a)` preserves shape [2, 3].
62 let ra = graph.relu(a).expect("Valid ReLU operation should succeed");
63
64 // `relu(b)` preserves shape [3, 2].
65 let rb = graph.relu(b).expect("Valid ReLU operation should succeed");
66
67 // `matmul(ra, rb)` combines [2, 3] x [3, 2] -> [2, 2].
68 //
69 // This is a good example of graph-level validation preventing malformed graphs
70 // before execution ever begins.
71 let mm = graph
72 .matmul(ra, rb)
73 .expect("Valid matmul operation should succeed");
74
75 // `add(mm, c)` adds two [2, 2] tensors and also yields [2, 2].
76 let out = graph
77 .add(mm, c)
78 .expect("Valid add operation should succeed");
79
80 graph
81 .set_output_node(out)
82 .expect("Setting output node should succeed");
83
84 // Bind concrete runtime values.
85 //
86 // These values are only examples; the graph structure is independent of them.
87 // The same graph can be executed many times with different input tensors.
88 let a_tensor = Tensor::from_vec(vec![2, 3], vec![-1.0, 2.0, -3.0, 4.0, -5.0, 6.0])
89 .expect("Tensor construction should succeed");
90
91 let b_tensor = Tensor::from_vec(vec![3, 2], vec![-7.0, 8.0, 9.0, -10.0, 11.0, 12.0])
92 .expect("Tensor construction should succeed");
93
94 let c_tensor = Tensor::from_vec(vec![2, 2], vec![0.5, 1.5, 2.5, 3.5])
95 .expect("Tensor construction should succeed");
96
97 let exec = Executor::new(KernelRegistry::default());
98
99 let outputs = exec
100 .execute(&graph, vec![(a, a_tensor), (b, b_tensor), (c, c_tensor)])
101 .expect("Execution should succeed");
102
103 let result = outputs
104 .get(&out)
105 .expect("Declared output should be present in executor results");
106
107 println!("Computed output for node {:?}: {:?}", out, result);
108}29fn main() {
30 // Build the graph:
31 //
32 // a ----\
33 // add ---> out
34 // b ----/
35 //
36 // Here `a` and `b` are graph input nodes. They do not yet have runtime values;
37 // they only declare that the graph expects tensors of shape [2, 2].
38 let mut graph = Graph::new();
39
40 let a = graph.input_node(vec![2, 2]);
41 let b = graph.input_node(vec![2, 2]);
42
43 // Add an operation node.
44 //
45 // `graph.add(a, b)` does not perform arithmetic immediately. It adds a new node to
46 // the graph describing a future Add operation whose inputs are `a` and `b`.
47 //
48 // Shape validation happens here at graph-construction time. Since both inputs are
49 // [2, 2], the resulting Add node is also [2, 2].
50 let out = graph
51 .add(a, b)
52 .expect("Adding valid input nodes should succeed");
53
54 // Mark the node as an output. The executor will return a tensor for every node
55 // designated as a graph output.
56 graph
57 .set_output_node(out)
58 .expect("Setting output node should succeed");
59
60 // Create runtime tensors for the graph input nodes.
61 //
62 // These must match the shapes declared by the corresponding input nodes.
63 let a_tensor = Tensor::from_vec(vec![2, 2], vec![1.0, 2.0, 3.0, 4.0])
64 .expect("Tensor construction should succeed");
65 let b_tensor = Tensor::from_vec(vec![2, 2], vec![10.0, 20.0, 30.0, 40.0])
66 .expect("Tensor construction should succeed");
67
68 // Construct an executor with the default kernel registry.
69 //
70 // The registry determines which kernel implementation is used for each `OpKind`.
71 // In this example, the default registry is expected to contain an Add kernel.
72 let exec = Executor::new(KernelRegistry::default());
73
74 // Execute the graph.
75 //
76 // The bindings are `(NodeId, Tensor)` pairs. Each input node in the graph must be
77 // bound exactly once at runtime.
78 //
79 // Internally, the executor:
80 // 1. validates the bindings,
81 // 2. topologically orders the graph,
82 // 3. executes non-input nodes using registered kernels,
83 // 4. returns tensors for all declared output nodes.
84 let outputs = exec
85 .execute(&graph, vec![(a, a_tensor), (b, b_tensor)])
86 .expect("Execution should succeed");
87
88 let result = outputs
89 .get(&out)
90 .expect("Declared output should be present in executor results");
91
92 println!("Computed output for node {:?}: {:?}", out, result);
93}46fn main() {
47 let mut graph = Graph::new();
48
49 let input_width = 3;
50 let hidden_widths = [2, 2];
51 let output_width = 2;
52 let shape = vec![1, 4];
53
54 // Create the input layer.
55 //
56 // Each input node declares that execution must provide a [1, 4] tensor for it.
57 let mut current_layer: Vec<NodeId> = (0..input_width)
58 .map(|_| graph.input_node(shape.clone()))
59 .collect();
60
61 let input_ids = current_layer.clone();
62
63 // Build hidden layers iteratively.
64 //
65 // This loop is only in graph construction. The resulting graph is still a
66 // feedforward DAG with no cycles.
67 for &layer_width in &hidden_widths {
68 let mut next_layer = Vec::with_capacity(layer_width);
69
70 for _ in 0..layer_width {
71 // Start accumulation with the first node of the current layer.
72 let mut acc = current_layer[0];
73
74 // Repeatedly add the remaining nodes from the current layer.
75 //
76 // Each `graph.add(...)` creates a new intermediate node.
77 for &node in ¤t_layer[1..] {
78 acc = graph
79 .add(acc, node)
80 .expect("Adding nodes in a layer should succeed");
81 }
82
83 // Apply a nonlinearity at the end of the layer computation.
84 let out = graph
85 .relu(acc)
86 .expect("Applying ReLU after accumulation should succeed");
87
88 next_layer.push(out);
89 }
90
91 current_layer = next_layer;
92 }
93
94 // Build the output layer the same way.
95 let mut output_ids = Vec::with_capacity(output_width);
96
97 for _ in 0..output_width {
98 let mut acc = current_layer[0];
99
100 for &node in ¤t_layer[1..] {
101 acc = graph
102 .add(acc, node)
103 .expect("Adding nodes in the output layer should succeed");
104 }
105
106 let out = graph
107 .relu(acc)
108 .expect("Applying ReLU in the output layer should succeed");
109
110 graph
111 .set_output_node(out)
112 .expect("Setting output node should succeed");
113
114 output_ids.push(out);
115 }
116
117 // Bind concrete input tensors.
118 //
119 // As in the smaller examples, bindings are keyed by input-node `NodeId`.
120 // The graph can be re-used with different runtime values.
121 let bindings = vec![
122 (
123 input_ids[0],
124 Tensor::from_vec(vec![1, 4], vec![1.0, -2.0, 3.0, -4.0])
125 .expect("Tensor construction should succeed"),
126 ),
127 (
128 input_ids[1],
129 Tensor::from_vec(vec![1, 4], vec![0.5, 1.5, -2.5, 3.5])
130 .expect("Tensor construction should succeed"),
131 ),
132 (
133 input_ids[2],
134 Tensor::from_vec(vec![1, 4], vec![10.0, -20.0, 30.0, -40.0])
135 .expect("Tensor construction should succeed"),
136 ),
137 ];
138
139 let exec = Executor::new(KernelRegistry::default());
140 let outputs = exec
141 .execute(&graph, bindings)
142 .expect("Execution should succeed");
143
144 for out in output_ids {
145 let tensor = outputs
146 .get(&out)
147 .expect("Declared output should be present in executor results");
148
149 println!("Computed output for node {:?}: {:?}", out, tensor);
150 }
151}