tract_onnx/prost/onnx.rs
1/// Attributes
2///
3/// A named attribute containing either singular float, integer, string, graph,
4/// and tensor values, or repeated float, integer, string, graph, and tensor values.
5/// An AttributeProto MUST contain the name field, and *only one* of the
6/// following content fields, effectively enforcing a C/C++ union equivalent.
7#[derive(Clone, PartialEq, ::prost::Message)]
8pub struct AttributeProto {
9 /// The name field MUST be present for this version of the IR.
10 ///
11 /// namespace Attribute
12 #[prost(string, tag="1")]
13 pub name: ::prost::alloc::string::String,
14 /// if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
15 /// In this case, this AttributeProto does not contain data, and it's a reference of attribute
16 /// in parent scope.
17 /// NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
18 #[prost(string, tag="21")]
19 pub ref_attr_name: ::prost::alloc::string::String,
20 /// A human-readable documentation for this attribute. Markdown is allowed.
21 #[prost(string, tag="13")]
22 pub doc_string: ::prost::alloc::string::String,
23 /// The type field MUST be present for this version of the IR.
24 /// For 0.0.1 versions of the IR, this field was not defined, and
25 /// implementations needed to use has_field hueristics to determine
26 /// which value field was in use. For IR_VERSION 0.0.2 or later, this
27 /// field MUST be set and match the f|i|s|t|... field in use. This
28 /// change was made to accomodate proto3 implementations.
29 ///
30 /// discriminator that indicates which field below is in use
31 #[prost(enumeration="attribute_proto::AttributeType", tag="20")]
32 pub r#type: i32,
33 /// Exactly ONE of the following fields must be present for this version of the IR
34 ///
35 /// float
36 #[prost(float, tag="2")]
37 pub f: f32,
38 /// int
39 #[prost(int64, tag="3")]
40 pub i: i64,
41 /// UTF-8 string
42 #[prost(bytes="vec", tag="4")]
43 pub s: ::prost::alloc::vec::Vec<u8>,
44 /// tensor value
45 #[prost(message, optional, tag="5")]
46 pub t: ::core::option::Option<TensorProto>,
47 /// graph
48 #[prost(message, optional, tag="6")]
49 pub g: ::core::option::Option<GraphProto>,
50 /// sparse tensor value
51 #[prost(message, optional, tag="22")]
52 pub sparse_tensor: ::core::option::Option<SparseTensorProto>,
53 // Do not use field below, it's deprecated.
54 // optional ValueProto v = 12; // value - subsumes everything but graph
55
56 /// list of floats
57 #[prost(float, repeated, tag="7")]
58 pub floats: ::prost::alloc::vec::Vec<f32>,
59 /// list of ints
60 #[prost(int64, repeated, tag="8")]
61 pub ints: ::prost::alloc::vec::Vec<i64>,
62 /// list of UTF-8 strings
63 #[prost(bytes="vec", repeated, tag="9")]
64 pub strings: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
65 /// list of tensors
66 #[prost(message, repeated, tag="10")]
67 pub tensors: ::prost::alloc::vec::Vec<TensorProto>,
68 /// list of graph
69 #[prost(message, repeated, tag="11")]
70 pub graphs: ::prost::alloc::vec::Vec<GraphProto>,
71 /// list of sparse tensors
72 #[prost(message, repeated, tag="23")]
73 pub sparse_tensors: ::prost::alloc::vec::Vec<SparseTensorProto>,
74 /// list of type protos
75 #[prost(message, repeated, tag="15")]
76 pub type_protos: ::prost::alloc::vec::Vec<TypeProto>,
77}
78/// Nested message and enum types in `AttributeProto`.
79pub mod attribute_proto {
80 /// Note: this enum is structurally identical to the OpSchema::AttrType
81 /// enum defined in schema.h. If you rev one, you likely need to rev the other.
82 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
83 #[repr(i32)]
84 pub enum AttributeType {
85 Undefined = 0,
86 Float = 1,
87 Int = 2,
88 String = 3,
89 Tensor = 4,
90 Graph = 5,
91 SparseTensor = 11,
92 TypeProto = 13,
93 Floats = 6,
94 Ints = 7,
95 Strings = 8,
96 Tensors = 9,
97 Graphs = 10,
98 SparseTensors = 12,
99 TypeProtos = 14,
100 }
101 impl AttributeType {
102 /// String value of the enum field names used in the ProtoBuf definition.
103 ///
104 /// The values are not transformed in any way and thus are considered stable
105 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
106 pub fn as_str_name(&self) -> &'static str {
107 match self {
108 AttributeType::Undefined => "UNDEFINED",
109 AttributeType::Float => "FLOAT",
110 AttributeType::Int => "INT",
111 AttributeType::String => "STRING",
112 AttributeType::Tensor => "TENSOR",
113 AttributeType::Graph => "GRAPH",
114 AttributeType::SparseTensor => "SPARSE_TENSOR",
115 AttributeType::TypeProto => "TYPE_PROTO",
116 AttributeType::Floats => "FLOATS",
117 AttributeType::Ints => "INTS",
118 AttributeType::Strings => "STRINGS",
119 AttributeType::Tensors => "TENSORS",
120 AttributeType::Graphs => "GRAPHS",
121 AttributeType::SparseTensors => "SPARSE_TENSORS",
122 AttributeType::TypeProtos => "TYPE_PROTOS",
123 }
124 }
125 }
126}
127/// Defines information on value, including the name, the type, and
128/// the shape of the value.
129#[derive(Clone, PartialEq, ::prost::Message)]
130pub struct ValueInfoProto {
131 /// This field MUST be present in this version of the IR.
132 ///
133 /// namespace Value
134 #[prost(string, tag="1")]
135 pub name: ::prost::alloc::string::String,
136 /// This field MUST be present in this version of the IR.
137 #[prost(message, optional, tag="2")]
138 pub r#type: ::core::option::Option<TypeProto>,
139 /// A human-readable documentation for this value. Markdown is allowed.
140 #[prost(string, tag="3")]
141 pub doc_string: ::prost::alloc::string::String,
142}
143/// Nodes
144///
145/// Computation graphs are made up of a DAG of nodes, which represent what is
146/// commonly called a "layer" or "pipeline stage" in machine learning frameworks.
147///
148/// For example, it can be a node of type "Conv" that takes in an image, a filter
149/// tensor and a bias tensor, and produces the convolved output.
150#[derive(Clone, PartialEq, ::prost::Message)]
151pub struct NodeProto {
152 /// namespace Value
153 #[prost(string, repeated, tag="1")]
154 pub input: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
155 /// namespace Value
156 #[prost(string, repeated, tag="2")]
157 pub output: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
158 /// An optional identifier for this node in a graph.
159 /// This field MAY be absent in ths version of the IR.
160 ///
161 /// namespace Node
162 #[prost(string, tag="3")]
163 pub name: ::prost::alloc::string::String,
164 /// The symbolic identifier of the Operator to execute.
165 ///
166 /// namespace Operator
167 #[prost(string, tag="4")]
168 pub op_type: ::prost::alloc::string::String,
169 /// The domain of the OperatorSet that specifies the operator named by op_type.
170 ///
171 /// namespace Domain
172 #[prost(string, tag="7")]
173 pub domain: ::prost::alloc::string::String,
174 /// Additional named attributes.
175 #[prost(message, repeated, tag="5")]
176 pub attribute: ::prost::alloc::vec::Vec<AttributeProto>,
177 /// A human-readable documentation for this node. Markdown is allowed.
178 #[prost(string, tag="6")]
179 pub doc_string: ::prost::alloc::string::String,
180}
181/// Models
182///
183/// ModelProto is a top-level file/container format for bundling a ML model and
184/// associating its computation graph with metadata.
185///
186/// The semantics of the model are described by the associated GraphProto.
187#[derive(Clone, PartialEq, ::prost::Message)]
188pub struct ModelProto {
189 /// The version of the IR this model targets. See Version enum above.
190 /// This field MUST be present.
191 #[prost(int64, tag="1")]
192 pub ir_version: i64,
193 /// The OperatorSets this model relies on.
194 /// All ModelProtos MUST have at least one entry that
195 /// specifies which version of the ONNX OperatorSet is
196 /// being imported.
197 ///
198 /// All nodes in the ModelProto's graph will bind against the operator
199 /// with the same-domain/same-op_type operator with the HIGHEST version
200 /// in the referenced operator sets.
201 #[prost(message, repeated, tag="8")]
202 pub opset_import: ::prost::alloc::vec::Vec<OperatorSetIdProto>,
203 /// The name of the framework or tool used to generate this model.
204 /// This field SHOULD be present to indicate which implementation/tool/framework
205 /// emitted the model.
206 #[prost(string, tag="2")]
207 pub producer_name: ::prost::alloc::string::String,
208 /// The version of the framework or tool used to generate this model.
209 /// This field SHOULD be present to indicate which implementation/tool/framework
210 /// emitted the model.
211 #[prost(string, tag="3")]
212 pub producer_version: ::prost::alloc::string::String,
213 /// Domain name of the model.
214 /// We use reverse domain names as name space indicators. For example:
215 /// `com.facebook.fair` or `com.microsoft.cognitiveservices`
216 ///
217 /// Together with `model_version` and GraphProto.name, this forms the unique identity of
218 /// the graph.
219 #[prost(string, tag="4")]
220 pub domain: ::prost::alloc::string::String,
221 /// The version of the graph encoded. See Version enum below.
222 #[prost(int64, tag="5")]
223 pub model_version: i64,
224 /// A human-readable documentation for this model. Markdown is allowed.
225 #[prost(string, tag="6")]
226 pub doc_string: ::prost::alloc::string::String,
227 /// The parameterized graph that is evaluated to execute the model.
228 #[prost(message, optional, tag="7")]
229 pub graph: ::core::option::Option<GraphProto>,
230 /// Named metadata values; keys should be distinct.
231 #[prost(message, repeated, tag="14")]
232 pub metadata_props: ::prost::alloc::vec::Vec<StringStringEntryProto>,
233 /// Training-specific information. Sequentially executing all stored
234 /// `TrainingInfoProto.algorithm`s and assigning their outputs following
235 /// the corresponding `TrainingInfoProto.update_binding`s is one training
236 /// iteration. Similarly, to initialize the model
237 /// (as if training hasn't happened), the user should sequentially execute
238 /// all stored `TrainingInfoProto.initialization`s and assigns their outputs
239 /// using `TrainingInfoProto.initialization_binding`s.
240 ///
241 /// If this field is empty, the training behavior of the model is undefined.
242 #[prost(message, repeated, tag="20")]
243 pub training_info: ::prost::alloc::vec::Vec<TrainingInfoProto>,
244 /// A list of function protos local to the model.
245 ///
246 /// Name of the function "FunctionProto.name" should be unique within the domain "FunctionProto.domain".
247 /// In case of any conflicts the behavior (whether the model local functions are given higher priority,
248 /// or standard opserator sets are given higher priotity or this is treated as error) is defined by
249 /// the runtimes.
250 ///
251 /// The operator sets imported by FunctionProto should be compatible with the ones
252 /// imported by ModelProto and other model local FunctionProtos.
253 /// Example, if same operator set say 'A' is imported by a FunctionProto and ModelProto
254 /// or by 2 FunctionProtos then versions for the operator set may be different but,
255 /// the operator schema returned for op_type, domain, version combination
256 /// for both the versions should be same for every node in the function body.
257 ///
258 /// One FunctionProto can reference other FunctionProto in the model, however, recursive reference
259 /// is not allowed.
260 #[prost(message, repeated, tag="25")]
261 pub functions: ::prost::alloc::vec::Vec<FunctionProto>,
262}
263/// StringStringEntryProto follows the pattern for cross-proto-version maps.
264/// See <https://developers.google.com/protocol-buffers/docs/proto3#maps>
265#[derive(Clone, PartialEq, ::prost::Message)]
266pub struct StringStringEntryProto {
267 #[prost(string, tag="1")]
268 pub key: ::prost::alloc::string::String,
269 #[prost(string, tag="2")]
270 pub value: ::prost::alloc::string::String,
271}
272#[derive(Clone, PartialEq, ::prost::Message)]
273pub struct TensorAnnotation {
274 #[prost(string, optional, tag="1")]
275 pub tensor_name: ::core::option::Option<::prost::alloc::string::String>,
276 /// <key, value> pairs to annotate tensor specified by <tensor_name> above.
277 /// The keys used in the mapping below must be pre-defined in ONNX spec.
278 /// For example, for 8-bit linear quantization case, 'SCALE_TENSOR', 'ZERO_POINT_TENSOR' will be pre-defined as
279 /// quantization parameter keys.
280 #[prost(message, repeated, tag="2")]
281 pub quant_parameter_tensor_names: ::prost::alloc::vec::Vec<StringStringEntryProto>,
282}
283/// Graphs
284///
285/// A graph defines the computational logic of a model and is comprised of a parameterized
286/// list of nodes that form a directed acyclic graph based on their inputs and outputs.
287/// This is the equivalent of the "network" or "graph" in many deep learning
288/// frameworks.
289#[derive(Clone, PartialEq, ::prost::Message)]
290pub struct GraphProto {
291 /// The nodes in the graph, sorted topologically.
292 #[prost(message, repeated, tag="1")]
293 pub node: ::prost::alloc::vec::Vec<NodeProto>,
294 /// The name of the graph.
295 ///
296 /// namespace Graph
297 #[prost(string, tag="2")]
298 pub name: ::prost::alloc::string::String,
299 /// A list of named tensor values, used to specify constant inputs of the graph.
300 /// Each initializer (both TensorProto as well SparseTensorProto) MUST have a name.
301 /// The name MUST be unique across both initializer and sparse_initializer,
302 /// but the name MAY also appear in the input list.
303 #[prost(message, repeated, tag="5")]
304 pub initializer: ::prost::alloc::vec::Vec<TensorProto>,
305 /// Initializers (see above) stored in sparse format.
306 #[prost(message, repeated, tag="15")]
307 pub sparse_initializer: ::prost::alloc::vec::Vec<SparseTensorProto>,
308 /// A human-readable documentation for this graph. Markdown is allowed.
309 #[prost(string, tag="10")]
310 pub doc_string: ::prost::alloc::string::String,
311 /// The inputs and outputs of the graph.
312 #[prost(message, repeated, tag="11")]
313 pub input: ::prost::alloc::vec::Vec<ValueInfoProto>,
314 #[prost(message, repeated, tag="12")]
315 pub output: ::prost::alloc::vec::Vec<ValueInfoProto>,
316 /// Information for the values in the graph. The ValueInfoProto.name's
317 /// must be distinct. It is optional for a value to appear in value_info list.
318 #[prost(message, repeated, tag="13")]
319 pub value_info: ::prost::alloc::vec::Vec<ValueInfoProto>,
320 /// This field carries information to indicate the mapping among a tensor and its
321 /// quantization parameter tensors. For example:
322 /// For tensor 'a', it may have {'SCALE_TENSOR', 'a_scale'} and {'ZERO_POINT_TENSOR', 'a_zero_point'} annotated,
323 /// which means, tensor 'a_scale' and tensor 'a_zero_point' are scale and zero point of tensor 'a' in the model.
324 #[prost(message, repeated, tag="14")]
325 pub quantization_annotation: ::prost::alloc::vec::Vec<TensorAnnotation>,
326}
327/// Training information
328/// TrainingInfoProto stores information for training a model.
329/// In particular, this defines two functionalities: an initialization-step
330/// and a training-algorithm-step. Initialization resets the model
331/// back to its original state as if no training has been performed.
332/// Training algorithm improves the model based on input data.
333///
334/// The semantics of the initialization-step is that the initializers
335/// in ModelProto.graph and in TrainingInfoProto.algorithm are first
336/// initialized as specified by the initializers in the graph, and then
337/// updated by the "initialization_binding" in every instance in
338/// ModelProto.training_info.
339///
340/// The field "algorithm" defines a computation graph which represents a
341/// training algorithm's step. After the execution of a
342/// TrainingInfoProto.algorithm, the initializers specified by "update_binding"
343/// may be immediately updated. If the targeted training algorithm contains
344/// consecutive update steps (such as block coordinate descent methods),
345/// the user needs to create a TrainingInfoProto for each step.
346#[derive(Clone, PartialEq, ::prost::Message)]
347pub struct TrainingInfoProto {
348 /// This field describes a graph to compute the initial tensors
349 /// upon starting the training process. Initialization graph has no input
350 /// and can have multiple outputs. Usually, trainable tensors in neural
351 /// networks are randomly initialized. To achieve that, for each tensor,
352 /// the user can put a random number operator such as RandomNormal or
353 /// RandomUniform in TrainingInfoProto.initialization.node and assign its
354 /// random output to the specific tensor using "initialization_binding".
355 /// This graph can also set the initializers in "algorithm" in the same
356 /// TrainingInfoProto; a use case is resetting the number of training
357 /// iteration to zero.
358 ///
359 /// By default, this field is an empty graph and its evaluation does not
360 /// produce any output. Thus, no initializer would be changed by default.
361 #[prost(message, optional, tag="1")]
362 pub initialization: ::core::option::Option<GraphProto>,
363 /// This field represents a training algorithm step. Given required inputs,
364 /// it computes outputs to update initializers in its own or inference graph's
365 /// initializer lists. In general, this field contains loss node, gradient node,
366 /// optimizer node, increment of iteration count.
367 ///
368 /// An execution of the training algorithm step is performed by executing the
369 /// graph obtained by combining the inference graph (namely "ModelProto.graph")
370 /// and the "algorithm" graph. That is, the actual the actual
371 /// input/initializer/output/node/value_info/sparse_initializer list of
372 /// the training graph is the concatenation of
373 /// "ModelProto.graph.input/initializer/output/node/value_info/sparse_initializer"
374 /// and "algorithm.input/initializer/output/node/value_info/sparse_initializer"
375 /// in that order. This combined graph must satisfy the normal ONNX conditions.
376 /// Now, let's provide a visualization of graph combination for clarity.
377 /// Let the inference graph (i.e., "ModelProto.graph") be
378 /// tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d
379 /// and the "algorithm" graph be
380 /// tensor_d -> Add -> tensor_e
381 /// The combination process results
382 /// tensor_a, tensor_b -> MatMul -> tensor_c -> Sigmoid -> tensor_d -> Add -> tensor_e
383 ///
384 /// Notice that an input of a node in the "algorithm" graph may reference the
385 /// output of a node in the inference graph (but not the other way round). Also, inference
386 /// node cannot reference inputs of "algorithm". With these restrictions, inference graph
387 /// can always be run independently without training information.
388 ///
389 /// By default, this field is an empty graph and its evaluation does not
390 /// produce any output. Evaluating the default training step never
391 /// update any initializers.
392 #[prost(message, optional, tag="2")]
393 pub algorithm: ::core::option::Option<GraphProto>,
394 /// This field specifies the bindings from the outputs of "initialization" to
395 /// some initializers in "ModelProto.graph.initializer" and
396 /// the "algorithm.initializer" in the same TrainingInfoProto.
397 /// See "update_binding" below for details.
398 ///
399 /// By default, this field is empty and no initializer would be changed
400 /// by the execution of "initialization".
401 #[prost(message, repeated, tag="3")]
402 pub initialization_binding: ::prost::alloc::vec::Vec<StringStringEntryProto>,
403 /// Gradient-based training is usually an iterative procedure. In one gradient
404 /// descent iteration, we apply
405 ///
406 /// x = x - r * g
407 ///
408 /// where "x" is the optimized tensor, "r" stands for learning rate, and "g" is
409 /// gradient of "x" with respect to a chosen loss. To avoid adding assignments
410 /// into the training graph, we split the update equation into
411 ///
412 /// y = x - r * g
413 /// x = y
414 ///
415 /// The user needs to save "y = x - r * g" into TrainingInfoProto.algorithm. To
416 /// tell that "y" should be assigned to "x", the field "update_binding" may
417 /// contain a key-value pair of strings, "x" (key of StringStringEntryProto)
418 /// and "y" (value of StringStringEntryProto).
419 /// For a neural network with multiple trainable (mutable) tensors, there can
420 /// be multiple key-value pairs in "update_binding".
421 ///
422 /// The initializers appears as keys in "update_binding" are considered
423 /// mutable variables. This implies some behaviors
424 /// as described below.
425 ///
426 /// 1. We have only unique keys in all "update_binding"s so that two
427 /// variables may not have the same name. This ensures that one
428 /// variable is assigned up to once.
429 /// 2. The keys must appear in names of "ModelProto.graph.initializer" or
430 /// "TrainingInfoProto.algorithm.initializer".
431 /// 3. The values must be output names of "algorithm" or "ModelProto.graph.output".
432 /// 4. Mutable variables are initialized to the value specified by the
433 /// corresponding initializer, and then potentially updated by
434 /// "initializer_binding"s and "update_binding"s in "TrainingInfoProto"s.
435 ///
436 /// This field usually contains names of trainable tensors
437 /// (in ModelProto.graph), optimizer states such as momentums in advanced
438 /// stochastic gradient methods (in TrainingInfoProto.graph),
439 /// and number of training iterations (in TrainingInfoProto.graph).
440 ///
441 /// By default, this field is empty and no initializer would be changed
442 /// by the execution of "algorithm".
443 #[prost(message, repeated, tag="4")]
444 pub update_binding: ::prost::alloc::vec::Vec<StringStringEntryProto>,
445}
446/// Tensors
447///
448/// A serialized tensor value.
449#[derive(Clone, PartialEq, ::prost::Message)]
450pub struct TensorProto {
451 /// The shape of the tensor.
452 #[prost(int64, repeated, tag="1")]
453 pub dims: ::prost::alloc::vec::Vec<i64>,
454 /// The data type of the tensor.
455 #[prost(enumeration="tensor_proto::DataType", tag="2")]
456 pub data_type: i32,
457 #[prost(message, optional, tag="3")]
458 pub segment: ::core::option::Option<tensor_proto::Segment>,
459 // Tensor content must be organized in row-major order.
460 //
461 // Depending on the data_type field, exactly one of the fields below with
462 // name ending in _data is used to store the elements of the tensor.
463
464 /// For float and complex64 values
465 /// Complex64 tensors are encoded as a single array of floats,
466 /// with the real components appearing in odd numbered positions,
467 /// and the corresponding imaginary component apparing in the
468 /// subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
469 /// is encoded as [1.0, 2.0 ,3.0 ,4.0]
470 /// When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
471 #[prost(float, repeated, tag="4")]
472 pub float_data: ::prost::alloc::vec::Vec<f32>,
473 /// For int32, uint8, int8, uint16, int16, bool, and float16 values
474 /// float16 values must be bit-wise converted to an uint16_t prior
475 /// to writing to the buffer.
476 /// When this field is present, the data_type field MUST be
477 /// INT32, INT16, INT8, UINT16, INT8, BOOL, or FLOAT16
478 #[prost(int32, repeated, tag="5")]
479 pub int32_data: ::prost::alloc::vec::Vec<i32>,
480 /// For strings.
481 /// Each element of string_data is a UTF-8 encoded Unicode
482 /// string. No trailing null, no leading BOM. The protobuf "string"
483 /// scalar type is not used to match ML community conventions.
484 /// When this field is present, the data_type field MUST be STRING
485 #[prost(bytes="vec", repeated, tag="6")]
486 pub string_data: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
487 /// For int64.
488 /// When this field is present, the data_type field MUST be INT64
489 #[prost(int64, repeated, tag="7")]
490 pub int64_data: ::prost::alloc::vec::Vec<i64>,
491 /// Optionally, a name for the tensor.
492 ///
493 /// namespace Value
494 #[prost(string, tag="8")]
495 pub name: ::prost::alloc::string::String,
496 /// A human-readable documentation for this tensor. Markdown is allowed.
497 #[prost(string, tag="12")]
498 pub doc_string: ::prost::alloc::string::String,
499 /// Serializations can either use one of the fields above, or use this
500 /// raw bytes field. The only exception is the string case, where one is
501 /// required to store the content in the repeated bytes string_data field.
502 ///
503 /// When this raw_data field is used to store tensor value, elements MUST
504 /// be stored in as fixed-width, little-endian order.
505 /// Floating-point data types MUST be stored in IEEE 754 format.
506 /// Complex64 elements must be written as two consecutive FLOAT values, real component first.
507 /// Complex128 elements must be written as two consecutive DOUBLE values, real component first.
508 /// Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
509 ///
510 /// Note: the advantage of specific field rather than the raw_data field is
511 /// that in some cases (e.g. int data), protobuf does a better packing via
512 /// variable length storage, and may lead to smaller binary footprint.
513 /// When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
514 #[prost(bytes="vec", tag="9")]
515 pub raw_data: ::prost::alloc::vec::Vec<u8>,
516 /// For double
517 /// Complex64 tensors are encoded as a single array of doubles,
518 /// with the real components appearing in odd numbered positions,
519 /// and the corresponding imaginary component apparing in the
520 /// subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
521 /// is encoded as [1.0, 2.0 ,3.0 ,4.0]
522 /// When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
523 #[prost(double, repeated, tag="10")]
524 pub double_data: ::prost::alloc::vec::Vec<f64>,
525 /// For uint64 and uint32 values
526 /// When this field is present, the data_type field MUST be
527 /// UINT32 or UINT64
528 #[prost(uint64, repeated, tag="11")]
529 pub uint64_data: ::prost::alloc::vec::Vec<u64>,
530 /// If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
531 #[prost(enumeration="tensor_proto::DataLocation", optional, tag="14")]
532 pub data_location: ::core::option::Option<i32>,
533 /// Data can be stored inside the protobuf file using type-specific fields or raw_data.
534 /// Alternatively, raw bytes data can be stored in an external file, using the external_data field.
535 /// external_data stores key-value pairs describing data location. Recognized keys are:
536 /// - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
537 /// protobuf model was stored
538 /// - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
539 /// Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
540 /// - "length" (optional) - number of bytes containing data. Integer stored as string.
541 /// - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
542 #[prost(message, repeated, tag="13")]
543 pub external_data: ::prost::alloc::vec::Vec<StringStringEntryProto>,
544}
545/// Nested message and enum types in `TensorProto`.
546pub mod tensor_proto {
547 /// For very large tensors, we may want to store them in chunks, in which
548 /// case the following fields will specify the segment that is stored in
549 /// the current TensorProto.
550 #[derive(Clone, PartialEq, ::prost::Message)]
551 pub struct Segment {
552 #[prost(int64, tag="1")]
553 pub begin: i64,
554 #[prost(int64, tag="2")]
555 pub end: i64,
556 }
557 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
558 #[repr(i32)]
559 pub enum DataType {
560 Undefined = 0,
561 /// Basic types.
562 ///
563 /// float
564 Float = 1,
565 /// uint8_t
566 Uint8 = 2,
567 /// int8_t
568 Int8 = 3,
569 /// uint16_t
570 Uint16 = 4,
571 /// int16_t
572 Int16 = 5,
573 /// int32_t
574 Int32 = 6,
575 /// int64_t
576 Int64 = 7,
577 /// string
578 String = 8,
579 /// bool
580 Bool = 9,
581 /// IEEE754 half-precision floating-point format (16 bits wide).
582 /// This format has 1 sign bit, 5 exponent bits, and 10 mantissa bits.
583 Float16 = 10,
584 Double = 11,
585 Uint32 = 12,
586 Uint64 = 13,
587 /// complex with float32 real and imaginary components
588 Complex64 = 14,
589 /// complex with float64 real and imaginary components
590 Complex128 = 15,
591 /// Non-IEEE floating-point format based on IEEE754 single-precision
592 /// floating-point number truncated to 16 bits.
593 /// This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits.
594 Bfloat16 = 16,
595 }
596 impl DataType {
597 /// String value of the enum field names used in the ProtoBuf definition.
598 ///
599 /// The values are not transformed in any way and thus are considered stable
600 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
601 pub fn as_str_name(&self) -> &'static str {
602 match self {
603 DataType::Undefined => "UNDEFINED",
604 DataType::Float => "FLOAT",
605 DataType::Uint8 => "UINT8",
606 DataType::Int8 => "INT8",
607 DataType::Uint16 => "UINT16",
608 DataType::Int16 => "INT16",
609 DataType::Int32 => "INT32",
610 DataType::Int64 => "INT64",
611 DataType::String => "STRING",
612 DataType::Bool => "BOOL",
613 DataType::Float16 => "FLOAT16",
614 DataType::Double => "DOUBLE",
615 DataType::Uint32 => "UINT32",
616 DataType::Uint64 => "UINT64",
617 DataType::Complex64 => "COMPLEX64",
618 DataType::Complex128 => "COMPLEX128",
619 DataType::Bfloat16 => "BFLOAT16",
620 }
621 }
622 }
623 /// Location of the data for this tensor. MUST be one of:
624 /// - DEFAULT - data stored inside the protobuf message. Data is stored in raw_data (if set) otherwise in type-specified field.
625 /// - EXTERNAL - data stored in an external location as described by external_data field.
626 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
627 #[repr(i32)]
628 pub enum DataLocation {
629 Default = 0,
630 External = 1,
631 }
632 impl DataLocation {
633 /// String value of the enum field names used in the ProtoBuf definition.
634 ///
635 /// The values are not transformed in any way and thus are considered stable
636 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
637 pub fn as_str_name(&self) -> &'static str {
638 match self {
639 DataLocation::Default => "DEFAULT",
640 DataLocation::External => "EXTERNAL",
641 }
642 }
643 }
644}
645/// A serialized sparse-tensor value
646#[derive(Clone, PartialEq, ::prost::Message)]
647pub struct SparseTensorProto {
648 /// The sequence of non-default values are encoded as a tensor of shape \[NNZ\].
649 /// The default-value is zero for numeric tensors, and empty-string for string tensors.
650 /// values must have a non-empty name present which serves as a name for SparseTensorProto
651 /// when used in sparse_initializer list.
652 #[prost(message, optional, tag="1")]
653 pub values: ::core::option::Option<TensorProto>,
654 /// The indices of the non-default values, which may be stored in one of two formats.
655 /// (a) Indices can be a tensor of shape [NNZ, rank] with the \[i,j\]-th value
656 /// corresponding to the j-th index of the i-th value (in the values tensor).
657 /// (b) Indices can be a tensor of shape \[NNZ\], in which case the i-th value
658 /// must be the linearized-index of the i-th value (in the values tensor).
659 /// The linearized-index can be converted into an index tuple (k_1,...,k_rank)
660 /// using the shape provided below.
661 /// The indices must appear in ascending order without duplication.
662 /// In the first format, the ordering is lexicographic-ordering:
663 /// e.g., index-value \[1,4\] must appear before \[2,1\]
664 #[prost(message, optional, tag="2")]
665 pub indices: ::core::option::Option<TensorProto>,
666 /// The shape of the underlying dense-tensor: [dim_1, dim_2, ... dim_rank]
667 #[prost(int64, repeated, tag="3")]
668 pub dims: ::prost::alloc::vec::Vec<i64>,
669}
670/// Defines a tensor shape. A dimension can be either an integer value
671/// or a symbolic variable. A symbolic variable represents an unknown
672/// dimension.
673#[derive(Clone, PartialEq, ::prost::Message)]
674pub struct TensorShapeProto {
675 #[prost(message, repeated, tag="1")]
676 pub dim: ::prost::alloc::vec::Vec<tensor_shape_proto::Dimension>,
677}
678/// Nested message and enum types in `TensorShapeProto`.
679pub mod tensor_shape_proto {
680 #[derive(Clone, PartialEq, ::prost::Message)]
681 pub struct Dimension {
682 /// Standard denotation can optionally be used to denote tensor
683 /// dimensions with standard semantic descriptions to ensure
684 /// that operations are applied to the correct axis of a tensor.
685 /// Refer to <https://github.com/onnx/onnx/blob/master/docs/DimensionDenotation.md#denotation-definition>
686 /// for pre-defined dimension denotations.
687 #[prost(string, tag="3")]
688 pub denotation: ::prost::alloc::string::String,
689 #[prost(oneof="dimension::Value", tags="1, 2")]
690 pub value: ::core::option::Option<dimension::Value>,
691 }
692 /// Nested message and enum types in `Dimension`.
693 pub mod dimension {
694 #[derive(Clone, PartialEq, ::prost::Oneof)]
695 pub enum Value {
696 #[prost(int64, tag="1")]
697 DimValue(i64),
698 /// namespace Shape
699 #[prost(string, tag="2")]
700 DimParam(::prost::alloc::string::String),
701 }
702 }
703}
704/// Types
705///
706/// The standard ONNX data types.
707#[derive(Clone, PartialEq, ::prost::Message)]
708pub struct TypeProto {
709 /// An optional denotation can be used to denote the whole
710 /// type with a standard semantic description as to what is
711 /// stored inside. Refer to <https://github.com/onnx/onnx/blob/master/docs/TypeDenotation.md#type-denotation-definition>
712 /// for pre-defined type denotations.
713 #[prost(string, tag="6")]
714 pub denotation: ::prost::alloc::string::String,
715 #[prost(oneof="type_proto::Value", tags="1")]
716 pub value: ::core::option::Option<type_proto::Value>,
717}
718/// Nested message and enum types in `TypeProto`.
719pub mod type_proto {
720 #[derive(Clone, PartialEq, ::prost::Message)]
721 pub struct Tensor {
722 /// This field MUST NOT have the value of UNDEFINED
723 /// This field MUST be present for this version of the IR.
724 #[prost(enumeration="super::tensor_proto::DataType", tag="1")]
725 pub elem_type: i32,
726 #[prost(message, optional, tag="2")]
727 pub shape: ::core::option::Option<super::TensorShapeProto>,
728 }
729 #[derive(Clone, PartialEq, ::prost::Oneof)]
730 pub enum Value {
731 /// The type of a tensor.
732 #[prost(message, tag="1")]
733 TensorType(Tensor),
734 }
735}
736/// Operator Sets
737///
738/// OperatorSets are uniquely identified by a (domain, opset_version) pair.
739#[derive(Clone, PartialEq, ::prost::Message)]
740pub struct OperatorSetIdProto {
741 /// The domain of the operator set being identified.
742 /// The empty string ("") or absence of this field implies the operator
743 /// set that is defined as part of the ONNX specification.
744 /// This field MUST be present in this version of the IR when referring to any other operator set.
745 #[prost(string, tag="1")]
746 pub domain: ::prost::alloc::string::String,
747 /// The version of the operator set being identified.
748 /// This field MUST be present in this version of the IR.
749 #[prost(int64, tag="2")]
750 pub version: i64,
751}
752#[derive(Clone, PartialEq, ::prost::Message)]
753pub struct FunctionProto {
754 /// The name of the function, similar usage of op_type in OperatorProto.
755 /// Combined with FunctionProto.domain, this forms the unique identity of
756 /// the FunctionProto.
757 #[prost(string, optional, tag="1")]
758 pub name: ::core::option::Option<::prost::alloc::string::String>,
759 /// The inputs and outputs of the function.
760 #[prost(string, repeated, tag="4")]
761 pub input: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
762 #[prost(string, repeated, tag="5")]
763 pub output: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
764 /// The attributes of the function.
765 #[prost(string, repeated, tag="6")]
766 pub attribute: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
767 /// The nodes in the function.
768 #[prost(message, repeated, tag="7")]
769 pub node: ::prost::alloc::vec::Vec<NodeProto>,
770 /// A human-readable documentation for this function. Markdown is allowed.
771 #[prost(string, optional, tag="8")]
772 pub doc_string: ::core::option::Option<::prost::alloc::string::String>,
773 // The OperatorSets this function body (graph) relies on.
774 //
775 // All nodes in the function body (graph) will bind against the operator
776 // with the same-domain/same-op_type operator with the HIGHEST version
777 // in the referenced operator sets. This means at most one version can be relied
778 // for one domain.
779 //
780 // The operator sets imported by FunctionProto should be compatible with the ones
781 // imported by ModelProto. Example, if same operator set say 'A' is imported by FunctionProto
782 // and ModelProto then versions for the operator set may be different but,
783 // the operator schema returned for op_type, domain, version combination
784 // for both the versions should be same.
785
786 #[prost(message, repeated, tag="9")]
787 pub opset_import: ::prost::alloc::vec::Vec<OperatorSetIdProto>,
788 /// The domain which this function belongs to. Combined with FunctionProto.name, this forms the unique identity of
789 /// the FunctionProto.
790 #[prost(string, optional, tag="10")]
791 pub domain: ::core::option::Option<::prost::alloc::string::String>,
792}
793// Overview
794//
795// ONNX is an open specification that is comprised of the following components:
796//
797// 1) A definition of an extensible computation graph model.
798// 2) Definitions of standard data types.
799// 3) Definitions of built-in operators.
800//
801// This document describes the syntax of models and their computation graphs,
802// as well as the standard data types. Together, they are referred to as the ONNX
803// Intermediate Representation, or 'IR' for short.
804//
805// The normative semantic specification of the ONNX IR is found in docs/IR.md.
806// Definitions of the built-in neural network operators may be found in docs/Operators.md.
807
808// Notes
809//
810// Release
811//
812// We are still in the very early stage of defining ONNX. The current
813// version of ONNX is a starting point. While we are actively working
814// towards a complete spec, we would like to get the community involved
815// by sharing our working version of ONNX.
816//
817// Protobuf compatibility
818//
819// To simplify framework compatibility, ONNX is defined using the subset of protobuf
820// that is compatible with both protobuf v2 and v3. This means that we do not use any
821// protobuf features that are only available in one of the two versions.
822//
823// Here are the most notable contortions we have to carry out to work around
824// these limitations:
825//
826// - No 'map' (added protobuf 3.0). We instead represent mappings as lists
827// of key-value pairs, where order does not matter and duplicates
828// are not allowed.
829
830/// Versioning
831///
832/// ONNX versioning is specified in docs/IR.md and elaborated on in docs/Versioning.md
833///
834/// To be compatible with both proto2 and proto3, we will use a version number
835/// that is not defined by the default value but an explicit enum number.
836#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
837#[repr(i32)]
838pub enum Version {
839 /// proto3 requires the first enum value to be zero.
840 /// We add this just to appease the compiler.
841 StartVersion = 0,
842 /// The version field is always serialized and we will use it to store the
843 /// version that the graph is generated from. This helps us set up version
844 /// control.
845 /// For the IR, we are using simple numbers starting with 0x00000001,
846 /// which was the version we published on Oct 10, 2017.
847 IrVersion20171010 = 1,
848 /// IR_VERSION 2 published on Oct 30, 2017
849 /// - Added type discriminator to AttributeProto to support proto3 users
850 IrVersion20171030 = 2,
851 /// IR VERSION 3 published on Nov 3, 2017
852 /// - For operator versioning:
853 /// - Added new message OperatorSetIdProto
854 /// - Added opset_import in ModelProto
855 /// - For vendor extensions, added domain in NodeProto
856 IrVersion2017113 = 3,
857 /// IR VERSION 4 published on Jan 22, 2019
858 /// - Relax constraint that initializers should be a subset of graph inputs
859 /// - Add type BFLOAT16
860 IrVersion2019122 = 4,
861 /// IR VERSION 5 published on March 18, 2019
862 /// - Add message TensorAnnotation.
863 /// - Add quantization annotation in GraphProto to map tensor with its scale and zero point quantization parameters.
864 IrVersion2019318 = 5,
865 /// IR VERSION 6 published on Sep 19, 2019
866 /// - Add support for sparse tensor constants stored in model.
867 /// - Add message SparseTensorProto
868 /// - Add sparse initializers
869 IrVersion2019919 = 6,
870 /// IR VERSION 7 published on May 8, 2020
871 /// - Add support to allow function body graph to rely on multiple external opreator sets.
872 /// - Add a list to promote inference graph's initializers to global and
873 /// mutable variables. Global variables are visible in all graphs of the
874 /// stored models.
875 /// - Add message TrainingInfoProto to store initialization
876 /// method and training algorithm. The execution of TrainingInfoProto
877 /// can modify the values of mutable variables.
878 /// - Implicitly add inference graph into each TrainingInfoProto's algorithm.
879 IrVersion202058 = 7,
880 /// IR VERSION 8 published on <TBD>
881 /// Introduce TypeProto.SparseTensor
882 /// Introduce TypeProto.Optional
883 /// Added a list of FunctionProtos local to the model
884 /// Deprecated since_version and operator status from FunctionProto
885 IrVersion = 8,
886}
887impl Version {
888 /// String value of the enum field names used in the ProtoBuf definition.
889 ///
890 /// The values are not transformed in any way and thus are considered stable
891 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
892 pub fn as_str_name(&self) -> &'static str {
893 match self {
894 Version::StartVersion => "_START_VERSION",
895 Version::IrVersion20171010 => "IR_VERSION_2017_10_10",
896 Version::IrVersion20171030 => "IR_VERSION_2017_10_30",
897 Version::IrVersion2017113 => "IR_VERSION_2017_11_3",
898 Version::IrVersion2019122 => "IR_VERSION_2019_1_22",
899 Version::IrVersion2019318 => "IR_VERSION_2019_3_18",
900 Version::IrVersion2019919 => "IR_VERSION_2019_9_19",
901 Version::IrVersion202058 => "IR_VERSION_2020_5_8",
902 Version::IrVersion => "IR_VERSION",
903 }
904 }
905}
906/// Operator/function status.
907#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
908#[repr(i32)]
909pub enum OperatorStatus {
910 Experimental = 0,
911 Stable = 1,
912}
913impl OperatorStatus {
914 /// String value of the enum field names used in the ProtoBuf definition.
915 ///
916 /// The values are not transformed in any way and thus are considered stable
917 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
918 pub fn as_str_name(&self) -> &'static str {
919 match self {
920 OperatorStatus::Experimental => "EXPERIMENTAL",
921 OperatorStatus::Stable => "STABLE",
922 }
923 }
924}