tract_tensorflow/prost/tensorflow.rs
1/// Protocol buffer representing a handle to a tensorflow resource. Handles are
2/// not valid across executions, but can be serialized back and forth from within
3/// a single run.
4#[derive(Clone, PartialEq, ::prost::Message)]
5pub struct ResourceHandleProto {
6 /// Unique name for the device containing the resource.
7 #[prost(string, tag="1")]
8 pub device: ::prost::alloc::string::String,
9 /// Container in which this resource is placed.
10 #[prost(string, tag="2")]
11 pub container: ::prost::alloc::string::String,
12 /// Unique name of this resource.
13 #[prost(string, tag="3")]
14 pub name: ::prost::alloc::string::String,
15 /// Hash code for the type of the resource. Is only valid in the same device
16 /// and in the same execution.
17 #[prost(uint64, tag="4")]
18 pub hash_code: u64,
19 /// For debug-only, the name of the type pointed to by this handle, if
20 /// available.
21 #[prost(string, tag="5")]
22 pub maybe_type_name: ::prost::alloc::string::String,
23}
24/// Dimensions of a tensor.
25#[derive(Clone, PartialEq, ::prost::Message)]
26pub struct TensorShapeProto {
27 /// Dimensions of the tensor, such as {"input", 30}, {"output", 40}
28 /// for a 30 x 40 2D tensor. If an entry has size -1, this
29 /// corresponds to a dimension of unknown size. The names are
30 /// optional.
31 ///
32 /// The order of entries in "dim" matters: It indicates the layout of the
33 /// values in the tensor in-memory representation.
34 ///
35 /// The first entry in "dim" is the outermost dimension used to layout the
36 /// values, the last entry is the innermost dimension. This matches the
37 /// in-memory layout of RowMajor Eigen tensors.
38 ///
39 /// If "dim.size()" > 0, "unknown_rank" must be false.
40 #[prost(message, repeated, tag="2")]
41 pub dim: ::prost::alloc::vec::Vec<tensor_shape_proto::Dim>,
42 /// If true, the number of dimensions in the shape is unknown.
43 ///
44 /// If true, "dim.size()" must be 0.
45 #[prost(bool, tag="3")]
46 pub unknown_rank: bool,
47}
48/// Nested message and enum types in `TensorShapeProto`.
49pub mod tensor_shape_proto {
50 /// One dimension of the tensor.
51 #[derive(Clone, PartialEq, ::prost::Message)]
52 pub struct Dim {
53 /// Size of the tensor in that dimension.
54 /// This value must be >= -1, but values of -1 are reserved for "unknown"
55 /// shapes (values of -1 mean "unknown" dimension). Certain wrappers
56 /// that work with TensorShapeProto may fail at runtime when deserializing
57 /// a TensorShapeProto containing a dim value of -1.
58 #[prost(int64, tag="1")]
59 pub size: i64,
60 /// Optional name of the tensor dimension.
61 #[prost(string, tag="2")]
62 pub name: ::prost::alloc::string::String,
63 }
64}
65/// LINT.IfChange
66#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
67#[repr(i32)]
68pub enum DataType {
69 /// Not a legal value for DataType. Used to indicate a DataType field
70 /// has not been set.
71 DtInvalid = 0,
72 /// Data types that all computation devices are expected to be
73 /// capable to support.
74 DtFloat = 1,
75 DtDouble = 2,
76 DtInt32 = 3,
77 DtUint8 = 4,
78 DtInt16 = 5,
79 DtInt8 = 6,
80 DtString = 7,
81 /// Single-precision complex
82 DtComplex64 = 8,
83 DtInt64 = 9,
84 DtBool = 10,
85 /// Quantized int8
86 DtQint8 = 11,
87 /// Quantized uint8
88 DtQuint8 = 12,
89 /// Quantized int32
90 DtQint32 = 13,
91 /// Float32 truncated to 16 bits. Only for cast ops.
92 DtBfloat16 = 14,
93 /// Quantized int16
94 DtQint16 = 15,
95 /// Quantized uint16
96 DtQuint16 = 16,
97 DtUint16 = 17,
98 /// Double-precision complex
99 DtComplex128 = 18,
100 DtHalf = 19,
101 DtResource = 20,
102 /// Arbitrary C++ data types
103 DtVariant = 21,
104 DtUint32 = 22,
105 DtUint64 = 23,
106 /// Do not use! These are only for parameters. Every enum above
107 /// should have a corresponding value below (verified by types_test).
108 DtFloatRef = 101,
109 DtDoubleRef = 102,
110 DtInt32Ref = 103,
111 DtUint8Ref = 104,
112 DtInt16Ref = 105,
113 DtInt8Ref = 106,
114 DtStringRef = 107,
115 DtComplex64Ref = 108,
116 DtInt64Ref = 109,
117 DtBoolRef = 110,
118 DtQint8Ref = 111,
119 DtQuint8Ref = 112,
120 DtQint32Ref = 113,
121 DtBfloat16Ref = 114,
122 DtQint16Ref = 115,
123 DtQuint16Ref = 116,
124 DtUint16Ref = 117,
125 DtComplex128Ref = 118,
126 DtHalfRef = 119,
127 DtResourceRef = 120,
128 DtVariantRef = 121,
129 DtUint32Ref = 122,
130 DtUint64Ref = 123,
131}
132impl DataType {
133 /// String value of the enum field names used in the ProtoBuf definition.
134 ///
135 /// The values are not transformed in any way and thus are considered stable
136 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
137 pub fn as_str_name(&self) -> &'static str {
138 match self {
139 DataType::DtInvalid => "DT_INVALID",
140 DataType::DtFloat => "DT_FLOAT",
141 DataType::DtDouble => "DT_DOUBLE",
142 DataType::DtInt32 => "DT_INT32",
143 DataType::DtUint8 => "DT_UINT8",
144 DataType::DtInt16 => "DT_INT16",
145 DataType::DtInt8 => "DT_INT8",
146 DataType::DtString => "DT_STRING",
147 DataType::DtComplex64 => "DT_COMPLEX64",
148 DataType::DtInt64 => "DT_INT64",
149 DataType::DtBool => "DT_BOOL",
150 DataType::DtQint8 => "DT_QINT8",
151 DataType::DtQuint8 => "DT_QUINT8",
152 DataType::DtQint32 => "DT_QINT32",
153 DataType::DtBfloat16 => "DT_BFLOAT16",
154 DataType::DtQint16 => "DT_QINT16",
155 DataType::DtQuint16 => "DT_QUINT16",
156 DataType::DtUint16 => "DT_UINT16",
157 DataType::DtComplex128 => "DT_COMPLEX128",
158 DataType::DtHalf => "DT_HALF",
159 DataType::DtResource => "DT_RESOURCE",
160 DataType::DtVariant => "DT_VARIANT",
161 DataType::DtUint32 => "DT_UINT32",
162 DataType::DtUint64 => "DT_UINT64",
163 DataType::DtFloatRef => "DT_FLOAT_REF",
164 DataType::DtDoubleRef => "DT_DOUBLE_REF",
165 DataType::DtInt32Ref => "DT_INT32_REF",
166 DataType::DtUint8Ref => "DT_UINT8_REF",
167 DataType::DtInt16Ref => "DT_INT16_REF",
168 DataType::DtInt8Ref => "DT_INT8_REF",
169 DataType::DtStringRef => "DT_STRING_REF",
170 DataType::DtComplex64Ref => "DT_COMPLEX64_REF",
171 DataType::DtInt64Ref => "DT_INT64_REF",
172 DataType::DtBoolRef => "DT_BOOL_REF",
173 DataType::DtQint8Ref => "DT_QINT8_REF",
174 DataType::DtQuint8Ref => "DT_QUINT8_REF",
175 DataType::DtQint32Ref => "DT_QINT32_REF",
176 DataType::DtBfloat16Ref => "DT_BFLOAT16_REF",
177 DataType::DtQint16Ref => "DT_QINT16_REF",
178 DataType::DtQuint16Ref => "DT_QUINT16_REF",
179 DataType::DtUint16Ref => "DT_UINT16_REF",
180 DataType::DtComplex128Ref => "DT_COMPLEX128_REF",
181 DataType::DtHalfRef => "DT_HALF_REF",
182 DataType::DtResourceRef => "DT_RESOURCE_REF",
183 DataType::DtVariantRef => "DT_VARIANT_REF",
184 DataType::DtUint32Ref => "DT_UINT32_REF",
185 DataType::DtUint64Ref => "DT_UINT64_REF",
186 }
187 }
188}
189/// Protocol buffer representing a tensor.
190#[derive(Clone, PartialEq, ::prost::Message)]
191pub struct TensorProto {
192 #[prost(enumeration="DataType", tag="1")]
193 pub dtype: i32,
194 /// Shape of the tensor. TODO(touts): sort out the 0-rank issues.
195 #[prost(message, optional, tag="2")]
196 pub tensor_shape: ::core::option::Option<TensorShapeProto>,
197 // Only one of the representations below is set, one of "tensor_contents" and
198 // the "xxx_val" attributes. We are not using oneof because as oneofs cannot
199 // contain repeated fields it would require another extra set of messages.
200
201 /// Version number.
202 ///
203 /// In version 0, if the "repeated xxx" representations contain only one
204 /// element, that element is repeated to fill the shape. This makes it easy
205 /// to represent a constant Tensor with a single value.
206 #[prost(int32, tag="3")]
207 pub version_number: i32,
208 /// Serialized raw tensor content from either Tensor::AsProtoTensorContent or
209 /// memcpy in tensorflow::grpc::EncodeTensorToByteBuffer. This representation
210 /// can be used for all tensor types. The purpose of this representation is to
211 /// reduce serialization overhead during RPC call by avoiding serialization of
212 /// many repeated small items.
213 #[prost(bytes="vec", tag="4")]
214 pub tensor_content: ::prost::alloc::vec::Vec<u8>,
215 // Type specific representations that make it easy to create tensor protos in
216 // all languages. Only the representation corresponding to "dtype" can
217 // be set. The values hold the flattened representation of the tensor in
218 // row major order.
219
220 /// DT_HALF, DT_BFLOAT16. Note that since protobuf has no int16 type, we'll
221 /// have some pointless zero padding for each value here.
222 #[prost(int32, repeated, tag="13")]
223 pub half_val: ::prost::alloc::vec::Vec<i32>,
224 /// DT_FLOAT.
225 #[prost(float, repeated, tag="5")]
226 pub float_val: ::prost::alloc::vec::Vec<f32>,
227 /// DT_DOUBLE.
228 #[prost(double, repeated, tag="6")]
229 pub double_val: ::prost::alloc::vec::Vec<f64>,
230 /// DT_INT32, DT_INT16, DT_INT8, DT_UINT8.
231 #[prost(int32, repeated, tag="7")]
232 pub int_val: ::prost::alloc::vec::Vec<i32>,
233 /// DT_STRING
234 #[prost(bytes="vec", repeated, tag="8")]
235 pub string_val: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
236 /// DT_COMPLEX64. scomplex_val(2*i) and scomplex_val(2*i+1) are real
237 /// and imaginary parts of i-th single precision complex.
238 #[prost(float, repeated, tag="9")]
239 pub scomplex_val: ::prost::alloc::vec::Vec<f32>,
240 /// DT_INT64
241 #[prost(int64, repeated, tag="10")]
242 pub int64_val: ::prost::alloc::vec::Vec<i64>,
243 /// DT_BOOL
244 #[prost(bool, repeated, tag="11")]
245 pub bool_val: ::prost::alloc::vec::Vec<bool>,
246 /// DT_COMPLEX128. dcomplex_val(2*i) and dcomplex_val(2*i+1) are real
247 /// and imaginary parts of i-th double precision complex.
248 #[prost(double, repeated, tag="12")]
249 pub dcomplex_val: ::prost::alloc::vec::Vec<f64>,
250 /// DT_RESOURCE
251 #[prost(message, repeated, tag="14")]
252 pub resource_handle_val: ::prost::alloc::vec::Vec<ResourceHandleProto>,
253 /// DT_VARIANT
254 #[prost(message, repeated, tag="15")]
255 pub variant_val: ::prost::alloc::vec::Vec<VariantTensorDataProto>,
256 /// DT_UINT32
257 #[prost(uint32, repeated, tag="16")]
258 pub uint32_val: ::prost::alloc::vec::Vec<u32>,
259 /// DT_UINT64
260 #[prost(uint64, repeated, tag="17")]
261 pub uint64_val: ::prost::alloc::vec::Vec<u64>,
262}
263/// Protocol buffer representing the serialization format of DT_VARIANT tensors.
264#[derive(Clone, PartialEq, ::prost::Message)]
265pub struct VariantTensorDataProto {
266 /// Name of the type of objects being serialized.
267 #[prost(string, tag="1")]
268 pub type_name: ::prost::alloc::string::String,
269 /// Portions of the object that are not Tensors.
270 #[prost(bytes="vec", tag="2")]
271 pub metadata: ::prost::alloc::vec::Vec<u8>,
272 /// Tensors contained within objects being serialized.
273 #[prost(message, repeated, tag="3")]
274 pub tensors: ::prost::alloc::vec::Vec<TensorProto>,
275}
276/// Protocol buffer representing the value for an attr used to configure an Op.
277/// Comment indicates the corresponding attr type. Only the field matching the
278/// attr type may be filled.
279#[derive(Clone, PartialEq, ::prost::Message)]
280pub struct AttrValue {
281 #[prost(oneof="attr_value::Value", tags="2, 3, 4, 5, 6, 7, 8, 1, 10, 9")]
282 pub value: ::core::option::Option<attr_value::Value>,
283}
284/// Nested message and enum types in `AttrValue`.
285pub mod attr_value {
286 /// LINT.IfChange
287 #[derive(Clone, PartialEq, ::prost::Message)]
288 pub struct ListValue {
289 /// "list(string)"
290 #[prost(bytes="vec", repeated, tag="2")]
291 pub s: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
292 /// "list(int)"
293 #[prost(int64, repeated, tag="3")]
294 pub i: ::prost::alloc::vec::Vec<i64>,
295 /// "list(float)"
296 #[prost(float, repeated, tag="4")]
297 pub f: ::prost::alloc::vec::Vec<f32>,
298 /// "list(bool)"
299 #[prost(bool, repeated, tag="5")]
300 pub b: ::prost::alloc::vec::Vec<bool>,
301 /// "list(type)"
302 #[prost(enumeration="super::DataType", repeated, tag="6")]
303 pub r#type: ::prost::alloc::vec::Vec<i32>,
304 /// "list(shape)"
305 #[prost(message, repeated, tag="7")]
306 pub shape: ::prost::alloc::vec::Vec<super::TensorShapeProto>,
307 /// "list(tensor)"
308 #[prost(message, repeated, tag="8")]
309 pub tensor: ::prost::alloc::vec::Vec<super::TensorProto>,
310 /// "list(attr)"
311 #[prost(message, repeated, tag="9")]
312 pub func: ::prost::alloc::vec::Vec<super::NameAttrList>,
313 }
314 #[derive(Clone, PartialEq, ::prost::Oneof)]
315 pub enum Value {
316 /// "string"
317 #[prost(bytes, tag="2")]
318 S(::prost::alloc::vec::Vec<u8>),
319 /// "int"
320 #[prost(int64, tag="3")]
321 I(i64),
322 /// "float"
323 #[prost(float, tag="4")]
324 F(f32),
325 /// "bool"
326 #[prost(bool, tag="5")]
327 B(bool),
328 /// "type"
329 #[prost(enumeration="super::DataType", tag="6")]
330 Type(i32),
331 /// "shape"
332 #[prost(message, tag="7")]
333 Shape(super::TensorShapeProto),
334 /// "tensor"
335 #[prost(message, tag="8")]
336 Tensor(super::TensorProto),
337 /// any "list(...)"
338 #[prost(message, tag="1")]
339 List(ListValue),
340 /// "func" represents a function. func.name is a function's name or
341 /// a primitive op's name. func.attr.first is the name of an attr
342 /// defined for that function. func.attr.second is the value for
343 /// that attr in the instantiation.
344 #[prost(message, tag="10")]
345 Func(super::NameAttrList),
346 /// This is a placeholder only used in nodes defined inside a
347 /// function. It indicates the attr value will be supplied when
348 /// the function is instantiated. For example, let us suppose a
349 /// node "N" in function "FN". "N" has an attr "A" with value
350 /// placeholder = "foo". When FN is instantiated with attr "foo"
351 /// set to "bar", the instantiated node N's attr A will have been
352 /// given the value "bar".
353 #[prost(string, tag="9")]
354 Placeholder(::prost::alloc::string::String),
355 }
356}
357/// A list of attr names and their values. The whole list is attached
358/// with a string name. E.g., MatMul\[T=float\].
359#[derive(Clone, PartialEq, ::prost::Message)]
360pub struct NameAttrList {
361 #[prost(string, tag="1")]
362 pub name: ::prost::alloc::string::String,
363 #[prost(map="string, message", tag="2")]
364 pub attr: ::std::collections::HashMap<::prost::alloc::string::String, AttrValue>,
365}
366#[derive(Clone, PartialEq, ::prost::Message)]
367pub struct NodeDef {
368 /// The name given to this operator. Used for naming inputs,
369 /// logging, visualization, etc. Unique within a single GraphDef.
370 /// Must match the regexp "\[A-Za-z0-9.][A-Za-z0-9_./\]*".
371 #[prost(string, tag="1")]
372 pub name: ::prost::alloc::string::String,
373 /// The operation name. There may be custom parameters in attrs.
374 /// Op names starting with an underscore are reserved for internal use.
375 #[prost(string, tag="2")]
376 pub op: ::prost::alloc::string::String,
377 /// Each input is "node:src_output" with "node" being a string name and
378 /// "src_output" indicating which output tensor to use from "node". If
379 /// "src_output" is 0 the ":0" suffix can be omitted. Regular inputs
380 /// may optionally be followed by control inputs that have the format
381 /// "^node".
382 #[prost(string, repeated, tag="3")]
383 pub input: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
384 /// A (possibly partial) specification for the device on which this
385 /// node should be placed.
386 /// The expected syntax for this string is as follows:
387 ///
388 /// DEVICE_SPEC ::= PARTIAL_SPEC
389 ///
390 /// PARTIAL_SPEC ::= ("/" CONSTRAINT) *
391 /// CONSTRAINT ::= ("job:" JOB_NAME)
392 /// | ("replica:" \[1-9][0-9\]*)
393 /// | ("task:" \[1-9][0-9\]*)
394 /// | ("device:" \[A-Za-z\]* ":" (\[1-9][0-9\]* | "*") )
395 ///
396 /// Valid values for this string include:
397 /// * "/job:worker/replica:0/task:1/device:GPU:3" (full specification)
398 /// * "/job:worker/device:GPU:3" (partial specification)
399 /// * "" (no specification)
400 ///
401 /// If the constraints do not resolve to a single device (or if this
402 /// field is empty or not present), the runtime will attempt to
403 /// choose a device automatically.
404 #[prost(string, tag="4")]
405 pub device: ::prost::alloc::string::String,
406 /// Operation-specific graph-construction-time configuration.
407 /// Note that this should include all attrs defined in the
408 /// corresponding OpDef, including those with a value matching
409 /// the default -- this allows the default to change and makes
410 /// NodeDefs easier to interpret on their own. However, if
411 /// an attr with a default is not specified in this list, the
412 /// default will be used.
413 /// The "names" (keys) must match the regexp "\[a-z][a-z0-9_\]+" (and
414 /// one of the names from the corresponding OpDef's attr field).
415 /// The values must have a type matching the corresponding OpDef
416 /// attr's type field.
417 /// TODO(josh11b): Add some examples here showing best practices.
418 #[prost(map="string, message", tag="5")]
419 pub attr: ::std::collections::HashMap<::prost::alloc::string::String, AttrValue>,
420}
421/// Defines an operation. A NodeDef in a GraphDef specifies an Op by
422/// using the "op" field which should match the name of a OpDef.
423/// LINT.IfChange
424#[derive(Clone, PartialEq, ::prost::Message)]
425pub struct OpDef {
426 /// Op names starting with an underscore are reserved for internal use.
427 /// Names should be CamelCase and match the regexp "\[A-Z][a-zA-Z0-9_\]*".
428 #[prost(string, tag="1")]
429 pub name: ::prost::alloc::string::String,
430 /// Description of the input(s).
431 #[prost(message, repeated, tag="2")]
432 pub input_arg: ::prost::alloc::vec::Vec<op_def::ArgDef>,
433 /// Description of the output(s).
434 #[prost(message, repeated, tag="3")]
435 pub output_arg: ::prost::alloc::vec::Vec<op_def::ArgDef>,
436 #[prost(message, repeated, tag="4")]
437 pub attr: ::prost::alloc::vec::Vec<op_def::AttrDef>,
438 /// Optional deprecation based on GraphDef versions.
439 #[prost(message, optional, tag="8")]
440 pub deprecation: ::core::option::Option<OpDeprecation>,
441 /// One-line human-readable description of what the Op does.
442 #[prost(string, tag="5")]
443 pub summary: ::prost::alloc::string::String,
444 /// Additional, longer human-readable description of what the Op does.
445 #[prost(string, tag="6")]
446 pub description: ::prost::alloc::string::String,
447 // -------------------------------------------------------------------------
448 // Which optimizations this operation can participate in.
449
450 /// True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
451 #[prost(bool, tag="18")]
452 pub is_commutative: bool,
453 /// If is_aggregate is true, then this operation accepts N >= 2
454 /// inputs and produces 1 output all of the same type. Should be
455 /// associative and commutative, and produce output with the same
456 /// shape as the input. The optimizer may replace an aggregate op
457 /// taking input from multiple devices with a tree of aggregate ops
458 /// that aggregate locally within each device (and possibly within
459 /// groups of nearby devices) before communicating.
460 /// TODO(josh11b): Implement that optimization.
461 ///
462 /// for things like add
463 #[prost(bool, tag="16")]
464 pub is_aggregate: bool,
465 // Other optimizations go here, like
466 // can_alias_input, rewrite_when_output_unused, partitioning_strategy, etc.
467
468 // -------------------------------------------------------------------------
469 // Optimization constraints.
470
471 /// Ops are marked as stateful if their behavior depends on some state beyond
472 /// their input tensors (e.g. variable reading op) or if they have
473 /// a side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
474 /// must always produce the same output for the same input and have
475 /// no side-effects.
476 ///
477 /// By default Ops may be moved between devices. Stateful ops should
478 /// either not be moved, or should only be moved if that state can also
479 /// be moved (e.g. via some sort of save / restore).
480 /// Stateful ops are guaranteed to never be optimized away by Common
481 /// Subexpression Elimination (CSE).
482 ///
483 /// for things like variables, queue
484 #[prost(bool, tag="17")]
485 pub is_stateful: bool,
486 // -------------------------------------------------------------------------
487 // Non-standard options.
488
489 /// By default, all inputs to an Op must be initialized Tensors. Ops
490 /// that may initialize tensors for the first time should set this
491 /// field to true, to allow the Op to take an uninitialized Tensor as
492 /// input.
493 ///
494 /// for Assign, etc.
495 #[prost(bool, tag="19")]
496 pub allows_uninitialized_input: bool,
497}
498/// Nested message and enum types in `OpDef`.
499pub mod op_def {
500 /// For describing inputs and outputs.
501 #[derive(Clone, PartialEq, ::prost::Message)]
502 pub struct ArgDef {
503 /// Name for the input/output. Should match the regexp "\[a-z][a-z0-9_\]*".
504 #[prost(string, tag="1")]
505 pub name: ::prost::alloc::string::String,
506 /// Human readable description.
507 #[prost(string, tag="2")]
508 pub description: ::prost::alloc::string::String,
509 /// Describes the type of one or more tensors that are accepted/produced
510 /// by this input/output arg. The only legal combinations are:
511 /// * For a single tensor: either the "type" field is set or the
512 /// "type_attr" field is set to the name of an attr with type "type".
513 /// * For a sequence of tensors with the same type: the "number_attr"
514 /// field will be set to the name of an attr with type "int", and
515 /// either the "type" or "type_attr" field will be set as for
516 /// single tensors.
517 /// * For a sequence of tensors, the "type_list_attr" field will be set
518 /// to the name of an attr with type "list(type)".
519 #[prost(enumeration="super::DataType", tag="3")]
520 pub r#type: i32,
521 /// if specified, attr must have type "type"
522 #[prost(string, tag="4")]
523 pub type_attr: ::prost::alloc::string::String,
524 /// if specified, attr must have type "int"
525 #[prost(string, tag="5")]
526 pub number_attr: ::prost::alloc::string::String,
527 /// If specified, attr must have type "list(type)", and none of
528 /// type, type_attr, and number_attr may be specified.
529 #[prost(string, tag="6")]
530 pub type_list_attr: ::prost::alloc::string::String,
531 /// For inputs: if true, the inputs are required to be refs.
532 /// By default, inputs can be either refs or non-refs.
533 /// For outputs: if true, outputs are refs, otherwise they are not.
534 #[prost(bool, tag="16")]
535 pub is_ref: bool,
536 }
537 /// Description of the graph-construction-time configuration of this
538 /// Op. That is to say, this describes the attr fields that will
539 /// be specified in the NodeDef.
540 #[derive(Clone, PartialEq, ::prost::Message)]
541 pub struct AttrDef {
542 /// A descriptive name for the argument. May be used, e.g. by the
543 /// Python client, as a keyword argument name, and so should match
544 /// the regexp "\[a-z][a-z0-9_\]+".
545 #[prost(string, tag="1")]
546 pub name: ::prost::alloc::string::String,
547 /// One of the type names from attr_value.proto ("string", "list(string)",
548 /// "int", etc.).
549 #[prost(string, tag="2")]
550 pub r#type: ::prost::alloc::string::String,
551 /// A reasonable default for this attribute if the user does not supply
552 /// a value. If not specified, the user must supply a value.
553 #[prost(message, optional, tag="3")]
554 pub default_value: ::core::option::Option<super::AttrValue>,
555 /// Human-readable description.
556 #[prost(string, tag="4")]
557 pub description: ::prost::alloc::string::String,
558 // TODO(josh11b): bool is_optional?
559
560 // --- Constraints ---
561 // These constraints are only in effect if specified. Default is no
562 // constraints.
563
564 /// For type == "int", this is a minimum value. For "list(___)"
565 /// types, this is the minimum length.
566 #[prost(bool, tag="5")]
567 pub has_minimum: bool,
568 #[prost(int64, tag="6")]
569 pub minimum: i64,
570 /// The set of allowed values. Has type that is the "list" version
571 /// of the "type" field above (uses the "list" field of AttrValue).
572 /// If type == "type" or "list(type)" above, then the "type" field
573 /// of "allowed_values.list" has the set of allowed DataTypes.
574 /// If type == "string" or "list(string)", then the "s" field of
575 /// "allowed_values.list" has the set of allowed strings.
576 #[prost(message, optional, tag="7")]
577 pub allowed_values: ::core::option::Option<super::AttrValue>,
578 }
579}
580/// Information about version-dependent deprecation of an op
581#[derive(Clone, PartialEq, ::prost::Message)]
582pub struct OpDeprecation {
583 /// First GraphDef version at which the op is disallowed.
584 #[prost(int32, tag="1")]
585 pub version: i32,
586 /// Explanation of why it was deprecated and what to use instead.
587 #[prost(string, tag="2")]
588 pub explanation: ::prost::alloc::string::String,
589}
590/// A collection of OpDefs
591#[derive(Clone, PartialEq, ::prost::Message)]
592pub struct OpList {
593 #[prost(message, repeated, tag="1")]
594 pub op: ::prost::alloc::vec::Vec<OpDef>,
595}
596/// A library is a set of named functions.
597#[derive(Clone, PartialEq, ::prost::Message)]
598pub struct FunctionDefLibrary {
599 #[prost(message, repeated, tag="1")]
600 pub function: ::prost::alloc::vec::Vec<FunctionDef>,
601 #[prost(message, repeated, tag="2")]
602 pub gradient: ::prost::alloc::vec::Vec<GradientDef>,
603}
604/// A function can be instantiated when the runtime can bind every attr
605/// with a value. When a GraphDef has a call to a function, it must
606/// have binding for every attr defined in the signature.
607///
608/// TODO(zhifengc):
609/// * device spec, etc.
610#[derive(Clone, PartialEq, ::prost::Message)]
611pub struct FunctionDef {
612 /// The definition of the function's name, arguments, return values,
613 /// attrs etc.
614 #[prost(message, optional, tag="1")]
615 pub signature: ::core::option::Option<OpDef>,
616 /// Attributes specific to this function definition.
617 #[prost(map="string, message", tag="5")]
618 pub attr: ::std::collections::HashMap<::prost::alloc::string::String, AttrValue>,
619 // NOTE: field id 2 deleted on Jan 11, 2016, GraphDef version 21.
620
621 // In both of the following fields, there is the need to specify an
622 // output that is used as either the input to another node (in
623 // `node_def`) or as a return value of the function (in `ret`).
624 // Unlike the NodeDefs in GraphDef, we need to be able to specify a
625 // list in some cases (instead of just single outputs). Also, we
626 // need to be able to deal with lists of unknown length (so the
627 // output index may not be known at function definition time). So
628 // we use the following format instead:
629 // * "fun_in" where "fun_in" is the name of a function input arg in
630 // the `signature` field above. This represents that input, whether
631 // it is a single tensor or a list.
632 // * "fun_in:0" gives the first element of a function input arg (a
633 // non-list input is considered a list of length 1 for these
634 // purposes).
635 // * "node:out" where "node" is the name of a node in `node_def` and
636 // "out" is the name one of its op's output arguments (the name
637 // comes from the OpDef of the node's op). This represents that
638 // node's output, whether it is a single tensor or a list.
639 // Note: We enforce that an op's output arguments are never
640 // renamed in the backwards-compatibility test.
641 // * "node:out:0" gives the first element of a node output arg (a
642 // non-list output is considered a list of length 1 for these
643 // purposes).
644 //
645 // NOT CURRENTLY SUPPORTED (but may be in the future):
646 // * "node:out:-1" gives last element in a node output list
647 // * "node:out:1:" gives a list with all but the first element in a
648 // node output list
649 // * "node:out::-1" gives a list with all but the last element in a
650 // node output list
651
652 // The body of the function. Unlike the NodeDefs in a GraphDef, attrs
653 // may have values of type `placeholder` and the `input` field uses
654 // the "output" format above.
655
656 /// By convention, "op" in node_def is resolved by consulting with a
657 /// user-defined library first. If not resolved, "func" is assumed to
658 /// be a builtin op.
659 #[prost(message, repeated, tag="3")]
660 pub node_def: ::prost::alloc::vec::Vec<NodeDef>,
661 /// A mapping from the output arg names from `signature` to the
662 /// outputs from `node_def` that should be returned by the function.
663 #[prost(map="string, string", tag="4")]
664 pub ret: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>,
665}
666/// GradientDef defines the gradient function of a function defined in
667/// a function library.
668///
669/// A gradient function g (specified by gradient_func) for a function f
670/// (specified by function_name) must follow the following:
671///
672/// The function 'f' must be a numerical function which takes N inputs
673/// and produces M outputs. Its gradient function 'g', which is a
674/// function taking N + M inputs and produces N outputs.
675///
676/// I.e. if we have
677/// (y1, y2, ..., y_M) = f(x1, x2, ..., x_N),
678/// then, g is
679/// (dL/dx1, dL/dx2, ..., dL/dx_N) = g(x1, x2, ..., x_N,
680/// dL/dy1, dL/dy2, ..., dL/dy_M),
681/// where L is a scalar-value function of (x1, x2, ..., xN) (e.g., the
682/// loss function). dL/dx_i is the partial derivative of L with respect
683/// to x_i.
684#[derive(Clone, PartialEq, ::prost::Message)]
685pub struct GradientDef {
686 /// The function name.
687 #[prost(string, tag="1")]
688 pub function_name: ::prost::alloc::string::String,
689 /// The gradient function's name.
690 #[prost(string, tag="2")]
691 pub gradient_func: ::prost::alloc::string::String,
692}
693/// Version information for a piece of serialized data
694///
695/// There are different types of versions for each type of data
696/// (GraphDef, etc.), but they all have the same common shape
697/// described here.
698///
699/// Each consumer has "consumer" and "min_producer" versions (specified
700/// elsewhere). A consumer is allowed to consume this data if
701///
702/// producer >= min_producer
703/// consumer >= min_consumer
704/// consumer not in bad_consumers
705///
706#[derive(Clone, PartialEq, ::prost::Message)]
707pub struct VersionDef {
708 /// The version of the code that produced this data.
709 #[prost(int32, tag="1")]
710 pub producer: i32,
711 /// Any consumer below this version is not allowed to consume this data.
712 #[prost(int32, tag="2")]
713 pub min_consumer: i32,
714 /// Specific consumer versions which are disallowed (e.g. due to bugs).
715 #[prost(int32, repeated, tag="3")]
716 pub bad_consumers: ::prost::alloc::vec::Vec<i32>,
717}
718/// Represents the graph of operations
719#[derive(Clone, PartialEq, ::prost::Message)]
720pub struct GraphDef {
721 #[prost(message, repeated, tag="1")]
722 pub node: ::prost::alloc::vec::Vec<NodeDef>,
723 /// Compatibility versions of the graph. See core/public/version.h for version
724 /// history. The GraphDef version is distinct from the TensorFlow version, and
725 /// each release of TensorFlow will support a range of GraphDef versions.
726 #[prost(message, optional, tag="4")]
727 pub versions: ::core::option::Option<VersionDef>,
728 /// Deprecated single version field; use versions above instead. Since all
729 /// GraphDef changes before "versions" was introduced were forward
730 /// compatible, this field is entirely ignored.
731 #[deprecated]
732 #[prost(int32, tag="3")]
733 pub version: i32,
734 /// EXPERIMENTAL. DO NOT USE OR DEPEND ON THIS YET.
735 ///
736 /// "library" provides user-defined functions.
737 ///
738 /// Naming:
739 /// * library.function.name are in a flat namespace.
740 /// NOTE: We may need to change it to be hierarchical to support
741 /// different orgs. E.g.,
742 /// { "/google/nn", { ... }},
743 /// { "/google/vision", { ... }}
744 /// { "/org_foo/module_bar", { ... }}
745 /// map<string, FunctionDefLib> named_lib;
746 /// * If node\[i\].op is the name of one function in "library",
747 /// node\[i\] is deemed as a function call. Otherwise, node\[i\].op
748 /// must be a primitive operation supported by the runtime.
749 ///
750 ///
751 /// Function call semantics:
752 ///
753 /// * The callee may start execution as soon as some of its inputs
754 /// are ready. The caller may want to use Tuple() mechanism to
755 /// ensure all inputs are ready in the same time.
756 ///
757 /// * The consumer of return values may start executing as soon as
758 /// the return values the consumer depends on are ready. The
759 /// consumer may want to use Tuple() mechanism to ensure the
760 /// consumer does not start until all return values of the callee
761 /// function are ready.
762 #[prost(message, optional, tag="2")]
763 pub library: ::core::option::Option<FunctionDefLibrary>,
764}
765/// Protocol buffer representing a Variable.
766#[derive(Clone, PartialEq, ::prost::Message)]
767pub struct VariableDef {
768 /// Name of the variable tensor.
769 #[prost(string, tag="1")]
770 pub variable_name: ::prost::alloc::string::String,
771 /// Name of the tensor holding the variable's initial value.
772 #[prost(string, tag="6")]
773 pub initial_value_name: ::prost::alloc::string::String,
774 /// Name of the initializer op.
775 #[prost(string, tag="2")]
776 pub initializer_name: ::prost::alloc::string::String,
777 /// Name of the snapshot tensor.
778 #[prost(string, tag="3")]
779 pub snapshot_name: ::prost::alloc::string::String,
780 /// Support for saving variables as slices of a larger variable.
781 #[prost(message, optional, tag="4")]
782 pub save_slice_info_def: ::core::option::Option<SaveSliceInfoDef>,
783 /// Whether to represent this as a ResourceVariable.
784 #[prost(bool, tag="5")]
785 pub is_resource: bool,
786 /// Whether this variable should be trained.
787 #[prost(bool, tag="7")]
788 pub trainable: bool,
789 /// Indicates when a distributed variable will be synced.
790 #[prost(enumeration="VariableSynchronization", tag="8")]
791 pub synchronization: i32,
792 /// Indicates how a distributed variable will be aggregated.
793 #[prost(enumeration="VariableAggregation", tag="9")]
794 pub aggregation: i32,
795}
796#[derive(Clone, PartialEq, ::prost::Message)]
797pub struct SaveSliceInfoDef {
798 /// Name of the full variable of which this is a slice.
799 #[prost(string, tag="1")]
800 pub full_name: ::prost::alloc::string::String,
801 /// Shape of the full variable.
802 #[prost(int64, repeated, tag="2")]
803 pub full_shape: ::prost::alloc::vec::Vec<i64>,
804 /// Offset of this variable into the full variable.
805 #[prost(int64, repeated, tag="3")]
806 pub var_offset: ::prost::alloc::vec::Vec<i64>,
807 /// Shape of this variable.
808 #[prost(int64, repeated, tag="4")]
809 pub var_shape: ::prost::alloc::vec::Vec<i64>,
810}
811/// Indicates when a distributed variable will be synced.
812#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
813#[repr(i32)]
814pub enum VariableSynchronization {
815 /// `AUTO`: Indicates that the synchronization will be determined by the
816 /// current `DistributionStrategy` (eg. With `MirroredStrategy` this would be
817 /// `ON_WRITE`).
818 Auto = 0,
819 /// `NONE`: Indicates that there will only be one copy of the variable, so
820 /// there is no need to sync.
821 None = 1,
822 /// `ON_WRITE`: Indicates that the variable will be updated across devices
823 /// every time it is written.
824 OnWrite = 2,
825 /// `ON_READ`: Indicates that the variable will be aggregated across devices
826 /// when it is read (eg. when checkpointing or when evaluating an op that uses
827 /// the variable).
828 OnRead = 3,
829}
830impl VariableSynchronization {
831 /// String value of the enum field names used in the ProtoBuf definition.
832 ///
833 /// The values are not transformed in any way and thus are considered stable
834 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
835 pub fn as_str_name(&self) -> &'static str {
836 match self {
837 VariableSynchronization::Auto => "VARIABLE_SYNCHRONIZATION_AUTO",
838 VariableSynchronization::None => "VARIABLE_SYNCHRONIZATION_NONE",
839 VariableSynchronization::OnWrite => "VARIABLE_SYNCHRONIZATION_ON_WRITE",
840 VariableSynchronization::OnRead => "VARIABLE_SYNCHRONIZATION_ON_READ",
841 }
842 }
843}
844/// Indicates how a distributed variable will be aggregated.
845#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
846#[repr(i32)]
847pub enum VariableAggregation {
848 /// `NONE`: This is the default, giving an error if you use a
849 /// variable-update operation with multiple replicas.
850 None = 0,
851 /// `SUM`: Add the updates across replicas.
852 Sum = 1,
853 /// `MEAN`: Take the arithmetic mean ("average") of the updates across
854 /// replicas.
855 Mean = 2,
856 /// `ONLY_FIRST_REPLICA`: This is for when every replica is performing the same
857 /// update, but we only want to perform the update once. Used, e.g., for the
858 /// global step counter.
859 OnlyFirstReplica = 3,
860}
861impl VariableAggregation {
862 /// String value of the enum field names used in the ProtoBuf definition.
863 ///
864 /// The values are not transformed in any way and thus are considered stable
865 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
866 pub fn as_str_name(&self) -> &'static str {
867 match self {
868 VariableAggregation::None => "VARIABLE_AGGREGATION_NONE",
869 VariableAggregation::Sum => "VARIABLE_AGGREGATION_SUM",
870 VariableAggregation::Mean => "VARIABLE_AGGREGATION_MEAN",
871 VariableAggregation::OnlyFirstReplica => "VARIABLE_AGGREGATION_ONLY_FIRST_REPLICA",
872 }
873 }
874}
875// A TensorBundle addition which saves extra information about the objects which
876// own variables, allowing for more robust checkpoint loading into modified
877// programs.
878
879#[derive(Clone, PartialEq, ::prost::Message)]
880pub struct TrackableObjectGraph {
881 #[prost(message, repeated, tag="1")]
882 pub nodes: ::prost::alloc::vec::Vec<trackable_object_graph::TrackableObject>,
883}
884/// Nested message and enum types in `TrackableObjectGraph`.
885pub mod trackable_object_graph {
886 #[derive(Clone, PartialEq, ::prost::Message)]
887 pub struct TrackableObject {
888 /// Objects which this object depends on.
889 #[prost(message, repeated, tag="1")]
890 pub children: ::prost::alloc::vec::Vec<trackable_object::ObjectReference>,
891 /// Serialized data specific to this object.
892 #[prost(message, repeated, tag="2")]
893 pub attributes: ::prost::alloc::vec::Vec<trackable_object::SerializedTensor>,
894 /// Slot variables owned by this object.
895 #[prost(message, repeated, tag="3")]
896 pub slot_variables: ::prost::alloc::vec::Vec<trackable_object::SlotVariableReference>,
897 }
898 /// Nested message and enum types in `TrackableObject`.
899 pub mod trackable_object {
900 #[derive(Clone, PartialEq, ::prost::Message)]
901 pub struct ObjectReference {
902 /// An index into `TrackableObjectGraph.nodes`, indicating the object
903 /// being referenced.
904 #[prost(int32, tag="1")]
905 pub node_id: i32,
906 /// A user-provided name for the edge.
907 #[prost(string, tag="2")]
908 pub local_name: ::prost::alloc::string::String,
909 }
910 #[derive(Clone, PartialEq, ::prost::Message)]
911 pub struct SerializedTensor {
912 /// A name for the Tensor. Simple variables have only one
913 /// `SerializedTensor` named "VARIABLE_VALUE" by convention. This value may
914 /// be restored on object creation as an optimization.
915 #[prost(string, tag="1")]
916 pub name: ::prost::alloc::string::String,
917 /// The full name of the variable/tensor, if applicable. Used to allow
918 /// name-based loading of checkpoints which were saved using an
919 /// object-based API. Should match the checkpoint key which would have been
920 /// assigned by tf.train.Saver.
921 #[prost(string, tag="2")]
922 pub full_name: ::prost::alloc::string::String,
923 /// The generated name of the Tensor in the checkpoint.
924 #[prost(string, tag="3")]
925 pub checkpoint_key: ::prost::alloc::string::String,
926 /// Whether checkpoints should be considered as matching even without this
927 /// value restored. Used for non-critical values which don't affect the
928 /// TensorFlow graph, such as layer configurations.
929 #[prost(bool, tag="4")]
930 pub optional_restore: bool,
931 }
932 #[derive(Clone, PartialEq, ::prost::Message)]
933 pub struct SlotVariableReference {
934 /// An index into `TrackableObjectGraph.nodes`, indicating the
935 /// variable object this slot was created for.
936 #[prost(int32, tag="1")]
937 pub original_variable_node_id: i32,
938 /// The name of the slot (e.g. "m"/"v").
939 #[prost(string, tag="2")]
940 pub slot_name: ::prost::alloc::string::String,
941 /// An index into `TrackableObjectGraph.nodes`, indicating the
942 /// `Object` with the value of the slot variable.
943 #[prost(int32, tag="3")]
944 pub slot_variable_node_id: i32,
945 }
946 }
947}
948/// `StructuredValue` represents a dynamically typed value representing various
949/// data structures that are inspired by Python data structures typically used in
950/// TensorFlow functions as inputs and outputs.
951///
952/// For example when saving a Layer there may be a `training` argument. If the
953/// user passes a boolean True/False, that switches between two concrete
954/// TensorFlow functions. In order to switch between them in the same way after
955/// loading the SavedModel, we need to represent "True" and "False".
956///
957/// A more advanced example might be a function which takes a list of
958/// dictionaries mapping from strings to Tensors. In order to map from
959/// user-specified arguments `[{"a": tf.constant(1.)}, {"q": tf.constant(3.)}]`
960/// after load to the right saved TensorFlow function, we need to represent the
961/// nested structure and the strings, recording that we have a trace for anything
962/// matching `[{"a": tf.TensorSpec(None, tf.float32)}, {"q": tf.TensorSpec([],
963/// tf.float64)}]` as an example.
964///
965/// Likewise functions may return nested structures of Tensors, for example
966/// returning a dictionary mapping from strings to Tensors. In order for the
967/// loaded function to return the same structure we need to serialize it.
968///
969/// This is an ergonomic aid for working with loaded SavedModels, not a promise
970/// to serialize all possible function signatures. For example we do not expect
971/// to pickle generic Python objects, and ideally we'd stay language-agnostic.
972#[derive(Clone, PartialEq, ::prost::Message)]
973pub struct StructuredValue {
974 /// The kind of value.
975 #[prost(oneof="structured_value::Kind", tags="1, 11, 12, 13, 14, 31, 32, 33, 34, 51, 52, 53, 54")]
976 pub kind: ::core::option::Option<structured_value::Kind>,
977}
978/// Nested message and enum types in `StructuredValue`.
979pub mod structured_value {
980 /// The kind of value.
981 #[derive(Clone, PartialEq, ::prost::Oneof)]
982 pub enum Kind {
983 /// Represents None.
984 #[prost(message, tag="1")]
985 NoneValue(super::NoneValue),
986 /// Represents a double-precision floating-point value (a Python `float`).
987 #[prost(double, tag="11")]
988 Float64Value(f64),
989 /// Represents a signed integer value, limited to 64 bits.
990 /// Larger values from Python's arbitrary-precision integers are unsupported.
991 #[prost(sint64, tag="12")]
992 Int64Value(i64),
993 /// Represents a string of Unicode characters stored in a Python `str`.
994 /// In Python 3, this is exactly what type `str` is.
995 /// In Python 2, this is the UTF-8 encoding of the characters.
996 /// For strings with ASCII characters only (as often used in TensorFlow code)
997 /// there is effectively no difference between the language versions.
998 /// The obsolescent `unicode` type of Python 2 is not supported here.
999 #[prost(string, tag="13")]
1000 StringValue(::prost::alloc::string::String),
1001 /// Represents a boolean value.
1002 #[prost(bool, tag="14")]
1003 BoolValue(bool),
1004 /// Represents a TensorShape.
1005 #[prost(message, tag="31")]
1006 TensorShapeValue(super::TensorShapeProto),
1007 /// Represents an enum value for dtype.
1008 #[prost(enumeration="super::DataType", tag="32")]
1009 TensorDtypeValue(i32),
1010 /// Represents a value for tf.TensorSpec.
1011 #[prost(message, tag="33")]
1012 TensorSpecValue(super::TensorSpecProto),
1013 /// Represents a value for tf.TypeSpec.
1014 #[prost(message, tag="34")]
1015 TypeSpecValue(::prost::alloc::boxed::Box<super::TypeSpecProto>),
1016 /// Represents a list of `Value`.
1017 #[prost(message, tag="51")]
1018 ListValue(super::ListValue),
1019 /// Represents a tuple of `Value`.
1020 #[prost(message, tag="52")]
1021 TupleValue(super::TupleValue),
1022 /// Represents a dict `Value`.
1023 #[prost(message, tag="53")]
1024 DictValue(super::DictValue),
1025 /// Represents Python's namedtuple.
1026 #[prost(message, tag="54")]
1027 NamedTupleValue(super::NamedTupleValue),
1028 }
1029}
1030/// Represents None.
1031#[derive(Clone, PartialEq, ::prost::Message)]
1032pub struct NoneValue {
1033}
1034/// Represents a Python list.
1035#[derive(Clone, PartialEq, ::prost::Message)]
1036pub struct ListValue {
1037 #[prost(message, repeated, tag="1")]
1038 pub values: ::prost::alloc::vec::Vec<StructuredValue>,
1039}
1040/// Represents a Python tuple.
1041#[derive(Clone, PartialEq, ::prost::Message)]
1042pub struct TupleValue {
1043 #[prost(message, repeated, tag="1")]
1044 pub values: ::prost::alloc::vec::Vec<StructuredValue>,
1045}
1046/// Represents a Python dict keyed by `str`.
1047/// The comment on Unicode from Value.string_value applies analogously.
1048#[derive(Clone, PartialEq, ::prost::Message)]
1049pub struct DictValue {
1050 #[prost(map="string, message", tag="1")]
1051 pub fields: ::std::collections::HashMap<::prost::alloc::string::String, StructuredValue>,
1052}
1053/// Represents a (key, value) pair.
1054#[derive(Clone, PartialEq, ::prost::Message)]
1055pub struct PairValue {
1056 #[prost(string, tag="1")]
1057 pub key: ::prost::alloc::string::String,
1058 #[prost(message, optional, tag="2")]
1059 pub value: ::core::option::Option<StructuredValue>,
1060}
1061/// Represents Python's namedtuple.
1062#[derive(Clone, PartialEq, ::prost::Message)]
1063pub struct NamedTupleValue {
1064 #[prost(string, tag="1")]
1065 pub name: ::prost::alloc::string::String,
1066 #[prost(message, repeated, tag="2")]
1067 pub values: ::prost::alloc::vec::Vec<PairValue>,
1068}
1069/// A protobuf to tf.TensorSpec.
1070#[derive(Clone, PartialEq, ::prost::Message)]
1071pub struct TensorSpecProto {
1072 #[prost(string, tag="1")]
1073 pub name: ::prost::alloc::string::String,
1074 #[prost(message, optional, tag="2")]
1075 pub shape: ::core::option::Option<TensorShapeProto>,
1076 #[prost(enumeration="DataType", tag="3")]
1077 pub dtype: i32,
1078}
1079/// Represents a tf.TypeSpec
1080#[derive(Clone, PartialEq, ::prost::Message)]
1081pub struct TypeSpecProto {
1082 #[prost(enumeration="type_spec_proto::TypeSpecClass", tag="1")]
1083 pub type_spec_class: i32,
1084 /// The value returned by TypeSpec._serialize().
1085 #[prost(message, optional, boxed, tag="2")]
1086 pub type_state: ::core::option::Option<::prost::alloc::boxed::Box<StructuredValue>>,
1087 /// This is currently redundant with the type_spec_class enum, and is only
1088 /// used for error reporting. In particular, if you use an older binary to
1089 /// load a newer model, and the model uses a TypeSpecClass that the older
1090 /// binary doesn't support, then this lets us display a useful error message.
1091 #[prost(string, tag="3")]
1092 pub type_spec_class_name: ::prost::alloc::string::String,
1093}
1094/// Nested message and enum types in `TypeSpecProto`.
1095pub mod type_spec_proto {
1096 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
1097 #[repr(i32)]
1098 pub enum TypeSpecClass {
1099 Unknown = 0,
1100 /// tf.SparseTensorSpec
1101 SparseTensorSpec = 1,
1102 /// tf.IndexedSlicesSpec
1103 IndexedSlicesSpec = 2,
1104 /// tf.RaggedTensorSpec
1105 RaggedTensorSpec = 3,
1106 /// tf.TensorArraySpec
1107 TensorArraySpec = 4,
1108 /// tf.data.DatasetSpec
1109 DataDatasetSpec = 5,
1110 /// IteratorSpec from data/ops/iterator_ops.py
1111 DataIteratorSpec = 6,
1112 /// tf.OptionalSpec
1113 OptionalSpec = 7,
1114 /// PerReplicaSpec from distribute/values.py
1115 PerReplicaSpec = 8,
1116 /// tf.VariableSpec
1117 VariableSpec = 9,
1118 }
1119 impl TypeSpecClass {
1120 /// String value of the enum field names used in the ProtoBuf definition.
1121 ///
1122 /// The values are not transformed in any way and thus are considered stable
1123 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
1124 pub fn as_str_name(&self) -> &'static str {
1125 match self {
1126 TypeSpecClass::Unknown => "UNKNOWN",
1127 TypeSpecClass::SparseTensorSpec => "SPARSE_TENSOR_SPEC",
1128 TypeSpecClass::IndexedSlicesSpec => "INDEXED_SLICES_SPEC",
1129 TypeSpecClass::RaggedTensorSpec => "RAGGED_TENSOR_SPEC",
1130 TypeSpecClass::TensorArraySpec => "TENSOR_ARRAY_SPEC",
1131 TypeSpecClass::DataDatasetSpec => "DATA_DATASET_SPEC",
1132 TypeSpecClass::DataIteratorSpec => "DATA_ITERATOR_SPEC",
1133 TypeSpecClass::OptionalSpec => "OPTIONAL_SPEC",
1134 TypeSpecClass::PerReplicaSpec => "PER_REPLICA_SPEC",
1135 TypeSpecClass::VariableSpec => "VARIABLE_SPEC",
1136 }
1137 }
1138 }
1139}
1140// A SavedObjectGraph is part of object-based SavedModels in TF 2.0. It
1141// describes the directed graph of Python objects (or equivalent in other
1142// languages) that make up a model, with nodes\[0\] at the root.
1143
1144// SavedObjectGraph shares some structure with TrackableObjectGraph, but
1145// SavedObjectGraph belongs to the MetaGraph and contains pointers to functions
1146// and type information, while TrackableObjectGraph lives in the checkpoint
1147// and contains pointers only to variable values.
1148
1149#[derive(Clone, PartialEq, ::prost::Message)]
1150pub struct SavedObjectGraph {
1151 /// Flattened list of objects in the object graph.
1152 ///
1153 /// The position of the object in this list indicates its id.
1154 /// Nodes\[0\] is considered the root node.
1155 #[prost(message, repeated, tag="1")]
1156 pub nodes: ::prost::alloc::vec::Vec<SavedObject>,
1157 /// Information about captures and output structures in concrete functions.
1158 /// Referenced from SavedBareConcreteFunction and SavedFunction.
1159 #[prost(map="string, message", tag="2")]
1160 pub concrete_functions: ::std::collections::HashMap<::prost::alloc::string::String, SavedConcreteFunction>,
1161}
1162#[derive(Clone, PartialEq, ::prost::Message)]
1163pub struct SavedObject {
1164 /// Objects which this object depends on: named edges in the dependency
1165 /// graph.
1166 ///
1167 /// Note: currently only valid if kind == "user_object".
1168 #[prost(message, repeated, tag="1")]
1169 pub children: ::prost::alloc::vec::Vec<trackable_object_graph::trackable_object::ObjectReference>,
1170 /// Slot variables owned by this object. This describes the three-way
1171 /// (optimizer, variable, slot variable) relationship; none of the three
1172 /// depend on the others directly.
1173 ///
1174 /// Note: currently only valid if kind == "user_object".
1175 #[prost(message, repeated, tag="3")]
1176 pub slot_variables: ::prost::alloc::vec::Vec<trackable_object_graph::trackable_object::SlotVariableReference>,
1177 #[prost(oneof="saved_object::Kind", tags="4, 5, 6, 7, 8, 9, 10")]
1178 pub kind: ::core::option::Option<saved_object::Kind>,
1179}
1180/// Nested message and enum types in `SavedObject`.
1181pub mod saved_object {
1182 #[derive(Clone, PartialEq, ::prost::Oneof)]
1183 pub enum Kind {
1184 #[prost(message, tag="4")]
1185 UserObject(super::SavedUserObject),
1186 #[prost(message, tag="5")]
1187 Asset(super::SavedAsset),
1188 #[prost(message, tag="6")]
1189 Function(super::SavedFunction),
1190 #[prost(message, tag="7")]
1191 Variable(super::SavedVariable),
1192 #[prost(message, tag="8")]
1193 BareConcreteFunction(super::SavedBareConcreteFunction),
1194 #[prost(message, tag="9")]
1195 Constant(super::SavedConstant),
1196 #[prost(message, tag="10")]
1197 Resource(super::SavedResource),
1198 }
1199}
1200/// A SavedUserObject is an object (in the object-oriented language of the
1201/// TensorFlow program) of some user- or framework-defined class other than
1202/// those handled specifically by the other kinds of SavedObjects.
1203///
1204/// This object cannot be evaluated as a tensor, and therefore cannot be bound
1205/// to an input of a function.
1206#[derive(Clone, PartialEq, ::prost::Message)]
1207pub struct SavedUserObject {
1208 /// Corresponds to a registration of the type to use in the loading program.
1209 #[prost(string, tag="1")]
1210 pub identifier: ::prost::alloc::string::String,
1211 /// Version information from the producer of this SavedUserObject.
1212 #[prost(message, optional, tag="2")]
1213 pub version: ::core::option::Option<VersionDef>,
1214 /// Initialization-related metadata.
1215 #[prost(string, tag="3")]
1216 pub metadata: ::prost::alloc::string::String,
1217}
1218/// A SavedAsset points to an asset in the MetaGraph.
1219///
1220/// When bound to a function this object evaluates to a tensor with the absolute
1221/// filename. Users should not depend on a particular part of the filename to
1222/// remain stable (e.g. basename could be changed).
1223#[derive(Clone, PartialEq, ::prost::Message)]
1224pub struct SavedAsset {
1225 /// Index into `MetaGraphDef.asset_file_def[]` that describes the Asset.
1226 ///
1227 /// Only the field `AssetFileDef.filename` is used. Other fields, such as
1228 /// `AssetFileDef.tensor_info`, MUST be ignored.
1229 #[prost(int32, tag="1")]
1230 pub asset_file_def_index: i32,
1231}
1232/// A function with multiple signatures, possibly with non-Tensor arguments.
1233#[derive(Clone, PartialEq, ::prost::Message)]
1234pub struct SavedFunction {
1235 #[prost(string, repeated, tag="1")]
1236 pub concrete_functions: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
1237 #[prost(message, optional, tag="2")]
1238 pub function_spec: ::core::option::Option<FunctionSpec>,
1239}
1240/// Stores low-level information about a concrete function. Referenced in either
1241/// a SavedFunction or a SavedBareConcreteFunction.
1242#[derive(Clone, PartialEq, ::prost::Message)]
1243pub struct SavedConcreteFunction {
1244 /// Bound inputs to the function. The SavedObjects identified by the node ids
1245 /// given here are appended as extra inputs to the caller-supplied inputs.
1246 /// The only types of SavedObjects valid here are SavedVariable, SavedResource
1247 /// and SavedAsset.
1248 #[prost(int32, repeated, tag="2")]
1249 pub bound_inputs: ::prost::alloc::vec::Vec<i32>,
1250 /// Input in canonicalized form that was received to create this concrete
1251 /// function.
1252 #[prost(message, optional, tag="3")]
1253 pub canonicalized_input_signature: ::core::option::Option<StructuredValue>,
1254 /// Output that was the return value of this function after replacing all
1255 /// Tensors with TensorSpecs. This can be an arbitrary nested function and will
1256 /// be used to reconstruct the full structure from pure tensors.
1257 #[prost(message, optional, tag="4")]
1258 pub output_signature: ::core::option::Option<StructuredValue>,
1259}
1260#[derive(Clone, PartialEq, ::prost::Message)]
1261pub struct SavedBareConcreteFunction {
1262 /// Identifies a SavedConcreteFunction.
1263 #[prost(string, tag="1")]
1264 pub concrete_function_name: ::prost::alloc::string::String,
1265 /// A sequence of unique strings, one per Tensor argument.
1266 #[prost(string, repeated, tag="2")]
1267 pub argument_keywords: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
1268 /// The prefix of `argument_keywords` which may be identified by position.
1269 #[prost(int64, tag="3")]
1270 pub allowed_positional_arguments: i64,
1271}
1272#[derive(Clone, PartialEq, ::prost::Message)]
1273pub struct SavedConstant {
1274 /// An Operation name for a ConstantOp in this SavedObjectGraph's MetaGraph.
1275 #[prost(string, tag="1")]
1276 pub operation: ::prost::alloc::string::String,
1277}
1278/// Represents a Variable that is initialized by loading the contents from the
1279/// checkpoint.
1280#[derive(Clone, PartialEq, ::prost::Message)]
1281pub struct SavedVariable {
1282 #[prost(enumeration="DataType", tag="1")]
1283 pub dtype: i32,
1284 #[prost(message, optional, tag="2")]
1285 pub shape: ::core::option::Option<TensorShapeProto>,
1286 #[prost(bool, tag="3")]
1287 pub trainable: bool,
1288 #[prost(enumeration="VariableSynchronization", tag="4")]
1289 pub synchronization: i32,
1290 #[prost(enumeration="VariableAggregation", tag="5")]
1291 pub aggregation: i32,
1292 #[prost(string, tag="6")]
1293 pub name: ::prost::alloc::string::String,
1294}
1295/// Represents `FunctionSpec` used in `Function`. This represents a
1296/// function that has been wrapped as a TensorFlow `Function`.
1297#[derive(Clone, PartialEq, ::prost::Message)]
1298pub struct FunctionSpec {
1299 /// Full arg spec from inspect.getfullargspec().
1300 #[prost(message, optional, tag="1")]
1301 pub fullargspec: ::core::option::Option<StructuredValue>,
1302 /// Whether this represents a class method.
1303 #[prost(bool, tag="2")]
1304 pub is_method: bool,
1305 /// The input signature, if specified.
1306 #[prost(message, optional, tag="5")]
1307 pub input_signature: ::core::option::Option<StructuredValue>,
1308}
1309/// A SavedResource represents a TF object that holds state during its lifetime.
1310/// An object of this type can have a reference to a:
1311/// create_resource() and an initialize() function.
1312#[derive(Clone, PartialEq, ::prost::Message)]
1313pub struct SavedResource {
1314 /// A device specification indicating a required placement for the resource
1315 /// creation function, e.g. "CPU". An empty string allows the user to select a
1316 /// device.
1317 #[prost(string, tag="1")]
1318 pub device: ::prost::alloc::string::String,
1319}
1320/// Protocol buffer representing the configuration of a Saver.
1321#[derive(Clone, PartialEq, ::prost::Message)]
1322pub struct SaverDef {
1323 /// The name of the tensor in which to specify the filename when saving or
1324 /// restoring a model checkpoint.
1325 #[prost(string, tag="1")]
1326 pub filename_tensor_name: ::prost::alloc::string::String,
1327 /// The operation to run when saving a model checkpoint.
1328 #[prost(string, tag="2")]
1329 pub save_tensor_name: ::prost::alloc::string::String,
1330 /// The operation to run when restoring a model checkpoint.
1331 #[prost(string, tag="3")]
1332 pub restore_op_name: ::prost::alloc::string::String,
1333 /// Maximum number of checkpoints to keep. If 0, no checkpoints are deleted.
1334 #[prost(int32, tag="4")]
1335 pub max_to_keep: i32,
1336 /// Shard the save files, one per device that has Variable nodes.
1337 #[prost(bool, tag="5")]
1338 pub sharded: bool,
1339 /// How often to keep an additional checkpoint. If not specified, only the last
1340 /// "max_to_keep" checkpoints are kept; if specified, in addition to keeping
1341 /// the last "max_to_keep" checkpoints, an additional checkpoint will be kept
1342 /// for every n hours of training.
1343 #[prost(float, tag="6")]
1344 pub keep_checkpoint_every_n_hours: f32,
1345 #[prost(enumeration="saver_def::CheckpointFormatVersion", tag="7")]
1346 pub version: i32,
1347}
1348/// Nested message and enum types in `SaverDef`.
1349pub mod saver_def {
1350 /// A version number that identifies a different on-disk checkpoint format.
1351 /// Usually, each subclass of BaseSaverBuilder works with a particular
1352 /// version/format. However, it is possible that the same builder may be
1353 /// upgraded to support a newer checkpoint format in the future.
1354 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
1355 #[repr(i32)]
1356 pub enum CheckpointFormatVersion {
1357 /// Internal legacy format.
1358 Legacy = 0,
1359 /// Deprecated format: tf.Saver() which works with tensorflow::table::Table.
1360 V1 = 1,
1361 /// Current format: more efficient.
1362 V2 = 2,
1363 }
1364 impl CheckpointFormatVersion {
1365 /// String value of the enum field names used in the ProtoBuf definition.
1366 ///
1367 /// The values are not transformed in any way and thus are considered stable
1368 /// (if the ProtoBuf definition does not change) and safe for programmatic use.
1369 pub fn as_str_name(&self) -> &'static str {
1370 match self {
1371 CheckpointFormatVersion::Legacy => "LEGACY",
1372 CheckpointFormatVersion::V1 => "V1",
1373 CheckpointFormatVersion::V2 => "V2",
1374 }
1375 }
1376 }
1377}
1378/// NOTE: This protocol buffer is evolving, and will go through revisions in the
1379/// coming months.
1380///
1381/// Protocol buffer containing the following which are necessary to restart
1382/// training, run inference. It can be used to serialize/de-serialize memory
1383/// objects necessary for running computation in a graph when crossing the
1384/// process boundary. It can be used for long term storage of graphs,
1385/// cross-language execution of graphs, etc.
1386/// MetaInfoDef
1387/// GraphDef
1388/// SaverDef
1389/// CollectionDef
1390/// TensorInfo
1391/// SignatureDef
1392#[derive(Clone, PartialEq, ::prost::Message)]
1393pub struct MetaGraphDef {
1394 #[prost(message, optional, tag="1")]
1395 pub meta_info_def: ::core::option::Option<meta_graph_def::MetaInfoDef>,
1396 /// GraphDef.
1397 #[prost(message, optional, tag="2")]
1398 pub graph_def: ::core::option::Option<GraphDef>,
1399 /// SaverDef.
1400 #[prost(message, optional, tag="3")]
1401 pub saver_def: ::core::option::Option<SaverDef>,
1402 /// collection_def: Map from collection name to collections.
1403 /// See CollectionDef section for details.
1404 #[prost(map="string, message", tag="4")]
1405 pub collection_def: ::std::collections::HashMap<::prost::alloc::string::String, CollectionDef>,
1406 /// signature_def: Map from user supplied key for a signature to a single
1407 /// SignatureDef.
1408 #[prost(map="string, message", tag="5")]
1409 pub signature_def: ::std::collections::HashMap<::prost::alloc::string::String, SignatureDef>,
1410 /// Asset file def to be used with the defined graph.
1411 #[prost(message, repeated, tag="6")]
1412 pub asset_file_def: ::prost::alloc::vec::Vec<AssetFileDef>,
1413 /// Extra information about the structure of functions and stateful objects.
1414 #[prost(message, optional, tag="7")]
1415 pub object_graph_def: ::core::option::Option<SavedObjectGraph>,
1416}
1417/// Nested message and enum types in `MetaGraphDef`.
1418pub mod meta_graph_def {
1419 /// Meta information regarding the graph to be exported. To be used by users
1420 /// of this protocol buffer to encode information regarding their meta graph.
1421 #[derive(Clone, PartialEq, ::prost::Message)]
1422 pub struct MetaInfoDef {
1423 /// User specified Version string. Can be the name of the model and revision,
1424 /// steps this model has been trained to, etc.
1425 #[prost(string, tag="1")]
1426 pub meta_graph_version: ::prost::alloc::string::String,
1427 /// A copy of the OpDefs used by the producer of this graph_def.
1428 /// Descriptions and Ops not used in graph_def are stripped out.
1429 #[prost(message, optional, tag="2")]
1430 pub stripped_op_list: ::core::option::Option<super::OpList>,
1431 /// A serialized protobuf. Can be the time this meta graph is created, or
1432 /// modified, or name of the model.
1433 #[prost(message, optional, tag="3")]
1434 pub any_info: ::core::option::Option<::prost_types::Any>,
1435 /// User supplied tag(s) on the meta_graph and included graph_def.
1436 ///
1437 /// MetaGraphDefs should be tagged with their capabilities or use-cases.
1438 /// Examples: "train", "serve", "gpu", "tpu", etc.
1439 /// These tags enable loaders to access the MetaGraph(s) appropriate for a
1440 /// specific use-case or runtime environment.
1441 #[prost(string, repeated, tag="4")]
1442 pub tags: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
1443 /// The __version__ string of the tensorflow build used to write this graph.
1444 /// This will be populated by the framework, which will overwrite any user
1445 /// supplied value.
1446 #[prost(string, tag="5")]
1447 pub tensorflow_version: ::prost::alloc::string::String,
1448 /// The __git_version__ string of the tensorflow build used to write this
1449 /// graph. This will be populated by the framework, which will overwrite any
1450 /// user supplied value.
1451 #[prost(string, tag="6")]
1452 pub tensorflow_git_version: ::prost::alloc::string::String,
1453 /// A flag to denote whether default-valued attrs have been stripped from
1454 /// the nodes in this graph_def.
1455 #[prost(bool, tag="7")]
1456 pub stripped_default_attrs: bool,
1457 }
1458}
1459/// CollectionDef should cover most collections.
1460/// To add a user-defined collection, do one of the following:
1461/// 1. For simple data types, such as string, int, float:
1462/// tf.add_to_collection("your_collection_name", your_simple_value)
1463/// strings will be stored as bytes_list.
1464///
1465/// 2. For Protobuf types, there are three ways to add them:
1466/// 1) tf.add_to_collection("your_collection_name",
1467/// your_proto.SerializeToString())
1468///
1469/// collection_def {
1470/// key: "user_defined_bytes_collection"
1471/// value {
1472/// bytes_list {
1473/// value: "queue_name: \"test_queue\"\n"
1474/// }
1475/// }
1476/// }
1477///
1478/// or
1479///
1480/// 2) tf.add_to_collection("your_collection_name", str(your_proto))
1481///
1482/// collection_def {
1483/// key: "user_defined_string_collection"
1484/// value {
1485/// bytes_list {
1486/// value: "\n\ntest_queue"
1487/// }
1488/// }
1489/// }
1490///
1491/// or
1492///
1493/// 3) any_buf = any_pb2.Any()
1494/// tf.add_to_collection("your_collection_name",
1495/// any_buf.Pack(your_proto))
1496///
1497/// collection_def {
1498/// key: "user_defined_any_collection"
1499/// value {
1500/// any_list {
1501/// value {
1502/// type_url: "type.googleapis.com/tensorflow.QueueRunnerDef"
1503/// value: "\n\ntest_queue"
1504/// }
1505/// }
1506/// }
1507/// }
1508///
1509/// 3. For Python objects, implement to_proto() and from_proto(), and register
1510/// them in the following manner:
1511/// ops.register_proto_function("your_collection_name",
1512/// proto_type,
1513/// to_proto=YourPythonObject.to_proto,
1514/// from_proto=YourPythonObject.from_proto)
1515/// These functions will be invoked to serialize and de-serialize the
1516/// collection. For example,
1517/// ops.register_proto_function(ops.GraphKeys.GLOBAL_VARIABLES,
1518/// proto_type=variable_pb2.VariableDef,
1519/// to_proto=Variable.to_proto,
1520/// from_proto=Variable.from_proto)
1521#[derive(Clone, PartialEq, ::prost::Message)]
1522pub struct CollectionDef {
1523 #[prost(oneof="collection_def::Kind", tags="1, 2, 3, 4, 5")]
1524 pub kind: ::core::option::Option<collection_def::Kind>,
1525}
1526/// Nested message and enum types in `CollectionDef`.
1527pub mod collection_def {
1528 /// NodeList is used for collecting nodes in graph. For example
1529 /// collection_def {
1530 /// key: "summaries"
1531 /// value {
1532 /// node_list {
1533 /// value: "input_producer/ScalarSummary:0"
1534 /// value: "shuffle_batch/ScalarSummary:0"
1535 /// value: "ImageSummary:0"
1536 /// }
1537 /// }
1538 #[derive(Clone, PartialEq, ::prost::Message)]
1539 pub struct NodeList {
1540 #[prost(string, repeated, tag="1")]
1541 pub value: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
1542 }
1543 /// BytesList is used for collecting strings and serialized protobufs. For
1544 /// example:
1545 /// collection_def {
1546 /// key: "trainable_variables"
1547 /// value {
1548 /// bytes_list {
1549 /// value: "\n\017conv1/weights:0\022\024conv1/weights/Assign
1550 /// \032\024conv1/weights/read:0"
1551 /// value: "\n\016conv1/biases:0\022\023conv1/biases/Assign\032
1552 /// \023conv1/biases/read:0"
1553 /// }
1554 /// }
1555 /// }
1556 #[derive(Clone, PartialEq, ::prost::Message)]
1557 pub struct BytesList {
1558 #[prost(bytes="vec", repeated, tag="1")]
1559 pub value: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
1560 }
1561 /// Int64List is used for collecting int, int64 and long values.
1562 #[derive(Clone, PartialEq, ::prost::Message)]
1563 pub struct Int64List {
1564 #[prost(int64, repeated, tag="1")]
1565 pub value: ::prost::alloc::vec::Vec<i64>,
1566 }
1567 /// FloatList is used for collecting float values.
1568 #[derive(Clone, PartialEq, ::prost::Message)]
1569 pub struct FloatList {
1570 #[prost(float, repeated, tag="1")]
1571 pub value: ::prost::alloc::vec::Vec<f32>,
1572 }
1573 /// AnyList is used for collecting Any protos.
1574 #[derive(Clone, PartialEq, ::prost::Message)]
1575 pub struct AnyList {
1576 #[prost(message, repeated, tag="1")]
1577 pub value: ::prost::alloc::vec::Vec<::prost_types::Any>,
1578 }
1579 #[derive(Clone, PartialEq, ::prost::Oneof)]
1580 pub enum Kind {
1581 #[prost(message, tag="1")]
1582 NodeList(NodeList),
1583 #[prost(message, tag="2")]
1584 BytesList(BytesList),
1585 #[prost(message, tag="3")]
1586 Int64List(Int64List),
1587 #[prost(message, tag="4")]
1588 FloatList(FloatList),
1589 #[prost(message, tag="5")]
1590 AnyList(AnyList),
1591 }
1592}
1593/// Information about a Tensor necessary for feeding or retrieval.
1594#[derive(Clone, PartialEq, ::prost::Message)]
1595pub struct TensorInfo {
1596 #[prost(enumeration="DataType", tag="2")]
1597 pub dtype: i32,
1598 /// The static shape should be recorded here, to the extent that it can
1599 /// be known in advance. In the case of a SparseTensor, this field describes
1600 /// the logical shape of the represented tensor (aka dense_shape).
1601 #[prost(message, optional, tag="3")]
1602 pub tensor_shape: ::core::option::Option<TensorShapeProto>,
1603 #[prost(oneof="tensor_info::Encoding", tags="1, 4, 5")]
1604 pub encoding: ::core::option::Option<tensor_info::Encoding>,
1605}
1606/// Nested message and enum types in `TensorInfo`.
1607pub mod tensor_info {
1608 /// For sparse tensors, The COO encoding stores a triple of values, indices,
1609 /// and shape.
1610 #[derive(Clone, PartialEq, ::prost::Message)]
1611 pub struct CooSparse {
1612 /// The shape of the values Tensor is \[?\]. Its dtype must be the dtype of
1613 /// the SparseTensor as a whole, given in the enclosing TensorInfo.
1614 #[prost(string, tag="1")]
1615 pub values_tensor_name: ::prost::alloc::string::String,
1616 /// The indices Tensor must have dtype int64 and shape [?, ?].
1617 #[prost(string, tag="2")]
1618 pub indices_tensor_name: ::prost::alloc::string::String,
1619 /// The dynamic logical shape represented by the SparseTensor is recorded in
1620 /// the Tensor referenced here. It must have dtype int64 and shape \[?\].
1621 #[prost(string, tag="3")]
1622 pub dense_shape_tensor_name: ::prost::alloc::string::String,
1623 }
1624 /// Generic encoding for composite tensors.
1625 #[derive(Clone, PartialEq, ::prost::Message)]
1626 pub struct CompositeTensor {
1627 /// The serialized TypeSpec for the composite tensor.
1628 #[prost(message, optional, tag="1")]
1629 pub type_spec: ::core::option::Option<super::TypeSpecProto>,
1630 /// A TensorInfo for each flattened component tensor.
1631 #[prost(message, repeated, tag="2")]
1632 pub components: ::prost::alloc::vec::Vec<super::TensorInfo>,
1633 }
1634 #[derive(Clone, PartialEq, ::prost::Oneof)]
1635 pub enum Encoding {
1636 /// For dense `Tensor`s, the name of the tensor in the graph.
1637 #[prost(string, tag="1")]
1638 Name(::prost::alloc::string::String),
1639 /// There are many possible encodings of sparse matrices
1640 /// (<https://en.wikipedia.org/wiki/Sparse_matrix>). Currently, TensorFlow
1641 /// uses only the COO encoding. This is supported and documented in the
1642 /// SparseTensor Python class.
1643 #[prost(message, tag="4")]
1644 CooSparse(CooSparse),
1645 /// Generic encoding for CompositeTensors.
1646 #[prost(message, tag="5")]
1647 CompositeTensor(CompositeTensor),
1648 }
1649}
1650/// SignatureDef defines the signature of a computation supported by a TensorFlow
1651/// graph.
1652///
1653/// For example, a model with two loss computations, sharing a single input,
1654/// might have the following signature_def map.
1655///
1656/// Note that across the two SignatureDefs "loss_A" and "loss_B", the input key,
1657/// output key, and method_name are identical, and will be used by system(s) that
1658/// implement or rely upon this particular loss method. The output tensor names
1659/// differ, demonstrating how different outputs can exist for the same method.
1660///
1661/// signature_def {
1662/// key: "loss_A"
1663/// value {
1664/// inputs {
1665/// key: "input"
1666/// value {
1667/// name: "input:0"
1668/// dtype: DT_STRING
1669/// tensor_shape: ...
1670/// }
1671/// }
1672/// outputs {
1673/// key: "loss_output"
1674/// value {
1675/// name: "loss_output_A:0"
1676/// dtype: DT_FLOAT
1677/// tensor_shape: ...
1678/// }
1679/// }
1680/// }
1681/// ...
1682/// method_name: "some/package/compute_loss"
1683/// }
1684/// signature_def {
1685/// key: "loss_B"
1686/// value {
1687/// inputs {
1688/// key: "input"
1689/// value {
1690/// name: "input:0"
1691/// dtype: DT_STRING
1692/// tensor_shape: ...
1693/// }
1694/// }
1695/// outputs {
1696/// key: "loss_output"
1697/// value {
1698/// name: "loss_output_B:0"
1699/// dtype: DT_FLOAT
1700/// tensor_shape: ...
1701/// }
1702/// }
1703/// }
1704/// ...
1705/// method_name: "some/package/compute_loss"
1706/// }
1707#[derive(Clone, PartialEq, ::prost::Message)]
1708pub struct SignatureDef {
1709 /// Named input parameters.
1710 #[prost(map="string, message", tag="1")]
1711 pub inputs: ::std::collections::HashMap<::prost::alloc::string::String, TensorInfo>,
1712 /// Named output parameters.
1713 #[prost(map="string, message", tag="2")]
1714 pub outputs: ::std::collections::HashMap<::prost::alloc::string::String, TensorInfo>,
1715 /// Extensible method_name information enabling third-party users to mark a
1716 /// SignatureDef as supporting a particular method. This enables producers and
1717 /// consumers of SignatureDefs, e.g. a model definition library and a serving
1718 /// library to have a clear hand-off regarding the semantics of a computation.
1719 ///
1720 /// Note that multiple SignatureDefs in a single MetaGraphDef may have the same
1721 /// method_name. This is commonly used to support multi-headed computation,
1722 /// where a single graph computation may return multiple results.
1723 #[prost(string, tag="3")]
1724 pub method_name: ::prost::alloc::string::String,
1725}
1726/// An asset file def for a single file or a set of sharded files with the same
1727/// name.
1728#[derive(Clone, PartialEq, ::prost::Message)]
1729pub struct AssetFileDef {
1730 /// The tensor to bind the asset filename to.
1731 #[prost(message, optional, tag="1")]
1732 pub tensor_info: ::core::option::Option<TensorInfo>,
1733 /// The filename within an assets directory. Note: does not include the path
1734 /// prefix, i.e. directories. For an asset at /tmp/path/vocab.txt, the filename
1735 /// would be "vocab.txt".
1736 #[prost(string, tag="2")]
1737 pub filename: ::prost::alloc::string::String,
1738}
1739/// SavedModel is the high level serialization format for TensorFlow Models.
1740/// See [todo: doc links, similar to session_bundle] for more information.
1741#[derive(Clone, PartialEq, ::prost::Message)]
1742pub struct SavedModel {
1743 /// The schema version of the SavedModel instance. Used for versioning when
1744 /// making future changes to the specification/implementation. Initial value
1745 /// at release will be 1.
1746 #[prost(int64, tag="1")]
1747 pub saved_model_schema_version: i64,
1748 /// One or more MetaGraphs.
1749 #[prost(message, repeated, tag="2")]
1750 pub meta_graphs: ::prost::alloc::vec::Vec<MetaGraphDef>,
1751}