objc2_metal_performance_shaders_graph/generated/MPSGraphRNNOps.rs
1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5use objc2::__framework_prelude::*;
6use objc2_foundation::*;
7
8use crate::*;
9
10/// The activation modes for RNN operations.
11///
12/// See also [Apple's documentation](https://developer.apple.com/documentation/metalperformanceshadersgraph/mpsgraphrnnactivation?language=objc)
13// NS_ENUM
14#[repr(transparent)]
15#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
16pub struct MPSGraphRNNActivation(pub NSUInteger);
17impl MPSGraphRNNActivation {
18 /// Defines a pass through activation.
19 #[doc(alias = "MPSGraphRNNActivationNone")]
20 pub const None: Self = Self(0);
21 /// Defines a ReLU activation.
22 #[doc(alias = "MPSGraphRNNActivationRelu")]
23 pub const Relu: Self = Self(1);
24 /// Defines a Tanh activation.
25 #[doc(alias = "MPSGraphRNNActivationTanh")]
26 pub const Tanh: Self = Self(2);
27 /// Defines a Sigmoid activation.
28 #[doc(alias = "MPSGraphRNNActivationSigmoid")]
29 pub const Sigmoid: Self = Self(3);
30 /// Defines a Hard sigmoid activation.
31 #[doc(alias = "MPSGraphRNNActivationHardSigmoid")]
32 pub const HardSigmoid: Self = Self(4);
33}
34
35unsafe impl Encode for MPSGraphRNNActivation {
36 const ENCODING: Encoding = NSUInteger::ENCODING;
37}
38
39unsafe impl RefEncode for MPSGraphRNNActivation {
40 const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
41}
42
43extern_class!(
44 /// The class that defines the parameters for a single gate RNN operation.
45 ///
46 /// Use this descriptor with the following ``MPSGraph`` methods:
47 /// - ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:initState:descriptor:name:``
48 /// - ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
49 /// - ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``
50 /// - ``MPSGraph/singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:initState:descriptor:name:``
51 /// - ``MPSGraph/singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:inputWeight:bias:initState:descriptor:name:``
52 /// - ``MPSGraph/singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:inputWeight:bias:initState:mask:descriptor:name:``
53 /// - ``MPSGraph/singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:stateGradient:inputWeight:bias:initState:mask:descriptor:name:``
54 ///
55 /// See also [Apple's documentation](https://developer.apple.com/documentation/metalperformanceshadersgraph/mpsgraphsinglegaternndescriptor?language=objc)
56 #[unsafe(super(MPSGraphObject, NSObject))]
57 #[derive(Debug, PartialEq, Eq, Hash)]
58 #[cfg(feature = "MPSGraphCore")]
59 pub struct MPSGraphSingleGateRNNDescriptor;
60);
61
62#[cfg(feature = "MPSGraphCore")]
63extern_conformance!(
64 unsafe impl NSCopying for MPSGraphSingleGateRNNDescriptor {}
65);
66
67#[cfg(feature = "MPSGraphCore")]
68unsafe impl CopyingHelper for MPSGraphSingleGateRNNDescriptor {
69 type Result = Self;
70}
71
72#[cfg(feature = "MPSGraphCore")]
73extern_conformance!(
74 unsafe impl NSObjectProtocol for MPSGraphSingleGateRNNDescriptor {}
75);
76
77#[cfg(feature = "MPSGraphCore")]
78impl MPSGraphSingleGateRNNDescriptor {
79 extern_methods!(
80 /// A parameter that defines time direction of the input sequence.
81 ///
82 /// If set to `YES` then the input sequence is passed in reverse time order to the layer.
83 /// Note: Ignored when `bidirectional = YES`.
84 /// Default value: `NO`.
85 #[unsafe(method(reverse))]
86 #[unsafe(method_family = none)]
87 pub unsafe fn reverse(&self) -> bool;
88
89 /// Setter for [`reverse`][Self::reverse].
90 #[unsafe(method(setReverse:))]
91 #[unsafe(method_family = none)]
92 pub unsafe fn setReverse(&self, reverse: bool);
93
94 /// A parameter that defines a bidirectional RNN layer.
95 ///
96 /// If set to `YES` then the input sequence is traversed in both directions and the two results
97 /// are concatenated together on the channel-axis.
98 /// Default value: `NO`.
99 #[unsafe(method(bidirectional))]
100 #[unsafe(method_family = none)]
101 pub unsafe fn bidirectional(&self) -> bool;
102
103 /// Setter for [`bidirectional`][Self::bidirectional].
104 #[unsafe(method(setBidirectional:))]
105 #[unsafe(method_family = none)]
106 pub unsafe fn setBidirectional(&self, bidirectional: bool);
107
108 /// A parameter that makes the RNN layer support training.
109 ///
110 /// If set to `YES` then the layer will produce training state tensor as a secondary output.
111 /// Default value: `NO`.
112 #[unsafe(method(training))]
113 #[unsafe(method_family = none)]
114 pub unsafe fn training(&self) -> bool;
115
116 /// Setter for [`training`][Self::training].
117 #[unsafe(method(setTraining:))]
118 #[unsafe(method_family = none)]
119 pub unsafe fn setTraining(&self, training: bool);
120
121 /// A parameter that defines the activation function to use with the RNN operation.
122 ///
123 /// Default value: `MPSGraphRNNActivationRelu`.
124 #[unsafe(method(activation))]
125 #[unsafe(method_family = none)]
126 pub unsafe fn activation(&self) -> MPSGraphRNNActivation;
127
128 /// Setter for [`activation`][Self::activation].
129 #[unsafe(method(setActivation:))]
130 #[unsafe(method_family = none)]
131 pub unsafe fn setActivation(&self, activation: MPSGraphRNNActivation);
132
133 /// Creates a single gate RNN descriptor with default values.
134 #[unsafe(method(descriptor))]
135 #[unsafe(method_family = none)]
136 pub unsafe fn descriptor() -> Option<Retained<Self>>;
137 );
138}
139
140/// Methods declared on superclass `NSObject`.
141#[cfg(feature = "MPSGraphCore")]
142impl MPSGraphSingleGateRNNDescriptor {
143 extern_methods!(
144 #[unsafe(method(init))]
145 #[unsafe(method_family = init)]
146 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
147
148 #[unsafe(method(new))]
149 #[unsafe(method_family = new)]
150 pub unsafe fn new() -> Retained<Self>;
151 );
152}
153
154extern_class!(
155 /// The class that defines the parameters for a long short-term memory (LSTM) operation.
156 ///
157 /// Use this descriptor with the following ``MPSGraph`` methods:
158 /// - ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:initState:initCell:descriptor:name:``
159 /// - ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
160 /// - ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:``
161 /// - ``MPSGraph/LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:descriptor:name:``
162 /// - ``MPSGraph/LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:inputWeight:bias:initState:initCell:descriptor:name:``
163 /// - ``MPSGraph/LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:inputWeight:bias:initState:initCell:mask:descriptor:name:``
164 /// - ``MPSGraph/LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:stateGradient:cellGradient:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:``
165 ///
166 /// See also [Apple's documentation](https://developer.apple.com/documentation/metalperformanceshadersgraph/mpsgraphlstmdescriptor?language=objc)
167 #[unsafe(super(MPSGraphObject, NSObject))]
168 #[derive(Debug, PartialEq, Eq, Hash)]
169 #[cfg(feature = "MPSGraphCore")]
170 pub struct MPSGraphLSTMDescriptor;
171);
172
173#[cfg(feature = "MPSGraphCore")]
174extern_conformance!(
175 unsafe impl NSCopying for MPSGraphLSTMDescriptor {}
176);
177
178#[cfg(feature = "MPSGraphCore")]
179unsafe impl CopyingHelper for MPSGraphLSTMDescriptor {
180 type Result = Self;
181}
182
183#[cfg(feature = "MPSGraphCore")]
184extern_conformance!(
185 unsafe impl NSObjectProtocol for MPSGraphLSTMDescriptor {}
186);
187
188#[cfg(feature = "MPSGraphCore")]
189impl MPSGraphLSTMDescriptor {
190 extern_methods!(
191 /// A parameter that defines time direction of the input sequence.
192 ///
193 /// If set to `YES` then the input sequence is passed in reverse time order to the layer.
194 /// Note: Ignored when `bidirectional = YES`.
195 /// Default value: `NO`.
196 #[unsafe(method(reverse))]
197 #[unsafe(method_family = none)]
198 pub unsafe fn reverse(&self) -> bool;
199
200 /// Setter for [`reverse`][Self::reverse].
201 #[unsafe(method(setReverse:))]
202 #[unsafe(method_family = none)]
203 pub unsafe fn setReverse(&self, reverse: bool);
204
205 /// A parameter that defines a bidirectional LSTM layer.
206 ///
207 /// If set to `YES` then the input sequence is traversed in both directions and the two results
208 /// are concatenated together on the channel-axis.
209 /// Default value: `NO`.
210 #[unsafe(method(bidirectional))]
211 #[unsafe(method_family = none)]
212 pub unsafe fn bidirectional(&self) -> bool;
213
214 /// Setter for [`bidirectional`][Self::bidirectional].
215 #[unsafe(method(setBidirectional:))]
216 #[unsafe(method_family = none)]
217 pub unsafe fn setBidirectional(&self, bidirectional: bool);
218
219 /// A parameter that controls whether or not to return the output cell from the LSTM layer.
220 ///
221 /// If set to `YES` then this layer will produce the internal cell of the LSTM unit as secondary output.
222 /// Default value: `NO`.
223 #[unsafe(method(produceCell))]
224 #[unsafe(method_family = none)]
225 pub unsafe fn produceCell(&self) -> bool;
226
227 /// Setter for [`produceCell`][Self::produceCell].
228 #[unsafe(method(setProduceCell:))]
229 #[unsafe(method_family = none)]
230 pub unsafe fn setProduceCell(&self, produce_cell: bool);
231
232 /// A parameter that enables the LSTM layer to support training.
233 ///
234 /// If set to `YES` then the layer will produce training state tensor as a secondary output.
235 /// Default value: `NO`.
236 #[unsafe(method(training))]
237 #[unsafe(method_family = none)]
238 pub unsafe fn training(&self) -> bool;
239
240 /// Setter for [`training`][Self::training].
241 #[unsafe(method(setTraining:))]
242 #[unsafe(method_family = none)]
243 pub unsafe fn setTraining(&self, training: bool);
244
245 /// A parameter that controls the internal order of the LSTM gates.
246 ///
247 /// If set to `YES` then the layer will use the gate-ordering `[ i, z, f, o ]` instead of default `[ i, f, z, o ]`.
248 /// Default value: `NO`
249 #[unsafe(method(forgetGateLast))]
250 #[unsafe(method_family = none)]
251 pub unsafe fn forgetGateLast(&self) -> bool;
252
253 /// Setter for [`forgetGateLast`][Self::forgetGateLast].
254 #[unsafe(method(setForgetGateLast:))]
255 #[unsafe(method_family = none)]
256 pub unsafe fn setForgetGateLast(&self, forget_gate_last: bool);
257
258 /// A parameter that defines the activation function used with the input gate of the LSTM operation.
259 ///
260 /// Default value: `MPSGraphRNNActivationSigmoid`.
261 #[unsafe(method(inputGateActivation))]
262 #[unsafe(method_family = none)]
263 pub unsafe fn inputGateActivation(&self) -> MPSGraphRNNActivation;
264
265 /// Setter for [`inputGateActivation`][Self::inputGateActivation].
266 #[unsafe(method(setInputGateActivation:))]
267 #[unsafe(method_family = none)]
268 pub unsafe fn setInputGateActivation(&self, input_gate_activation: MPSGraphRNNActivation);
269
270 /// A parameter that defines the activation function used with the forget gate of the LSTM operation.
271 ///
272 /// Default value: `MPSGraphRNNActivationSigmoid`.
273 #[unsafe(method(forgetGateActivation))]
274 #[unsafe(method_family = none)]
275 pub unsafe fn forgetGateActivation(&self) -> MPSGraphRNNActivation;
276
277 /// Setter for [`forgetGateActivation`][Self::forgetGateActivation].
278 #[unsafe(method(setForgetGateActivation:))]
279 #[unsafe(method_family = none)]
280 pub unsafe fn setForgetGateActivation(&self, forget_gate_activation: MPSGraphRNNActivation);
281
282 /// A parameter that defines the activation function used with the cell gate of the LSTM operation.
283 ///
284 /// Default value: `MPSGraphRNNActivationTanh`.
285 #[unsafe(method(cellGateActivation))]
286 #[unsafe(method_family = none)]
287 pub unsafe fn cellGateActivation(&self) -> MPSGraphRNNActivation;
288
289 /// Setter for [`cellGateActivation`][Self::cellGateActivation].
290 #[unsafe(method(setCellGateActivation:))]
291 #[unsafe(method_family = none)]
292 pub unsafe fn setCellGateActivation(&self, cell_gate_activation: MPSGraphRNNActivation);
293
294 /// A parameter that defines the activation function used with the output gate of the LSTM operation.
295 ///
296 /// Default value: `MPSGraphRNNActivationSigmoid`.
297 #[unsafe(method(outputGateActivation))]
298 #[unsafe(method_family = none)]
299 pub unsafe fn outputGateActivation(&self) -> MPSGraphRNNActivation;
300
301 /// Setter for [`outputGateActivation`][Self::outputGateActivation].
302 #[unsafe(method(setOutputGateActivation:))]
303 #[unsafe(method_family = none)]
304 pub unsafe fn setOutputGateActivation(&self, output_gate_activation: MPSGraphRNNActivation);
305
306 /// A parameter that defines the activation function used with the current cell value of the LSTM operation.
307 ///
308 /// Default value: `MPSGraphRNNActivationTanh`.
309 #[unsafe(method(activation))]
310 #[unsafe(method_family = none)]
311 pub unsafe fn activation(&self) -> MPSGraphRNNActivation;
312
313 /// Setter for [`activation`][Self::activation].
314 #[unsafe(method(setActivation:))]
315 #[unsafe(method_family = none)]
316 pub unsafe fn setActivation(&self, activation: MPSGraphRNNActivation);
317
318 /// Creates an LSTM descriptor with default values.
319 #[unsafe(method(descriptor))]
320 #[unsafe(method_family = none)]
321 pub unsafe fn descriptor() -> Option<Retained<Self>>;
322 );
323}
324
325/// Methods declared on superclass `NSObject`.
326#[cfg(feature = "MPSGraphCore")]
327impl MPSGraphLSTMDescriptor {
328 extern_methods!(
329 #[unsafe(method(init))]
330 #[unsafe(method_family = init)]
331 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
332
333 #[unsafe(method(new))]
334 #[unsafe(method_family = new)]
335 pub unsafe fn new() -> Retained<Self>;
336 );
337}
338
339extern_class!(
340 /// The class that defines the parameters for a gated recurrent unit (GRU) operation.
341 ///
342 /// Use this descriptor with the following ``MPSGraph`` methods:
343 /// - ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:descriptor:name:``
344 /// - ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
345 /// - ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:``
346 /// - ``MPSGraph/GRUGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:outputFwd:inputWeight:bias:descriptor:name:``
347 /// - ``MPSGraph/GRUGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:outputFwd:inputWeight:bias:initState:descriptor:name:``
348 /// - ``MPSGraph/GRUGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:outputFwd:stateGradient:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:``
349 ///
350 /// See also [Apple's documentation](https://developer.apple.com/documentation/metalperformanceshadersgraph/mpsgraphgrudescriptor?language=objc)
351 #[unsafe(super(MPSGraphObject, NSObject))]
352 #[derive(Debug, PartialEq, Eq, Hash)]
353 #[cfg(feature = "MPSGraphCore")]
354 pub struct MPSGraphGRUDescriptor;
355);
356
357#[cfg(feature = "MPSGraphCore")]
358extern_conformance!(
359 unsafe impl NSCopying for MPSGraphGRUDescriptor {}
360);
361
362#[cfg(feature = "MPSGraphCore")]
363unsafe impl CopyingHelper for MPSGraphGRUDescriptor {
364 type Result = Self;
365}
366
367#[cfg(feature = "MPSGraphCore")]
368extern_conformance!(
369 unsafe impl NSObjectProtocol for MPSGraphGRUDescriptor {}
370);
371
372#[cfg(feature = "MPSGraphCore")]
373impl MPSGraphGRUDescriptor {
374 extern_methods!(
375 /// A parameter that defines the time direction of the input sequence.
376 ///
377 /// If set to `YES` then the input sequence is passed in reverse time order to the layer.
378 /// Note: Ignored when `bidirectional = YES`.
379 /// Default value: `NO`.
380 #[unsafe(method(reverse))]
381 #[unsafe(method_family = none)]
382 pub unsafe fn reverse(&self) -> bool;
383
384 /// Setter for [`reverse`][Self::reverse].
385 #[unsafe(method(setReverse:))]
386 #[unsafe(method_family = none)]
387 pub unsafe fn setReverse(&self, reverse: bool);
388
389 /// A parameter that defines a bidirectional GRU layer.
390 ///
391 /// If set to `YES` then the input sequence is traversed in both directions and the two results
392 /// are concatenated together on the channel-axis.
393 /// Default value: `NO`.
394 #[unsafe(method(bidirectional))]
395 #[unsafe(method_family = none)]
396 pub unsafe fn bidirectional(&self) -> bool;
397
398 /// Setter for [`bidirectional`][Self::bidirectional].
399 #[unsafe(method(setBidirectional:))]
400 #[unsafe(method_family = none)]
401 pub unsafe fn setBidirectional(&self, bidirectional: bool);
402
403 /// A parameter that enables the GRU layer to support training.
404 ///
405 /// If set to `YES` then the layer will produce training state tensor as a secondary output.
406 /// Default value: `NO`.
407 #[unsafe(method(training))]
408 #[unsafe(method_family = none)]
409 pub unsafe fn training(&self) -> bool;
410
411 /// Setter for [`training`][Self::training].
412 #[unsafe(method(setTraining:))]
413 #[unsafe(method_family = none)]
414 pub unsafe fn setTraining(&self, training: bool);
415
416 /// A parameter that controls the internal order of the GRU gates.
417 ///
418 /// If set to `YES` then the layer will use the gate-ordering `[ r, z, o ]` instead of default `[ z, r, o ]`.
419 /// Default value: `NO`.
420 #[unsafe(method(resetGateFirst))]
421 #[unsafe(method_family = none)]
422 pub unsafe fn resetGateFirst(&self) -> bool;
423
424 /// Setter for [`resetGateFirst`][Self::resetGateFirst].
425 #[unsafe(method(setResetGateFirst:))]
426 #[unsafe(method_family = none)]
427 pub unsafe fn setResetGateFirst(&self, reset_gate_first: bool);
428
429 /// A parameter that chooses between two variants for the reset gate computation.
430 ///
431 /// If set to `YES` then the layer will compute the intermediate value as `c[t] = ( b + (h[t-1] m ) R^T) r[t]`.
432 /// Otherwise it's computed as `c[t] = (h[t-1] r[t] m) R^T`.
433 /// Default value: `NO`.
434 #[unsafe(method(resetAfter))]
435 #[unsafe(method_family = none)]
436 pub unsafe fn resetAfter(&self) -> bool;
437
438 /// Setter for [`resetAfter`][Self::resetAfter].
439 #[unsafe(method(setResetAfter:))]
440 #[unsafe(method_family = none)]
441 pub unsafe fn setResetAfter(&self, reset_after: bool);
442
443 /// A parameter that chooses between two variants for the final output computation.
444 ///
445 /// If set to `YES` then the layer will compute the final value as `h[t] = z[t] h[t-1] + (1-z[t]) o[t]`.
446 /// Otherwise it's computed as `h[t] = (1-z[t]) h[t-1] + z[t] o[t]`.
447 /// Default value: `NO`.
448 #[unsafe(method(flipZ))]
449 #[unsafe(method_family = none)]
450 pub unsafe fn flipZ(&self) -> bool;
451
452 /// Setter for [`flipZ`][Self::flipZ].
453 #[unsafe(method(setFlipZ:))]
454 #[unsafe(method_family = none)]
455 pub unsafe fn setFlipZ(&self, flip_z: bool);
456
457 /// A parameter that defines the activation function to use with the update-gate of the GRU operation.
458 ///
459 /// Default value: `MPSGraphRNNActivationSigmoid`.
460 #[unsafe(method(updateGateActivation))]
461 #[unsafe(method_family = none)]
462 pub unsafe fn updateGateActivation(&self) -> MPSGraphRNNActivation;
463
464 /// Setter for [`updateGateActivation`][Self::updateGateActivation].
465 #[unsafe(method(setUpdateGateActivation:))]
466 #[unsafe(method_family = none)]
467 pub unsafe fn setUpdateGateActivation(&self, update_gate_activation: MPSGraphRNNActivation);
468
469 /// A parameter that defines the activation function to use with the reset-gate of the GRU operation.
470 ///
471 /// Default value: `MPSGraphRNNActivationSigmoid`.
472 #[unsafe(method(resetGateActivation))]
473 #[unsafe(method_family = none)]
474 pub unsafe fn resetGateActivation(&self) -> MPSGraphRNNActivation;
475
476 /// Setter for [`resetGateActivation`][Self::resetGateActivation].
477 #[unsafe(method(setResetGateActivation:))]
478 #[unsafe(method_family = none)]
479 pub unsafe fn setResetGateActivation(&self, reset_gate_activation: MPSGraphRNNActivation);
480
481 /// A parameter that defines the activation function to use with the output-gate of the GRU operation.
482 ///
483 /// Default value: `MPSGraphRNNActivationTanh`.
484 #[unsafe(method(outputGateActivation))]
485 #[unsafe(method_family = none)]
486 pub unsafe fn outputGateActivation(&self) -> MPSGraphRNNActivation;
487
488 /// Setter for [`outputGateActivation`][Self::outputGateActivation].
489 #[unsafe(method(setOutputGateActivation:))]
490 #[unsafe(method_family = none)]
491 pub unsafe fn setOutputGateActivation(&self, output_gate_activation: MPSGraphRNNActivation);
492
493 /// Creates an GRU descriptor with default values.
494 #[unsafe(method(descriptor))]
495 #[unsafe(method_family = none)]
496 pub unsafe fn descriptor() -> Option<Retained<Self>>;
497 );
498}
499
500/// Methods declared on superclass `NSObject`.
501#[cfg(feature = "MPSGraphCore")]
502impl MPSGraphGRUDescriptor {
503 extern_methods!(
504 #[unsafe(method(init))]
505 #[unsafe(method_family = init)]
506 pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
507
508 #[unsafe(method(new))]
509 #[unsafe(method_family = new)]
510 pub unsafe fn new() -> Retained<Self>;
511 );
512}
513
514/// MPSGraphRNNOps.
515#[cfg(all(feature = "MPSGraph", feature = "MPSGraphCore"))]
516impl MPSGraph {
517 extern_methods!(
518 #[cfg(feature = "MPSGraphTensor")]
519 /// Creates a single-gate RNN operation and returns the value and optionally the training state tensor.
520 ///
521 /// This operation returns tensors `h` and optionally `z` that are defined recursively as follows:
522 /// ```md
523 /// for t = 0 to T-1
524 /// z[t] = x[t] W^T + (h[t-1]m) R^T + b
525 /// h[t] = activation( z[t] ), where
526 /// ```
527 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is `bias`, `m` is optional `mask`,
528 /// `x[t]` is `source` `h[t]` is the first output, `z[t]` is the second output (optional) and `h[-1]` is `initState`.
529 /// See ``MPSGraphSingleGateRNNDescriptor`` for different `activation` options.
530 ///
531 /// - Parameters:
532 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
533 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
534 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
535 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
536 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
537 /// For `bidirectional` the layout is [2H,I] and otherwise it is [H,I].
538 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [2H] and otherwise it is [H].
539 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
540 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. This is useful for dropout support.
541 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
542 /// - name: The name for the operation.
543 /// - Returns: A valid MPSGraphTensor array of size 1 or 2, depending on value of `descriptor.training`. The layout of the both outputs is [T,N,H] or [T,N,2H] for bidirectional.
544 #[unsafe(method(singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:))]
545 #[unsafe(method_family = none)]
546 pub unsafe fn singleGateRNNWithSourceTensor_recurrentWeight_inputWeight_bias_initState_mask_descriptor_name(
547 &self,
548 source: &MPSGraphTensor,
549 recurrent_weight: &MPSGraphTensor,
550 input_weight: Option<&MPSGraphTensor>,
551 bias: Option<&MPSGraphTensor>,
552 init_state: Option<&MPSGraphTensor>,
553 mask: Option<&MPSGraphTensor>,
554 descriptor: &MPSGraphSingleGateRNNDescriptor,
555 name: Option<&NSString>,
556 ) -> Retained<NSArray<MPSGraphTensor>>;
557
558 #[cfg(feature = "MPSGraphTensor")]
559 /// Creates a single-gate RNN operation and returns the value and optionally the training state tensor.
560 ///
561 /// This operation returns tensors `h` and optionally `z` that are defined recursively as follows:
562 /// ```md
563 /// for t = 0 to T-1
564 /// z[t] = x[t] W^T + (h[t-1]m) R^T + b
565 /// h[t] = activation( z[t] ), where
566 /// ```
567 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is `bias`, `m` is optional `mask`,
568 /// `x[t]` is `source` `h[t]` is the first output, `z[t]` is the second output (optional) and `h[-1]` is `initState`.
569 /// See ``MPSGraphSingleGateRNNDescriptor`` for different `activation` options.
570 ///
571 /// - Parameters:
572 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
573 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
574 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
575 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
576 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
577 /// For `bidirectional` the layout is [2H,I] and otherwise it is [H,I].
578 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [2H] and otherwise it is [H].
579 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
580 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
581 /// - name: The name for the operation.
582 /// - Returns: A valid MPSGraphTensor array of size 1 or 2, depending on value of `descriptor.training`. The layout of the both outputs is [T,N,H] or [T,N,2H] for bidirectional.
583 #[unsafe(method(singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:))]
584 #[unsafe(method_family = none)]
585 pub unsafe fn singleGateRNNWithSourceTensor_recurrentWeight_inputWeight_bias_initState_descriptor_name(
586 &self,
587 source: &MPSGraphTensor,
588 recurrent_weight: &MPSGraphTensor,
589 input_weight: Option<&MPSGraphTensor>,
590 bias: Option<&MPSGraphTensor>,
591 init_state: Option<&MPSGraphTensor>,
592 descriptor: &MPSGraphSingleGateRNNDescriptor,
593 name: Option<&NSString>,
594 ) -> Retained<NSArray<MPSGraphTensor>>;
595
596 #[cfg(feature = "MPSGraphTensor")]
597 /// Creates a single-gate RNN operation and returns the value and optionally the training state tensor.
598 ///
599 /// This operation returns tensors `h` and optionally `z` that are defined recursively as follows:
600 /// ```md
601 /// for t = 0 to T-1
602 /// z[t] = x[t] W^T + (h[t-1]m) R^T + b
603 /// h[t] = activation( z[t] ), where
604 /// ```
605 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is `bias`, `m` is optional `mask`,
606 /// `x[t]` is `source` `h[t]` is the first output, `z[t]` is the second output (optional) and `h[-1]` is `initState`.
607 /// See ``MPSGraphSingleGateRNNDescriptor`` for different `activation` options.
608 ///
609 /// - Parameters:
610 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
611 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
612 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
613 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
614 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
615 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
616 /// - name: The name for the operation.
617 /// - Returns: A valid MPSGraphTensor array of size 1 or 2, depending on value of `descriptor.training`. The layout of the both outputs is [T,N,H] or [T,N,2H] for bidirectional.
618 #[unsafe(method(singleGateRNNWithSourceTensor:recurrentWeight:initState:descriptor:name:))]
619 #[unsafe(method_family = none)]
620 pub unsafe fn singleGateRNNWithSourceTensor_recurrentWeight_initState_descriptor_name(
621 &self,
622 source: &MPSGraphTensor,
623 recurrent_weight: &MPSGraphTensor,
624 init_state: Option<&MPSGraphTensor>,
625 descriptor: &MPSGraphSingleGateRNNDescriptor,
626 name: Option<&NSString>,
627 ) -> Retained<NSArray<MPSGraphTensor>>;
628
629 #[cfg(feature = "MPSGraphTensor")]
630 /// Creates a single-gate RNN gradient operation and returns the gradient tensor values.
631 ///
632 /// For details of this operation and parameters, refer to documentation of
633 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``.
634 ///
635 /// - Parameters:
636 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
637 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
638 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
639 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
640 /// Note: For `bidirectional` this tensor must have a static shape.
641 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
642 /// - zState: The second output of
643 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``
644 /// with `descriptor.training = YES`.
645 /// - stateGradient: The input gradient coming from the future timestep - optional, if missing the operation assumes zeroes.
646 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
647 /// For `bidirectional` the layout is [2H,I] and otherwise it is [H,I].
648 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [2H] and otherwise it is [H].
649 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
650 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. This is useful for dropout support.
651 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
652 /// - name: The name for the operation.
653 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
654 /// In case an input is `nil`, no gradient will be returned for it.
655 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias` and finally for `initState`.
656 #[unsafe(method(singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:stateGradient:inputWeight:bias:initState:mask:descriptor:name:))]
657 #[unsafe(method_family = none)]
658 pub unsafe fn singleGateRNNGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_stateGradient_inputWeight_bias_initState_mask_descriptor_name(
659 &self,
660 source: &MPSGraphTensor,
661 recurrent_weight: &MPSGraphTensor,
662 source_gradient: &MPSGraphTensor,
663 z_state: &MPSGraphTensor,
664 state_gradient: Option<&MPSGraphTensor>,
665 input_weight: Option<&MPSGraphTensor>,
666 bias: Option<&MPSGraphTensor>,
667 init_state: Option<&MPSGraphTensor>,
668 mask: Option<&MPSGraphTensor>,
669 descriptor: &MPSGraphSingleGateRNNDescriptor,
670 name: Option<&NSString>,
671 ) -> Retained<NSArray<MPSGraphTensor>>;
672
673 #[cfg(feature = "MPSGraphTensor")]
674 /// Creates a single-gate RNN gradient operation and returns the gradient tensor values.
675 ///
676 /// For details of this operation and parameters, refer to documentation of
677 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``.
678 ///
679 /// - Parameters:
680 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
681 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
682 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
683 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
684 /// Note: For `bidirectional` this tensor must have a static shape.
685 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
686 /// - zState: The second output of
687 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``
688 /// with `descriptor.training = YES`.
689 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
690 /// For `bidirectional` the layout is [2H,I] and otherwise it is [H,I].
691 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [2H] and otherwise it is [H].
692 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
693 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. This is useful for dropout support.
694 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
695 /// - name: The name for the operation.
696 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
697 /// In case an input is `nil`, no gradient will be returned for it.
698 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias` and finally for `initState`.
699 #[unsafe(method(singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:inputWeight:bias:initState:mask:descriptor:name:))]
700 #[unsafe(method_family = none)]
701 pub unsafe fn singleGateRNNGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_inputWeight_bias_initState_mask_descriptor_name(
702 &self,
703 source: &MPSGraphTensor,
704 recurrent_weight: &MPSGraphTensor,
705 source_gradient: &MPSGraphTensor,
706 z_state: &MPSGraphTensor,
707 input_weight: Option<&MPSGraphTensor>,
708 bias: Option<&MPSGraphTensor>,
709 init_state: Option<&MPSGraphTensor>,
710 mask: Option<&MPSGraphTensor>,
711 descriptor: &MPSGraphSingleGateRNNDescriptor,
712 name: Option<&NSString>,
713 ) -> Retained<NSArray<MPSGraphTensor>>;
714
715 #[cfg(feature = "MPSGraphTensor")]
716 /// Creates a single-gate RNN gradient operation and returns the gradient tensor values.
717 ///
718 /// For details of this operation and parameters, refer to documentation of
719 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``.
720 ///
721 /// - Parameters:
722 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
723 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
724 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
725 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
726 /// Note: For `bidirectional` this tensor must have a static shape.
727 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
728 /// - zState: The second output of
729 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``
730 /// with `descriptor.training = YES`.
731 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
732 /// For `bidirectional` the layout is [2H,I] and otherwise it is [H,I].
733 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [2H] and otherwise it is [H].
734 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
735 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
736 /// - name: The name for the operation.
737 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
738 /// In case an input is `nil`, no gradient will be returned for it.
739 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias` and finally for `initState`.
740 #[unsafe(method(singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:inputWeight:bias:initState:descriptor:name:))]
741 #[unsafe(method_family = none)]
742 pub unsafe fn singleGateRNNGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_inputWeight_bias_initState_descriptor_name(
743 &self,
744 source: &MPSGraphTensor,
745 recurrent_weight: &MPSGraphTensor,
746 source_gradient: &MPSGraphTensor,
747 z_state: &MPSGraphTensor,
748 input_weight: Option<&MPSGraphTensor>,
749 bias: Option<&MPSGraphTensor>,
750 init_state: Option<&MPSGraphTensor>,
751 descriptor: &MPSGraphSingleGateRNNDescriptor,
752 name: Option<&NSString>,
753 ) -> Retained<NSArray<MPSGraphTensor>>;
754
755 #[cfg(feature = "MPSGraphTensor")]
756 /// Creates a single-gate RNN gradient operation and returns the gradient tensor values.
757 ///
758 /// For details of this operation and parameters, refer to documentation of
759 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``.
760 ///
761 /// - Parameters:
762 /// - source: A tensor that contains the source data `x[t]` with the data layout [T,N,I].
763 /// In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,H] and
764 /// for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,2H].
765 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,H,H] and otherwise it is [H,H].
766 /// Note: For `bidirectional` this tensor must have a static shape.
767 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
768 /// - zState: The second output of
769 /// ``MPSGraph/singleGateRNNWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:descriptor:name:``
770 /// with `descriptor.training = YES`.
771 /// - initState: The initial internal state of the RNN `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
772 /// - descriptor: A descriptor that defines the parameters for the RNN operation.
773 /// - name: The name for the operation.
774 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
775 /// In case an input is `nil`, no gradient will be returned for it.
776 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias` and finally for `initState`.
777 #[unsafe(method(singleGateRNNGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:initState:descriptor:name:))]
778 #[unsafe(method_family = none)]
779 pub unsafe fn singleGateRNNGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_initState_descriptor_name(
780 &self,
781 source: &MPSGraphTensor,
782 recurrent_weight: &MPSGraphTensor,
783 source_gradient: &MPSGraphTensor,
784 z_state: &MPSGraphTensor,
785 init_state: Option<&MPSGraphTensor>,
786 descriptor: &MPSGraphSingleGateRNNDescriptor,
787 name: Option<&NSString>,
788 ) -> Retained<NSArray<MPSGraphTensor>>;
789
790 #[cfg(feature = "MPSGraphTensor")]
791 /// Creates an LSTM operation and returns the value tensor and optionally the cell state tensor and the training state tensor.
792 ///
793 /// This operation returns tensors `h` and optionally `c` and optionally `z` that are defined recursively as follows:
794 /// ```md
795 /// for t = 0 to T-1
796 /// z[t] = [i, f, z, o][t] = f( (h[t-1] m) R^T + x'[t] + p c[t-1] )
797 /// x'[t] = x[t] W^T + b
798 /// c[t] = f[t]c[t-1] + i[t]z[t]
799 /// h[t] = o[t]g(c[t]), where
800 /// ```
801 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is optional `bias`, `m` is optional `mask`,
802 /// `x[t]` is `source` `h[t]` is the first output, `c[t]` is the second output (optional),
803 /// `z[t]` is either the second or third output (optional), `h[-1]` is `initCell`. and `h[-1]` is `initState`.
804 /// `p` is an optional peephole vector.
805 /// See ``MPSGraphLSTMDescriptor`` for different `activation` options for `f()` and `g()`.
806 ///
807 /// - Parameters:
808 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
809 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
810 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix. For `bidirectional` the layout is [8H,I] and otherwise it is [4H,I].
811 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [8H] and otherwise it is [4H].
812 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
813 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
814 /// - initCell: The initial internal cell of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
815 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
816 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. Useful for dropout.
817 /// - peephole: A tensor containing the peephole vector `v` - optional, if missing the operation assumes zeroes. Shape is [4H], ie. a vector for each gate, or [2,4H] for bidirectional.
818 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
819 /// - name: The name for the operation.
820 /// - Returns: A valid `MPSGraphTensor` array of size 1 or 2 or 3, depending on values of `descriptor.produceCell` and `descriptor.training`.
821 /// The layout of the both state and cell outputs are [T,N,H] or [T,N,2H] for bidirectional, and the layout of the trainingState output is [T,N,4H] or [T,N,8H] for bidirectional.
822 #[unsafe(method(LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:))]
823 #[unsafe(method_family = none)]
824 pub unsafe fn LSTMWithSourceTensor_recurrentWeight_inputWeight_bias_initState_initCell_mask_peephole_descriptor_name(
825 &self,
826 source: &MPSGraphTensor,
827 recurrent_weight: &MPSGraphTensor,
828 input_weight: Option<&MPSGraphTensor>,
829 bias: Option<&MPSGraphTensor>,
830 init_state: Option<&MPSGraphTensor>,
831 init_cell: Option<&MPSGraphTensor>,
832 mask: Option<&MPSGraphTensor>,
833 peephole: Option<&MPSGraphTensor>,
834 descriptor: &MPSGraphLSTMDescriptor,
835 name: Option<&NSString>,
836 ) -> Retained<NSArray<MPSGraphTensor>>;
837
838 #[cfg(feature = "MPSGraphTensor")]
839 /// Creates an LSTM operation and returns the value tensor and optionally the cell state tensor and the training state tensor.
840 ///
841 /// This operation returns tensors `h` and optionally `c` and optionally `z` that are defined recursively as follows:
842 /// ```md
843 /// for t = 0 to T-1
844 /// z[t] = [i, f, z, o][t] = f( (h[t-1] m) R^T + x'[t] + p c[t-1] )
845 /// x'[t] = x[t] W^T + b
846 /// c[t] = f[t]c[t-1] + i[t]z[t]
847 /// h[t] = o[t]g(c[t]), where
848 /// ```
849 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is optional `bias`, `m` is optional `mask`,
850 /// `x[t]` is `source` `h[t]` is the first output, `c[t]` is the second output (optional),
851 /// `z[t]` is either the second or third output (optional), `h[-1]` is `initCell`. and `h[-1]` is `initState`.
852 /// `p` is an optional peephole vector.
853 /// See ``MPSGraphLSTMDescriptor`` for different `activation` options for `f()` and `g()`.
854 ///
855 /// - Parameters:
856 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
857 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
858 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix. For `bidirectional` the layout is [8H,I] and otherwise it is [4H,I].
859 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [8H] and otherwise it is [4H].
860 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
861 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
862 /// - initCell: The initial internal cell of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
863 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
864 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
865 /// - name: The name for the operation.
866 /// - Returns: A valid `MPSGraphTensor` array of size 1 or 2 or 3, depending on values of `descriptor.produceCell` and `descriptor.training`.
867 /// The layout of the both state and cell outputs are [T,N,H] or [T,N,2H] for bidirectional, and the layout of the trainingState output is [T,N,4H] or [T,N,8H] for bidirectional.
868 #[unsafe(method(LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:))]
869 #[unsafe(method_family = none)]
870 pub unsafe fn LSTMWithSourceTensor_recurrentWeight_inputWeight_bias_initState_initCell_descriptor_name(
871 &self,
872 source: &MPSGraphTensor,
873 recurrent_weight: &MPSGraphTensor,
874 input_weight: Option<&MPSGraphTensor>,
875 bias: Option<&MPSGraphTensor>,
876 init_state: Option<&MPSGraphTensor>,
877 init_cell: Option<&MPSGraphTensor>,
878 descriptor: &MPSGraphLSTMDescriptor,
879 name: Option<&NSString>,
880 ) -> Retained<NSArray<MPSGraphTensor>>;
881
882 #[cfg(feature = "MPSGraphTensor")]
883 /// Creates an LSTM operation and returns the value tensor and optionally the cell state tensor and the training state tensor.
884 ///
885 /// This operation returns tensors `h` and optionally `c` and optionally `z` that are defined recursively as follows:
886 /// ```md
887 /// for t = 0 to T-1
888 /// z[t] = [i, f, z, o][t] = f( (h[t-1] m) R^T + x'[t] + p c[t-1] )
889 /// x'[t] = x[t] W^T + b
890 /// c[t] = f[t]c[t-1] + i[t]z[t]
891 /// h[t] = o[t]g(c[t]), where
892 /// ```
893 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is optional `bias`, `m` is optional `mask`,
894 /// `x[t]` is `source` `h[t]` is the first output, `c[t]` is the second output (optional),
895 /// `z[t]` is either the second or third output (optional), `h[-1]` is `initCell`. and `h[-1]` is `initState`.
896 /// `p` is an optional peephole vector.
897 /// See ``MPSGraphLSTMDescriptor`` for different `activation` options for `f()` and `g()`.
898 ///
899 /// - Parameters:
900 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
901 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
902 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
903 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
904 /// - initCell: The initial internal cell of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
905 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
906 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
907 /// - name: The name for the operation.
908 /// - Returns: A valid `MPSGraphTensor` array of size 1 or 2 or 3, depending on values of `descriptor.produceCell` and `descriptor.training`.
909 /// The layout of the both state and cell outputs are [T,N,H] or [T,N,2H] for bidirectional, and the layout of the trainingState output is [T,N,4H] or [T,N,8H] for bidirectional.
910 #[unsafe(method(LSTMWithSourceTensor:recurrentWeight:initState:initCell:descriptor:name:))]
911 #[unsafe(method_family = none)]
912 pub unsafe fn LSTMWithSourceTensor_recurrentWeight_initState_initCell_descriptor_name(
913 &self,
914 source: &MPSGraphTensor,
915 recurrent_weight: &MPSGraphTensor,
916 init_state: Option<&MPSGraphTensor>,
917 init_cell: Option<&MPSGraphTensor>,
918 descriptor: &MPSGraphLSTMDescriptor,
919 name: Option<&NSString>,
920 ) -> Retained<NSArray<MPSGraphTensor>>;
921
922 #[cfg(feature = "MPSGraphTensor")]
923 /// Creates an LSTM gradient operation and returns the gradient tensor values.
924 ///
925 /// For details of this operation and parameters, refer to documentation of
926 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:``.
927 ///
928 /// - Parameters:
929 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
930 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
931 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
932 /// - zState: The third output of
933 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
934 /// with `descriptor.training = YES`.
935 /// - cellOutputFwd: The second output of
936 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
937 /// with `descriptor.training = YES` or `descriptor.produceCell = YES`.
938 /// - stateGradient: The input gradient for state coming from the future timestep - optional, if missing the operation assumes zeroes.
939 /// - cellGradient: Input gradient for cell coming from the future timestep - optional, if missing the operation assumes zeroes.
940 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix. For `bidirectional` the layout is [8H,I] and otherwise it is [4H,I].
941 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [8H] and otherwise it is [4H].
942 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
943 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
944 /// - initCell: The initial internal cell of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
945 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
946 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. Useful for dropout.
947 /// - peephole: A tensor containing the peephole vector `v` - optional, if missing the operation assumes zeroes. Shape is [4H], ie. a vector for each gate, or [2,4H] for bidirectional.
948 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
949 /// - name: The name for the operation.
950 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
951 /// In case an input is nil, no gradient will be returned for it.
952 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias`, for `peephole`, for `initState` and for `initCell`.
953 #[unsafe(method(LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:stateGradient:cellGradient:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:))]
954 #[unsafe(method_family = none)]
955 pub unsafe fn LSTMGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_cellOutputFwd_stateGradient_cellGradient_inputWeight_bias_initState_initCell_mask_peephole_descriptor_name(
956 &self,
957 source: &MPSGraphTensor,
958 recurrent_weight: &MPSGraphTensor,
959 source_gradient: &MPSGraphTensor,
960 z_state: &MPSGraphTensor,
961 cell_output_fwd: &MPSGraphTensor,
962 state_gradient: Option<&MPSGraphTensor>,
963 cell_gradient: Option<&MPSGraphTensor>,
964 input_weight: Option<&MPSGraphTensor>,
965 bias: Option<&MPSGraphTensor>,
966 init_state: Option<&MPSGraphTensor>,
967 init_cell: Option<&MPSGraphTensor>,
968 mask: Option<&MPSGraphTensor>,
969 peephole: Option<&MPSGraphTensor>,
970 descriptor: &MPSGraphLSTMDescriptor,
971 name: Option<&NSString>,
972 ) -> Retained<NSArray<MPSGraphTensor>>;
973
974 #[cfg(feature = "MPSGraphTensor")]
975 /// Creates an LSTM gradient operation and returns the gradient tensor values.
976 ///
977 /// For details of this operation and parameters, refer to documentation of
978 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:``.
979 ///
980 /// - Parameters:
981 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
982 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
983 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
984 /// - zState: The third output of
985 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
986 /// with `descriptor.training = YES`.
987 /// - cellOutputFwd: The second output of
988 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
989 /// with `descriptor.training = YES` or `descriptor.produceCell = YES`.
990 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix. For `bidirectional` the layout is [8H,I] and otherwise it is [4H,I].
991 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [8H] and otherwise it is [4H].
992 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
993 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
994 /// - initCell: The initial internal cell of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
995 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
996 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. Useful for dropout.
997 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
998 /// - name: The name for the operation.
999 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
1000 /// In case an input is nil, no gradient will be returned for it.
1001 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias`, for `peephole`, for `initState` and for `initCell`.
1002 #[unsafe(method(LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:inputWeight:bias:initState:initCell:mask:descriptor:name:))]
1003 #[unsafe(method_family = none)]
1004 pub unsafe fn LSTMGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_cellOutputFwd_inputWeight_bias_initState_initCell_mask_descriptor_name(
1005 &self,
1006 source: &MPSGraphTensor,
1007 recurrent_weight: &MPSGraphTensor,
1008 source_gradient: &MPSGraphTensor,
1009 z_state: &MPSGraphTensor,
1010 cell_output_fwd: &MPSGraphTensor,
1011 input_weight: Option<&MPSGraphTensor>,
1012 bias: Option<&MPSGraphTensor>,
1013 init_state: Option<&MPSGraphTensor>,
1014 init_cell: Option<&MPSGraphTensor>,
1015 mask: Option<&MPSGraphTensor>,
1016 descriptor: &MPSGraphLSTMDescriptor,
1017 name: Option<&NSString>,
1018 ) -> Retained<NSArray<MPSGraphTensor>>;
1019
1020 #[cfg(feature = "MPSGraphTensor")]
1021 /// Creates an LSTM gradient operation and returns the gradient tensor values.
1022 ///
1023 /// For details of this operation and parameters, refer to documentation of
1024 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:``.
1025 ///
1026 /// - Parameters:
1027 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
1028 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
1029 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
1030 /// - zState: The third output of
1031 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
1032 /// with `descriptor.training = YES`.
1033 /// - cellOutputFwd: The second output of
1034 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
1035 /// with `descriptor.training = YES` or `descriptor.produceCell = YES`.
1036 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix. For `bidirectional` the layout is [8H,I] and otherwise it is [4H,I].
1037 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [8H] and otherwise it is [4H].
1038 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
1039 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
1040 /// - initCell: The initial internal cell of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes.
1041 /// For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
1042 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
1043 /// - name: The name for the operation.
1044 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
1045 /// In case an input is nil, no gradient will be returned for it.
1046 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias`, for `initState` and for `initCell`.
1047 #[unsafe(method(LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:inputWeight:bias:initState:initCell:descriptor:name:))]
1048 #[unsafe(method_family = none)]
1049 pub unsafe fn LSTMGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_cellOutputFwd_inputWeight_bias_initState_initCell_descriptor_name(
1050 &self,
1051 source: &MPSGraphTensor,
1052 recurrent_weight: &MPSGraphTensor,
1053 source_gradient: &MPSGraphTensor,
1054 z_state: &MPSGraphTensor,
1055 cell_output_fwd: &MPSGraphTensor,
1056 input_weight: Option<&MPSGraphTensor>,
1057 bias: Option<&MPSGraphTensor>,
1058 init_state: Option<&MPSGraphTensor>,
1059 init_cell: Option<&MPSGraphTensor>,
1060 descriptor: &MPSGraphLSTMDescriptor,
1061 name: Option<&NSString>,
1062 ) -> Retained<NSArray<MPSGraphTensor>>;
1063
1064 #[cfg(feature = "MPSGraphTensor")]
1065 /// Creates an LSTM gradient operation and returns the gradient tensor values.
1066 ///
1067 /// For details of this operation and parameters, refer to documentation of
1068 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:mask:peephole:descriptor:name:``.
1069 ///
1070 /// - Parameters:
1071 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,4H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,8H].
1072 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,4H,H] and otherwise it is [4H,H].
1073 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
1074 /// - zState: The third output of
1075 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
1076 /// with `descriptor.training = YES`.
1077 /// - cellOutputFwd: The second output of
1078 /// ``MPSGraph/LSTMWithSourceTensor:recurrentWeight:inputWeight:bias:initState:initCell:descriptor:name:``
1079 /// with `descriptor.training = YES` or `descriptor.produceCell = YES`.
1080 /// - descriptor: A descriptor that defines the parameters for the LSTM operation.
1081 /// - name: The name for the operation.
1082 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
1083 /// In case an input is nil, no gradient will be returned for it.
1084 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias`, for `initState` and for `initCell`.
1085 #[unsafe(method(LSTMGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:cellOutputFwd:descriptor:name:))]
1086 #[unsafe(method_family = none)]
1087 pub unsafe fn LSTMGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_cellOutputFwd_descriptor_name(
1088 &self,
1089 source: &MPSGraphTensor,
1090 recurrent_weight: &MPSGraphTensor,
1091 source_gradient: &MPSGraphTensor,
1092 z_state: &MPSGraphTensor,
1093 cell_output_fwd: &MPSGraphTensor,
1094 descriptor: &MPSGraphLSTMDescriptor,
1095 name: Option<&NSString>,
1096 ) -> Retained<NSArray<MPSGraphTensor>>;
1097
1098 #[cfg(feature = "MPSGraphTensor")]
1099 /// Creates a GRU operation and returns the value and optionally the training state tensor.
1100 ///
1101 /// This operation returns tensors `h` and optionally `z` that are defined recursively as follows:
1102 /// ```md
1103 /// for t = 0 to T-1
1104 /// z[t] = fz( (h[t-1] m) R^T + x[t] W^T + b ),
1105 /// r[t] = fr( (h[t-1] m) R^T + x[t] W^T + b ),
1106 /// c[t] = (h[t-1] r[t] m) R^T
1107 /// o[t] = fo( c[t] + x[t] W^T + b )
1108 /// h[t] = z[t]h[t-1] + (1-z[t])o[t]
1109 /// ```
1110 /// If `resetAfter = YES` then `c[t]` is replaced by
1111 /// ```md
1112 /// c[t] = ( (h[t-1] m) R^T + b2 ) r[t]
1113 /// ```
1114 /// If `flipZ = YES` then `h[t]` is replaced by
1115 /// ```md
1116 /// h[t] = (1-z[t])h[t-1] + z[t]o[t].
1117 /// ```
1118 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is optional `bias`, `m` is optional `mask`,
1119 /// `x[t]` is `source` `h[t]` is the first output, `z[t]` is the second output (optional) and `h[-1]` is `initState`.
1120 /// `b2` is an optional `resetBias` vector, only used when `resetAfter = YES`.
1121 /// See ``MPSGraphGRUDescriptor`` for different `activation` options for `f()`.
1122 ///
1123 /// - Parameters:
1124 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,3H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,6H].
1125 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,3H,H] and otherwise it is [3H,H].
1126 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
1127 /// For `bidirectional` the layout is [6H,I] and otherwise it is [3H,I].
1128 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [6H] and otherwise it is [3H].
1129 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
1130 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. Useful for dropout.
1131 /// - secondaryBias: A tensor containing the secondary bias vector `b2` - optional, if missing the operation assumes zeroes. Only used with `reset_after = YES`. Shape is [H], ie. a vector for each gate, or [2H] for bidirectional.
1132 /// - descriptor: A descriptor that defines the parameters for the GRU operation.
1133 /// - name: The name for the operation.
1134 /// - Returns: A valid `MPSGraphTensor` array of size 1 or 2 depending on value of `descriptor.training`.
1135 /// The layout of the state output is [T,N,H] or [T,N,2H] for bidirectional,
1136 /// and the layout of the `trainingState` output is [T,N,3H] or [T,N,6H] for bidirectional.
1137 #[unsafe(method(GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:))]
1138 #[unsafe(method_family = none)]
1139 pub unsafe fn GRUWithSourceTensor_recurrentWeight_inputWeight_bias_initState_mask_secondaryBias_descriptor_name(
1140 &self,
1141 source: &MPSGraphTensor,
1142 recurrent_weight: &MPSGraphTensor,
1143 input_weight: Option<&MPSGraphTensor>,
1144 bias: Option<&MPSGraphTensor>,
1145 init_state: Option<&MPSGraphTensor>,
1146 mask: Option<&MPSGraphTensor>,
1147 secondary_bias: Option<&MPSGraphTensor>,
1148 descriptor: &MPSGraphGRUDescriptor,
1149 name: Option<&NSString>,
1150 ) -> Retained<NSArray<MPSGraphTensor>>;
1151
1152 #[cfg(feature = "MPSGraphTensor")]
1153 /// Creates a GRU operation and returns the value and optionally the training state tensor.
1154 ///
1155 /// This operation returns tensors `h` and optionally `z` that are defined recursively as follows:
1156 /// ```md
1157 /// for t = 0 to T-1
1158 /// z[t] = fz( (h[t-1] m) R^T + x[t] W^T + b ),
1159 /// r[t] = fr( (h[t-1] m) R^T + x[t] W^T + b ),
1160 /// c[t] = (h[t-1] r[t] m) R^T
1161 /// o[t] = fo( c[t] + x[t] W^T + b )
1162 /// h[t] = z[t]h[t-1] + (1-z[t])o[t]
1163 /// ```
1164 /// If `resetAfter = YES` then `c[t]` is replaced by
1165 /// ```md
1166 /// c[t] = ( (h[t-1] m) R^T + b2 ) r[t]
1167 /// ```
1168 /// If `flipZ = YES` then `h[t]` is replaced by
1169 /// ```md
1170 /// h[t] = (1-z[t])h[t-1] + z[t]o[t].
1171 /// ```
1172 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is optional `bias`, `m` is optional `mask`,
1173 /// `x[t]` is `source` `h[t]` is the first output, `z[t]` is the second output (optional) and `h[-1]` is `initState`.
1174 /// `b2` is an optional `resetBias` vector, only used when `resetAfter = YES`.
1175 /// See ``MPSGraphGRUDescriptor`` for different `activation` options for `f()`.
1176 ///
1177 /// - Parameters:
1178 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,3H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,6H].
1179 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,3H,H] and otherwise it is [3H,H].
1180 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
1181 /// For `bidirectional` the layout is [6H,I] and otherwise it is [3H,I].
1182 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [6H] and otherwise it is [3H].
1183 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
1184 /// - descriptor: A descriptor that defines the parameters for the GRU operation.
1185 /// - name: The name for the operation.
1186 /// - Returns: A valid `MPSGraphTensor` array of size 1 or 2 depending on value of `descriptor.training`.
1187 /// The layout of the state output is [T,N,H] or [T,N,2H] for bidirectional,
1188 /// and the layout of the `trainingState` output is [T,N,3H] or [T,N,6H] for bidirectional.
1189 #[unsafe(method(GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:))]
1190 #[unsafe(method_family = none)]
1191 pub unsafe fn GRUWithSourceTensor_recurrentWeight_inputWeight_bias_initState_descriptor_name(
1192 &self,
1193 source: &MPSGraphTensor,
1194 recurrent_weight: &MPSGraphTensor,
1195 input_weight: Option<&MPSGraphTensor>,
1196 bias: Option<&MPSGraphTensor>,
1197 init_state: Option<&MPSGraphTensor>,
1198 descriptor: &MPSGraphGRUDescriptor,
1199 name: Option<&NSString>,
1200 ) -> Retained<NSArray<MPSGraphTensor>>;
1201
1202 #[cfg(feature = "MPSGraphTensor")]
1203 /// Creates a GRU operation and returns the value and optionally the training state tensor.
1204 ///
1205 /// This operation returns tensors `h` and optionally `z` that are defined recursively as follows:
1206 /// ```md
1207 /// for t = 0 to T-1
1208 /// z[t] = fz( (h[t-1] m) R^T + x[t] W^T + b ),
1209 /// r[t] = fr( (h[t-1] m) R^T + x[t] W^T + b ),
1210 /// c[t] = (h[t-1] r[t] m) R^T
1211 /// o[t] = fo( c[t] + x[t] W^T + b )
1212 /// h[t] = z[t]h[t-1] + (1-z[t])o[t]
1213 /// ```
1214 /// If `resetAfter = YES` then `c[t]` is replaced by
1215 /// ```md
1216 /// c[t] = ( (h[t-1] m) R^T + b2 ) r[t]
1217 /// ```
1218 /// If `flipZ = YES` then `h[t]` is replaced by
1219 /// ```md
1220 /// h[t] = (1-z[t])h[t-1] + z[t]o[t].
1221 /// ```
1222 /// `W` is optional `inputWeight`, `R` is `recurrentWeight`, `b` is optional `bias`, `m` is optional `mask`,
1223 /// `x[t]` is `source` `h[t]` is the first output, `z[t]` is the second output (optional) and `h[-1]` is `initState`.
1224 /// `b2` is an optional `resetBias` vector, only used when `resetAfter = YES`.
1225 /// See ``MPSGraphGRUDescriptor`` for different `activation` options for `f()`.
1226 ///
1227 /// - Parameters:
1228 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,3H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,6H].
1229 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,3H,H] and otherwise it is [3H,H].
1230 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
1231 /// For `bidirectional` the layout is [6H,I] and otherwise it is [3H,I].
1232 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [6H] and otherwise it is [3H].
1233 /// - descriptor: A descriptor that defines the parameters for the GRU operation.
1234 /// - name: The name for the operation.
1235 /// - Returns: A valid `MPSGraphTensor` array of size 1 or 2 depending on value of `descriptor.training`.
1236 /// The layout of the state output is [T,N,H] or [T,N,2H] for bidirectional,
1237 /// and the layout of the `trainingState` output is [T,N,3H] or [T,N,6H] for bidirectional.
1238 #[unsafe(method(GRUWithSourceTensor:recurrentWeight:inputWeight:bias:descriptor:name:))]
1239 #[unsafe(method_family = none)]
1240 pub unsafe fn GRUWithSourceTensor_recurrentWeight_inputWeight_bias_descriptor_name(
1241 &self,
1242 source: &MPSGraphTensor,
1243 recurrent_weight: &MPSGraphTensor,
1244 input_weight: Option<&MPSGraphTensor>,
1245 bias: Option<&MPSGraphTensor>,
1246 descriptor: &MPSGraphGRUDescriptor,
1247 name: Option<&NSString>,
1248 ) -> Retained<NSArray<MPSGraphTensor>>;
1249
1250 #[cfg(feature = "MPSGraphTensor")]
1251 /// Creates a GRU gradient operation and returns the gradient tensor values.
1252 ///
1253 /// For details of this operation and parameters, refer to documentation of
1254 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:``.
1255 ///
1256 /// - Parameters:
1257 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,3H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,6H].
1258 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,3H,H] and otherwise it is [3H,H].
1259 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
1260 /// - zState: The second output of
1261 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
1262 /// with `descriptor.training = YES`.
1263 /// - outputFwd: The first output of
1264 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
1265 /// with `descriptor.training = YES`.
1266 /// - stateGradient: The input gradient for state coming from the future timestep - optional, if missing the operation assumes zeroes.
1267 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
1268 /// For `bidirectional` the layout is [6H,I] and otherwise it is [3H,I].
1269 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [6H] and otherwise it is [3H].
1270 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
1271 /// - mask: A tensor containing the mask `m` - optional, if missing the operation assumes ones. Useful for dropout.
1272 /// - secondaryBias: A tensor containing the secondary bias vector `b2` - optional, if missing the operation assumes zeroes. Only used with `reset_after = YES`. Shape is [H], ie. a vector for each gate, or [2H] for bidirectional.
1273 /// - descriptor: A descriptor that defines the parameters for the GRU operation.
1274 /// - name: The name for the operation.
1275 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
1276 /// In case an input is nil, no gradient will be returned for it.
1277 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias`, for `initState` and for `secondaryBias`.
1278 #[unsafe(method(GRUGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:outputFwd:stateGradient:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:))]
1279 #[unsafe(method_family = none)]
1280 pub unsafe fn GRUGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_outputFwd_stateGradient_inputWeight_bias_initState_mask_secondaryBias_descriptor_name(
1281 &self,
1282 source: &MPSGraphTensor,
1283 recurrent_weight: &MPSGraphTensor,
1284 source_gradient: &MPSGraphTensor,
1285 z_state: &MPSGraphTensor,
1286 output_fwd: &MPSGraphTensor,
1287 state_gradient: Option<&MPSGraphTensor>,
1288 input_weight: Option<&MPSGraphTensor>,
1289 bias: Option<&MPSGraphTensor>,
1290 init_state: Option<&MPSGraphTensor>,
1291 mask: Option<&MPSGraphTensor>,
1292 secondary_bias: Option<&MPSGraphTensor>,
1293 descriptor: &MPSGraphGRUDescriptor,
1294 name: Option<&NSString>,
1295 ) -> Retained<NSArray<MPSGraphTensor>>;
1296
1297 #[cfg(feature = "MPSGraphTensor")]
1298 /// Creates a GRU gradient operation and returns the gradient tensor values.
1299 ///
1300 /// For details of this operation and parameters, refer to documentation of
1301 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:``.
1302 ///
1303 /// - Parameters:
1304 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,3H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,6H].
1305 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,3H,H] and otherwise it is [3H,H].
1306 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
1307 /// - zState: The second output of
1308 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
1309 /// with `descriptor.training = YES`.
1310 /// - outputFwd: The first output of
1311 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
1312 /// with `descriptor.training = YES`.
1313 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
1314 /// For `bidirectional` the layout is [6H,I] and otherwise it is [3H,I].
1315 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [6H] and otherwise it is [3H].
1316 /// - initState: The initial internal state of the LSTM `h[-1]` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [N,2H] and otherwise it is [N,H].
1317 /// - descriptor: A descriptor that defines the parameters for the GRU operation.
1318 /// - name: The name for the operation.
1319 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
1320 /// In case an input is nil, no gradient will be returned for it.
1321 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight`, for `bias` and for `initState`.
1322 #[unsafe(method(GRUGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:outputFwd:inputWeight:bias:initState:descriptor:name:))]
1323 #[unsafe(method_family = none)]
1324 pub unsafe fn GRUGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_outputFwd_inputWeight_bias_initState_descriptor_name(
1325 &self,
1326 source: &MPSGraphTensor,
1327 recurrent_weight: &MPSGraphTensor,
1328 source_gradient: &MPSGraphTensor,
1329 z_state: &MPSGraphTensor,
1330 output_fwd: &MPSGraphTensor,
1331 input_weight: Option<&MPSGraphTensor>,
1332 bias: Option<&MPSGraphTensor>,
1333 init_state: Option<&MPSGraphTensor>,
1334 descriptor: &MPSGraphGRUDescriptor,
1335 name: Option<&NSString>,
1336 ) -> Retained<NSArray<MPSGraphTensor>>;
1337
1338 #[cfg(feature = "MPSGraphTensor")]
1339 /// Creates a GRU gradient operation and returns the gradient tensor values.
1340 ///
1341 /// For details of this operation and parameters, refer to documentation of
1342 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:mask:secondaryBias:descriptor:name:``.
1343 ///
1344 /// - Parameters:
1345 /// - source: A tensor containing the source data `x[t]` with the data layout [T,N,I]. In case `inputWeight = nil` and `bidirectional = NO` then the layout is [T,N,3H] and for `inputWeight = nil` and `bidirectional = YES` the layout is [T,N,6H].
1346 /// - recurrentWeight: A tensor containing the recurrent weights `R`. For `bidirectional` the layout is [2,3H,H] and otherwise it is [3H,H].
1347 /// - sourceGradient: The input gradient, that is the gradient of a tensor with respect to the first output of the forward pass.
1348 /// - zState: The second output of
1349 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
1350 /// with `descriptor.training = YES`.
1351 /// - outputFwd: The first output of
1352 /// ``MPSGraph/GRUWithSourceTensor:recurrentWeight:inputWeight:bias:initState:descriptor:name:``
1353 /// with `descriptor.training = YES`.
1354 /// - inputWeight: A tensor containing the input weights matrix `W` - optional, if missing the operation assumes a diagonal unit-matrix.
1355 /// For `bidirectional` the layout is [6H,I] and otherwise it is [3H,I].
1356 /// - bias: A tensor containing the bias `b` - optional, if missing the operation assumes zeroes. For `bidirectional` the layout is [6H] and otherwise it is [3H].
1357 /// - descriptor: A descriptor that defines the parameters for the GRU operation.
1358 /// - name: The name for the operation.
1359 /// - Returns: A valid `MPSGraphTensor` array containing gradients for each input tensor, except for `sourceGradient` and `mask`.
1360 /// In case an input is nil, no gradient will be returned for it.
1361 /// The order of the gradients will be: for `source`, for `recurrentWeight`, for `inputWeight` and for `bias`.
1362 #[unsafe(method(GRUGradientsWithSourceTensor:recurrentWeight:sourceGradient:zState:outputFwd:inputWeight:bias:descriptor:name:))]
1363 #[unsafe(method_family = none)]
1364 pub unsafe fn GRUGradientsWithSourceTensor_recurrentWeight_sourceGradient_zState_outputFwd_inputWeight_bias_descriptor_name(
1365 &self,
1366 source: &MPSGraphTensor,
1367 recurrent_weight: &MPSGraphTensor,
1368 source_gradient: &MPSGraphTensor,
1369 z_state: &MPSGraphTensor,
1370 output_fwd: &MPSGraphTensor,
1371 input_weight: Option<&MPSGraphTensor>,
1372 bias: Option<&MPSGraphTensor>,
1373 descriptor: &MPSGraphGRUDescriptor,
1374 name: Option<&NSString>,
1375 ) -> Retained<NSArray<MPSGraphTensor>>;
1376 );
1377}