google_cloud_speech_v2/model.rs
1// Copyright 2025 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14//
15// Code generated by sidekick. DO NOT EDIT.
16
17#![allow(rustdoc::redundant_explicit_links)]
18#![allow(rustdoc::broken_intra_doc_links)]
19#![no_implicit_prelude]
20extern crate async_trait;
21extern crate bytes;
22extern crate gax;
23extern crate gaxi;
24extern crate lazy_static;
25extern crate location;
26extern crate longrunning;
27extern crate lro;
28extern crate reqwest;
29extern crate rpc;
30extern crate serde;
31extern crate serde_json;
32extern crate serde_with;
33extern crate std;
34extern crate tracing;
35extern crate wkt;
36
37mod debug;
38mod deserialize;
39mod serialize;
40
41/// Request message for the
42/// [CreateRecognizer][google.cloud.speech.v2.Speech.CreateRecognizer] method.
43///
44/// [google.cloud.speech.v2.Speech.CreateRecognizer]: crate::client::Speech::create_recognizer
45#[derive(Clone, Default, PartialEq)]
46#[non_exhaustive]
47pub struct CreateRecognizerRequest {
48 /// Required. The Recognizer to create.
49 pub recognizer: std::option::Option<crate::model::Recognizer>,
50
51 /// If set, validate the request and preview the Recognizer, but do not
52 /// actually create it.
53 pub validate_only: bool,
54
55 /// The ID to use for the Recognizer, which will become the final component of
56 /// the Recognizer's resource name.
57 ///
58 /// This value should be 4-63 characters, and valid characters
59 /// are /[a-z][0-9]-/.
60 pub recognizer_id: std::string::String,
61
62 /// Required. The project and location where this Recognizer will be created.
63 /// The expected format is `projects/{project}/locations/{location}`.
64 pub parent: std::string::String,
65
66 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
67}
68
69impl CreateRecognizerRequest {
70 pub fn new() -> Self {
71 std::default::Default::default()
72 }
73
74 /// Sets the value of [recognizer][crate::model::CreateRecognizerRequest::recognizer].
75 ///
76 /// # Example
77 /// ```ignore,no_run
78 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
79 /// use google_cloud_speech_v2::model::Recognizer;
80 /// let x = CreateRecognizerRequest::new().set_recognizer(Recognizer::default()/* use setters */);
81 /// ```
82 pub fn set_recognizer<T>(mut self, v: T) -> Self
83 where
84 T: std::convert::Into<crate::model::Recognizer>,
85 {
86 self.recognizer = std::option::Option::Some(v.into());
87 self
88 }
89
90 /// Sets or clears the value of [recognizer][crate::model::CreateRecognizerRequest::recognizer].
91 ///
92 /// # Example
93 /// ```ignore,no_run
94 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
95 /// use google_cloud_speech_v2::model::Recognizer;
96 /// let x = CreateRecognizerRequest::new().set_or_clear_recognizer(Some(Recognizer::default()/* use setters */));
97 /// let x = CreateRecognizerRequest::new().set_or_clear_recognizer(None::<Recognizer>);
98 /// ```
99 pub fn set_or_clear_recognizer<T>(mut self, v: std::option::Option<T>) -> Self
100 where
101 T: std::convert::Into<crate::model::Recognizer>,
102 {
103 self.recognizer = v.map(|x| x.into());
104 self
105 }
106
107 /// Sets the value of [validate_only][crate::model::CreateRecognizerRequest::validate_only].
108 ///
109 /// # Example
110 /// ```ignore,no_run
111 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
112 /// let x = CreateRecognizerRequest::new().set_validate_only(true);
113 /// ```
114 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
115 self.validate_only = v.into();
116 self
117 }
118
119 /// Sets the value of [recognizer_id][crate::model::CreateRecognizerRequest::recognizer_id].
120 ///
121 /// # Example
122 /// ```ignore,no_run
123 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
124 /// let x = CreateRecognizerRequest::new().set_recognizer_id("example");
125 /// ```
126 pub fn set_recognizer_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
127 self.recognizer_id = v.into();
128 self
129 }
130
131 /// Sets the value of [parent][crate::model::CreateRecognizerRequest::parent].
132 ///
133 /// # Example
134 /// ```ignore,no_run
135 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
136 /// let x = CreateRecognizerRequest::new().set_parent("example");
137 /// ```
138 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
139 self.parent = v.into();
140 self
141 }
142}
143
144impl wkt::message::Message for CreateRecognizerRequest {
145 fn typename() -> &'static str {
146 "type.googleapis.com/google.cloud.speech.v2.CreateRecognizerRequest"
147 }
148}
149
150/// Represents the metadata of a long-running operation.
151#[derive(Clone, Default, PartialEq)]
152#[non_exhaustive]
153pub struct OperationMetadata {
154 /// The time the operation was created.
155 pub create_time: std::option::Option<wkt::Timestamp>,
156
157 /// The time the operation was last updated.
158 pub update_time: std::option::Option<wkt::Timestamp>,
159
160 /// The resource path for the target of the operation.
161 pub resource: std::string::String,
162
163 /// The method that triggered the operation.
164 pub method: std::string::String,
165
166 /// The [KMS key
167 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
168 /// the content of the Operation is encrypted. The expected format is
169 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
170 pub kms_key_name: std::string::String,
171
172 /// The [KMS key version
173 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
174 /// with which content of the Operation is encrypted. The expected format is
175 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
176 pub kms_key_version_name: std::string::String,
177
178 /// The percent progress of the Operation. Values can range from 0-100. If the
179 /// value is 100, then the operation is finished.
180 pub progress_percent: i32,
181
182 /// The request that spawned the Operation.
183 pub request: std::option::Option<crate::model::operation_metadata::Request>,
184
185 /// Specific metadata per RPC.
186 pub metadata: std::option::Option<crate::model::operation_metadata::Metadata>,
187
188 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
189}
190
191impl OperationMetadata {
192 pub fn new() -> Self {
193 std::default::Default::default()
194 }
195
196 /// Sets the value of [create_time][crate::model::OperationMetadata::create_time].
197 ///
198 /// # Example
199 /// ```ignore,no_run
200 /// # use google_cloud_speech_v2::model::OperationMetadata;
201 /// use wkt::Timestamp;
202 /// let x = OperationMetadata::new().set_create_time(Timestamp::default()/* use setters */);
203 /// ```
204 pub fn set_create_time<T>(mut self, v: T) -> Self
205 where
206 T: std::convert::Into<wkt::Timestamp>,
207 {
208 self.create_time = std::option::Option::Some(v.into());
209 self
210 }
211
212 /// Sets or clears the value of [create_time][crate::model::OperationMetadata::create_time].
213 ///
214 /// # Example
215 /// ```ignore,no_run
216 /// # use google_cloud_speech_v2::model::OperationMetadata;
217 /// use wkt::Timestamp;
218 /// let x = OperationMetadata::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
219 /// let x = OperationMetadata::new().set_or_clear_create_time(None::<Timestamp>);
220 /// ```
221 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
222 where
223 T: std::convert::Into<wkt::Timestamp>,
224 {
225 self.create_time = v.map(|x| x.into());
226 self
227 }
228
229 /// Sets the value of [update_time][crate::model::OperationMetadata::update_time].
230 ///
231 /// # Example
232 /// ```ignore,no_run
233 /// # use google_cloud_speech_v2::model::OperationMetadata;
234 /// use wkt::Timestamp;
235 /// let x = OperationMetadata::new().set_update_time(Timestamp::default()/* use setters */);
236 /// ```
237 pub fn set_update_time<T>(mut self, v: T) -> Self
238 where
239 T: std::convert::Into<wkt::Timestamp>,
240 {
241 self.update_time = std::option::Option::Some(v.into());
242 self
243 }
244
245 /// Sets or clears the value of [update_time][crate::model::OperationMetadata::update_time].
246 ///
247 /// # Example
248 /// ```ignore,no_run
249 /// # use google_cloud_speech_v2::model::OperationMetadata;
250 /// use wkt::Timestamp;
251 /// let x = OperationMetadata::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
252 /// let x = OperationMetadata::new().set_or_clear_update_time(None::<Timestamp>);
253 /// ```
254 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
255 where
256 T: std::convert::Into<wkt::Timestamp>,
257 {
258 self.update_time = v.map(|x| x.into());
259 self
260 }
261
262 /// Sets the value of [resource][crate::model::OperationMetadata::resource].
263 ///
264 /// # Example
265 /// ```ignore,no_run
266 /// # use google_cloud_speech_v2::model::OperationMetadata;
267 /// let x = OperationMetadata::new().set_resource("example");
268 /// ```
269 pub fn set_resource<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
270 self.resource = v.into();
271 self
272 }
273
274 /// Sets the value of [method][crate::model::OperationMetadata::method].
275 ///
276 /// # Example
277 /// ```ignore,no_run
278 /// # use google_cloud_speech_v2::model::OperationMetadata;
279 /// let x = OperationMetadata::new().set_method("example");
280 /// ```
281 pub fn set_method<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
282 self.method = v.into();
283 self
284 }
285
286 /// Sets the value of [kms_key_name][crate::model::OperationMetadata::kms_key_name].
287 ///
288 /// # Example
289 /// ```ignore,no_run
290 /// # use google_cloud_speech_v2::model::OperationMetadata;
291 /// let x = OperationMetadata::new().set_kms_key_name("example");
292 /// ```
293 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
294 self.kms_key_name = v.into();
295 self
296 }
297
298 /// Sets the value of [kms_key_version_name][crate::model::OperationMetadata::kms_key_version_name].
299 ///
300 /// # Example
301 /// ```ignore,no_run
302 /// # use google_cloud_speech_v2::model::OperationMetadata;
303 /// let x = OperationMetadata::new().set_kms_key_version_name("example");
304 /// ```
305 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
306 mut self,
307 v: T,
308 ) -> Self {
309 self.kms_key_version_name = v.into();
310 self
311 }
312
313 /// Sets the value of [progress_percent][crate::model::OperationMetadata::progress_percent].
314 ///
315 /// # Example
316 /// ```ignore,no_run
317 /// # use google_cloud_speech_v2::model::OperationMetadata;
318 /// let x = OperationMetadata::new().set_progress_percent(42);
319 /// ```
320 pub fn set_progress_percent<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
321 self.progress_percent = v.into();
322 self
323 }
324
325 /// Sets the value of [request][crate::model::OperationMetadata::request].
326 ///
327 /// Note that all the setters affecting `request` are mutually
328 /// exclusive.
329 ///
330 /// # Example
331 /// ```ignore,no_run
332 /// # use google_cloud_speech_v2::model::OperationMetadata;
333 /// use google_cloud_speech_v2::model::BatchRecognizeRequest;
334 /// let x = OperationMetadata::new().set_request(Some(
335 /// google_cloud_speech_v2::model::operation_metadata::Request::BatchRecognizeRequest(BatchRecognizeRequest::default().into())));
336 /// ```
337 pub fn set_request<
338 T: std::convert::Into<std::option::Option<crate::model::operation_metadata::Request>>,
339 >(
340 mut self,
341 v: T,
342 ) -> Self {
343 self.request = v.into();
344 self
345 }
346
347 /// The value of [request][crate::model::OperationMetadata::request]
348 /// if it holds a `BatchRecognizeRequest`, `None` if the field is not set or
349 /// holds a different branch.
350 pub fn batch_recognize_request(
351 &self,
352 ) -> std::option::Option<&std::boxed::Box<crate::model::BatchRecognizeRequest>> {
353 #[allow(unreachable_patterns)]
354 self.request.as_ref().and_then(|v| match v {
355 crate::model::operation_metadata::Request::BatchRecognizeRequest(v) => {
356 std::option::Option::Some(v)
357 }
358 _ => std::option::Option::None,
359 })
360 }
361
362 /// Sets the value of [request][crate::model::OperationMetadata::request]
363 /// to hold a `BatchRecognizeRequest`.
364 ///
365 /// Note that all the setters affecting `request` are
366 /// mutually exclusive.
367 ///
368 /// # Example
369 /// ```ignore,no_run
370 /// # use google_cloud_speech_v2::model::OperationMetadata;
371 /// use google_cloud_speech_v2::model::BatchRecognizeRequest;
372 /// let x = OperationMetadata::new().set_batch_recognize_request(BatchRecognizeRequest::default()/* use setters */);
373 /// assert!(x.batch_recognize_request().is_some());
374 /// assert!(x.create_recognizer_request().is_none());
375 /// assert!(x.update_recognizer_request().is_none());
376 /// assert!(x.delete_recognizer_request().is_none());
377 /// assert!(x.undelete_recognizer_request().is_none());
378 /// assert!(x.create_custom_class_request().is_none());
379 /// assert!(x.update_custom_class_request().is_none());
380 /// assert!(x.delete_custom_class_request().is_none());
381 /// assert!(x.undelete_custom_class_request().is_none());
382 /// assert!(x.create_phrase_set_request().is_none());
383 /// assert!(x.update_phrase_set_request().is_none());
384 /// assert!(x.delete_phrase_set_request().is_none());
385 /// assert!(x.undelete_phrase_set_request().is_none());
386 /// assert!(x.update_config_request().is_none());
387 /// ```
388 pub fn set_batch_recognize_request<
389 T: std::convert::Into<std::boxed::Box<crate::model::BatchRecognizeRequest>>,
390 >(
391 mut self,
392 v: T,
393 ) -> Self {
394 self.request = std::option::Option::Some(
395 crate::model::operation_metadata::Request::BatchRecognizeRequest(v.into()),
396 );
397 self
398 }
399
400 /// The value of [request][crate::model::OperationMetadata::request]
401 /// if it holds a `CreateRecognizerRequest`, `None` if the field is not set or
402 /// holds a different branch.
403 pub fn create_recognizer_request(
404 &self,
405 ) -> std::option::Option<&std::boxed::Box<crate::model::CreateRecognizerRequest>> {
406 #[allow(unreachable_patterns)]
407 self.request.as_ref().and_then(|v| match v {
408 crate::model::operation_metadata::Request::CreateRecognizerRequest(v) => {
409 std::option::Option::Some(v)
410 }
411 _ => std::option::Option::None,
412 })
413 }
414
415 /// Sets the value of [request][crate::model::OperationMetadata::request]
416 /// to hold a `CreateRecognizerRequest`.
417 ///
418 /// Note that all the setters affecting `request` are
419 /// mutually exclusive.
420 ///
421 /// # Example
422 /// ```ignore,no_run
423 /// # use google_cloud_speech_v2::model::OperationMetadata;
424 /// use google_cloud_speech_v2::model::CreateRecognizerRequest;
425 /// let x = OperationMetadata::new().set_create_recognizer_request(CreateRecognizerRequest::default()/* use setters */);
426 /// assert!(x.create_recognizer_request().is_some());
427 /// assert!(x.batch_recognize_request().is_none());
428 /// assert!(x.update_recognizer_request().is_none());
429 /// assert!(x.delete_recognizer_request().is_none());
430 /// assert!(x.undelete_recognizer_request().is_none());
431 /// assert!(x.create_custom_class_request().is_none());
432 /// assert!(x.update_custom_class_request().is_none());
433 /// assert!(x.delete_custom_class_request().is_none());
434 /// assert!(x.undelete_custom_class_request().is_none());
435 /// assert!(x.create_phrase_set_request().is_none());
436 /// assert!(x.update_phrase_set_request().is_none());
437 /// assert!(x.delete_phrase_set_request().is_none());
438 /// assert!(x.undelete_phrase_set_request().is_none());
439 /// assert!(x.update_config_request().is_none());
440 /// ```
441 pub fn set_create_recognizer_request<
442 T: std::convert::Into<std::boxed::Box<crate::model::CreateRecognizerRequest>>,
443 >(
444 mut self,
445 v: T,
446 ) -> Self {
447 self.request = std::option::Option::Some(
448 crate::model::operation_metadata::Request::CreateRecognizerRequest(v.into()),
449 );
450 self
451 }
452
453 /// The value of [request][crate::model::OperationMetadata::request]
454 /// if it holds a `UpdateRecognizerRequest`, `None` if the field is not set or
455 /// holds a different branch.
456 pub fn update_recognizer_request(
457 &self,
458 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateRecognizerRequest>> {
459 #[allow(unreachable_patterns)]
460 self.request.as_ref().and_then(|v| match v {
461 crate::model::operation_metadata::Request::UpdateRecognizerRequest(v) => {
462 std::option::Option::Some(v)
463 }
464 _ => std::option::Option::None,
465 })
466 }
467
468 /// Sets the value of [request][crate::model::OperationMetadata::request]
469 /// to hold a `UpdateRecognizerRequest`.
470 ///
471 /// Note that all the setters affecting `request` are
472 /// mutually exclusive.
473 ///
474 /// # Example
475 /// ```ignore,no_run
476 /// # use google_cloud_speech_v2::model::OperationMetadata;
477 /// use google_cloud_speech_v2::model::UpdateRecognizerRequest;
478 /// let x = OperationMetadata::new().set_update_recognizer_request(UpdateRecognizerRequest::default()/* use setters */);
479 /// assert!(x.update_recognizer_request().is_some());
480 /// assert!(x.batch_recognize_request().is_none());
481 /// assert!(x.create_recognizer_request().is_none());
482 /// assert!(x.delete_recognizer_request().is_none());
483 /// assert!(x.undelete_recognizer_request().is_none());
484 /// assert!(x.create_custom_class_request().is_none());
485 /// assert!(x.update_custom_class_request().is_none());
486 /// assert!(x.delete_custom_class_request().is_none());
487 /// assert!(x.undelete_custom_class_request().is_none());
488 /// assert!(x.create_phrase_set_request().is_none());
489 /// assert!(x.update_phrase_set_request().is_none());
490 /// assert!(x.delete_phrase_set_request().is_none());
491 /// assert!(x.undelete_phrase_set_request().is_none());
492 /// assert!(x.update_config_request().is_none());
493 /// ```
494 pub fn set_update_recognizer_request<
495 T: std::convert::Into<std::boxed::Box<crate::model::UpdateRecognizerRequest>>,
496 >(
497 mut self,
498 v: T,
499 ) -> Self {
500 self.request = std::option::Option::Some(
501 crate::model::operation_metadata::Request::UpdateRecognizerRequest(v.into()),
502 );
503 self
504 }
505
506 /// The value of [request][crate::model::OperationMetadata::request]
507 /// if it holds a `DeleteRecognizerRequest`, `None` if the field is not set or
508 /// holds a different branch.
509 pub fn delete_recognizer_request(
510 &self,
511 ) -> std::option::Option<&std::boxed::Box<crate::model::DeleteRecognizerRequest>> {
512 #[allow(unreachable_patterns)]
513 self.request.as_ref().and_then(|v| match v {
514 crate::model::operation_metadata::Request::DeleteRecognizerRequest(v) => {
515 std::option::Option::Some(v)
516 }
517 _ => std::option::Option::None,
518 })
519 }
520
521 /// Sets the value of [request][crate::model::OperationMetadata::request]
522 /// to hold a `DeleteRecognizerRequest`.
523 ///
524 /// Note that all the setters affecting `request` are
525 /// mutually exclusive.
526 ///
527 /// # Example
528 /// ```ignore,no_run
529 /// # use google_cloud_speech_v2::model::OperationMetadata;
530 /// use google_cloud_speech_v2::model::DeleteRecognizerRequest;
531 /// let x = OperationMetadata::new().set_delete_recognizer_request(DeleteRecognizerRequest::default()/* use setters */);
532 /// assert!(x.delete_recognizer_request().is_some());
533 /// assert!(x.batch_recognize_request().is_none());
534 /// assert!(x.create_recognizer_request().is_none());
535 /// assert!(x.update_recognizer_request().is_none());
536 /// assert!(x.undelete_recognizer_request().is_none());
537 /// assert!(x.create_custom_class_request().is_none());
538 /// assert!(x.update_custom_class_request().is_none());
539 /// assert!(x.delete_custom_class_request().is_none());
540 /// assert!(x.undelete_custom_class_request().is_none());
541 /// assert!(x.create_phrase_set_request().is_none());
542 /// assert!(x.update_phrase_set_request().is_none());
543 /// assert!(x.delete_phrase_set_request().is_none());
544 /// assert!(x.undelete_phrase_set_request().is_none());
545 /// assert!(x.update_config_request().is_none());
546 /// ```
547 pub fn set_delete_recognizer_request<
548 T: std::convert::Into<std::boxed::Box<crate::model::DeleteRecognizerRequest>>,
549 >(
550 mut self,
551 v: T,
552 ) -> Self {
553 self.request = std::option::Option::Some(
554 crate::model::operation_metadata::Request::DeleteRecognizerRequest(v.into()),
555 );
556 self
557 }
558
559 /// The value of [request][crate::model::OperationMetadata::request]
560 /// if it holds a `UndeleteRecognizerRequest`, `None` if the field is not set or
561 /// holds a different branch.
562 pub fn undelete_recognizer_request(
563 &self,
564 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeleteRecognizerRequest>> {
565 #[allow(unreachable_patterns)]
566 self.request.as_ref().and_then(|v| match v {
567 crate::model::operation_metadata::Request::UndeleteRecognizerRequest(v) => {
568 std::option::Option::Some(v)
569 }
570 _ => std::option::Option::None,
571 })
572 }
573
574 /// Sets the value of [request][crate::model::OperationMetadata::request]
575 /// to hold a `UndeleteRecognizerRequest`.
576 ///
577 /// Note that all the setters affecting `request` are
578 /// mutually exclusive.
579 ///
580 /// # Example
581 /// ```ignore,no_run
582 /// # use google_cloud_speech_v2::model::OperationMetadata;
583 /// use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
584 /// let x = OperationMetadata::new().set_undelete_recognizer_request(UndeleteRecognizerRequest::default()/* use setters */);
585 /// assert!(x.undelete_recognizer_request().is_some());
586 /// assert!(x.batch_recognize_request().is_none());
587 /// assert!(x.create_recognizer_request().is_none());
588 /// assert!(x.update_recognizer_request().is_none());
589 /// assert!(x.delete_recognizer_request().is_none());
590 /// assert!(x.create_custom_class_request().is_none());
591 /// assert!(x.update_custom_class_request().is_none());
592 /// assert!(x.delete_custom_class_request().is_none());
593 /// assert!(x.undelete_custom_class_request().is_none());
594 /// assert!(x.create_phrase_set_request().is_none());
595 /// assert!(x.update_phrase_set_request().is_none());
596 /// assert!(x.delete_phrase_set_request().is_none());
597 /// assert!(x.undelete_phrase_set_request().is_none());
598 /// assert!(x.update_config_request().is_none());
599 /// ```
600 pub fn set_undelete_recognizer_request<
601 T: std::convert::Into<std::boxed::Box<crate::model::UndeleteRecognizerRequest>>,
602 >(
603 mut self,
604 v: T,
605 ) -> Self {
606 self.request = std::option::Option::Some(
607 crate::model::operation_metadata::Request::UndeleteRecognizerRequest(v.into()),
608 );
609 self
610 }
611
612 /// The value of [request][crate::model::OperationMetadata::request]
613 /// if it holds a `CreateCustomClassRequest`, `None` if the field is not set or
614 /// holds a different branch.
615 pub fn create_custom_class_request(
616 &self,
617 ) -> std::option::Option<&std::boxed::Box<crate::model::CreateCustomClassRequest>> {
618 #[allow(unreachable_patterns)]
619 self.request.as_ref().and_then(|v| match v {
620 crate::model::operation_metadata::Request::CreateCustomClassRequest(v) => {
621 std::option::Option::Some(v)
622 }
623 _ => std::option::Option::None,
624 })
625 }
626
627 /// Sets the value of [request][crate::model::OperationMetadata::request]
628 /// to hold a `CreateCustomClassRequest`.
629 ///
630 /// Note that all the setters affecting `request` are
631 /// mutually exclusive.
632 ///
633 /// # Example
634 /// ```ignore,no_run
635 /// # use google_cloud_speech_v2::model::OperationMetadata;
636 /// use google_cloud_speech_v2::model::CreateCustomClassRequest;
637 /// let x = OperationMetadata::new().set_create_custom_class_request(CreateCustomClassRequest::default()/* use setters */);
638 /// assert!(x.create_custom_class_request().is_some());
639 /// assert!(x.batch_recognize_request().is_none());
640 /// assert!(x.create_recognizer_request().is_none());
641 /// assert!(x.update_recognizer_request().is_none());
642 /// assert!(x.delete_recognizer_request().is_none());
643 /// assert!(x.undelete_recognizer_request().is_none());
644 /// assert!(x.update_custom_class_request().is_none());
645 /// assert!(x.delete_custom_class_request().is_none());
646 /// assert!(x.undelete_custom_class_request().is_none());
647 /// assert!(x.create_phrase_set_request().is_none());
648 /// assert!(x.update_phrase_set_request().is_none());
649 /// assert!(x.delete_phrase_set_request().is_none());
650 /// assert!(x.undelete_phrase_set_request().is_none());
651 /// assert!(x.update_config_request().is_none());
652 /// ```
653 pub fn set_create_custom_class_request<
654 T: std::convert::Into<std::boxed::Box<crate::model::CreateCustomClassRequest>>,
655 >(
656 mut self,
657 v: T,
658 ) -> Self {
659 self.request = std::option::Option::Some(
660 crate::model::operation_metadata::Request::CreateCustomClassRequest(v.into()),
661 );
662 self
663 }
664
665 /// The value of [request][crate::model::OperationMetadata::request]
666 /// if it holds a `UpdateCustomClassRequest`, `None` if the field is not set or
667 /// holds a different branch.
668 pub fn update_custom_class_request(
669 &self,
670 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateCustomClassRequest>> {
671 #[allow(unreachable_patterns)]
672 self.request.as_ref().and_then(|v| match v {
673 crate::model::operation_metadata::Request::UpdateCustomClassRequest(v) => {
674 std::option::Option::Some(v)
675 }
676 _ => std::option::Option::None,
677 })
678 }
679
680 /// Sets the value of [request][crate::model::OperationMetadata::request]
681 /// to hold a `UpdateCustomClassRequest`.
682 ///
683 /// Note that all the setters affecting `request` are
684 /// mutually exclusive.
685 ///
686 /// # Example
687 /// ```ignore,no_run
688 /// # use google_cloud_speech_v2::model::OperationMetadata;
689 /// use google_cloud_speech_v2::model::UpdateCustomClassRequest;
690 /// let x = OperationMetadata::new().set_update_custom_class_request(UpdateCustomClassRequest::default()/* use setters */);
691 /// assert!(x.update_custom_class_request().is_some());
692 /// assert!(x.batch_recognize_request().is_none());
693 /// assert!(x.create_recognizer_request().is_none());
694 /// assert!(x.update_recognizer_request().is_none());
695 /// assert!(x.delete_recognizer_request().is_none());
696 /// assert!(x.undelete_recognizer_request().is_none());
697 /// assert!(x.create_custom_class_request().is_none());
698 /// assert!(x.delete_custom_class_request().is_none());
699 /// assert!(x.undelete_custom_class_request().is_none());
700 /// assert!(x.create_phrase_set_request().is_none());
701 /// assert!(x.update_phrase_set_request().is_none());
702 /// assert!(x.delete_phrase_set_request().is_none());
703 /// assert!(x.undelete_phrase_set_request().is_none());
704 /// assert!(x.update_config_request().is_none());
705 /// ```
706 pub fn set_update_custom_class_request<
707 T: std::convert::Into<std::boxed::Box<crate::model::UpdateCustomClassRequest>>,
708 >(
709 mut self,
710 v: T,
711 ) -> Self {
712 self.request = std::option::Option::Some(
713 crate::model::operation_metadata::Request::UpdateCustomClassRequest(v.into()),
714 );
715 self
716 }
717
718 /// The value of [request][crate::model::OperationMetadata::request]
719 /// if it holds a `DeleteCustomClassRequest`, `None` if the field is not set or
720 /// holds a different branch.
721 pub fn delete_custom_class_request(
722 &self,
723 ) -> std::option::Option<&std::boxed::Box<crate::model::DeleteCustomClassRequest>> {
724 #[allow(unreachable_patterns)]
725 self.request.as_ref().and_then(|v| match v {
726 crate::model::operation_metadata::Request::DeleteCustomClassRequest(v) => {
727 std::option::Option::Some(v)
728 }
729 _ => std::option::Option::None,
730 })
731 }
732
733 /// Sets the value of [request][crate::model::OperationMetadata::request]
734 /// to hold a `DeleteCustomClassRequest`.
735 ///
736 /// Note that all the setters affecting `request` are
737 /// mutually exclusive.
738 ///
739 /// # Example
740 /// ```ignore,no_run
741 /// # use google_cloud_speech_v2::model::OperationMetadata;
742 /// use google_cloud_speech_v2::model::DeleteCustomClassRequest;
743 /// let x = OperationMetadata::new().set_delete_custom_class_request(DeleteCustomClassRequest::default()/* use setters */);
744 /// assert!(x.delete_custom_class_request().is_some());
745 /// assert!(x.batch_recognize_request().is_none());
746 /// assert!(x.create_recognizer_request().is_none());
747 /// assert!(x.update_recognizer_request().is_none());
748 /// assert!(x.delete_recognizer_request().is_none());
749 /// assert!(x.undelete_recognizer_request().is_none());
750 /// assert!(x.create_custom_class_request().is_none());
751 /// assert!(x.update_custom_class_request().is_none());
752 /// assert!(x.undelete_custom_class_request().is_none());
753 /// assert!(x.create_phrase_set_request().is_none());
754 /// assert!(x.update_phrase_set_request().is_none());
755 /// assert!(x.delete_phrase_set_request().is_none());
756 /// assert!(x.undelete_phrase_set_request().is_none());
757 /// assert!(x.update_config_request().is_none());
758 /// ```
759 pub fn set_delete_custom_class_request<
760 T: std::convert::Into<std::boxed::Box<crate::model::DeleteCustomClassRequest>>,
761 >(
762 mut self,
763 v: T,
764 ) -> Self {
765 self.request = std::option::Option::Some(
766 crate::model::operation_metadata::Request::DeleteCustomClassRequest(v.into()),
767 );
768 self
769 }
770
771 /// The value of [request][crate::model::OperationMetadata::request]
772 /// if it holds a `UndeleteCustomClassRequest`, `None` if the field is not set or
773 /// holds a different branch.
774 pub fn undelete_custom_class_request(
775 &self,
776 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeleteCustomClassRequest>> {
777 #[allow(unreachable_patterns)]
778 self.request.as_ref().and_then(|v| match v {
779 crate::model::operation_metadata::Request::UndeleteCustomClassRequest(v) => {
780 std::option::Option::Some(v)
781 }
782 _ => std::option::Option::None,
783 })
784 }
785
786 /// Sets the value of [request][crate::model::OperationMetadata::request]
787 /// to hold a `UndeleteCustomClassRequest`.
788 ///
789 /// Note that all the setters affecting `request` are
790 /// mutually exclusive.
791 ///
792 /// # Example
793 /// ```ignore,no_run
794 /// # use google_cloud_speech_v2::model::OperationMetadata;
795 /// use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
796 /// let x = OperationMetadata::new().set_undelete_custom_class_request(UndeleteCustomClassRequest::default()/* use setters */);
797 /// assert!(x.undelete_custom_class_request().is_some());
798 /// assert!(x.batch_recognize_request().is_none());
799 /// assert!(x.create_recognizer_request().is_none());
800 /// assert!(x.update_recognizer_request().is_none());
801 /// assert!(x.delete_recognizer_request().is_none());
802 /// assert!(x.undelete_recognizer_request().is_none());
803 /// assert!(x.create_custom_class_request().is_none());
804 /// assert!(x.update_custom_class_request().is_none());
805 /// assert!(x.delete_custom_class_request().is_none());
806 /// assert!(x.create_phrase_set_request().is_none());
807 /// assert!(x.update_phrase_set_request().is_none());
808 /// assert!(x.delete_phrase_set_request().is_none());
809 /// assert!(x.undelete_phrase_set_request().is_none());
810 /// assert!(x.update_config_request().is_none());
811 /// ```
812 pub fn set_undelete_custom_class_request<
813 T: std::convert::Into<std::boxed::Box<crate::model::UndeleteCustomClassRequest>>,
814 >(
815 mut self,
816 v: T,
817 ) -> Self {
818 self.request = std::option::Option::Some(
819 crate::model::operation_metadata::Request::UndeleteCustomClassRequest(v.into()),
820 );
821 self
822 }
823
824 /// The value of [request][crate::model::OperationMetadata::request]
825 /// if it holds a `CreatePhraseSetRequest`, `None` if the field is not set or
826 /// holds a different branch.
827 pub fn create_phrase_set_request(
828 &self,
829 ) -> std::option::Option<&std::boxed::Box<crate::model::CreatePhraseSetRequest>> {
830 #[allow(unreachable_patterns)]
831 self.request.as_ref().and_then(|v| match v {
832 crate::model::operation_metadata::Request::CreatePhraseSetRequest(v) => {
833 std::option::Option::Some(v)
834 }
835 _ => std::option::Option::None,
836 })
837 }
838
839 /// Sets the value of [request][crate::model::OperationMetadata::request]
840 /// to hold a `CreatePhraseSetRequest`.
841 ///
842 /// Note that all the setters affecting `request` are
843 /// mutually exclusive.
844 ///
845 /// # Example
846 /// ```ignore,no_run
847 /// # use google_cloud_speech_v2::model::OperationMetadata;
848 /// use google_cloud_speech_v2::model::CreatePhraseSetRequest;
849 /// let x = OperationMetadata::new().set_create_phrase_set_request(CreatePhraseSetRequest::default()/* use setters */);
850 /// assert!(x.create_phrase_set_request().is_some());
851 /// assert!(x.batch_recognize_request().is_none());
852 /// assert!(x.create_recognizer_request().is_none());
853 /// assert!(x.update_recognizer_request().is_none());
854 /// assert!(x.delete_recognizer_request().is_none());
855 /// assert!(x.undelete_recognizer_request().is_none());
856 /// assert!(x.create_custom_class_request().is_none());
857 /// assert!(x.update_custom_class_request().is_none());
858 /// assert!(x.delete_custom_class_request().is_none());
859 /// assert!(x.undelete_custom_class_request().is_none());
860 /// assert!(x.update_phrase_set_request().is_none());
861 /// assert!(x.delete_phrase_set_request().is_none());
862 /// assert!(x.undelete_phrase_set_request().is_none());
863 /// assert!(x.update_config_request().is_none());
864 /// ```
865 pub fn set_create_phrase_set_request<
866 T: std::convert::Into<std::boxed::Box<crate::model::CreatePhraseSetRequest>>,
867 >(
868 mut self,
869 v: T,
870 ) -> Self {
871 self.request = std::option::Option::Some(
872 crate::model::operation_metadata::Request::CreatePhraseSetRequest(v.into()),
873 );
874 self
875 }
876
877 /// The value of [request][crate::model::OperationMetadata::request]
878 /// if it holds a `UpdatePhraseSetRequest`, `None` if the field is not set or
879 /// holds a different branch.
880 pub fn update_phrase_set_request(
881 &self,
882 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdatePhraseSetRequest>> {
883 #[allow(unreachable_patterns)]
884 self.request.as_ref().and_then(|v| match v {
885 crate::model::operation_metadata::Request::UpdatePhraseSetRequest(v) => {
886 std::option::Option::Some(v)
887 }
888 _ => std::option::Option::None,
889 })
890 }
891
892 /// Sets the value of [request][crate::model::OperationMetadata::request]
893 /// to hold a `UpdatePhraseSetRequest`.
894 ///
895 /// Note that all the setters affecting `request` are
896 /// mutually exclusive.
897 ///
898 /// # Example
899 /// ```ignore,no_run
900 /// # use google_cloud_speech_v2::model::OperationMetadata;
901 /// use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
902 /// let x = OperationMetadata::new().set_update_phrase_set_request(UpdatePhraseSetRequest::default()/* use setters */);
903 /// assert!(x.update_phrase_set_request().is_some());
904 /// assert!(x.batch_recognize_request().is_none());
905 /// assert!(x.create_recognizer_request().is_none());
906 /// assert!(x.update_recognizer_request().is_none());
907 /// assert!(x.delete_recognizer_request().is_none());
908 /// assert!(x.undelete_recognizer_request().is_none());
909 /// assert!(x.create_custom_class_request().is_none());
910 /// assert!(x.update_custom_class_request().is_none());
911 /// assert!(x.delete_custom_class_request().is_none());
912 /// assert!(x.undelete_custom_class_request().is_none());
913 /// assert!(x.create_phrase_set_request().is_none());
914 /// assert!(x.delete_phrase_set_request().is_none());
915 /// assert!(x.undelete_phrase_set_request().is_none());
916 /// assert!(x.update_config_request().is_none());
917 /// ```
918 pub fn set_update_phrase_set_request<
919 T: std::convert::Into<std::boxed::Box<crate::model::UpdatePhraseSetRequest>>,
920 >(
921 mut self,
922 v: T,
923 ) -> Self {
924 self.request = std::option::Option::Some(
925 crate::model::operation_metadata::Request::UpdatePhraseSetRequest(v.into()),
926 );
927 self
928 }
929
930 /// The value of [request][crate::model::OperationMetadata::request]
931 /// if it holds a `DeletePhraseSetRequest`, `None` if the field is not set or
932 /// holds a different branch.
933 pub fn delete_phrase_set_request(
934 &self,
935 ) -> std::option::Option<&std::boxed::Box<crate::model::DeletePhraseSetRequest>> {
936 #[allow(unreachable_patterns)]
937 self.request.as_ref().and_then(|v| match v {
938 crate::model::operation_metadata::Request::DeletePhraseSetRequest(v) => {
939 std::option::Option::Some(v)
940 }
941 _ => std::option::Option::None,
942 })
943 }
944
945 /// Sets the value of [request][crate::model::OperationMetadata::request]
946 /// to hold a `DeletePhraseSetRequest`.
947 ///
948 /// Note that all the setters affecting `request` are
949 /// mutually exclusive.
950 ///
951 /// # Example
952 /// ```ignore,no_run
953 /// # use google_cloud_speech_v2::model::OperationMetadata;
954 /// use google_cloud_speech_v2::model::DeletePhraseSetRequest;
955 /// let x = OperationMetadata::new().set_delete_phrase_set_request(DeletePhraseSetRequest::default()/* use setters */);
956 /// assert!(x.delete_phrase_set_request().is_some());
957 /// assert!(x.batch_recognize_request().is_none());
958 /// assert!(x.create_recognizer_request().is_none());
959 /// assert!(x.update_recognizer_request().is_none());
960 /// assert!(x.delete_recognizer_request().is_none());
961 /// assert!(x.undelete_recognizer_request().is_none());
962 /// assert!(x.create_custom_class_request().is_none());
963 /// assert!(x.update_custom_class_request().is_none());
964 /// assert!(x.delete_custom_class_request().is_none());
965 /// assert!(x.undelete_custom_class_request().is_none());
966 /// assert!(x.create_phrase_set_request().is_none());
967 /// assert!(x.update_phrase_set_request().is_none());
968 /// assert!(x.undelete_phrase_set_request().is_none());
969 /// assert!(x.update_config_request().is_none());
970 /// ```
971 pub fn set_delete_phrase_set_request<
972 T: std::convert::Into<std::boxed::Box<crate::model::DeletePhraseSetRequest>>,
973 >(
974 mut self,
975 v: T,
976 ) -> Self {
977 self.request = std::option::Option::Some(
978 crate::model::operation_metadata::Request::DeletePhraseSetRequest(v.into()),
979 );
980 self
981 }
982
983 /// The value of [request][crate::model::OperationMetadata::request]
984 /// if it holds a `UndeletePhraseSetRequest`, `None` if the field is not set or
985 /// holds a different branch.
986 pub fn undelete_phrase_set_request(
987 &self,
988 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeletePhraseSetRequest>> {
989 #[allow(unreachable_patterns)]
990 self.request.as_ref().and_then(|v| match v {
991 crate::model::operation_metadata::Request::UndeletePhraseSetRequest(v) => {
992 std::option::Option::Some(v)
993 }
994 _ => std::option::Option::None,
995 })
996 }
997
998 /// Sets the value of [request][crate::model::OperationMetadata::request]
999 /// to hold a `UndeletePhraseSetRequest`.
1000 ///
1001 /// Note that all the setters affecting `request` are
1002 /// mutually exclusive.
1003 ///
1004 /// # Example
1005 /// ```ignore,no_run
1006 /// # use google_cloud_speech_v2::model::OperationMetadata;
1007 /// use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
1008 /// let x = OperationMetadata::new().set_undelete_phrase_set_request(UndeletePhraseSetRequest::default()/* use setters */);
1009 /// assert!(x.undelete_phrase_set_request().is_some());
1010 /// assert!(x.batch_recognize_request().is_none());
1011 /// assert!(x.create_recognizer_request().is_none());
1012 /// assert!(x.update_recognizer_request().is_none());
1013 /// assert!(x.delete_recognizer_request().is_none());
1014 /// assert!(x.undelete_recognizer_request().is_none());
1015 /// assert!(x.create_custom_class_request().is_none());
1016 /// assert!(x.update_custom_class_request().is_none());
1017 /// assert!(x.delete_custom_class_request().is_none());
1018 /// assert!(x.undelete_custom_class_request().is_none());
1019 /// assert!(x.create_phrase_set_request().is_none());
1020 /// assert!(x.update_phrase_set_request().is_none());
1021 /// assert!(x.delete_phrase_set_request().is_none());
1022 /// assert!(x.update_config_request().is_none());
1023 /// ```
1024 pub fn set_undelete_phrase_set_request<
1025 T: std::convert::Into<std::boxed::Box<crate::model::UndeletePhraseSetRequest>>,
1026 >(
1027 mut self,
1028 v: T,
1029 ) -> Self {
1030 self.request = std::option::Option::Some(
1031 crate::model::operation_metadata::Request::UndeletePhraseSetRequest(v.into()),
1032 );
1033 self
1034 }
1035
1036 /// The value of [request][crate::model::OperationMetadata::request]
1037 /// if it holds a `UpdateConfigRequest`, `None` if the field is not set or
1038 /// holds a different branch.
1039 #[deprecated]
1040 pub fn update_config_request(
1041 &self,
1042 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateConfigRequest>> {
1043 #[allow(unreachable_patterns)]
1044 self.request.as_ref().and_then(|v| match v {
1045 crate::model::operation_metadata::Request::UpdateConfigRequest(v) => {
1046 std::option::Option::Some(v)
1047 }
1048 _ => std::option::Option::None,
1049 })
1050 }
1051
1052 /// Sets the value of [request][crate::model::OperationMetadata::request]
1053 /// to hold a `UpdateConfigRequest`.
1054 ///
1055 /// Note that all the setters affecting `request` are
1056 /// mutually exclusive.
1057 ///
1058 /// # Example
1059 /// ```ignore,no_run
1060 /// # use google_cloud_speech_v2::model::OperationMetadata;
1061 /// use google_cloud_speech_v2::model::UpdateConfigRequest;
1062 /// let x = OperationMetadata::new().set_update_config_request(UpdateConfigRequest::default()/* use setters */);
1063 /// assert!(x.update_config_request().is_some());
1064 /// assert!(x.batch_recognize_request().is_none());
1065 /// assert!(x.create_recognizer_request().is_none());
1066 /// assert!(x.update_recognizer_request().is_none());
1067 /// assert!(x.delete_recognizer_request().is_none());
1068 /// assert!(x.undelete_recognizer_request().is_none());
1069 /// assert!(x.create_custom_class_request().is_none());
1070 /// assert!(x.update_custom_class_request().is_none());
1071 /// assert!(x.delete_custom_class_request().is_none());
1072 /// assert!(x.undelete_custom_class_request().is_none());
1073 /// assert!(x.create_phrase_set_request().is_none());
1074 /// assert!(x.update_phrase_set_request().is_none());
1075 /// assert!(x.delete_phrase_set_request().is_none());
1076 /// assert!(x.undelete_phrase_set_request().is_none());
1077 /// ```
1078 #[deprecated]
1079 pub fn set_update_config_request<
1080 T: std::convert::Into<std::boxed::Box<crate::model::UpdateConfigRequest>>,
1081 >(
1082 mut self,
1083 v: T,
1084 ) -> Self {
1085 self.request = std::option::Option::Some(
1086 crate::model::operation_metadata::Request::UpdateConfigRequest(v.into()),
1087 );
1088 self
1089 }
1090
1091 /// Sets the value of [metadata][crate::model::OperationMetadata::metadata].
1092 ///
1093 /// Note that all the setters affecting `metadata` are mutually
1094 /// exclusive.
1095 ///
1096 /// # Example
1097 /// ```ignore,no_run
1098 /// # use google_cloud_speech_v2::model::OperationMetadata;
1099 /// use google_cloud_speech_v2::model::BatchRecognizeMetadata;
1100 /// let x = OperationMetadata::new().set_metadata(Some(
1101 /// google_cloud_speech_v2::model::operation_metadata::Metadata::BatchRecognizeMetadata(BatchRecognizeMetadata::default().into())));
1102 /// ```
1103 pub fn set_metadata<
1104 T: std::convert::Into<std::option::Option<crate::model::operation_metadata::Metadata>>,
1105 >(
1106 mut self,
1107 v: T,
1108 ) -> Self {
1109 self.metadata = v.into();
1110 self
1111 }
1112
1113 /// The value of [metadata][crate::model::OperationMetadata::metadata]
1114 /// if it holds a `BatchRecognizeMetadata`, `None` if the field is not set or
1115 /// holds a different branch.
1116 pub fn batch_recognize_metadata(
1117 &self,
1118 ) -> std::option::Option<&std::boxed::Box<crate::model::BatchRecognizeMetadata>> {
1119 #[allow(unreachable_patterns)]
1120 self.metadata.as_ref().and_then(|v| match v {
1121 crate::model::operation_metadata::Metadata::BatchRecognizeMetadata(v) => {
1122 std::option::Option::Some(v)
1123 }
1124 _ => std::option::Option::None,
1125 })
1126 }
1127
1128 /// Sets the value of [metadata][crate::model::OperationMetadata::metadata]
1129 /// to hold a `BatchRecognizeMetadata`.
1130 ///
1131 /// Note that all the setters affecting `metadata` are
1132 /// mutually exclusive.
1133 ///
1134 /// # Example
1135 /// ```ignore,no_run
1136 /// # use google_cloud_speech_v2::model::OperationMetadata;
1137 /// use google_cloud_speech_v2::model::BatchRecognizeMetadata;
1138 /// let x = OperationMetadata::new().set_batch_recognize_metadata(BatchRecognizeMetadata::default()/* use setters */);
1139 /// assert!(x.batch_recognize_metadata().is_some());
1140 /// ```
1141 pub fn set_batch_recognize_metadata<
1142 T: std::convert::Into<std::boxed::Box<crate::model::BatchRecognizeMetadata>>,
1143 >(
1144 mut self,
1145 v: T,
1146 ) -> Self {
1147 self.metadata = std::option::Option::Some(
1148 crate::model::operation_metadata::Metadata::BatchRecognizeMetadata(v.into()),
1149 );
1150 self
1151 }
1152}
1153
1154impl wkt::message::Message for OperationMetadata {
1155 fn typename() -> &'static str {
1156 "type.googleapis.com/google.cloud.speech.v2.OperationMetadata"
1157 }
1158}
1159
1160/// Defines additional types related to [OperationMetadata].
1161pub mod operation_metadata {
1162 #[allow(unused_imports)]
1163 use super::*;
1164
1165 /// The request that spawned the Operation.
1166 #[derive(Clone, Debug, PartialEq)]
1167 #[non_exhaustive]
1168 pub enum Request {
1169 /// The BatchRecognizeRequest that spawned the Operation.
1170 BatchRecognizeRequest(std::boxed::Box<crate::model::BatchRecognizeRequest>),
1171 /// The CreateRecognizerRequest that spawned the Operation.
1172 CreateRecognizerRequest(std::boxed::Box<crate::model::CreateRecognizerRequest>),
1173 /// The UpdateRecognizerRequest that spawned the Operation.
1174 UpdateRecognizerRequest(std::boxed::Box<crate::model::UpdateRecognizerRequest>),
1175 /// The DeleteRecognizerRequest that spawned the Operation.
1176 DeleteRecognizerRequest(std::boxed::Box<crate::model::DeleteRecognizerRequest>),
1177 /// The UndeleteRecognizerRequest that spawned the Operation.
1178 UndeleteRecognizerRequest(std::boxed::Box<crate::model::UndeleteRecognizerRequest>),
1179 /// The CreateCustomClassRequest that spawned the Operation.
1180 CreateCustomClassRequest(std::boxed::Box<crate::model::CreateCustomClassRequest>),
1181 /// The UpdateCustomClassRequest that spawned the Operation.
1182 UpdateCustomClassRequest(std::boxed::Box<crate::model::UpdateCustomClassRequest>),
1183 /// The DeleteCustomClassRequest that spawned the Operation.
1184 DeleteCustomClassRequest(std::boxed::Box<crate::model::DeleteCustomClassRequest>),
1185 /// The UndeleteCustomClassRequest that spawned the Operation.
1186 UndeleteCustomClassRequest(std::boxed::Box<crate::model::UndeleteCustomClassRequest>),
1187 /// The CreatePhraseSetRequest that spawned the Operation.
1188 CreatePhraseSetRequest(std::boxed::Box<crate::model::CreatePhraseSetRequest>),
1189 /// The UpdatePhraseSetRequest that spawned the Operation.
1190 UpdatePhraseSetRequest(std::boxed::Box<crate::model::UpdatePhraseSetRequest>),
1191 /// The DeletePhraseSetRequest that spawned the Operation.
1192 DeletePhraseSetRequest(std::boxed::Box<crate::model::DeletePhraseSetRequest>),
1193 /// The UndeletePhraseSetRequest that spawned the Operation.
1194 UndeletePhraseSetRequest(std::boxed::Box<crate::model::UndeletePhraseSetRequest>),
1195 /// The UpdateConfigRequest that spawned the Operation.
1196 #[deprecated]
1197 UpdateConfigRequest(std::boxed::Box<crate::model::UpdateConfigRequest>),
1198 }
1199
1200 /// Specific metadata per RPC.
1201 #[derive(Clone, Debug, PartialEq)]
1202 #[non_exhaustive]
1203 pub enum Metadata {
1204 /// Metadata specific to the BatchRecognize method.
1205 BatchRecognizeMetadata(std::boxed::Box<crate::model::BatchRecognizeMetadata>),
1206 }
1207}
1208
1209/// Request message for the
1210/// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
1211///
1212/// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1213#[derive(Clone, Default, PartialEq)]
1214#[non_exhaustive]
1215pub struct ListRecognizersRequest {
1216 /// Required. The project and location of Recognizers to list. The expected
1217 /// format is `projects/{project}/locations/{location}`.
1218 pub parent: std::string::String,
1219
1220 /// The maximum number of Recognizers to return. The service may return fewer
1221 /// than this value. If unspecified, at most 5 Recognizers will be returned.
1222 /// The maximum value is 100; values above 100 will be coerced to 100.
1223 pub page_size: i32,
1224
1225 /// A page token, received from a previous
1226 /// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] call.
1227 /// Provide this to retrieve the subsequent page.
1228 ///
1229 /// When paginating, all other parameters provided to
1230 /// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] must match
1231 /// the call that provided the page token.
1232 ///
1233 /// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1234 pub page_token: std::string::String,
1235
1236 /// Whether, or not, to show resources that have been deleted.
1237 pub show_deleted: bool,
1238
1239 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1240}
1241
1242impl ListRecognizersRequest {
1243 pub fn new() -> Self {
1244 std::default::Default::default()
1245 }
1246
1247 /// Sets the value of [parent][crate::model::ListRecognizersRequest::parent].
1248 ///
1249 /// # Example
1250 /// ```ignore,no_run
1251 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1252 /// let x = ListRecognizersRequest::new().set_parent("example");
1253 /// ```
1254 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1255 self.parent = v.into();
1256 self
1257 }
1258
1259 /// Sets the value of [page_size][crate::model::ListRecognizersRequest::page_size].
1260 ///
1261 /// # Example
1262 /// ```ignore,no_run
1263 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1264 /// let x = ListRecognizersRequest::new().set_page_size(42);
1265 /// ```
1266 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
1267 self.page_size = v.into();
1268 self
1269 }
1270
1271 /// Sets the value of [page_token][crate::model::ListRecognizersRequest::page_token].
1272 ///
1273 /// # Example
1274 /// ```ignore,no_run
1275 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1276 /// let x = ListRecognizersRequest::new().set_page_token("example");
1277 /// ```
1278 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1279 self.page_token = v.into();
1280 self
1281 }
1282
1283 /// Sets the value of [show_deleted][crate::model::ListRecognizersRequest::show_deleted].
1284 ///
1285 /// # Example
1286 /// ```ignore,no_run
1287 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1288 /// let x = ListRecognizersRequest::new().set_show_deleted(true);
1289 /// ```
1290 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1291 self.show_deleted = v.into();
1292 self
1293 }
1294}
1295
1296impl wkt::message::Message for ListRecognizersRequest {
1297 fn typename() -> &'static str {
1298 "type.googleapis.com/google.cloud.speech.v2.ListRecognizersRequest"
1299 }
1300}
1301
1302/// Response message for the
1303/// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
1304///
1305/// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1306#[derive(Clone, Default, PartialEq)]
1307#[non_exhaustive]
1308pub struct ListRecognizersResponse {
1309 /// The list of requested Recognizers.
1310 pub recognizers: std::vec::Vec<crate::model::Recognizer>,
1311
1312 /// A token, which can be sent as
1313 /// [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
1314 /// retrieve the next page. If this field is omitted, there are no subsequent
1315 /// pages. This token expires after 72 hours.
1316 ///
1317 /// [google.cloud.speech.v2.ListRecognizersRequest.page_token]: crate::model::ListRecognizersRequest::page_token
1318 pub next_page_token: std::string::String,
1319
1320 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1321}
1322
1323impl ListRecognizersResponse {
1324 pub fn new() -> Self {
1325 std::default::Default::default()
1326 }
1327
1328 /// Sets the value of [recognizers][crate::model::ListRecognizersResponse::recognizers].
1329 ///
1330 /// # Example
1331 /// ```ignore,no_run
1332 /// # use google_cloud_speech_v2::model::ListRecognizersResponse;
1333 /// use google_cloud_speech_v2::model::Recognizer;
1334 /// let x = ListRecognizersResponse::new()
1335 /// .set_recognizers([
1336 /// Recognizer::default()/* use setters */,
1337 /// Recognizer::default()/* use (different) setters */,
1338 /// ]);
1339 /// ```
1340 pub fn set_recognizers<T, V>(mut self, v: T) -> Self
1341 where
1342 T: std::iter::IntoIterator<Item = V>,
1343 V: std::convert::Into<crate::model::Recognizer>,
1344 {
1345 use std::iter::Iterator;
1346 self.recognizers = v.into_iter().map(|i| i.into()).collect();
1347 self
1348 }
1349
1350 /// Sets the value of [next_page_token][crate::model::ListRecognizersResponse::next_page_token].
1351 ///
1352 /// # Example
1353 /// ```ignore,no_run
1354 /// # use google_cloud_speech_v2::model::ListRecognizersResponse;
1355 /// let x = ListRecognizersResponse::new().set_next_page_token("example");
1356 /// ```
1357 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1358 self.next_page_token = v.into();
1359 self
1360 }
1361}
1362
1363impl wkt::message::Message for ListRecognizersResponse {
1364 fn typename() -> &'static str {
1365 "type.googleapis.com/google.cloud.speech.v2.ListRecognizersResponse"
1366 }
1367}
1368
1369#[doc(hidden)]
1370impl gax::paginator::internal::PageableResponse for ListRecognizersResponse {
1371 type PageItem = crate::model::Recognizer;
1372
1373 fn items(self) -> std::vec::Vec<Self::PageItem> {
1374 self.recognizers
1375 }
1376
1377 fn next_page_token(&self) -> std::string::String {
1378 use std::clone::Clone;
1379 self.next_page_token.clone()
1380 }
1381}
1382
1383/// Request message for the
1384/// [GetRecognizer][google.cloud.speech.v2.Speech.GetRecognizer] method.
1385///
1386/// [google.cloud.speech.v2.Speech.GetRecognizer]: crate::client::Speech::get_recognizer
1387#[derive(Clone, Default, PartialEq)]
1388#[non_exhaustive]
1389pub struct GetRecognizerRequest {
1390 /// Required. The name of the Recognizer to retrieve. The expected format is
1391 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1392 pub name: std::string::String,
1393
1394 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1395}
1396
1397impl GetRecognizerRequest {
1398 pub fn new() -> Self {
1399 std::default::Default::default()
1400 }
1401
1402 /// Sets the value of [name][crate::model::GetRecognizerRequest::name].
1403 ///
1404 /// # Example
1405 /// ```ignore,no_run
1406 /// # use google_cloud_speech_v2::model::GetRecognizerRequest;
1407 /// let x = GetRecognizerRequest::new().set_name("example");
1408 /// ```
1409 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1410 self.name = v.into();
1411 self
1412 }
1413}
1414
1415impl wkt::message::Message for GetRecognizerRequest {
1416 fn typename() -> &'static str {
1417 "type.googleapis.com/google.cloud.speech.v2.GetRecognizerRequest"
1418 }
1419}
1420
1421/// Request message for the
1422/// [UpdateRecognizer][google.cloud.speech.v2.Speech.UpdateRecognizer] method.
1423///
1424/// [google.cloud.speech.v2.Speech.UpdateRecognizer]: crate::client::Speech::update_recognizer
1425#[derive(Clone, Default, PartialEq)]
1426#[non_exhaustive]
1427pub struct UpdateRecognizerRequest {
1428 /// Required. The Recognizer to update.
1429 ///
1430 /// The Recognizer's `name` field is used to identify the Recognizer to update.
1431 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1432 pub recognizer: std::option::Option<crate::model::Recognizer>,
1433
1434 /// The list of fields to update. If empty, all non-default valued fields are
1435 /// considered for update. Use `*` to update the entire Recognizer resource.
1436 pub update_mask: std::option::Option<wkt::FieldMask>,
1437
1438 /// If set, validate the request and preview the updated Recognizer, but do not
1439 /// actually update it.
1440 pub validate_only: bool,
1441
1442 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1443}
1444
1445impl UpdateRecognizerRequest {
1446 pub fn new() -> Self {
1447 std::default::Default::default()
1448 }
1449
1450 /// Sets the value of [recognizer][crate::model::UpdateRecognizerRequest::recognizer].
1451 ///
1452 /// # Example
1453 /// ```ignore,no_run
1454 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1455 /// use google_cloud_speech_v2::model::Recognizer;
1456 /// let x = UpdateRecognizerRequest::new().set_recognizer(Recognizer::default()/* use setters */);
1457 /// ```
1458 pub fn set_recognizer<T>(mut self, v: T) -> Self
1459 where
1460 T: std::convert::Into<crate::model::Recognizer>,
1461 {
1462 self.recognizer = std::option::Option::Some(v.into());
1463 self
1464 }
1465
1466 /// Sets or clears the value of [recognizer][crate::model::UpdateRecognizerRequest::recognizer].
1467 ///
1468 /// # Example
1469 /// ```ignore,no_run
1470 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1471 /// use google_cloud_speech_v2::model::Recognizer;
1472 /// let x = UpdateRecognizerRequest::new().set_or_clear_recognizer(Some(Recognizer::default()/* use setters */));
1473 /// let x = UpdateRecognizerRequest::new().set_or_clear_recognizer(None::<Recognizer>);
1474 /// ```
1475 pub fn set_or_clear_recognizer<T>(mut self, v: std::option::Option<T>) -> Self
1476 where
1477 T: std::convert::Into<crate::model::Recognizer>,
1478 {
1479 self.recognizer = v.map(|x| x.into());
1480 self
1481 }
1482
1483 /// Sets the value of [update_mask][crate::model::UpdateRecognizerRequest::update_mask].
1484 ///
1485 /// # Example
1486 /// ```ignore,no_run
1487 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1488 /// use wkt::FieldMask;
1489 /// let x = UpdateRecognizerRequest::new().set_update_mask(FieldMask::default()/* use setters */);
1490 /// ```
1491 pub fn set_update_mask<T>(mut self, v: T) -> Self
1492 where
1493 T: std::convert::Into<wkt::FieldMask>,
1494 {
1495 self.update_mask = std::option::Option::Some(v.into());
1496 self
1497 }
1498
1499 /// Sets or clears the value of [update_mask][crate::model::UpdateRecognizerRequest::update_mask].
1500 ///
1501 /// # Example
1502 /// ```ignore,no_run
1503 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1504 /// use wkt::FieldMask;
1505 /// let x = UpdateRecognizerRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
1506 /// let x = UpdateRecognizerRequest::new().set_or_clear_update_mask(None::<FieldMask>);
1507 /// ```
1508 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
1509 where
1510 T: std::convert::Into<wkt::FieldMask>,
1511 {
1512 self.update_mask = v.map(|x| x.into());
1513 self
1514 }
1515
1516 /// Sets the value of [validate_only][crate::model::UpdateRecognizerRequest::validate_only].
1517 ///
1518 /// # Example
1519 /// ```ignore,no_run
1520 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1521 /// let x = UpdateRecognizerRequest::new().set_validate_only(true);
1522 /// ```
1523 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1524 self.validate_only = v.into();
1525 self
1526 }
1527}
1528
1529impl wkt::message::Message for UpdateRecognizerRequest {
1530 fn typename() -> &'static str {
1531 "type.googleapis.com/google.cloud.speech.v2.UpdateRecognizerRequest"
1532 }
1533}
1534
1535/// Request message for the
1536/// [DeleteRecognizer][google.cloud.speech.v2.Speech.DeleteRecognizer] method.
1537///
1538/// [google.cloud.speech.v2.Speech.DeleteRecognizer]: crate::client::Speech::delete_recognizer
1539#[derive(Clone, Default, PartialEq)]
1540#[non_exhaustive]
1541pub struct DeleteRecognizerRequest {
1542 /// Required. The name of the Recognizer to delete.
1543 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`
1544 pub name: std::string::String,
1545
1546 /// If set, validate the request and preview the deleted Recognizer, but do not
1547 /// actually delete it.
1548 pub validate_only: bool,
1549
1550 /// If set to true, and the Recognizer is not found, the request will succeed
1551 /// and be a no-op (no Operation is recorded in this case).
1552 pub allow_missing: bool,
1553
1554 /// This checksum is computed by the server based on the value of other
1555 /// fields. This may be sent on update, undelete, and delete requests to ensure
1556 /// the client has an up-to-date value before proceeding.
1557 pub etag: std::string::String,
1558
1559 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1560}
1561
1562impl DeleteRecognizerRequest {
1563 pub fn new() -> Self {
1564 std::default::Default::default()
1565 }
1566
1567 /// Sets the value of [name][crate::model::DeleteRecognizerRequest::name].
1568 ///
1569 /// # Example
1570 /// ```ignore,no_run
1571 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1572 /// let x = DeleteRecognizerRequest::new().set_name("example");
1573 /// ```
1574 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1575 self.name = v.into();
1576 self
1577 }
1578
1579 /// Sets the value of [validate_only][crate::model::DeleteRecognizerRequest::validate_only].
1580 ///
1581 /// # Example
1582 /// ```ignore,no_run
1583 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1584 /// let x = DeleteRecognizerRequest::new().set_validate_only(true);
1585 /// ```
1586 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1587 self.validate_only = v.into();
1588 self
1589 }
1590
1591 /// Sets the value of [allow_missing][crate::model::DeleteRecognizerRequest::allow_missing].
1592 ///
1593 /// # Example
1594 /// ```ignore,no_run
1595 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1596 /// let x = DeleteRecognizerRequest::new().set_allow_missing(true);
1597 /// ```
1598 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1599 self.allow_missing = v.into();
1600 self
1601 }
1602
1603 /// Sets the value of [etag][crate::model::DeleteRecognizerRequest::etag].
1604 ///
1605 /// # Example
1606 /// ```ignore,no_run
1607 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1608 /// let x = DeleteRecognizerRequest::new().set_etag("example");
1609 /// ```
1610 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1611 self.etag = v.into();
1612 self
1613 }
1614}
1615
1616impl wkt::message::Message for DeleteRecognizerRequest {
1617 fn typename() -> &'static str {
1618 "type.googleapis.com/google.cloud.speech.v2.DeleteRecognizerRequest"
1619 }
1620}
1621
1622/// Request message for the
1623/// [UndeleteRecognizer][google.cloud.speech.v2.Speech.UndeleteRecognizer]
1624/// method.
1625///
1626/// [google.cloud.speech.v2.Speech.UndeleteRecognizer]: crate::client::Speech::undelete_recognizer
1627#[derive(Clone, Default, PartialEq)]
1628#[non_exhaustive]
1629pub struct UndeleteRecognizerRequest {
1630 /// Required. The name of the Recognizer to undelete.
1631 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`
1632 pub name: std::string::String,
1633
1634 /// If set, validate the request and preview the undeleted Recognizer, but do
1635 /// not actually undelete it.
1636 pub validate_only: bool,
1637
1638 /// This checksum is computed by the server based on the value of other
1639 /// fields. This may be sent on update, undelete, and delete requests to ensure
1640 /// the client has an up-to-date value before proceeding.
1641 pub etag: std::string::String,
1642
1643 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1644}
1645
1646impl UndeleteRecognizerRequest {
1647 pub fn new() -> Self {
1648 std::default::Default::default()
1649 }
1650
1651 /// Sets the value of [name][crate::model::UndeleteRecognizerRequest::name].
1652 ///
1653 /// # Example
1654 /// ```ignore,no_run
1655 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1656 /// let x = UndeleteRecognizerRequest::new().set_name("example");
1657 /// ```
1658 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1659 self.name = v.into();
1660 self
1661 }
1662
1663 /// Sets the value of [validate_only][crate::model::UndeleteRecognizerRequest::validate_only].
1664 ///
1665 /// # Example
1666 /// ```ignore,no_run
1667 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1668 /// let x = UndeleteRecognizerRequest::new().set_validate_only(true);
1669 /// ```
1670 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1671 self.validate_only = v.into();
1672 self
1673 }
1674
1675 /// Sets the value of [etag][crate::model::UndeleteRecognizerRequest::etag].
1676 ///
1677 /// # Example
1678 /// ```ignore,no_run
1679 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1680 /// let x = UndeleteRecognizerRequest::new().set_etag("example");
1681 /// ```
1682 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1683 self.etag = v.into();
1684 self
1685 }
1686}
1687
1688impl wkt::message::Message for UndeleteRecognizerRequest {
1689 fn typename() -> &'static str {
1690 "type.googleapis.com/google.cloud.speech.v2.UndeleteRecognizerRequest"
1691 }
1692}
1693
1694/// A Recognizer message. Stores recognition configuration and metadata.
1695#[derive(Clone, Default, PartialEq)]
1696#[non_exhaustive]
1697pub struct Recognizer {
1698 /// Output only. Identifier. The resource name of the Recognizer.
1699 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1700 pub name: std::string::String,
1701
1702 /// Output only. System-assigned unique identifier for the Recognizer.
1703 pub uid: std::string::String,
1704
1705 /// User-settable, human-readable name for the Recognizer. Must be 63
1706 /// characters or less.
1707 pub display_name: std::string::String,
1708
1709 /// Optional. This field is now deprecated. Prefer the
1710 /// [`model`][google.cloud.speech.v2.RecognitionConfig.model] field in the
1711 /// [`RecognitionConfig`][google.cloud.speech.v2.RecognitionConfig] message.
1712 ///
1713 /// Which model to use for recognition requests. Select the model best suited
1714 /// to your domain to get best results.
1715 ///
1716 /// Guidance for choosing which model to use can be found in the [Transcription
1717 /// Models
1718 /// Documentation](https://cloud.google.com/speech-to-text/v2/docs/transcription-model)
1719 /// and the models supported in each region can be found in the [Table Of
1720 /// Supported
1721 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
1722 ///
1723 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
1724 /// [google.cloud.speech.v2.RecognitionConfig.model]: crate::model::RecognitionConfig::model
1725 #[deprecated]
1726 pub model: std::string::String,
1727
1728 /// Optional. This field is now deprecated. Prefer the
1729 /// [`language_codes`][google.cloud.speech.v2.RecognitionConfig.language_codes]
1730 /// field in the
1731 /// [`RecognitionConfig`][google.cloud.speech.v2.RecognitionConfig] message.
1732 ///
1733 /// The language of the supplied audio as a
1734 /// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
1735 ///
1736 /// Supported languages for each model are listed in the [Table of Supported
1737 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
1738 ///
1739 /// If additional languages are provided, recognition result will contain
1740 /// recognition in the most likely language detected. The recognition result
1741 /// will include the language tag of the language detected in the audio.
1742 /// When you create or update a Recognizer, these values are
1743 /// stored in normalized BCP-47 form. For example, "en-us" is stored as
1744 /// "en-US".
1745 ///
1746 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
1747 /// [google.cloud.speech.v2.RecognitionConfig.language_codes]: crate::model::RecognitionConfig::language_codes
1748 #[deprecated]
1749 pub language_codes: std::vec::Vec<std::string::String>,
1750
1751 /// Default configuration to use for requests with this Recognizer.
1752 /// This can be overwritten by inline configuration in the
1753 /// [RecognizeRequest.config][google.cloud.speech.v2.RecognizeRequest.config]
1754 /// field.
1755 ///
1756 /// [google.cloud.speech.v2.RecognizeRequest.config]: crate::model::RecognizeRequest::config
1757 pub default_recognition_config: std::option::Option<crate::model::RecognitionConfig>,
1758
1759 /// Allows users to store small amounts of arbitrary data.
1760 /// Both the key and the value must be 63 characters or less each.
1761 /// At most 100 annotations.
1762 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
1763
1764 /// Output only. The Recognizer lifecycle state.
1765 pub state: crate::model::recognizer::State,
1766
1767 /// Output only. Creation time.
1768 pub create_time: std::option::Option<wkt::Timestamp>,
1769
1770 /// Output only. The most recent time this Recognizer was modified.
1771 pub update_time: std::option::Option<wkt::Timestamp>,
1772
1773 /// Output only. The time at which this Recognizer was requested for deletion.
1774 pub delete_time: std::option::Option<wkt::Timestamp>,
1775
1776 /// Output only. The time at which this Recognizer will be purged.
1777 pub expire_time: std::option::Option<wkt::Timestamp>,
1778
1779 /// Output only. This checksum is computed by the server based on the value of
1780 /// other fields. This may be sent on update, undelete, and delete requests to
1781 /// ensure the client has an up-to-date value before proceeding.
1782 pub etag: std::string::String,
1783
1784 /// Output only. Whether or not this Recognizer is in the process of being
1785 /// updated.
1786 pub reconciling: bool,
1787
1788 /// Output only. The [KMS key
1789 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
1790 /// the Recognizer is encrypted. The expected format is
1791 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
1792 pub kms_key_name: std::string::String,
1793
1794 /// Output only. The [KMS key version
1795 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
1796 /// with which the Recognizer is encrypted. The expected format is
1797 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
1798 pub kms_key_version_name: std::string::String,
1799
1800 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1801}
1802
1803impl Recognizer {
1804 pub fn new() -> Self {
1805 std::default::Default::default()
1806 }
1807
1808 /// Sets the value of [name][crate::model::Recognizer::name].
1809 ///
1810 /// # Example
1811 /// ```ignore,no_run
1812 /// # use google_cloud_speech_v2::model::Recognizer;
1813 /// let x = Recognizer::new().set_name("example");
1814 /// ```
1815 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1816 self.name = v.into();
1817 self
1818 }
1819
1820 /// Sets the value of [uid][crate::model::Recognizer::uid].
1821 ///
1822 /// # Example
1823 /// ```ignore,no_run
1824 /// # use google_cloud_speech_v2::model::Recognizer;
1825 /// let x = Recognizer::new().set_uid("example");
1826 /// ```
1827 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1828 self.uid = v.into();
1829 self
1830 }
1831
1832 /// Sets the value of [display_name][crate::model::Recognizer::display_name].
1833 ///
1834 /// # Example
1835 /// ```ignore,no_run
1836 /// # use google_cloud_speech_v2::model::Recognizer;
1837 /// let x = Recognizer::new().set_display_name("example");
1838 /// ```
1839 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1840 self.display_name = v.into();
1841 self
1842 }
1843
1844 /// Sets the value of [model][crate::model::Recognizer::model].
1845 ///
1846 /// # Example
1847 /// ```ignore,no_run
1848 /// # use google_cloud_speech_v2::model::Recognizer;
1849 /// let x = Recognizer::new().set_model("example");
1850 /// ```
1851 #[deprecated]
1852 pub fn set_model<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1853 self.model = v.into();
1854 self
1855 }
1856
1857 /// Sets the value of [language_codes][crate::model::Recognizer::language_codes].
1858 ///
1859 /// # Example
1860 /// ```ignore,no_run
1861 /// # use google_cloud_speech_v2::model::Recognizer;
1862 /// let x = Recognizer::new().set_language_codes(["a", "b", "c"]);
1863 /// ```
1864 #[deprecated]
1865 pub fn set_language_codes<T, V>(mut self, v: T) -> Self
1866 where
1867 T: std::iter::IntoIterator<Item = V>,
1868 V: std::convert::Into<std::string::String>,
1869 {
1870 use std::iter::Iterator;
1871 self.language_codes = v.into_iter().map(|i| i.into()).collect();
1872 self
1873 }
1874
1875 /// Sets the value of [default_recognition_config][crate::model::Recognizer::default_recognition_config].
1876 ///
1877 /// # Example
1878 /// ```ignore,no_run
1879 /// # use google_cloud_speech_v2::model::Recognizer;
1880 /// use google_cloud_speech_v2::model::RecognitionConfig;
1881 /// let x = Recognizer::new().set_default_recognition_config(RecognitionConfig::default()/* use setters */);
1882 /// ```
1883 pub fn set_default_recognition_config<T>(mut self, v: T) -> Self
1884 where
1885 T: std::convert::Into<crate::model::RecognitionConfig>,
1886 {
1887 self.default_recognition_config = std::option::Option::Some(v.into());
1888 self
1889 }
1890
1891 /// Sets or clears the value of [default_recognition_config][crate::model::Recognizer::default_recognition_config].
1892 ///
1893 /// # Example
1894 /// ```ignore,no_run
1895 /// # use google_cloud_speech_v2::model::Recognizer;
1896 /// use google_cloud_speech_v2::model::RecognitionConfig;
1897 /// let x = Recognizer::new().set_or_clear_default_recognition_config(Some(RecognitionConfig::default()/* use setters */));
1898 /// let x = Recognizer::new().set_or_clear_default_recognition_config(None::<RecognitionConfig>);
1899 /// ```
1900 pub fn set_or_clear_default_recognition_config<T>(mut self, v: std::option::Option<T>) -> Self
1901 where
1902 T: std::convert::Into<crate::model::RecognitionConfig>,
1903 {
1904 self.default_recognition_config = v.map(|x| x.into());
1905 self
1906 }
1907
1908 /// Sets the value of [annotations][crate::model::Recognizer::annotations].
1909 ///
1910 /// # Example
1911 /// ```ignore,no_run
1912 /// # use google_cloud_speech_v2::model::Recognizer;
1913 /// let x = Recognizer::new().set_annotations([
1914 /// ("key0", "abc"),
1915 /// ("key1", "xyz"),
1916 /// ]);
1917 /// ```
1918 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
1919 where
1920 T: std::iter::IntoIterator<Item = (K, V)>,
1921 K: std::convert::Into<std::string::String>,
1922 V: std::convert::Into<std::string::String>,
1923 {
1924 use std::iter::Iterator;
1925 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
1926 self
1927 }
1928
1929 /// Sets the value of [state][crate::model::Recognizer::state].
1930 ///
1931 /// # Example
1932 /// ```ignore,no_run
1933 /// # use google_cloud_speech_v2::model::Recognizer;
1934 /// use google_cloud_speech_v2::model::recognizer::State;
1935 /// let x0 = Recognizer::new().set_state(State::Active);
1936 /// let x1 = Recognizer::new().set_state(State::Deleted);
1937 /// ```
1938 pub fn set_state<T: std::convert::Into<crate::model::recognizer::State>>(
1939 mut self,
1940 v: T,
1941 ) -> Self {
1942 self.state = v.into();
1943 self
1944 }
1945
1946 /// Sets the value of [create_time][crate::model::Recognizer::create_time].
1947 ///
1948 /// # Example
1949 /// ```ignore,no_run
1950 /// # use google_cloud_speech_v2::model::Recognizer;
1951 /// use wkt::Timestamp;
1952 /// let x = Recognizer::new().set_create_time(Timestamp::default()/* use setters */);
1953 /// ```
1954 pub fn set_create_time<T>(mut self, v: T) -> Self
1955 where
1956 T: std::convert::Into<wkt::Timestamp>,
1957 {
1958 self.create_time = std::option::Option::Some(v.into());
1959 self
1960 }
1961
1962 /// Sets or clears the value of [create_time][crate::model::Recognizer::create_time].
1963 ///
1964 /// # Example
1965 /// ```ignore,no_run
1966 /// # use google_cloud_speech_v2::model::Recognizer;
1967 /// use wkt::Timestamp;
1968 /// let x = Recognizer::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
1969 /// let x = Recognizer::new().set_or_clear_create_time(None::<Timestamp>);
1970 /// ```
1971 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
1972 where
1973 T: std::convert::Into<wkt::Timestamp>,
1974 {
1975 self.create_time = v.map(|x| x.into());
1976 self
1977 }
1978
1979 /// Sets the value of [update_time][crate::model::Recognizer::update_time].
1980 ///
1981 /// # Example
1982 /// ```ignore,no_run
1983 /// # use google_cloud_speech_v2::model::Recognizer;
1984 /// use wkt::Timestamp;
1985 /// let x = Recognizer::new().set_update_time(Timestamp::default()/* use setters */);
1986 /// ```
1987 pub fn set_update_time<T>(mut self, v: T) -> Self
1988 where
1989 T: std::convert::Into<wkt::Timestamp>,
1990 {
1991 self.update_time = std::option::Option::Some(v.into());
1992 self
1993 }
1994
1995 /// Sets or clears the value of [update_time][crate::model::Recognizer::update_time].
1996 ///
1997 /// # Example
1998 /// ```ignore,no_run
1999 /// # use google_cloud_speech_v2::model::Recognizer;
2000 /// use wkt::Timestamp;
2001 /// let x = Recognizer::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
2002 /// let x = Recognizer::new().set_or_clear_update_time(None::<Timestamp>);
2003 /// ```
2004 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
2005 where
2006 T: std::convert::Into<wkt::Timestamp>,
2007 {
2008 self.update_time = v.map(|x| x.into());
2009 self
2010 }
2011
2012 /// Sets the value of [delete_time][crate::model::Recognizer::delete_time].
2013 ///
2014 /// # Example
2015 /// ```ignore,no_run
2016 /// # use google_cloud_speech_v2::model::Recognizer;
2017 /// use wkt::Timestamp;
2018 /// let x = Recognizer::new().set_delete_time(Timestamp::default()/* use setters */);
2019 /// ```
2020 pub fn set_delete_time<T>(mut self, v: T) -> Self
2021 where
2022 T: std::convert::Into<wkt::Timestamp>,
2023 {
2024 self.delete_time = std::option::Option::Some(v.into());
2025 self
2026 }
2027
2028 /// Sets or clears the value of [delete_time][crate::model::Recognizer::delete_time].
2029 ///
2030 /// # Example
2031 /// ```ignore,no_run
2032 /// # use google_cloud_speech_v2::model::Recognizer;
2033 /// use wkt::Timestamp;
2034 /// let x = Recognizer::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
2035 /// let x = Recognizer::new().set_or_clear_delete_time(None::<Timestamp>);
2036 /// ```
2037 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
2038 where
2039 T: std::convert::Into<wkt::Timestamp>,
2040 {
2041 self.delete_time = v.map(|x| x.into());
2042 self
2043 }
2044
2045 /// Sets the value of [expire_time][crate::model::Recognizer::expire_time].
2046 ///
2047 /// # Example
2048 /// ```ignore,no_run
2049 /// # use google_cloud_speech_v2::model::Recognizer;
2050 /// use wkt::Timestamp;
2051 /// let x = Recognizer::new().set_expire_time(Timestamp::default()/* use setters */);
2052 /// ```
2053 pub fn set_expire_time<T>(mut self, v: T) -> Self
2054 where
2055 T: std::convert::Into<wkt::Timestamp>,
2056 {
2057 self.expire_time = std::option::Option::Some(v.into());
2058 self
2059 }
2060
2061 /// Sets or clears the value of [expire_time][crate::model::Recognizer::expire_time].
2062 ///
2063 /// # Example
2064 /// ```ignore,no_run
2065 /// # use google_cloud_speech_v2::model::Recognizer;
2066 /// use wkt::Timestamp;
2067 /// let x = Recognizer::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
2068 /// let x = Recognizer::new().set_or_clear_expire_time(None::<Timestamp>);
2069 /// ```
2070 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
2071 where
2072 T: std::convert::Into<wkt::Timestamp>,
2073 {
2074 self.expire_time = v.map(|x| x.into());
2075 self
2076 }
2077
2078 /// Sets the value of [etag][crate::model::Recognizer::etag].
2079 ///
2080 /// # Example
2081 /// ```ignore,no_run
2082 /// # use google_cloud_speech_v2::model::Recognizer;
2083 /// let x = Recognizer::new().set_etag("example");
2084 /// ```
2085 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2086 self.etag = v.into();
2087 self
2088 }
2089
2090 /// Sets the value of [reconciling][crate::model::Recognizer::reconciling].
2091 ///
2092 /// # Example
2093 /// ```ignore,no_run
2094 /// # use google_cloud_speech_v2::model::Recognizer;
2095 /// let x = Recognizer::new().set_reconciling(true);
2096 /// ```
2097 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2098 self.reconciling = v.into();
2099 self
2100 }
2101
2102 /// Sets the value of [kms_key_name][crate::model::Recognizer::kms_key_name].
2103 ///
2104 /// # Example
2105 /// ```ignore,no_run
2106 /// # use google_cloud_speech_v2::model::Recognizer;
2107 /// let x = Recognizer::new().set_kms_key_name("example");
2108 /// ```
2109 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2110 self.kms_key_name = v.into();
2111 self
2112 }
2113
2114 /// Sets the value of [kms_key_version_name][crate::model::Recognizer::kms_key_version_name].
2115 ///
2116 /// # Example
2117 /// ```ignore,no_run
2118 /// # use google_cloud_speech_v2::model::Recognizer;
2119 /// let x = Recognizer::new().set_kms_key_version_name("example");
2120 /// ```
2121 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
2122 mut self,
2123 v: T,
2124 ) -> Self {
2125 self.kms_key_version_name = v.into();
2126 self
2127 }
2128}
2129
2130impl wkt::message::Message for Recognizer {
2131 fn typename() -> &'static str {
2132 "type.googleapis.com/google.cloud.speech.v2.Recognizer"
2133 }
2134}
2135
2136/// Defines additional types related to [Recognizer].
2137pub mod recognizer {
2138 #[allow(unused_imports)]
2139 use super::*;
2140
2141 /// Set of states that define the lifecycle of a Recognizer.
2142 ///
2143 /// # Working with unknown values
2144 ///
2145 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2146 /// additional enum variants at any time. Adding new variants is not considered
2147 /// a breaking change. Applications should write their code in anticipation of:
2148 ///
2149 /// - New values appearing in future releases of the client library, **and**
2150 /// - New values received dynamically, without application changes.
2151 ///
2152 /// Please consult the [Working with enums] section in the user guide for some
2153 /// guidelines.
2154 ///
2155 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2156 #[derive(Clone, Debug, PartialEq)]
2157 #[non_exhaustive]
2158 pub enum State {
2159 /// The default value. This value is used if the state is omitted.
2160 Unspecified,
2161 /// The Recognizer is active and ready for use.
2162 Active,
2163 /// This Recognizer has been deleted.
2164 Deleted,
2165 /// If set, the enum was initialized with an unknown value.
2166 ///
2167 /// Applications can examine the value using [State::value] or
2168 /// [State::name].
2169 UnknownValue(state::UnknownValue),
2170 }
2171
2172 #[doc(hidden)]
2173 pub mod state {
2174 #[allow(unused_imports)]
2175 use super::*;
2176 #[derive(Clone, Debug, PartialEq)]
2177 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2178 }
2179
2180 impl State {
2181 /// Gets the enum value.
2182 ///
2183 /// Returns `None` if the enum contains an unknown value deserialized from
2184 /// the string representation of enums.
2185 pub fn value(&self) -> std::option::Option<i32> {
2186 match self {
2187 Self::Unspecified => std::option::Option::Some(0),
2188 Self::Active => std::option::Option::Some(2),
2189 Self::Deleted => std::option::Option::Some(4),
2190 Self::UnknownValue(u) => u.0.value(),
2191 }
2192 }
2193
2194 /// Gets the enum value as a string.
2195 ///
2196 /// Returns `None` if the enum contains an unknown value deserialized from
2197 /// the integer representation of enums.
2198 pub fn name(&self) -> std::option::Option<&str> {
2199 match self {
2200 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
2201 Self::Active => std::option::Option::Some("ACTIVE"),
2202 Self::Deleted => std::option::Option::Some("DELETED"),
2203 Self::UnknownValue(u) => u.0.name(),
2204 }
2205 }
2206 }
2207
2208 impl std::default::Default for State {
2209 fn default() -> Self {
2210 use std::convert::From;
2211 Self::from(0)
2212 }
2213 }
2214
2215 impl std::fmt::Display for State {
2216 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
2217 wkt::internal::display_enum(f, self.name(), self.value())
2218 }
2219 }
2220
2221 impl std::convert::From<i32> for State {
2222 fn from(value: i32) -> Self {
2223 match value {
2224 0 => Self::Unspecified,
2225 2 => Self::Active,
2226 4 => Self::Deleted,
2227 _ => Self::UnknownValue(state::UnknownValue(
2228 wkt::internal::UnknownEnumValue::Integer(value),
2229 )),
2230 }
2231 }
2232 }
2233
2234 impl std::convert::From<&str> for State {
2235 fn from(value: &str) -> Self {
2236 use std::string::ToString;
2237 match value {
2238 "STATE_UNSPECIFIED" => Self::Unspecified,
2239 "ACTIVE" => Self::Active,
2240 "DELETED" => Self::Deleted,
2241 _ => Self::UnknownValue(state::UnknownValue(
2242 wkt::internal::UnknownEnumValue::String(value.to_string()),
2243 )),
2244 }
2245 }
2246 }
2247
2248 impl serde::ser::Serialize for State {
2249 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
2250 where
2251 S: serde::Serializer,
2252 {
2253 match self {
2254 Self::Unspecified => serializer.serialize_i32(0),
2255 Self::Active => serializer.serialize_i32(2),
2256 Self::Deleted => serializer.serialize_i32(4),
2257 Self::UnknownValue(u) => u.0.serialize(serializer),
2258 }
2259 }
2260 }
2261
2262 impl<'de> serde::de::Deserialize<'de> for State {
2263 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
2264 where
2265 D: serde::Deserializer<'de>,
2266 {
2267 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
2268 ".google.cloud.speech.v2.Recognizer.State",
2269 ))
2270 }
2271 }
2272}
2273
2274/// Automatically detected decoding parameters.
2275/// Supported for the following encodings:
2276///
2277/// * WAV_LINEAR16: 16-bit signed little-endian PCM samples in a WAV container.
2278///
2279/// * WAV_MULAW: 8-bit companded mulaw samples in a WAV container.
2280///
2281/// * WAV_ALAW: 8-bit companded alaw samples in a WAV container.
2282///
2283/// * RFC4867_5_AMR: AMR frames with an rfc4867.5 header.
2284///
2285/// * RFC4867_5_AMRWB: AMR-WB frames with an rfc4867.5 header.
2286///
2287/// * FLAC: FLAC frames in the "native FLAC" container format.
2288///
2289/// * MP3: MPEG audio frames with optional (ignored) ID3 metadata.
2290///
2291/// * OGG_OPUS: Opus audio frames in an Ogg container.
2292///
2293/// * WEBM_OPUS: Opus audio frames in a WebM container.
2294///
2295/// * MP4_AAC: AAC audio frames in an MP4 container.
2296///
2297/// * M4A_AAC: AAC audio frames in an M4A container.
2298///
2299/// * MOV_AAC: AAC audio frames in an MOV container.
2300///
2301#[derive(Clone, Default, PartialEq)]
2302#[non_exhaustive]
2303pub struct AutoDetectDecodingConfig {
2304 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2305}
2306
2307impl AutoDetectDecodingConfig {
2308 pub fn new() -> Self {
2309 std::default::Default::default()
2310 }
2311}
2312
2313impl wkt::message::Message for AutoDetectDecodingConfig {
2314 fn typename() -> &'static str {
2315 "type.googleapis.com/google.cloud.speech.v2.AutoDetectDecodingConfig"
2316 }
2317}
2318
2319/// Explicitly specified decoding parameters.
2320#[derive(Clone, Default, PartialEq)]
2321#[non_exhaustive]
2322pub struct ExplicitDecodingConfig {
2323 /// Required. Encoding of the audio data sent for recognition.
2324 pub encoding: crate::model::explicit_decoding_config::AudioEncoding,
2325
2326 /// Optional. Sample rate in Hertz of the audio data sent for recognition.
2327 /// Valid values are: 8000-48000, and 16000 is optimal. For best results, set
2328 /// the sampling rate of the audio source to 16000 Hz. If that's not possible,
2329 /// use the native sample rate of the audio source (instead of resampling).
2330 /// Note that this field is marked as OPTIONAL for backward compatibility
2331 /// reasons. It is (and has always been) effectively REQUIRED.
2332 pub sample_rate_hertz: i32,
2333
2334 /// Optional. Number of channels present in the audio data sent for
2335 /// recognition. Note that this field is marked as OPTIONAL for backward
2336 /// compatibility reasons. It is (and has always been) effectively REQUIRED.
2337 ///
2338 /// The maximum allowed value is 8.
2339 pub audio_channel_count: i32,
2340
2341 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2342}
2343
2344impl ExplicitDecodingConfig {
2345 pub fn new() -> Self {
2346 std::default::Default::default()
2347 }
2348
2349 /// Sets the value of [encoding][crate::model::ExplicitDecodingConfig::encoding].
2350 ///
2351 /// # Example
2352 /// ```ignore,no_run
2353 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2354 /// use google_cloud_speech_v2::model::explicit_decoding_config::AudioEncoding;
2355 /// let x0 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Linear16);
2356 /// let x1 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Mulaw);
2357 /// let x2 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Alaw);
2358 /// ```
2359 pub fn set_encoding<
2360 T: std::convert::Into<crate::model::explicit_decoding_config::AudioEncoding>,
2361 >(
2362 mut self,
2363 v: T,
2364 ) -> Self {
2365 self.encoding = v.into();
2366 self
2367 }
2368
2369 /// Sets the value of [sample_rate_hertz][crate::model::ExplicitDecodingConfig::sample_rate_hertz].
2370 ///
2371 /// # Example
2372 /// ```ignore,no_run
2373 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2374 /// let x = ExplicitDecodingConfig::new().set_sample_rate_hertz(42);
2375 /// ```
2376 pub fn set_sample_rate_hertz<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2377 self.sample_rate_hertz = v.into();
2378 self
2379 }
2380
2381 /// Sets the value of [audio_channel_count][crate::model::ExplicitDecodingConfig::audio_channel_count].
2382 ///
2383 /// # Example
2384 /// ```ignore,no_run
2385 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2386 /// let x = ExplicitDecodingConfig::new().set_audio_channel_count(42);
2387 /// ```
2388 pub fn set_audio_channel_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2389 self.audio_channel_count = v.into();
2390 self
2391 }
2392}
2393
2394impl wkt::message::Message for ExplicitDecodingConfig {
2395 fn typename() -> &'static str {
2396 "type.googleapis.com/google.cloud.speech.v2.ExplicitDecodingConfig"
2397 }
2398}
2399
2400/// Defines additional types related to [ExplicitDecodingConfig].
2401pub mod explicit_decoding_config {
2402 #[allow(unused_imports)]
2403 use super::*;
2404
2405 /// Supported audio data encodings.
2406 ///
2407 /// # Working with unknown values
2408 ///
2409 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2410 /// additional enum variants at any time. Adding new variants is not considered
2411 /// a breaking change. Applications should write their code in anticipation of:
2412 ///
2413 /// - New values appearing in future releases of the client library, **and**
2414 /// - New values received dynamically, without application changes.
2415 ///
2416 /// Please consult the [Working with enums] section in the user guide for some
2417 /// guidelines.
2418 ///
2419 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2420 #[derive(Clone, Debug, PartialEq)]
2421 #[non_exhaustive]
2422 pub enum AudioEncoding {
2423 /// Default value. This value is unused.
2424 Unspecified,
2425 /// Headerless 16-bit signed little-endian PCM samples.
2426 Linear16,
2427 /// Headerless 8-bit companded mulaw samples.
2428 Mulaw,
2429 /// Headerless 8-bit companded alaw samples.
2430 Alaw,
2431 /// AMR frames with an rfc4867.5 header.
2432 Amr,
2433 /// AMR-WB frames with an rfc4867.5 header.
2434 AmrWb,
2435 /// FLAC frames in the "native FLAC" container format.
2436 Flac,
2437 /// MPEG audio frames with optional (ignored) ID3 metadata.
2438 Mp3,
2439 /// Opus audio frames in an Ogg container.
2440 OggOpus,
2441 /// Opus audio frames in a WebM container.
2442 WebmOpus,
2443 /// AAC audio frames in an MP4 container.
2444 Mp4Aac,
2445 /// AAC audio frames in an M4A container.
2446 M4AAac,
2447 /// AAC audio frames in an MOV container.
2448 MovAac,
2449 /// If set, the enum was initialized with an unknown value.
2450 ///
2451 /// Applications can examine the value using [AudioEncoding::value] or
2452 /// [AudioEncoding::name].
2453 UnknownValue(audio_encoding::UnknownValue),
2454 }
2455
2456 #[doc(hidden)]
2457 pub mod audio_encoding {
2458 #[allow(unused_imports)]
2459 use super::*;
2460 #[derive(Clone, Debug, PartialEq)]
2461 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2462 }
2463
2464 impl AudioEncoding {
2465 /// Gets the enum value.
2466 ///
2467 /// Returns `None` if the enum contains an unknown value deserialized from
2468 /// the string representation of enums.
2469 pub fn value(&self) -> std::option::Option<i32> {
2470 match self {
2471 Self::Unspecified => std::option::Option::Some(0),
2472 Self::Linear16 => std::option::Option::Some(1),
2473 Self::Mulaw => std::option::Option::Some(2),
2474 Self::Alaw => std::option::Option::Some(3),
2475 Self::Amr => std::option::Option::Some(4),
2476 Self::AmrWb => std::option::Option::Some(5),
2477 Self::Flac => std::option::Option::Some(6),
2478 Self::Mp3 => std::option::Option::Some(7),
2479 Self::OggOpus => std::option::Option::Some(8),
2480 Self::WebmOpus => std::option::Option::Some(9),
2481 Self::Mp4Aac => std::option::Option::Some(10),
2482 Self::M4AAac => std::option::Option::Some(11),
2483 Self::MovAac => std::option::Option::Some(12),
2484 Self::UnknownValue(u) => u.0.value(),
2485 }
2486 }
2487
2488 /// Gets the enum value as a string.
2489 ///
2490 /// Returns `None` if the enum contains an unknown value deserialized from
2491 /// the integer representation of enums.
2492 pub fn name(&self) -> std::option::Option<&str> {
2493 match self {
2494 Self::Unspecified => std::option::Option::Some("AUDIO_ENCODING_UNSPECIFIED"),
2495 Self::Linear16 => std::option::Option::Some("LINEAR16"),
2496 Self::Mulaw => std::option::Option::Some("MULAW"),
2497 Self::Alaw => std::option::Option::Some("ALAW"),
2498 Self::Amr => std::option::Option::Some("AMR"),
2499 Self::AmrWb => std::option::Option::Some("AMR_WB"),
2500 Self::Flac => std::option::Option::Some("FLAC"),
2501 Self::Mp3 => std::option::Option::Some("MP3"),
2502 Self::OggOpus => std::option::Option::Some("OGG_OPUS"),
2503 Self::WebmOpus => std::option::Option::Some("WEBM_OPUS"),
2504 Self::Mp4Aac => std::option::Option::Some("MP4_AAC"),
2505 Self::M4AAac => std::option::Option::Some("M4A_AAC"),
2506 Self::MovAac => std::option::Option::Some("MOV_AAC"),
2507 Self::UnknownValue(u) => u.0.name(),
2508 }
2509 }
2510 }
2511
2512 impl std::default::Default for AudioEncoding {
2513 fn default() -> Self {
2514 use std::convert::From;
2515 Self::from(0)
2516 }
2517 }
2518
2519 impl std::fmt::Display for AudioEncoding {
2520 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
2521 wkt::internal::display_enum(f, self.name(), self.value())
2522 }
2523 }
2524
2525 impl std::convert::From<i32> for AudioEncoding {
2526 fn from(value: i32) -> Self {
2527 match value {
2528 0 => Self::Unspecified,
2529 1 => Self::Linear16,
2530 2 => Self::Mulaw,
2531 3 => Self::Alaw,
2532 4 => Self::Amr,
2533 5 => Self::AmrWb,
2534 6 => Self::Flac,
2535 7 => Self::Mp3,
2536 8 => Self::OggOpus,
2537 9 => Self::WebmOpus,
2538 10 => Self::Mp4Aac,
2539 11 => Self::M4AAac,
2540 12 => Self::MovAac,
2541 _ => Self::UnknownValue(audio_encoding::UnknownValue(
2542 wkt::internal::UnknownEnumValue::Integer(value),
2543 )),
2544 }
2545 }
2546 }
2547
2548 impl std::convert::From<&str> for AudioEncoding {
2549 fn from(value: &str) -> Self {
2550 use std::string::ToString;
2551 match value {
2552 "AUDIO_ENCODING_UNSPECIFIED" => Self::Unspecified,
2553 "LINEAR16" => Self::Linear16,
2554 "MULAW" => Self::Mulaw,
2555 "ALAW" => Self::Alaw,
2556 "AMR" => Self::Amr,
2557 "AMR_WB" => Self::AmrWb,
2558 "FLAC" => Self::Flac,
2559 "MP3" => Self::Mp3,
2560 "OGG_OPUS" => Self::OggOpus,
2561 "WEBM_OPUS" => Self::WebmOpus,
2562 "MP4_AAC" => Self::Mp4Aac,
2563 "M4A_AAC" => Self::M4AAac,
2564 "MOV_AAC" => Self::MovAac,
2565 _ => Self::UnknownValue(audio_encoding::UnknownValue(
2566 wkt::internal::UnknownEnumValue::String(value.to_string()),
2567 )),
2568 }
2569 }
2570 }
2571
2572 impl serde::ser::Serialize for AudioEncoding {
2573 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
2574 where
2575 S: serde::Serializer,
2576 {
2577 match self {
2578 Self::Unspecified => serializer.serialize_i32(0),
2579 Self::Linear16 => serializer.serialize_i32(1),
2580 Self::Mulaw => serializer.serialize_i32(2),
2581 Self::Alaw => serializer.serialize_i32(3),
2582 Self::Amr => serializer.serialize_i32(4),
2583 Self::AmrWb => serializer.serialize_i32(5),
2584 Self::Flac => serializer.serialize_i32(6),
2585 Self::Mp3 => serializer.serialize_i32(7),
2586 Self::OggOpus => serializer.serialize_i32(8),
2587 Self::WebmOpus => serializer.serialize_i32(9),
2588 Self::Mp4Aac => serializer.serialize_i32(10),
2589 Self::M4AAac => serializer.serialize_i32(11),
2590 Self::MovAac => serializer.serialize_i32(12),
2591 Self::UnknownValue(u) => u.0.serialize(serializer),
2592 }
2593 }
2594 }
2595
2596 impl<'de> serde::de::Deserialize<'de> for AudioEncoding {
2597 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
2598 where
2599 D: serde::Deserializer<'de>,
2600 {
2601 deserializer.deserialize_any(wkt::internal::EnumVisitor::<AudioEncoding>::new(
2602 ".google.cloud.speech.v2.ExplicitDecodingConfig.AudioEncoding",
2603 ))
2604 }
2605 }
2606}
2607
2608/// Configuration to enable speaker diarization.
2609#[derive(Clone, Default, PartialEq)]
2610#[non_exhaustive]
2611pub struct SpeakerDiarizationConfig {
2612 /// Optional. The system automatically determines the number of speakers. This
2613 /// value is not currently used.
2614 pub min_speaker_count: i32,
2615
2616 /// Optional. The system automatically determines the number of speakers. This
2617 /// value is not currently used.
2618 pub max_speaker_count: i32,
2619
2620 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2621}
2622
2623impl SpeakerDiarizationConfig {
2624 pub fn new() -> Self {
2625 std::default::Default::default()
2626 }
2627
2628 /// Sets the value of [min_speaker_count][crate::model::SpeakerDiarizationConfig::min_speaker_count].
2629 ///
2630 /// # Example
2631 /// ```ignore,no_run
2632 /// # use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2633 /// let x = SpeakerDiarizationConfig::new().set_min_speaker_count(42);
2634 /// ```
2635 pub fn set_min_speaker_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2636 self.min_speaker_count = v.into();
2637 self
2638 }
2639
2640 /// Sets the value of [max_speaker_count][crate::model::SpeakerDiarizationConfig::max_speaker_count].
2641 ///
2642 /// # Example
2643 /// ```ignore,no_run
2644 /// # use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2645 /// let x = SpeakerDiarizationConfig::new().set_max_speaker_count(42);
2646 /// ```
2647 pub fn set_max_speaker_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2648 self.max_speaker_count = v.into();
2649 self
2650 }
2651}
2652
2653impl wkt::message::Message for SpeakerDiarizationConfig {
2654 fn typename() -> &'static str {
2655 "type.googleapis.com/google.cloud.speech.v2.SpeakerDiarizationConfig"
2656 }
2657}
2658
2659/// Configuration to enable custom prompt in chirp3.
2660#[derive(Clone, Default, PartialEq)]
2661#[non_exhaustive]
2662pub struct CustomPromptConfig {
2663 /// Optional. The custom instructions to override the existing instructions for
2664 /// chirp3.
2665 pub custom_prompt: std::string::String,
2666
2667 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2668}
2669
2670impl CustomPromptConfig {
2671 pub fn new() -> Self {
2672 std::default::Default::default()
2673 }
2674
2675 /// Sets the value of [custom_prompt][crate::model::CustomPromptConfig::custom_prompt].
2676 ///
2677 /// # Example
2678 /// ```ignore,no_run
2679 /// # use google_cloud_speech_v2::model::CustomPromptConfig;
2680 /// let x = CustomPromptConfig::new().set_custom_prompt("example");
2681 /// ```
2682 pub fn set_custom_prompt<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2683 self.custom_prompt = v.into();
2684 self
2685 }
2686}
2687
2688impl wkt::message::Message for CustomPromptConfig {
2689 fn typename() -> &'static str {
2690 "type.googleapis.com/google.cloud.speech.v2.CustomPromptConfig"
2691 }
2692}
2693
2694/// Available recognition features.
2695#[derive(Clone, Default, PartialEq)]
2696#[non_exhaustive]
2697pub struct RecognitionFeatures {
2698 /// If set to `true`, the server will attempt to filter out profanities,
2699 /// replacing all but the initial character in each filtered word with
2700 /// asterisks, for instance, "f***". If set to `false` or omitted, profanities
2701 /// won't be filtered out.
2702 pub profanity_filter: bool,
2703
2704 /// If `true`, the top result includes a list of words and the start and end
2705 /// time offsets (timestamps) for those words. If `false`, no word-level time
2706 /// offset information is returned. The default is `false`.
2707 pub enable_word_time_offsets: bool,
2708
2709 /// If `true`, the top result includes a list of words and the confidence for
2710 /// those words. If `false`, no word-level confidence information is returned.
2711 /// The default is `false`.
2712 pub enable_word_confidence: bool,
2713
2714 /// If `true`, adds punctuation to recognition result hypotheses. This feature
2715 /// is only available in select languages. The default `false` value does not
2716 /// add punctuation to result hypotheses.
2717 pub enable_automatic_punctuation: bool,
2718
2719 /// The spoken punctuation behavior for the call. If `true`, replaces spoken
2720 /// punctuation with the corresponding symbols in the request. For example,
2721 /// "how are you question mark" becomes "how are you?". See
2722 /// <https://cloud.google.com/speech-to-text/docs/spoken-punctuation> for
2723 /// support. If `false`, spoken punctuation is not replaced.
2724 pub enable_spoken_punctuation: bool,
2725
2726 /// The spoken emoji behavior for the call. If `true`, adds spoken emoji
2727 /// formatting for the request. This will replace spoken emojis with the
2728 /// corresponding Unicode symbols in the final transcript. If `false`, spoken
2729 /// emojis are not replaced.
2730 pub enable_spoken_emojis: bool,
2731
2732 /// Mode for recognizing multi-channel audio.
2733 pub multi_channel_mode: crate::model::recognition_features::MultiChannelMode,
2734
2735 /// Configuration to enable speaker diarization. To enable diarization, set
2736 /// this field to an empty SpeakerDiarizationConfig message.
2737 pub diarization_config: std::option::Option<crate::model::SpeakerDiarizationConfig>,
2738
2739 /// Maximum number of recognition hypotheses to be returned.
2740 /// The server may return fewer than `max_alternatives`.
2741 /// Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of
2742 /// one. If omitted, will return a maximum of one.
2743 pub max_alternatives: i32,
2744
2745 /// Optional. Configuration to enable custom prompt for chirp3.
2746 pub custom_prompt_config: std::option::Option<crate::model::CustomPromptConfig>,
2747
2748 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2749}
2750
2751impl RecognitionFeatures {
2752 pub fn new() -> Self {
2753 std::default::Default::default()
2754 }
2755
2756 /// Sets the value of [profanity_filter][crate::model::RecognitionFeatures::profanity_filter].
2757 ///
2758 /// # Example
2759 /// ```ignore,no_run
2760 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2761 /// let x = RecognitionFeatures::new().set_profanity_filter(true);
2762 /// ```
2763 pub fn set_profanity_filter<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2764 self.profanity_filter = v.into();
2765 self
2766 }
2767
2768 /// Sets the value of [enable_word_time_offsets][crate::model::RecognitionFeatures::enable_word_time_offsets].
2769 ///
2770 /// # Example
2771 /// ```ignore,no_run
2772 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2773 /// let x = RecognitionFeatures::new().set_enable_word_time_offsets(true);
2774 /// ```
2775 pub fn set_enable_word_time_offsets<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2776 self.enable_word_time_offsets = v.into();
2777 self
2778 }
2779
2780 /// Sets the value of [enable_word_confidence][crate::model::RecognitionFeatures::enable_word_confidence].
2781 ///
2782 /// # Example
2783 /// ```ignore,no_run
2784 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2785 /// let x = RecognitionFeatures::new().set_enable_word_confidence(true);
2786 /// ```
2787 pub fn set_enable_word_confidence<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2788 self.enable_word_confidence = v.into();
2789 self
2790 }
2791
2792 /// Sets the value of [enable_automatic_punctuation][crate::model::RecognitionFeatures::enable_automatic_punctuation].
2793 ///
2794 /// # Example
2795 /// ```ignore,no_run
2796 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2797 /// let x = RecognitionFeatures::new().set_enable_automatic_punctuation(true);
2798 /// ```
2799 pub fn set_enable_automatic_punctuation<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2800 self.enable_automatic_punctuation = v.into();
2801 self
2802 }
2803
2804 /// Sets the value of [enable_spoken_punctuation][crate::model::RecognitionFeatures::enable_spoken_punctuation].
2805 ///
2806 /// # Example
2807 /// ```ignore,no_run
2808 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2809 /// let x = RecognitionFeatures::new().set_enable_spoken_punctuation(true);
2810 /// ```
2811 pub fn set_enable_spoken_punctuation<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2812 self.enable_spoken_punctuation = v.into();
2813 self
2814 }
2815
2816 /// Sets the value of [enable_spoken_emojis][crate::model::RecognitionFeatures::enable_spoken_emojis].
2817 ///
2818 /// # Example
2819 /// ```ignore,no_run
2820 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2821 /// let x = RecognitionFeatures::new().set_enable_spoken_emojis(true);
2822 /// ```
2823 pub fn set_enable_spoken_emojis<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2824 self.enable_spoken_emojis = v.into();
2825 self
2826 }
2827
2828 /// Sets the value of [multi_channel_mode][crate::model::RecognitionFeatures::multi_channel_mode].
2829 ///
2830 /// # Example
2831 /// ```ignore,no_run
2832 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2833 /// use google_cloud_speech_v2::model::recognition_features::MultiChannelMode;
2834 /// let x0 = RecognitionFeatures::new().set_multi_channel_mode(MultiChannelMode::SeparateRecognitionPerChannel);
2835 /// ```
2836 pub fn set_multi_channel_mode<
2837 T: std::convert::Into<crate::model::recognition_features::MultiChannelMode>,
2838 >(
2839 mut self,
2840 v: T,
2841 ) -> Self {
2842 self.multi_channel_mode = v.into();
2843 self
2844 }
2845
2846 /// Sets the value of [diarization_config][crate::model::RecognitionFeatures::diarization_config].
2847 ///
2848 /// # Example
2849 /// ```ignore,no_run
2850 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2851 /// use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2852 /// let x = RecognitionFeatures::new().set_diarization_config(SpeakerDiarizationConfig::default()/* use setters */);
2853 /// ```
2854 pub fn set_diarization_config<T>(mut self, v: T) -> Self
2855 where
2856 T: std::convert::Into<crate::model::SpeakerDiarizationConfig>,
2857 {
2858 self.diarization_config = std::option::Option::Some(v.into());
2859 self
2860 }
2861
2862 /// Sets or clears the value of [diarization_config][crate::model::RecognitionFeatures::diarization_config].
2863 ///
2864 /// # Example
2865 /// ```ignore,no_run
2866 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2867 /// use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2868 /// let x = RecognitionFeatures::new().set_or_clear_diarization_config(Some(SpeakerDiarizationConfig::default()/* use setters */));
2869 /// let x = RecognitionFeatures::new().set_or_clear_diarization_config(None::<SpeakerDiarizationConfig>);
2870 /// ```
2871 pub fn set_or_clear_diarization_config<T>(mut self, v: std::option::Option<T>) -> Self
2872 where
2873 T: std::convert::Into<crate::model::SpeakerDiarizationConfig>,
2874 {
2875 self.diarization_config = v.map(|x| x.into());
2876 self
2877 }
2878
2879 /// Sets the value of [max_alternatives][crate::model::RecognitionFeatures::max_alternatives].
2880 ///
2881 /// # Example
2882 /// ```ignore,no_run
2883 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2884 /// let x = RecognitionFeatures::new().set_max_alternatives(42);
2885 /// ```
2886 pub fn set_max_alternatives<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2887 self.max_alternatives = v.into();
2888 self
2889 }
2890
2891 /// Sets the value of [custom_prompt_config][crate::model::RecognitionFeatures::custom_prompt_config].
2892 ///
2893 /// # Example
2894 /// ```ignore,no_run
2895 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2896 /// use google_cloud_speech_v2::model::CustomPromptConfig;
2897 /// let x = RecognitionFeatures::new().set_custom_prompt_config(CustomPromptConfig::default()/* use setters */);
2898 /// ```
2899 pub fn set_custom_prompt_config<T>(mut self, v: T) -> Self
2900 where
2901 T: std::convert::Into<crate::model::CustomPromptConfig>,
2902 {
2903 self.custom_prompt_config = std::option::Option::Some(v.into());
2904 self
2905 }
2906
2907 /// Sets or clears the value of [custom_prompt_config][crate::model::RecognitionFeatures::custom_prompt_config].
2908 ///
2909 /// # Example
2910 /// ```ignore,no_run
2911 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2912 /// use google_cloud_speech_v2::model::CustomPromptConfig;
2913 /// let x = RecognitionFeatures::new().set_or_clear_custom_prompt_config(Some(CustomPromptConfig::default()/* use setters */));
2914 /// let x = RecognitionFeatures::new().set_or_clear_custom_prompt_config(None::<CustomPromptConfig>);
2915 /// ```
2916 pub fn set_or_clear_custom_prompt_config<T>(mut self, v: std::option::Option<T>) -> Self
2917 where
2918 T: std::convert::Into<crate::model::CustomPromptConfig>,
2919 {
2920 self.custom_prompt_config = v.map(|x| x.into());
2921 self
2922 }
2923}
2924
2925impl wkt::message::Message for RecognitionFeatures {
2926 fn typename() -> &'static str {
2927 "type.googleapis.com/google.cloud.speech.v2.RecognitionFeatures"
2928 }
2929}
2930
2931/// Defines additional types related to [RecognitionFeatures].
2932pub mod recognition_features {
2933 #[allow(unused_imports)]
2934 use super::*;
2935
2936 /// Options for how to recognize multi-channel audio.
2937 ///
2938 /// # Working with unknown values
2939 ///
2940 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2941 /// additional enum variants at any time. Adding new variants is not considered
2942 /// a breaking change. Applications should write their code in anticipation of:
2943 ///
2944 /// - New values appearing in future releases of the client library, **and**
2945 /// - New values received dynamically, without application changes.
2946 ///
2947 /// Please consult the [Working with enums] section in the user guide for some
2948 /// guidelines.
2949 ///
2950 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2951 #[derive(Clone, Debug, PartialEq)]
2952 #[non_exhaustive]
2953 pub enum MultiChannelMode {
2954 /// Default value for the multi-channel mode. If the audio contains
2955 /// multiple channels, only the first channel will be transcribed; other
2956 /// channels will be ignored.
2957 Unspecified,
2958 /// If selected, each channel in the provided audio is transcribed
2959 /// independently. This cannot be selected if the selected
2960 /// [model][google.cloud.speech.v2.Recognizer.model] is `latest_short`.
2961 ///
2962 /// [google.cloud.speech.v2.Recognizer.model]: crate::model::Recognizer::model
2963 SeparateRecognitionPerChannel,
2964 /// If set, the enum was initialized with an unknown value.
2965 ///
2966 /// Applications can examine the value using [MultiChannelMode::value] or
2967 /// [MultiChannelMode::name].
2968 UnknownValue(multi_channel_mode::UnknownValue),
2969 }
2970
2971 #[doc(hidden)]
2972 pub mod multi_channel_mode {
2973 #[allow(unused_imports)]
2974 use super::*;
2975 #[derive(Clone, Debug, PartialEq)]
2976 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2977 }
2978
2979 impl MultiChannelMode {
2980 /// Gets the enum value.
2981 ///
2982 /// Returns `None` if the enum contains an unknown value deserialized from
2983 /// the string representation of enums.
2984 pub fn value(&self) -> std::option::Option<i32> {
2985 match self {
2986 Self::Unspecified => std::option::Option::Some(0),
2987 Self::SeparateRecognitionPerChannel => std::option::Option::Some(1),
2988 Self::UnknownValue(u) => u.0.value(),
2989 }
2990 }
2991
2992 /// Gets the enum value as a string.
2993 ///
2994 /// Returns `None` if the enum contains an unknown value deserialized from
2995 /// the integer representation of enums.
2996 pub fn name(&self) -> std::option::Option<&str> {
2997 match self {
2998 Self::Unspecified => std::option::Option::Some("MULTI_CHANNEL_MODE_UNSPECIFIED"),
2999 Self::SeparateRecognitionPerChannel => {
3000 std::option::Option::Some("SEPARATE_RECOGNITION_PER_CHANNEL")
3001 }
3002 Self::UnknownValue(u) => u.0.name(),
3003 }
3004 }
3005 }
3006
3007 impl std::default::Default for MultiChannelMode {
3008 fn default() -> Self {
3009 use std::convert::From;
3010 Self::from(0)
3011 }
3012 }
3013
3014 impl std::fmt::Display for MultiChannelMode {
3015 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
3016 wkt::internal::display_enum(f, self.name(), self.value())
3017 }
3018 }
3019
3020 impl std::convert::From<i32> for MultiChannelMode {
3021 fn from(value: i32) -> Self {
3022 match value {
3023 0 => Self::Unspecified,
3024 1 => Self::SeparateRecognitionPerChannel,
3025 _ => Self::UnknownValue(multi_channel_mode::UnknownValue(
3026 wkt::internal::UnknownEnumValue::Integer(value),
3027 )),
3028 }
3029 }
3030 }
3031
3032 impl std::convert::From<&str> for MultiChannelMode {
3033 fn from(value: &str) -> Self {
3034 use std::string::ToString;
3035 match value {
3036 "MULTI_CHANNEL_MODE_UNSPECIFIED" => Self::Unspecified,
3037 "SEPARATE_RECOGNITION_PER_CHANNEL" => Self::SeparateRecognitionPerChannel,
3038 _ => Self::UnknownValue(multi_channel_mode::UnknownValue(
3039 wkt::internal::UnknownEnumValue::String(value.to_string()),
3040 )),
3041 }
3042 }
3043 }
3044
3045 impl serde::ser::Serialize for MultiChannelMode {
3046 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
3047 where
3048 S: serde::Serializer,
3049 {
3050 match self {
3051 Self::Unspecified => serializer.serialize_i32(0),
3052 Self::SeparateRecognitionPerChannel => serializer.serialize_i32(1),
3053 Self::UnknownValue(u) => u.0.serialize(serializer),
3054 }
3055 }
3056 }
3057
3058 impl<'de> serde::de::Deserialize<'de> for MultiChannelMode {
3059 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
3060 where
3061 D: serde::Deserializer<'de>,
3062 {
3063 deserializer.deserialize_any(wkt::internal::EnumVisitor::<MultiChannelMode>::new(
3064 ".google.cloud.speech.v2.RecognitionFeatures.MultiChannelMode",
3065 ))
3066 }
3067 }
3068}
3069
3070/// Transcription normalization configuration. Use transcription normalization
3071/// to automatically replace parts of the transcript with phrases of your
3072/// choosing. For StreamingRecognize, this normalization only applies to stable
3073/// partial transcripts (stability > 0.8) and final transcripts.
3074#[derive(Clone, Default, PartialEq)]
3075#[non_exhaustive]
3076pub struct TranscriptNormalization {
3077 /// A list of replacement entries. We will perform replacement with one entry
3078 /// at a time. For example, the second entry in ["cat" => "dog", "mountain cat"
3079 /// => "mountain dog"] will never be applied because we will always process the
3080 /// first entry before it. At most 100 entries.
3081 pub entries: std::vec::Vec<crate::model::transcript_normalization::Entry>,
3082
3083 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3084}
3085
3086impl TranscriptNormalization {
3087 pub fn new() -> Self {
3088 std::default::Default::default()
3089 }
3090
3091 /// Sets the value of [entries][crate::model::TranscriptNormalization::entries].
3092 ///
3093 /// # Example
3094 /// ```ignore,no_run
3095 /// # use google_cloud_speech_v2::model::TranscriptNormalization;
3096 /// use google_cloud_speech_v2::model::transcript_normalization::Entry;
3097 /// let x = TranscriptNormalization::new()
3098 /// .set_entries([
3099 /// Entry::default()/* use setters */,
3100 /// Entry::default()/* use (different) setters */,
3101 /// ]);
3102 /// ```
3103 pub fn set_entries<T, V>(mut self, v: T) -> Self
3104 where
3105 T: std::iter::IntoIterator<Item = V>,
3106 V: std::convert::Into<crate::model::transcript_normalization::Entry>,
3107 {
3108 use std::iter::Iterator;
3109 self.entries = v.into_iter().map(|i| i.into()).collect();
3110 self
3111 }
3112}
3113
3114impl wkt::message::Message for TranscriptNormalization {
3115 fn typename() -> &'static str {
3116 "type.googleapis.com/google.cloud.speech.v2.TranscriptNormalization"
3117 }
3118}
3119
3120/// Defines additional types related to [TranscriptNormalization].
3121pub mod transcript_normalization {
3122 #[allow(unused_imports)]
3123 use super::*;
3124
3125 /// A single replacement configuration.
3126 #[derive(Clone, Default, PartialEq)]
3127 #[non_exhaustive]
3128 pub struct Entry {
3129 /// What to replace. Max length is 100 characters.
3130 pub search: std::string::String,
3131
3132 /// What to replace with. Max length is 100 characters.
3133 pub replace: std::string::String,
3134
3135 /// Whether the search is case sensitive.
3136 pub case_sensitive: bool,
3137
3138 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3139 }
3140
3141 impl Entry {
3142 pub fn new() -> Self {
3143 std::default::Default::default()
3144 }
3145
3146 /// Sets the value of [search][crate::model::transcript_normalization::Entry::search].
3147 ///
3148 /// # Example
3149 /// ```ignore,no_run
3150 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3151 /// let x = Entry::new().set_search("example");
3152 /// ```
3153 pub fn set_search<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3154 self.search = v.into();
3155 self
3156 }
3157
3158 /// Sets the value of [replace][crate::model::transcript_normalization::Entry::replace].
3159 ///
3160 /// # Example
3161 /// ```ignore,no_run
3162 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3163 /// let x = Entry::new().set_replace("example");
3164 /// ```
3165 pub fn set_replace<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3166 self.replace = v.into();
3167 self
3168 }
3169
3170 /// Sets the value of [case_sensitive][crate::model::transcript_normalization::Entry::case_sensitive].
3171 ///
3172 /// # Example
3173 /// ```ignore,no_run
3174 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3175 /// let x = Entry::new().set_case_sensitive(true);
3176 /// ```
3177 pub fn set_case_sensitive<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
3178 self.case_sensitive = v.into();
3179 self
3180 }
3181 }
3182
3183 impl wkt::message::Message for Entry {
3184 fn typename() -> &'static str {
3185 "type.googleapis.com/google.cloud.speech.v2.TranscriptNormalization.Entry"
3186 }
3187 }
3188}
3189
3190/// Translation configuration. Use to translate the given audio into text for the
3191/// desired language.
3192#[derive(Clone, Default, PartialEq)]
3193#[non_exhaustive]
3194pub struct TranslationConfig {
3195 /// Required. The language code to translate to.
3196 pub target_language: std::string::String,
3197
3198 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3199}
3200
3201impl TranslationConfig {
3202 pub fn new() -> Self {
3203 std::default::Default::default()
3204 }
3205
3206 /// Sets the value of [target_language][crate::model::TranslationConfig::target_language].
3207 ///
3208 /// # Example
3209 /// ```ignore,no_run
3210 /// # use google_cloud_speech_v2::model::TranslationConfig;
3211 /// let x = TranslationConfig::new().set_target_language("example");
3212 /// ```
3213 pub fn set_target_language<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3214 self.target_language = v.into();
3215 self
3216 }
3217}
3218
3219impl wkt::message::Message for TranslationConfig {
3220 fn typename() -> &'static str {
3221 "type.googleapis.com/google.cloud.speech.v2.TranslationConfig"
3222 }
3223}
3224
3225/// Provides "hints" to the speech recognizer to favor specific words and phrases
3226/// in the results. PhraseSets can be specified as an inline resource, or a
3227/// reference to an existing PhraseSet resource.
3228#[derive(Clone, Default, PartialEq)]
3229#[non_exhaustive]
3230pub struct SpeechAdaptation {
3231 /// A list of inline or referenced PhraseSets.
3232 pub phrase_sets: std::vec::Vec<crate::model::speech_adaptation::AdaptationPhraseSet>,
3233
3234 /// A list of inline CustomClasses. Existing CustomClass resources can be
3235 /// referenced directly in a PhraseSet.
3236 pub custom_classes: std::vec::Vec<crate::model::CustomClass>,
3237
3238 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3239}
3240
3241impl SpeechAdaptation {
3242 pub fn new() -> Self {
3243 std::default::Default::default()
3244 }
3245
3246 /// Sets the value of [phrase_sets][crate::model::SpeechAdaptation::phrase_sets].
3247 ///
3248 /// # Example
3249 /// ```ignore,no_run
3250 /// # use google_cloud_speech_v2::model::SpeechAdaptation;
3251 /// use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3252 /// let x = SpeechAdaptation::new()
3253 /// .set_phrase_sets([
3254 /// AdaptationPhraseSet::default()/* use setters */,
3255 /// AdaptationPhraseSet::default()/* use (different) setters */,
3256 /// ]);
3257 /// ```
3258 pub fn set_phrase_sets<T, V>(mut self, v: T) -> Self
3259 where
3260 T: std::iter::IntoIterator<Item = V>,
3261 V: std::convert::Into<crate::model::speech_adaptation::AdaptationPhraseSet>,
3262 {
3263 use std::iter::Iterator;
3264 self.phrase_sets = v.into_iter().map(|i| i.into()).collect();
3265 self
3266 }
3267
3268 /// Sets the value of [custom_classes][crate::model::SpeechAdaptation::custom_classes].
3269 ///
3270 /// # Example
3271 /// ```ignore,no_run
3272 /// # use google_cloud_speech_v2::model::SpeechAdaptation;
3273 /// use google_cloud_speech_v2::model::CustomClass;
3274 /// let x = SpeechAdaptation::new()
3275 /// .set_custom_classes([
3276 /// CustomClass::default()/* use setters */,
3277 /// CustomClass::default()/* use (different) setters */,
3278 /// ]);
3279 /// ```
3280 pub fn set_custom_classes<T, V>(mut self, v: T) -> Self
3281 where
3282 T: std::iter::IntoIterator<Item = V>,
3283 V: std::convert::Into<crate::model::CustomClass>,
3284 {
3285 use std::iter::Iterator;
3286 self.custom_classes = v.into_iter().map(|i| i.into()).collect();
3287 self
3288 }
3289}
3290
3291impl wkt::message::Message for SpeechAdaptation {
3292 fn typename() -> &'static str {
3293 "type.googleapis.com/google.cloud.speech.v2.SpeechAdaptation"
3294 }
3295}
3296
3297/// Defines additional types related to [SpeechAdaptation].
3298pub mod speech_adaptation {
3299 #[allow(unused_imports)]
3300 use super::*;
3301
3302 /// A biasing PhraseSet, which can be either a string referencing the name of
3303 /// an existing PhraseSets resource, or an inline definition of a PhraseSet.
3304 #[derive(Clone, Default, PartialEq)]
3305 #[non_exhaustive]
3306 pub struct AdaptationPhraseSet {
3307 pub value:
3308 std::option::Option<crate::model::speech_adaptation::adaptation_phrase_set::Value>,
3309
3310 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3311 }
3312
3313 impl AdaptationPhraseSet {
3314 pub fn new() -> Self {
3315 std::default::Default::default()
3316 }
3317
3318 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value].
3319 ///
3320 /// Note that all the setters affecting `value` are mutually
3321 /// exclusive.
3322 ///
3323 /// # Example
3324 /// ```ignore,no_run
3325 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3326 /// use google_cloud_speech_v2::model::speech_adaptation::adaptation_phrase_set::Value;
3327 /// let x = AdaptationPhraseSet::new().set_value(Some(Value::PhraseSet("example".to_string())));
3328 /// ```
3329 pub fn set_value<
3330 T: std::convert::Into<
3331 std::option::Option<
3332 crate::model::speech_adaptation::adaptation_phrase_set::Value,
3333 >,
3334 >,
3335 >(
3336 mut self,
3337 v: T,
3338 ) -> Self {
3339 self.value = v.into();
3340 self
3341 }
3342
3343 /// The value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3344 /// if it holds a `PhraseSet`, `None` if the field is not set or
3345 /// holds a different branch.
3346 pub fn phrase_set(&self) -> std::option::Option<&std::string::String> {
3347 #[allow(unreachable_patterns)]
3348 self.value.as_ref().and_then(|v| match v {
3349 crate::model::speech_adaptation::adaptation_phrase_set::Value::PhraseSet(v) => {
3350 std::option::Option::Some(v)
3351 }
3352 _ => std::option::Option::None,
3353 })
3354 }
3355
3356 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3357 /// to hold a `PhraseSet`.
3358 ///
3359 /// Note that all the setters affecting `value` are
3360 /// mutually exclusive.
3361 ///
3362 /// # Example
3363 /// ```ignore,no_run
3364 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3365 /// let x = AdaptationPhraseSet::new().set_phrase_set("example");
3366 /// assert!(x.phrase_set().is_some());
3367 /// assert!(x.inline_phrase_set().is_none());
3368 /// ```
3369 pub fn set_phrase_set<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3370 self.value = std::option::Option::Some(
3371 crate::model::speech_adaptation::adaptation_phrase_set::Value::PhraseSet(v.into()),
3372 );
3373 self
3374 }
3375
3376 /// The value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3377 /// if it holds a `InlinePhraseSet`, `None` if the field is not set or
3378 /// holds a different branch.
3379 pub fn inline_phrase_set(
3380 &self,
3381 ) -> std::option::Option<&std::boxed::Box<crate::model::PhraseSet>> {
3382 #[allow(unreachable_patterns)]
3383 self.value.as_ref().and_then(|v| match v {
3384 crate::model::speech_adaptation::adaptation_phrase_set::Value::InlinePhraseSet(
3385 v,
3386 ) => std::option::Option::Some(v),
3387 _ => std::option::Option::None,
3388 })
3389 }
3390
3391 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3392 /// to hold a `InlinePhraseSet`.
3393 ///
3394 /// Note that all the setters affecting `value` are
3395 /// mutually exclusive.
3396 ///
3397 /// # Example
3398 /// ```ignore,no_run
3399 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3400 /// use google_cloud_speech_v2::model::PhraseSet;
3401 /// let x = AdaptationPhraseSet::new().set_inline_phrase_set(PhraseSet::default()/* use setters */);
3402 /// assert!(x.inline_phrase_set().is_some());
3403 /// assert!(x.phrase_set().is_none());
3404 /// ```
3405 pub fn set_inline_phrase_set<
3406 T: std::convert::Into<std::boxed::Box<crate::model::PhraseSet>>,
3407 >(
3408 mut self,
3409 v: T,
3410 ) -> Self {
3411 self.value = std::option::Option::Some(
3412 crate::model::speech_adaptation::adaptation_phrase_set::Value::InlinePhraseSet(
3413 v.into(),
3414 ),
3415 );
3416 self
3417 }
3418 }
3419
3420 impl wkt::message::Message for AdaptationPhraseSet {
3421 fn typename() -> &'static str {
3422 "type.googleapis.com/google.cloud.speech.v2.SpeechAdaptation.AdaptationPhraseSet"
3423 }
3424 }
3425
3426 /// Defines additional types related to [AdaptationPhraseSet].
3427 pub mod adaptation_phrase_set {
3428 #[allow(unused_imports)]
3429 use super::*;
3430
3431 #[derive(Clone, Debug, PartialEq)]
3432 #[non_exhaustive]
3433 pub enum Value {
3434 /// The name of an existing PhraseSet resource. The user must have read
3435 /// access to the resource and it must not be deleted.
3436 PhraseSet(std::string::String),
3437 /// An inline defined PhraseSet.
3438 InlinePhraseSet(std::boxed::Box<crate::model::PhraseSet>),
3439 }
3440 }
3441}
3442
3443/// Denoiser config. May not be supported for all models and may
3444/// have no effect.
3445#[derive(Clone, Default, PartialEq)]
3446#[non_exhaustive]
3447pub struct DenoiserConfig {
3448 /// Denoise audio before sending to the transcription model.
3449 pub denoise_audio: bool,
3450
3451 /// Signal-to-Noise Ratio (SNR) threshold for the denoiser. Here SNR means the
3452 /// loudness of the speech signal. Audio with an SNR below this threshold,
3453 /// meaning the speech is too quiet, will be prevented from being sent to the
3454 /// transcription model.
3455 ///
3456 /// If snr_threshold=0, no filtering will be applied.
3457 pub snr_threshold: f32,
3458
3459 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3460}
3461
3462impl DenoiserConfig {
3463 pub fn new() -> Self {
3464 std::default::Default::default()
3465 }
3466
3467 /// Sets the value of [denoise_audio][crate::model::DenoiserConfig::denoise_audio].
3468 ///
3469 /// # Example
3470 /// ```ignore,no_run
3471 /// # use google_cloud_speech_v2::model::DenoiserConfig;
3472 /// let x = DenoiserConfig::new().set_denoise_audio(true);
3473 /// ```
3474 pub fn set_denoise_audio<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
3475 self.denoise_audio = v.into();
3476 self
3477 }
3478
3479 /// Sets the value of [snr_threshold][crate::model::DenoiserConfig::snr_threshold].
3480 ///
3481 /// # Example
3482 /// ```ignore,no_run
3483 /// # use google_cloud_speech_v2::model::DenoiserConfig;
3484 /// let x = DenoiserConfig::new().set_snr_threshold(42.0);
3485 /// ```
3486 pub fn set_snr_threshold<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
3487 self.snr_threshold = v.into();
3488 self
3489 }
3490}
3491
3492impl wkt::message::Message for DenoiserConfig {
3493 fn typename() -> &'static str {
3494 "type.googleapis.com/google.cloud.speech.v2.DenoiserConfig"
3495 }
3496}
3497
3498/// Provides information to the Recognizer that specifies how to process the
3499/// recognition request.
3500#[derive(Clone, Default, PartialEq)]
3501#[non_exhaustive]
3502pub struct RecognitionConfig {
3503 /// Optional. Which model to use for recognition requests. Select the model
3504 /// best suited to your domain to get best results.
3505 ///
3506 /// Guidance for choosing which model to use can be found in the [Transcription
3507 /// Models
3508 /// Documentation](https://cloud.google.com/speech-to-text/v2/docs/transcription-model)
3509 /// and the models supported in each region can be found in the [Table Of
3510 /// Supported
3511 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
3512 pub model: std::string::String,
3513
3514 /// Optional. The language of the supplied audio as a
3515 /// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
3516 /// Language tags are normalized to BCP-47 before they are used eg "en-us"
3517 /// becomes "en-US".
3518 ///
3519 /// Supported languages for each model are listed in the [Table of Supported
3520 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
3521 ///
3522 /// If additional languages are provided, recognition result will contain
3523 /// recognition in the most likely language detected. The recognition result
3524 /// will include the language tag of the language detected in the audio.
3525 pub language_codes: std::vec::Vec<std::string::String>,
3526
3527 /// Speech recognition features to enable.
3528 pub features: std::option::Option<crate::model::RecognitionFeatures>,
3529
3530 /// Speech adaptation context that weights recognizer predictions for specific
3531 /// words and phrases.
3532 pub adaptation: std::option::Option<crate::model::SpeechAdaptation>,
3533
3534 /// Optional. Use transcription normalization to automatically replace parts of
3535 /// the transcript with phrases of your choosing. For StreamingRecognize, this
3536 /// normalization only applies to stable partial transcripts (stability > 0.8)
3537 /// and final transcripts.
3538 pub transcript_normalization: std::option::Option<crate::model::TranscriptNormalization>,
3539
3540 /// Optional. Optional configuration used to automatically run translation on
3541 /// the given audio to the desired language for supported models.
3542 pub translation_config: std::option::Option<crate::model::TranslationConfig>,
3543
3544 /// Optional. Optional denoiser config. May not be supported for all models
3545 /// and may have no effect.
3546 pub denoiser_config: std::option::Option<crate::model::DenoiserConfig>,
3547
3548 /// Decoding parameters for audio being sent for recognition.
3549 pub decoding_config: std::option::Option<crate::model::recognition_config::DecodingConfig>,
3550
3551 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3552}
3553
3554impl RecognitionConfig {
3555 pub fn new() -> Self {
3556 std::default::Default::default()
3557 }
3558
3559 /// Sets the value of [model][crate::model::RecognitionConfig::model].
3560 ///
3561 /// # Example
3562 /// ```ignore,no_run
3563 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3564 /// let x = RecognitionConfig::new().set_model("example");
3565 /// ```
3566 pub fn set_model<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3567 self.model = v.into();
3568 self
3569 }
3570
3571 /// Sets the value of [language_codes][crate::model::RecognitionConfig::language_codes].
3572 ///
3573 /// # Example
3574 /// ```ignore,no_run
3575 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3576 /// let x = RecognitionConfig::new().set_language_codes(["a", "b", "c"]);
3577 /// ```
3578 pub fn set_language_codes<T, V>(mut self, v: T) -> Self
3579 where
3580 T: std::iter::IntoIterator<Item = V>,
3581 V: std::convert::Into<std::string::String>,
3582 {
3583 use std::iter::Iterator;
3584 self.language_codes = v.into_iter().map(|i| i.into()).collect();
3585 self
3586 }
3587
3588 /// Sets the value of [features][crate::model::RecognitionConfig::features].
3589 ///
3590 /// # Example
3591 /// ```ignore,no_run
3592 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3593 /// use google_cloud_speech_v2::model::RecognitionFeatures;
3594 /// let x = RecognitionConfig::new().set_features(RecognitionFeatures::default()/* use setters */);
3595 /// ```
3596 pub fn set_features<T>(mut self, v: T) -> Self
3597 where
3598 T: std::convert::Into<crate::model::RecognitionFeatures>,
3599 {
3600 self.features = std::option::Option::Some(v.into());
3601 self
3602 }
3603
3604 /// Sets or clears the value of [features][crate::model::RecognitionConfig::features].
3605 ///
3606 /// # Example
3607 /// ```ignore,no_run
3608 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3609 /// use google_cloud_speech_v2::model::RecognitionFeatures;
3610 /// let x = RecognitionConfig::new().set_or_clear_features(Some(RecognitionFeatures::default()/* use setters */));
3611 /// let x = RecognitionConfig::new().set_or_clear_features(None::<RecognitionFeatures>);
3612 /// ```
3613 pub fn set_or_clear_features<T>(mut self, v: std::option::Option<T>) -> Self
3614 where
3615 T: std::convert::Into<crate::model::RecognitionFeatures>,
3616 {
3617 self.features = v.map(|x| x.into());
3618 self
3619 }
3620
3621 /// Sets the value of [adaptation][crate::model::RecognitionConfig::adaptation].
3622 ///
3623 /// # Example
3624 /// ```ignore,no_run
3625 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3626 /// use google_cloud_speech_v2::model::SpeechAdaptation;
3627 /// let x = RecognitionConfig::new().set_adaptation(SpeechAdaptation::default()/* use setters */);
3628 /// ```
3629 pub fn set_adaptation<T>(mut self, v: T) -> Self
3630 where
3631 T: std::convert::Into<crate::model::SpeechAdaptation>,
3632 {
3633 self.adaptation = std::option::Option::Some(v.into());
3634 self
3635 }
3636
3637 /// Sets or clears the value of [adaptation][crate::model::RecognitionConfig::adaptation].
3638 ///
3639 /// # Example
3640 /// ```ignore,no_run
3641 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3642 /// use google_cloud_speech_v2::model::SpeechAdaptation;
3643 /// let x = RecognitionConfig::new().set_or_clear_adaptation(Some(SpeechAdaptation::default()/* use setters */));
3644 /// let x = RecognitionConfig::new().set_or_clear_adaptation(None::<SpeechAdaptation>);
3645 /// ```
3646 pub fn set_or_clear_adaptation<T>(mut self, v: std::option::Option<T>) -> Self
3647 where
3648 T: std::convert::Into<crate::model::SpeechAdaptation>,
3649 {
3650 self.adaptation = v.map(|x| x.into());
3651 self
3652 }
3653
3654 /// Sets the value of [transcript_normalization][crate::model::RecognitionConfig::transcript_normalization].
3655 ///
3656 /// # Example
3657 /// ```ignore,no_run
3658 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3659 /// use google_cloud_speech_v2::model::TranscriptNormalization;
3660 /// let x = RecognitionConfig::new().set_transcript_normalization(TranscriptNormalization::default()/* use setters */);
3661 /// ```
3662 pub fn set_transcript_normalization<T>(mut self, v: T) -> Self
3663 where
3664 T: std::convert::Into<crate::model::TranscriptNormalization>,
3665 {
3666 self.transcript_normalization = std::option::Option::Some(v.into());
3667 self
3668 }
3669
3670 /// Sets or clears the value of [transcript_normalization][crate::model::RecognitionConfig::transcript_normalization].
3671 ///
3672 /// # Example
3673 /// ```ignore,no_run
3674 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3675 /// use google_cloud_speech_v2::model::TranscriptNormalization;
3676 /// let x = RecognitionConfig::new().set_or_clear_transcript_normalization(Some(TranscriptNormalization::default()/* use setters */));
3677 /// let x = RecognitionConfig::new().set_or_clear_transcript_normalization(None::<TranscriptNormalization>);
3678 /// ```
3679 pub fn set_or_clear_transcript_normalization<T>(mut self, v: std::option::Option<T>) -> Self
3680 where
3681 T: std::convert::Into<crate::model::TranscriptNormalization>,
3682 {
3683 self.transcript_normalization = v.map(|x| x.into());
3684 self
3685 }
3686
3687 /// Sets the value of [translation_config][crate::model::RecognitionConfig::translation_config].
3688 ///
3689 /// # Example
3690 /// ```ignore,no_run
3691 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3692 /// use google_cloud_speech_v2::model::TranslationConfig;
3693 /// let x = RecognitionConfig::new().set_translation_config(TranslationConfig::default()/* use setters */);
3694 /// ```
3695 pub fn set_translation_config<T>(mut self, v: T) -> Self
3696 where
3697 T: std::convert::Into<crate::model::TranslationConfig>,
3698 {
3699 self.translation_config = std::option::Option::Some(v.into());
3700 self
3701 }
3702
3703 /// Sets or clears the value of [translation_config][crate::model::RecognitionConfig::translation_config].
3704 ///
3705 /// # Example
3706 /// ```ignore,no_run
3707 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3708 /// use google_cloud_speech_v2::model::TranslationConfig;
3709 /// let x = RecognitionConfig::new().set_or_clear_translation_config(Some(TranslationConfig::default()/* use setters */));
3710 /// let x = RecognitionConfig::new().set_or_clear_translation_config(None::<TranslationConfig>);
3711 /// ```
3712 pub fn set_or_clear_translation_config<T>(mut self, v: std::option::Option<T>) -> Self
3713 where
3714 T: std::convert::Into<crate::model::TranslationConfig>,
3715 {
3716 self.translation_config = v.map(|x| x.into());
3717 self
3718 }
3719
3720 /// Sets the value of [denoiser_config][crate::model::RecognitionConfig::denoiser_config].
3721 ///
3722 /// # Example
3723 /// ```ignore,no_run
3724 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3725 /// use google_cloud_speech_v2::model::DenoiserConfig;
3726 /// let x = RecognitionConfig::new().set_denoiser_config(DenoiserConfig::default()/* use setters */);
3727 /// ```
3728 pub fn set_denoiser_config<T>(mut self, v: T) -> Self
3729 where
3730 T: std::convert::Into<crate::model::DenoiserConfig>,
3731 {
3732 self.denoiser_config = std::option::Option::Some(v.into());
3733 self
3734 }
3735
3736 /// Sets or clears the value of [denoiser_config][crate::model::RecognitionConfig::denoiser_config].
3737 ///
3738 /// # Example
3739 /// ```ignore,no_run
3740 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3741 /// use google_cloud_speech_v2::model::DenoiserConfig;
3742 /// let x = RecognitionConfig::new().set_or_clear_denoiser_config(Some(DenoiserConfig::default()/* use setters */));
3743 /// let x = RecognitionConfig::new().set_or_clear_denoiser_config(None::<DenoiserConfig>);
3744 /// ```
3745 pub fn set_or_clear_denoiser_config<T>(mut self, v: std::option::Option<T>) -> Self
3746 where
3747 T: std::convert::Into<crate::model::DenoiserConfig>,
3748 {
3749 self.denoiser_config = v.map(|x| x.into());
3750 self
3751 }
3752
3753 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config].
3754 ///
3755 /// Note that all the setters affecting `decoding_config` are mutually
3756 /// exclusive.
3757 ///
3758 /// # Example
3759 /// ```ignore,no_run
3760 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3761 /// use google_cloud_speech_v2::model::AutoDetectDecodingConfig;
3762 /// let x = RecognitionConfig::new().set_decoding_config(Some(
3763 /// google_cloud_speech_v2::model::recognition_config::DecodingConfig::AutoDecodingConfig(AutoDetectDecodingConfig::default().into())));
3764 /// ```
3765 pub fn set_decoding_config<
3766 T: std::convert::Into<std::option::Option<crate::model::recognition_config::DecodingConfig>>,
3767 >(
3768 mut self,
3769 v: T,
3770 ) -> Self {
3771 self.decoding_config = v.into();
3772 self
3773 }
3774
3775 /// The value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3776 /// if it holds a `AutoDecodingConfig`, `None` if the field is not set or
3777 /// holds a different branch.
3778 pub fn auto_decoding_config(
3779 &self,
3780 ) -> std::option::Option<&std::boxed::Box<crate::model::AutoDetectDecodingConfig>> {
3781 #[allow(unreachable_patterns)]
3782 self.decoding_config.as_ref().and_then(|v| match v {
3783 crate::model::recognition_config::DecodingConfig::AutoDecodingConfig(v) => {
3784 std::option::Option::Some(v)
3785 }
3786 _ => std::option::Option::None,
3787 })
3788 }
3789
3790 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3791 /// to hold a `AutoDecodingConfig`.
3792 ///
3793 /// Note that all the setters affecting `decoding_config` are
3794 /// mutually exclusive.
3795 ///
3796 /// # Example
3797 /// ```ignore,no_run
3798 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3799 /// use google_cloud_speech_v2::model::AutoDetectDecodingConfig;
3800 /// let x = RecognitionConfig::new().set_auto_decoding_config(AutoDetectDecodingConfig::default()/* use setters */);
3801 /// assert!(x.auto_decoding_config().is_some());
3802 /// assert!(x.explicit_decoding_config().is_none());
3803 /// ```
3804 pub fn set_auto_decoding_config<
3805 T: std::convert::Into<std::boxed::Box<crate::model::AutoDetectDecodingConfig>>,
3806 >(
3807 mut self,
3808 v: T,
3809 ) -> Self {
3810 self.decoding_config = std::option::Option::Some(
3811 crate::model::recognition_config::DecodingConfig::AutoDecodingConfig(v.into()),
3812 );
3813 self
3814 }
3815
3816 /// The value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3817 /// if it holds a `ExplicitDecodingConfig`, `None` if the field is not set or
3818 /// holds a different branch.
3819 pub fn explicit_decoding_config(
3820 &self,
3821 ) -> std::option::Option<&std::boxed::Box<crate::model::ExplicitDecodingConfig>> {
3822 #[allow(unreachable_patterns)]
3823 self.decoding_config.as_ref().and_then(|v| match v {
3824 crate::model::recognition_config::DecodingConfig::ExplicitDecodingConfig(v) => {
3825 std::option::Option::Some(v)
3826 }
3827 _ => std::option::Option::None,
3828 })
3829 }
3830
3831 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3832 /// to hold a `ExplicitDecodingConfig`.
3833 ///
3834 /// Note that all the setters affecting `decoding_config` are
3835 /// mutually exclusive.
3836 ///
3837 /// # Example
3838 /// ```ignore,no_run
3839 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3840 /// use google_cloud_speech_v2::model::ExplicitDecodingConfig;
3841 /// let x = RecognitionConfig::new().set_explicit_decoding_config(ExplicitDecodingConfig::default()/* use setters */);
3842 /// assert!(x.explicit_decoding_config().is_some());
3843 /// assert!(x.auto_decoding_config().is_none());
3844 /// ```
3845 pub fn set_explicit_decoding_config<
3846 T: std::convert::Into<std::boxed::Box<crate::model::ExplicitDecodingConfig>>,
3847 >(
3848 mut self,
3849 v: T,
3850 ) -> Self {
3851 self.decoding_config = std::option::Option::Some(
3852 crate::model::recognition_config::DecodingConfig::ExplicitDecodingConfig(v.into()),
3853 );
3854 self
3855 }
3856}
3857
3858impl wkt::message::Message for RecognitionConfig {
3859 fn typename() -> &'static str {
3860 "type.googleapis.com/google.cloud.speech.v2.RecognitionConfig"
3861 }
3862}
3863
3864/// Defines additional types related to [RecognitionConfig].
3865pub mod recognition_config {
3866 #[allow(unused_imports)]
3867 use super::*;
3868
3869 /// Decoding parameters for audio being sent for recognition.
3870 #[derive(Clone, Debug, PartialEq)]
3871 #[non_exhaustive]
3872 pub enum DecodingConfig {
3873 /// Automatically detect decoding parameters.
3874 /// Preferred for supported formats.
3875 AutoDecodingConfig(std::boxed::Box<crate::model::AutoDetectDecodingConfig>),
3876 /// Explicitly specified decoding parameters.
3877 /// Required if using headerless PCM audio (linear16, mulaw, alaw).
3878 ExplicitDecodingConfig(std::boxed::Box<crate::model::ExplicitDecodingConfig>),
3879 }
3880}
3881
3882/// Request message for the
3883/// [Recognize][google.cloud.speech.v2.Speech.Recognize] method. Either
3884/// `content` or `uri` must be supplied. Supplying both or neither returns
3885/// [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. See [content
3886/// limits](https://cloud.google.com/speech-to-text/quotas#content).
3887///
3888/// [google.cloud.speech.v2.Speech.Recognize]: crate::client::Speech::recognize
3889#[derive(Clone, Default, PartialEq)]
3890#[non_exhaustive]
3891pub struct RecognizeRequest {
3892 /// Required. The name of the Recognizer to use during recognition. The
3893 /// expected format is
3894 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
3895 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
3896 pub recognizer: std::string::String,
3897
3898 /// Features and audio metadata to use for the Automatic Speech Recognition.
3899 /// This field in combination with the
3900 /// [config_mask][google.cloud.speech.v2.RecognizeRequest.config_mask] field
3901 /// can be used to override parts of the
3902 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
3903 /// of the Recognizer resource.
3904 ///
3905 /// [google.cloud.speech.v2.RecognizeRequest.config_mask]: crate::model::RecognizeRequest::config_mask
3906 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
3907 pub config: std::option::Option<crate::model::RecognitionConfig>,
3908
3909 /// The list of fields in
3910 /// [config][google.cloud.speech.v2.RecognizeRequest.config] that override the
3911 /// values in the
3912 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
3913 /// of the recognizer during this recognition request. If no mask is provided,
3914 /// all non-default valued fields in
3915 /// [config][google.cloud.speech.v2.RecognizeRequest.config] override the
3916 /// values in the recognizer for this recognition request. If a mask is
3917 /// provided, only the fields listed in the mask override the config in the
3918 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
3919 /// [config][google.cloud.speech.v2.RecognizeRequest.config] completely
3920 /// overrides and replaces the config in the recognizer for this recognition
3921 /// request.
3922 ///
3923 /// [google.cloud.speech.v2.RecognizeRequest.config]: crate::model::RecognizeRequest::config
3924 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
3925 pub config_mask: std::option::Option<wkt::FieldMask>,
3926
3927 /// The audio source, which is either inline content or a Google Cloud
3928 /// Storage URI.
3929 pub audio_source: std::option::Option<crate::model::recognize_request::AudioSource>,
3930
3931 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3932}
3933
3934impl RecognizeRequest {
3935 pub fn new() -> Self {
3936 std::default::Default::default()
3937 }
3938
3939 /// Sets the value of [recognizer][crate::model::RecognizeRequest::recognizer].
3940 ///
3941 /// # Example
3942 /// ```ignore,no_run
3943 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3944 /// let x = RecognizeRequest::new().set_recognizer("example");
3945 /// ```
3946 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3947 self.recognizer = v.into();
3948 self
3949 }
3950
3951 /// Sets the value of [config][crate::model::RecognizeRequest::config].
3952 ///
3953 /// # Example
3954 /// ```ignore,no_run
3955 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3956 /// use google_cloud_speech_v2::model::RecognitionConfig;
3957 /// let x = RecognizeRequest::new().set_config(RecognitionConfig::default()/* use setters */);
3958 /// ```
3959 pub fn set_config<T>(mut self, v: T) -> Self
3960 where
3961 T: std::convert::Into<crate::model::RecognitionConfig>,
3962 {
3963 self.config = std::option::Option::Some(v.into());
3964 self
3965 }
3966
3967 /// Sets or clears the value of [config][crate::model::RecognizeRequest::config].
3968 ///
3969 /// # Example
3970 /// ```ignore,no_run
3971 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3972 /// use google_cloud_speech_v2::model::RecognitionConfig;
3973 /// let x = RecognizeRequest::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
3974 /// let x = RecognizeRequest::new().set_or_clear_config(None::<RecognitionConfig>);
3975 /// ```
3976 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
3977 where
3978 T: std::convert::Into<crate::model::RecognitionConfig>,
3979 {
3980 self.config = v.map(|x| x.into());
3981 self
3982 }
3983
3984 /// Sets the value of [config_mask][crate::model::RecognizeRequest::config_mask].
3985 ///
3986 /// # Example
3987 /// ```ignore,no_run
3988 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3989 /// use wkt::FieldMask;
3990 /// let x = RecognizeRequest::new().set_config_mask(FieldMask::default()/* use setters */);
3991 /// ```
3992 pub fn set_config_mask<T>(mut self, v: T) -> Self
3993 where
3994 T: std::convert::Into<wkt::FieldMask>,
3995 {
3996 self.config_mask = std::option::Option::Some(v.into());
3997 self
3998 }
3999
4000 /// Sets or clears the value of [config_mask][crate::model::RecognizeRequest::config_mask].
4001 ///
4002 /// # Example
4003 /// ```ignore,no_run
4004 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4005 /// use wkt::FieldMask;
4006 /// let x = RecognizeRequest::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
4007 /// let x = RecognizeRequest::new().set_or_clear_config_mask(None::<FieldMask>);
4008 /// ```
4009 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
4010 where
4011 T: std::convert::Into<wkt::FieldMask>,
4012 {
4013 self.config_mask = v.map(|x| x.into());
4014 self
4015 }
4016
4017 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source].
4018 ///
4019 /// Note that all the setters affecting `audio_source` are mutually
4020 /// exclusive.
4021 ///
4022 /// # Example
4023 /// ```ignore,no_run
4024 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4025 /// use google_cloud_speech_v2::model::recognize_request::AudioSource;
4026 /// let x = RecognizeRequest::new().set_audio_source(Some(AudioSource::Content(bytes::Bytes::from_static(b"example"))));
4027 /// ```
4028 pub fn set_audio_source<
4029 T: std::convert::Into<std::option::Option<crate::model::recognize_request::AudioSource>>,
4030 >(
4031 mut self,
4032 v: T,
4033 ) -> Self {
4034 self.audio_source = v.into();
4035 self
4036 }
4037
4038 /// The value of [audio_source][crate::model::RecognizeRequest::audio_source]
4039 /// if it holds a `Content`, `None` if the field is not set or
4040 /// holds a different branch.
4041 pub fn content(&self) -> std::option::Option<&::bytes::Bytes> {
4042 #[allow(unreachable_patterns)]
4043 self.audio_source.as_ref().and_then(|v| match v {
4044 crate::model::recognize_request::AudioSource::Content(v) => {
4045 std::option::Option::Some(v)
4046 }
4047 _ => std::option::Option::None,
4048 })
4049 }
4050
4051 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source]
4052 /// to hold a `Content`.
4053 ///
4054 /// Note that all the setters affecting `audio_source` are
4055 /// mutually exclusive.
4056 ///
4057 /// # Example
4058 /// ```ignore,no_run
4059 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4060 /// let x = RecognizeRequest::new().set_content(bytes::Bytes::from_static(b"example"));
4061 /// assert!(x.content().is_some());
4062 /// assert!(x.uri().is_none());
4063 /// ```
4064 pub fn set_content<T: std::convert::Into<::bytes::Bytes>>(mut self, v: T) -> Self {
4065 self.audio_source = std::option::Option::Some(
4066 crate::model::recognize_request::AudioSource::Content(v.into()),
4067 );
4068 self
4069 }
4070
4071 /// The value of [audio_source][crate::model::RecognizeRequest::audio_source]
4072 /// if it holds a `Uri`, `None` if the field is not set or
4073 /// holds a different branch.
4074 pub fn uri(&self) -> std::option::Option<&std::string::String> {
4075 #[allow(unreachable_patterns)]
4076 self.audio_source.as_ref().and_then(|v| match v {
4077 crate::model::recognize_request::AudioSource::Uri(v) => std::option::Option::Some(v),
4078 _ => std::option::Option::None,
4079 })
4080 }
4081
4082 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source]
4083 /// to hold a `Uri`.
4084 ///
4085 /// Note that all the setters affecting `audio_source` are
4086 /// mutually exclusive.
4087 ///
4088 /// # Example
4089 /// ```ignore,no_run
4090 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4091 /// let x = RecognizeRequest::new().set_uri("example");
4092 /// assert!(x.uri().is_some());
4093 /// assert!(x.content().is_none());
4094 /// ```
4095 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4096 self.audio_source =
4097 std::option::Option::Some(crate::model::recognize_request::AudioSource::Uri(v.into()));
4098 self
4099 }
4100}
4101
4102impl wkt::message::Message for RecognizeRequest {
4103 fn typename() -> &'static str {
4104 "type.googleapis.com/google.cloud.speech.v2.RecognizeRequest"
4105 }
4106}
4107
4108/// Defines additional types related to [RecognizeRequest].
4109pub mod recognize_request {
4110 #[allow(unused_imports)]
4111 use super::*;
4112
4113 /// The audio source, which is either inline content or a Google Cloud
4114 /// Storage URI.
4115 #[derive(Clone, Debug, PartialEq)]
4116 #[non_exhaustive]
4117 pub enum AudioSource {
4118 /// The audio data bytes encoded as specified in
4119 /// [RecognitionConfig][google.cloud.speech.v2.RecognitionConfig]. As
4120 /// with all bytes fields, proto buffers use a pure binary representation,
4121 /// whereas JSON representations use base64.
4122 ///
4123 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
4124 Content(::bytes::Bytes),
4125 /// URI that points to a file that contains audio data bytes as specified in
4126 /// [RecognitionConfig][google.cloud.speech.v2.RecognitionConfig]. The file
4127 /// must not be compressed (for example, gzip). Currently, only Google Cloud
4128 /// Storage URIs are supported, which must be specified in the following
4129 /// format: `gs://bucket_name/object_name` (other URI formats return
4130 /// [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more
4131 /// information, see [Request
4132 /// URIs](https://cloud.google.com/storage/docs/reference-uris).
4133 ///
4134 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
4135 Uri(std::string::String),
4136 }
4137}
4138
4139/// Metadata about the recognition request and response.
4140#[derive(Clone, Default, PartialEq)]
4141#[non_exhaustive]
4142pub struct RecognitionResponseMetadata {
4143 /// Global request identifier auto-generated by the API.
4144 pub request_id: std::string::String,
4145
4146 /// When available, billed audio seconds for the corresponding request.
4147 pub total_billed_duration: std::option::Option<wkt::Duration>,
4148
4149 /// Optional. Output only. Provides the prompt used for the recognition
4150 /// request.
4151 pub prompt: std::option::Option<std::string::String>,
4152
4153 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4154}
4155
4156impl RecognitionResponseMetadata {
4157 pub fn new() -> Self {
4158 std::default::Default::default()
4159 }
4160
4161 /// Sets the value of [request_id][crate::model::RecognitionResponseMetadata::request_id].
4162 ///
4163 /// # Example
4164 /// ```ignore,no_run
4165 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4166 /// let x = RecognitionResponseMetadata::new().set_request_id("example");
4167 /// ```
4168 pub fn set_request_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4169 self.request_id = v.into();
4170 self
4171 }
4172
4173 /// Sets the value of [total_billed_duration][crate::model::RecognitionResponseMetadata::total_billed_duration].
4174 ///
4175 /// # Example
4176 /// ```ignore,no_run
4177 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4178 /// use wkt::Duration;
4179 /// let x = RecognitionResponseMetadata::new().set_total_billed_duration(Duration::default()/* use setters */);
4180 /// ```
4181 pub fn set_total_billed_duration<T>(mut self, v: T) -> Self
4182 where
4183 T: std::convert::Into<wkt::Duration>,
4184 {
4185 self.total_billed_duration = std::option::Option::Some(v.into());
4186 self
4187 }
4188
4189 /// Sets or clears the value of [total_billed_duration][crate::model::RecognitionResponseMetadata::total_billed_duration].
4190 ///
4191 /// # Example
4192 /// ```ignore,no_run
4193 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4194 /// use wkt::Duration;
4195 /// let x = RecognitionResponseMetadata::new().set_or_clear_total_billed_duration(Some(Duration::default()/* use setters */));
4196 /// let x = RecognitionResponseMetadata::new().set_or_clear_total_billed_duration(None::<Duration>);
4197 /// ```
4198 pub fn set_or_clear_total_billed_duration<T>(mut self, v: std::option::Option<T>) -> Self
4199 where
4200 T: std::convert::Into<wkt::Duration>,
4201 {
4202 self.total_billed_duration = v.map(|x| x.into());
4203 self
4204 }
4205
4206 /// Sets the value of [prompt][crate::model::RecognitionResponseMetadata::prompt].
4207 ///
4208 /// # Example
4209 /// ```ignore,no_run
4210 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4211 /// let x = RecognitionResponseMetadata::new().set_prompt("example");
4212 /// ```
4213 pub fn set_prompt<T>(mut self, v: T) -> Self
4214 where
4215 T: std::convert::Into<std::string::String>,
4216 {
4217 self.prompt = std::option::Option::Some(v.into());
4218 self
4219 }
4220
4221 /// Sets or clears the value of [prompt][crate::model::RecognitionResponseMetadata::prompt].
4222 ///
4223 /// # Example
4224 /// ```ignore,no_run
4225 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4226 /// let x = RecognitionResponseMetadata::new().set_or_clear_prompt(Some("example"));
4227 /// let x = RecognitionResponseMetadata::new().set_or_clear_prompt(None::<String>);
4228 /// ```
4229 pub fn set_or_clear_prompt<T>(mut self, v: std::option::Option<T>) -> Self
4230 where
4231 T: std::convert::Into<std::string::String>,
4232 {
4233 self.prompt = v.map(|x| x.into());
4234 self
4235 }
4236}
4237
4238impl wkt::message::Message for RecognitionResponseMetadata {
4239 fn typename() -> &'static str {
4240 "type.googleapis.com/google.cloud.speech.v2.RecognitionResponseMetadata"
4241 }
4242}
4243
4244/// Alternative hypotheses (a.k.a. n-best list).
4245#[derive(Clone, Default, PartialEq)]
4246#[non_exhaustive]
4247pub struct SpeechRecognitionAlternative {
4248 /// Transcript text representing the words that the user spoke.
4249 pub transcript: std::string::String,
4250
4251 /// The confidence estimate between 0.0 and 1.0. A higher number
4252 /// indicates an estimated greater likelihood that the recognized words are
4253 /// correct. This field is set only for the top alternative of a non-streaming
4254 /// result or, of a streaming result where
4255 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final] is
4256 /// set to `true`. This field is not guaranteed to be accurate and users should
4257 /// not rely on it to be always provided. The default of 0.0 is a sentinel
4258 /// value indicating `confidence` was not set.
4259 ///
4260 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
4261 pub confidence: f32,
4262
4263 /// A list of word-specific information for each recognized word.
4264 /// When the
4265 /// [SpeakerDiarizationConfig][google.cloud.speech.v2.SpeakerDiarizationConfig]
4266 /// is set, you will see all the words from the beginning of the audio.
4267 ///
4268 /// [google.cloud.speech.v2.SpeakerDiarizationConfig]: crate::model::SpeakerDiarizationConfig
4269 pub words: std::vec::Vec<crate::model::WordInfo>,
4270
4271 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4272}
4273
4274impl SpeechRecognitionAlternative {
4275 pub fn new() -> Self {
4276 std::default::Default::default()
4277 }
4278
4279 /// Sets the value of [transcript][crate::model::SpeechRecognitionAlternative::transcript].
4280 ///
4281 /// # Example
4282 /// ```ignore,no_run
4283 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4284 /// let x = SpeechRecognitionAlternative::new().set_transcript("example");
4285 /// ```
4286 pub fn set_transcript<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4287 self.transcript = v.into();
4288 self
4289 }
4290
4291 /// Sets the value of [confidence][crate::model::SpeechRecognitionAlternative::confidence].
4292 ///
4293 /// # Example
4294 /// ```ignore,no_run
4295 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4296 /// let x = SpeechRecognitionAlternative::new().set_confidence(42.0);
4297 /// ```
4298 pub fn set_confidence<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
4299 self.confidence = v.into();
4300 self
4301 }
4302
4303 /// Sets the value of [words][crate::model::SpeechRecognitionAlternative::words].
4304 ///
4305 /// # Example
4306 /// ```ignore,no_run
4307 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4308 /// use google_cloud_speech_v2::model::WordInfo;
4309 /// let x = SpeechRecognitionAlternative::new()
4310 /// .set_words([
4311 /// WordInfo::default()/* use setters */,
4312 /// WordInfo::default()/* use (different) setters */,
4313 /// ]);
4314 /// ```
4315 pub fn set_words<T, V>(mut self, v: T) -> Self
4316 where
4317 T: std::iter::IntoIterator<Item = V>,
4318 V: std::convert::Into<crate::model::WordInfo>,
4319 {
4320 use std::iter::Iterator;
4321 self.words = v.into_iter().map(|i| i.into()).collect();
4322 self
4323 }
4324}
4325
4326impl wkt::message::Message for SpeechRecognitionAlternative {
4327 fn typename() -> &'static str {
4328 "type.googleapis.com/google.cloud.speech.v2.SpeechRecognitionAlternative"
4329 }
4330}
4331
4332/// Word-specific information for recognized words.
4333#[derive(Clone, Default, PartialEq)]
4334#[non_exhaustive]
4335pub struct WordInfo {
4336 /// Time offset relative to the beginning of the audio,
4337 /// and corresponding to the start of the spoken word.
4338 /// This field is only set if
4339 /// [enable_word_time_offsets][google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]
4340 /// is `true` and only in the top hypothesis. This is an experimental feature
4341 /// and the accuracy of the time offset can vary.
4342 ///
4343 /// [google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]: crate::model::RecognitionFeatures::enable_word_time_offsets
4344 pub start_offset: std::option::Option<wkt::Duration>,
4345
4346 /// Time offset relative to the beginning of the audio,
4347 /// and corresponding to the end of the spoken word.
4348 /// This field is only set if
4349 /// [enable_word_time_offsets][google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]
4350 /// is `true` and only in the top hypothesis. This is an experimental feature
4351 /// and the accuracy of the time offset can vary.
4352 ///
4353 /// [google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]: crate::model::RecognitionFeatures::enable_word_time_offsets
4354 pub end_offset: std::option::Option<wkt::Duration>,
4355
4356 /// The word corresponding to this set of information.
4357 pub word: std::string::String,
4358
4359 /// The confidence estimate between 0.0 and 1.0. A higher number
4360 /// indicates an estimated greater likelihood that the recognized words are
4361 /// correct. This field is set only for the top alternative of a non-streaming
4362 /// result or, of a streaming result where
4363 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final] is
4364 /// set to `true`. This field is not guaranteed to be accurate and users should
4365 /// not rely on it to be always provided. The default of 0.0 is a sentinel
4366 /// value indicating `confidence` was not set.
4367 ///
4368 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
4369 pub confidence: f32,
4370
4371 /// A distinct label is assigned for every speaker within the audio. This field
4372 /// specifies which one of those speakers was detected to have spoken this
4373 /// word. `speaker_label` is set if
4374 /// [SpeakerDiarizationConfig][google.cloud.speech.v2.SpeakerDiarizationConfig]
4375 /// is given and only in the top alternative.
4376 ///
4377 /// [google.cloud.speech.v2.SpeakerDiarizationConfig]: crate::model::SpeakerDiarizationConfig
4378 pub speaker_label: std::string::String,
4379
4380 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4381}
4382
4383impl WordInfo {
4384 pub fn new() -> Self {
4385 std::default::Default::default()
4386 }
4387
4388 /// Sets the value of [start_offset][crate::model::WordInfo::start_offset].
4389 ///
4390 /// # Example
4391 /// ```ignore,no_run
4392 /// # use google_cloud_speech_v2::model::WordInfo;
4393 /// use wkt::Duration;
4394 /// let x = WordInfo::new().set_start_offset(Duration::default()/* use setters */);
4395 /// ```
4396 pub fn set_start_offset<T>(mut self, v: T) -> Self
4397 where
4398 T: std::convert::Into<wkt::Duration>,
4399 {
4400 self.start_offset = std::option::Option::Some(v.into());
4401 self
4402 }
4403
4404 /// Sets or clears the value of [start_offset][crate::model::WordInfo::start_offset].
4405 ///
4406 /// # Example
4407 /// ```ignore,no_run
4408 /// # use google_cloud_speech_v2::model::WordInfo;
4409 /// use wkt::Duration;
4410 /// let x = WordInfo::new().set_or_clear_start_offset(Some(Duration::default()/* use setters */));
4411 /// let x = WordInfo::new().set_or_clear_start_offset(None::<Duration>);
4412 /// ```
4413 pub fn set_or_clear_start_offset<T>(mut self, v: std::option::Option<T>) -> Self
4414 where
4415 T: std::convert::Into<wkt::Duration>,
4416 {
4417 self.start_offset = v.map(|x| x.into());
4418 self
4419 }
4420
4421 /// Sets the value of [end_offset][crate::model::WordInfo::end_offset].
4422 ///
4423 /// # Example
4424 /// ```ignore,no_run
4425 /// # use google_cloud_speech_v2::model::WordInfo;
4426 /// use wkt::Duration;
4427 /// let x = WordInfo::new().set_end_offset(Duration::default()/* use setters */);
4428 /// ```
4429 pub fn set_end_offset<T>(mut self, v: T) -> Self
4430 where
4431 T: std::convert::Into<wkt::Duration>,
4432 {
4433 self.end_offset = std::option::Option::Some(v.into());
4434 self
4435 }
4436
4437 /// Sets or clears the value of [end_offset][crate::model::WordInfo::end_offset].
4438 ///
4439 /// # Example
4440 /// ```ignore,no_run
4441 /// # use google_cloud_speech_v2::model::WordInfo;
4442 /// use wkt::Duration;
4443 /// let x = WordInfo::new().set_or_clear_end_offset(Some(Duration::default()/* use setters */));
4444 /// let x = WordInfo::new().set_or_clear_end_offset(None::<Duration>);
4445 /// ```
4446 pub fn set_or_clear_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
4447 where
4448 T: std::convert::Into<wkt::Duration>,
4449 {
4450 self.end_offset = v.map(|x| x.into());
4451 self
4452 }
4453
4454 /// Sets the value of [word][crate::model::WordInfo::word].
4455 ///
4456 /// # Example
4457 /// ```ignore,no_run
4458 /// # use google_cloud_speech_v2::model::WordInfo;
4459 /// let x = WordInfo::new().set_word("example");
4460 /// ```
4461 pub fn set_word<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4462 self.word = v.into();
4463 self
4464 }
4465
4466 /// Sets the value of [confidence][crate::model::WordInfo::confidence].
4467 ///
4468 /// # Example
4469 /// ```ignore,no_run
4470 /// # use google_cloud_speech_v2::model::WordInfo;
4471 /// let x = WordInfo::new().set_confidence(42.0);
4472 /// ```
4473 pub fn set_confidence<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
4474 self.confidence = v.into();
4475 self
4476 }
4477
4478 /// Sets the value of [speaker_label][crate::model::WordInfo::speaker_label].
4479 ///
4480 /// # Example
4481 /// ```ignore,no_run
4482 /// # use google_cloud_speech_v2::model::WordInfo;
4483 /// let x = WordInfo::new().set_speaker_label("example");
4484 /// ```
4485 pub fn set_speaker_label<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4486 self.speaker_label = v.into();
4487 self
4488 }
4489}
4490
4491impl wkt::message::Message for WordInfo {
4492 fn typename() -> &'static str {
4493 "type.googleapis.com/google.cloud.speech.v2.WordInfo"
4494 }
4495}
4496
4497/// A speech recognition result corresponding to a portion of the audio.
4498#[derive(Clone, Default, PartialEq)]
4499#[non_exhaustive]
4500pub struct SpeechRecognitionResult {
4501 /// May contain one or more recognition hypotheses. These alternatives are
4502 /// ordered in terms of accuracy, with the top (first) alternative being the
4503 /// most probable, as ranked by the recognizer.
4504 pub alternatives: std::vec::Vec<crate::model::SpeechRecognitionAlternative>,
4505
4506 /// For multi-channel audio, this is the channel number corresponding to the
4507 /// recognized result for the audio from that channel.
4508 /// For `audio_channel_count` = `N`, its output values can range from `1` to
4509 /// `N`.
4510 pub channel_tag: i32,
4511
4512 /// Time offset of the end of this result relative to the beginning of the
4513 /// audio.
4514 pub result_end_offset: std::option::Option<wkt::Duration>,
4515
4516 /// Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
4517 /// language tag of the language in this result. This language code was
4518 /// detected to have the most likelihood of being spoken in the audio.
4519 pub language_code: std::string::String,
4520
4521 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4522}
4523
4524impl SpeechRecognitionResult {
4525 pub fn new() -> Self {
4526 std::default::Default::default()
4527 }
4528
4529 /// Sets the value of [alternatives][crate::model::SpeechRecognitionResult::alternatives].
4530 ///
4531 /// # Example
4532 /// ```ignore,no_run
4533 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4534 /// use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4535 /// let x = SpeechRecognitionResult::new()
4536 /// .set_alternatives([
4537 /// SpeechRecognitionAlternative::default()/* use setters */,
4538 /// SpeechRecognitionAlternative::default()/* use (different) setters */,
4539 /// ]);
4540 /// ```
4541 pub fn set_alternatives<T, V>(mut self, v: T) -> Self
4542 where
4543 T: std::iter::IntoIterator<Item = V>,
4544 V: std::convert::Into<crate::model::SpeechRecognitionAlternative>,
4545 {
4546 use std::iter::Iterator;
4547 self.alternatives = v.into_iter().map(|i| i.into()).collect();
4548 self
4549 }
4550
4551 /// Sets the value of [channel_tag][crate::model::SpeechRecognitionResult::channel_tag].
4552 ///
4553 /// # Example
4554 /// ```ignore,no_run
4555 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4556 /// let x = SpeechRecognitionResult::new().set_channel_tag(42);
4557 /// ```
4558 pub fn set_channel_tag<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
4559 self.channel_tag = v.into();
4560 self
4561 }
4562
4563 /// Sets the value of [result_end_offset][crate::model::SpeechRecognitionResult::result_end_offset].
4564 ///
4565 /// # Example
4566 /// ```ignore,no_run
4567 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4568 /// use wkt::Duration;
4569 /// let x = SpeechRecognitionResult::new().set_result_end_offset(Duration::default()/* use setters */);
4570 /// ```
4571 pub fn set_result_end_offset<T>(mut self, v: T) -> Self
4572 where
4573 T: std::convert::Into<wkt::Duration>,
4574 {
4575 self.result_end_offset = std::option::Option::Some(v.into());
4576 self
4577 }
4578
4579 /// Sets or clears the value of [result_end_offset][crate::model::SpeechRecognitionResult::result_end_offset].
4580 ///
4581 /// # Example
4582 /// ```ignore,no_run
4583 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4584 /// use wkt::Duration;
4585 /// let x = SpeechRecognitionResult::new().set_or_clear_result_end_offset(Some(Duration::default()/* use setters */));
4586 /// let x = SpeechRecognitionResult::new().set_or_clear_result_end_offset(None::<Duration>);
4587 /// ```
4588 pub fn set_or_clear_result_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
4589 where
4590 T: std::convert::Into<wkt::Duration>,
4591 {
4592 self.result_end_offset = v.map(|x| x.into());
4593 self
4594 }
4595
4596 /// Sets the value of [language_code][crate::model::SpeechRecognitionResult::language_code].
4597 ///
4598 /// # Example
4599 /// ```ignore,no_run
4600 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4601 /// let x = SpeechRecognitionResult::new().set_language_code("example");
4602 /// ```
4603 pub fn set_language_code<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4604 self.language_code = v.into();
4605 self
4606 }
4607}
4608
4609impl wkt::message::Message for SpeechRecognitionResult {
4610 fn typename() -> &'static str {
4611 "type.googleapis.com/google.cloud.speech.v2.SpeechRecognitionResult"
4612 }
4613}
4614
4615/// Response message for the
4616/// [Recognize][google.cloud.speech.v2.Speech.Recognize] method.
4617///
4618/// [google.cloud.speech.v2.Speech.Recognize]: crate::client::Speech::recognize
4619#[derive(Clone, Default, PartialEq)]
4620#[non_exhaustive]
4621pub struct RecognizeResponse {
4622 /// Sequential list of transcription results corresponding to sequential
4623 /// portions of audio.
4624 pub results: std::vec::Vec<crate::model::SpeechRecognitionResult>,
4625
4626 /// Metadata about the recognition.
4627 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
4628
4629 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4630}
4631
4632impl RecognizeResponse {
4633 pub fn new() -> Self {
4634 std::default::Default::default()
4635 }
4636
4637 /// Sets the value of [results][crate::model::RecognizeResponse::results].
4638 ///
4639 /// # Example
4640 /// ```ignore,no_run
4641 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4642 /// use google_cloud_speech_v2::model::SpeechRecognitionResult;
4643 /// let x = RecognizeResponse::new()
4644 /// .set_results([
4645 /// SpeechRecognitionResult::default()/* use setters */,
4646 /// SpeechRecognitionResult::default()/* use (different) setters */,
4647 /// ]);
4648 /// ```
4649 pub fn set_results<T, V>(mut self, v: T) -> Self
4650 where
4651 T: std::iter::IntoIterator<Item = V>,
4652 V: std::convert::Into<crate::model::SpeechRecognitionResult>,
4653 {
4654 use std::iter::Iterator;
4655 self.results = v.into_iter().map(|i| i.into()).collect();
4656 self
4657 }
4658
4659 /// Sets the value of [metadata][crate::model::RecognizeResponse::metadata].
4660 ///
4661 /// # Example
4662 /// ```ignore,no_run
4663 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4664 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4665 /// let x = RecognizeResponse::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
4666 /// ```
4667 pub fn set_metadata<T>(mut self, v: T) -> Self
4668 where
4669 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
4670 {
4671 self.metadata = std::option::Option::Some(v.into());
4672 self
4673 }
4674
4675 /// Sets or clears the value of [metadata][crate::model::RecognizeResponse::metadata].
4676 ///
4677 /// # Example
4678 /// ```ignore,no_run
4679 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4680 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4681 /// let x = RecognizeResponse::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
4682 /// let x = RecognizeResponse::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
4683 /// ```
4684 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
4685 where
4686 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
4687 {
4688 self.metadata = v.map(|x| x.into());
4689 self
4690 }
4691}
4692
4693impl wkt::message::Message for RecognizeResponse {
4694 fn typename() -> &'static str {
4695 "type.googleapis.com/google.cloud.speech.v2.RecognizeResponse"
4696 }
4697}
4698
4699/// Available recognition features specific to streaming recognition requests.
4700#[derive(Clone, Default, PartialEq)]
4701#[non_exhaustive]
4702pub struct StreamingRecognitionFeatures {
4703 /// If `true`, responses with voice activity speech events will be returned as
4704 /// they are detected.
4705 pub enable_voice_activity_events: bool,
4706
4707 /// Whether or not to stream interim results to the client. If set to true,
4708 /// interim results will be streamed to the client. Otherwise, only the final
4709 /// response will be streamed back.
4710 pub interim_results: bool,
4711
4712 /// If set, the server will automatically close the stream after the specified
4713 /// duration has elapsed after the last VOICE_ACTIVITY speech event has been
4714 /// sent. The field `voice_activity_events` must also be set to true.
4715 pub voice_activity_timeout:
4716 std::option::Option<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4717
4718 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4719}
4720
4721impl StreamingRecognitionFeatures {
4722 pub fn new() -> Self {
4723 std::default::Default::default()
4724 }
4725
4726 /// Sets the value of [enable_voice_activity_events][crate::model::StreamingRecognitionFeatures::enable_voice_activity_events].
4727 ///
4728 /// # Example
4729 /// ```ignore,no_run
4730 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4731 /// let x = StreamingRecognitionFeatures::new().set_enable_voice_activity_events(true);
4732 /// ```
4733 pub fn set_enable_voice_activity_events<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
4734 self.enable_voice_activity_events = v.into();
4735 self
4736 }
4737
4738 /// Sets the value of [interim_results][crate::model::StreamingRecognitionFeatures::interim_results].
4739 ///
4740 /// # Example
4741 /// ```ignore,no_run
4742 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4743 /// let x = StreamingRecognitionFeatures::new().set_interim_results(true);
4744 /// ```
4745 pub fn set_interim_results<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
4746 self.interim_results = v.into();
4747 self
4748 }
4749
4750 /// Sets the value of [voice_activity_timeout][crate::model::StreamingRecognitionFeatures::voice_activity_timeout].
4751 ///
4752 /// # Example
4753 /// ```ignore,no_run
4754 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4755 /// use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4756 /// let x = StreamingRecognitionFeatures::new().set_voice_activity_timeout(VoiceActivityTimeout::default()/* use setters */);
4757 /// ```
4758 pub fn set_voice_activity_timeout<T>(mut self, v: T) -> Self
4759 where
4760 T: std::convert::Into<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4761 {
4762 self.voice_activity_timeout = std::option::Option::Some(v.into());
4763 self
4764 }
4765
4766 /// Sets or clears the value of [voice_activity_timeout][crate::model::StreamingRecognitionFeatures::voice_activity_timeout].
4767 ///
4768 /// # Example
4769 /// ```ignore,no_run
4770 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4771 /// use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4772 /// let x = StreamingRecognitionFeatures::new().set_or_clear_voice_activity_timeout(Some(VoiceActivityTimeout::default()/* use setters */));
4773 /// let x = StreamingRecognitionFeatures::new().set_or_clear_voice_activity_timeout(None::<VoiceActivityTimeout>);
4774 /// ```
4775 pub fn set_or_clear_voice_activity_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4776 where
4777 T: std::convert::Into<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4778 {
4779 self.voice_activity_timeout = v.map(|x| x.into());
4780 self
4781 }
4782}
4783
4784impl wkt::message::Message for StreamingRecognitionFeatures {
4785 fn typename() -> &'static str {
4786 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionFeatures"
4787 }
4788}
4789
4790/// Defines additional types related to [StreamingRecognitionFeatures].
4791pub mod streaming_recognition_features {
4792 #[allow(unused_imports)]
4793 use super::*;
4794
4795 /// Events that a timeout can be set on for voice activity.
4796 #[derive(Clone, Default, PartialEq)]
4797 #[non_exhaustive]
4798 pub struct VoiceActivityTimeout {
4799 /// Duration to timeout the stream if no speech begins. If this is set and
4800 /// no speech is detected in this duration at the start of the stream, the
4801 /// server will close the stream.
4802 pub speech_start_timeout: std::option::Option<wkt::Duration>,
4803
4804 /// Duration to timeout the stream after speech ends. If this is set and no
4805 /// speech is detected in this duration after speech was detected, the server
4806 /// will close the stream.
4807 pub speech_end_timeout: std::option::Option<wkt::Duration>,
4808
4809 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4810 }
4811
4812 impl VoiceActivityTimeout {
4813 pub fn new() -> Self {
4814 std::default::Default::default()
4815 }
4816
4817 /// Sets the value of [speech_start_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_start_timeout].
4818 ///
4819 /// # Example
4820 /// ```ignore,no_run
4821 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4822 /// use wkt::Duration;
4823 /// let x = VoiceActivityTimeout::new().set_speech_start_timeout(Duration::default()/* use setters */);
4824 /// ```
4825 pub fn set_speech_start_timeout<T>(mut self, v: T) -> Self
4826 where
4827 T: std::convert::Into<wkt::Duration>,
4828 {
4829 self.speech_start_timeout = std::option::Option::Some(v.into());
4830 self
4831 }
4832
4833 /// Sets or clears the value of [speech_start_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_start_timeout].
4834 ///
4835 /// # Example
4836 /// ```ignore,no_run
4837 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4838 /// use wkt::Duration;
4839 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_start_timeout(Some(Duration::default()/* use setters */));
4840 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_start_timeout(None::<Duration>);
4841 /// ```
4842 pub fn set_or_clear_speech_start_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4843 where
4844 T: std::convert::Into<wkt::Duration>,
4845 {
4846 self.speech_start_timeout = v.map(|x| x.into());
4847 self
4848 }
4849
4850 /// Sets the value of [speech_end_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_end_timeout].
4851 ///
4852 /// # Example
4853 /// ```ignore,no_run
4854 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4855 /// use wkt::Duration;
4856 /// let x = VoiceActivityTimeout::new().set_speech_end_timeout(Duration::default()/* use setters */);
4857 /// ```
4858 pub fn set_speech_end_timeout<T>(mut self, v: T) -> Self
4859 where
4860 T: std::convert::Into<wkt::Duration>,
4861 {
4862 self.speech_end_timeout = std::option::Option::Some(v.into());
4863 self
4864 }
4865
4866 /// Sets or clears the value of [speech_end_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_end_timeout].
4867 ///
4868 /// # Example
4869 /// ```ignore,no_run
4870 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4871 /// use wkt::Duration;
4872 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_end_timeout(Some(Duration::default()/* use setters */));
4873 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_end_timeout(None::<Duration>);
4874 /// ```
4875 pub fn set_or_clear_speech_end_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4876 where
4877 T: std::convert::Into<wkt::Duration>,
4878 {
4879 self.speech_end_timeout = v.map(|x| x.into());
4880 self
4881 }
4882 }
4883
4884 impl wkt::message::Message for VoiceActivityTimeout {
4885 fn typename() -> &'static str {
4886 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionFeatures.VoiceActivityTimeout"
4887 }
4888 }
4889}
4890
4891/// Provides configuration information for the StreamingRecognize request.
4892#[derive(Clone, Default, PartialEq)]
4893#[non_exhaustive]
4894pub struct StreamingRecognitionConfig {
4895 /// Required. Features and audio metadata to use for the Automatic Speech
4896 /// Recognition. This field in combination with the
4897 /// [config_mask][google.cloud.speech.v2.StreamingRecognitionConfig.config_mask]
4898 /// field can be used to override parts of the
4899 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
4900 /// of the Recognizer resource.
4901 ///
4902 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
4903 /// [google.cloud.speech.v2.StreamingRecognitionConfig.config_mask]: crate::model::StreamingRecognitionConfig::config_mask
4904 pub config: std::option::Option<crate::model::RecognitionConfig>,
4905
4906 /// The list of fields in
4907 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config] that
4908 /// override the values in the
4909 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
4910 /// of the recognizer during this recognition request. If no mask is provided,
4911 /// all non-default valued fields in
4912 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config] override
4913 /// the values in the Recognizer for this recognition request. If a mask is
4914 /// provided, only the fields listed in the mask override the config in the
4915 /// Recognizer for this recognition request. If a wildcard (`*`) is provided,
4916 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config]
4917 /// completely overrides and replaces the config in the recognizer for this
4918 /// recognition request.
4919 ///
4920 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
4921 /// [google.cloud.speech.v2.StreamingRecognitionConfig.config]: crate::model::StreamingRecognitionConfig::config
4922 pub config_mask: std::option::Option<wkt::FieldMask>,
4923
4924 /// Speech recognition features to enable specific to streaming audio
4925 /// recognition requests.
4926 pub streaming_features: std::option::Option<crate::model::StreamingRecognitionFeatures>,
4927
4928 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4929}
4930
4931impl StreamingRecognitionConfig {
4932 pub fn new() -> Self {
4933 std::default::Default::default()
4934 }
4935
4936 /// Sets the value of [config][crate::model::StreamingRecognitionConfig::config].
4937 ///
4938 /// # Example
4939 /// ```ignore,no_run
4940 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4941 /// use google_cloud_speech_v2::model::RecognitionConfig;
4942 /// let x = StreamingRecognitionConfig::new().set_config(RecognitionConfig::default()/* use setters */);
4943 /// ```
4944 pub fn set_config<T>(mut self, v: T) -> Self
4945 where
4946 T: std::convert::Into<crate::model::RecognitionConfig>,
4947 {
4948 self.config = std::option::Option::Some(v.into());
4949 self
4950 }
4951
4952 /// Sets or clears the value of [config][crate::model::StreamingRecognitionConfig::config].
4953 ///
4954 /// # Example
4955 /// ```ignore,no_run
4956 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4957 /// use google_cloud_speech_v2::model::RecognitionConfig;
4958 /// let x = StreamingRecognitionConfig::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
4959 /// let x = StreamingRecognitionConfig::new().set_or_clear_config(None::<RecognitionConfig>);
4960 /// ```
4961 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
4962 where
4963 T: std::convert::Into<crate::model::RecognitionConfig>,
4964 {
4965 self.config = v.map(|x| x.into());
4966 self
4967 }
4968
4969 /// Sets the value of [config_mask][crate::model::StreamingRecognitionConfig::config_mask].
4970 ///
4971 /// # Example
4972 /// ```ignore,no_run
4973 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4974 /// use wkt::FieldMask;
4975 /// let x = StreamingRecognitionConfig::new().set_config_mask(FieldMask::default()/* use setters */);
4976 /// ```
4977 pub fn set_config_mask<T>(mut self, v: T) -> Self
4978 where
4979 T: std::convert::Into<wkt::FieldMask>,
4980 {
4981 self.config_mask = std::option::Option::Some(v.into());
4982 self
4983 }
4984
4985 /// Sets or clears the value of [config_mask][crate::model::StreamingRecognitionConfig::config_mask].
4986 ///
4987 /// # Example
4988 /// ```ignore,no_run
4989 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4990 /// use wkt::FieldMask;
4991 /// let x = StreamingRecognitionConfig::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
4992 /// let x = StreamingRecognitionConfig::new().set_or_clear_config_mask(None::<FieldMask>);
4993 /// ```
4994 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
4995 where
4996 T: std::convert::Into<wkt::FieldMask>,
4997 {
4998 self.config_mask = v.map(|x| x.into());
4999 self
5000 }
5001
5002 /// Sets the value of [streaming_features][crate::model::StreamingRecognitionConfig::streaming_features].
5003 ///
5004 /// # Example
5005 /// ```ignore,no_run
5006 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5007 /// use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
5008 /// let x = StreamingRecognitionConfig::new().set_streaming_features(StreamingRecognitionFeatures::default()/* use setters */);
5009 /// ```
5010 pub fn set_streaming_features<T>(mut self, v: T) -> Self
5011 where
5012 T: std::convert::Into<crate::model::StreamingRecognitionFeatures>,
5013 {
5014 self.streaming_features = std::option::Option::Some(v.into());
5015 self
5016 }
5017
5018 /// Sets or clears the value of [streaming_features][crate::model::StreamingRecognitionConfig::streaming_features].
5019 ///
5020 /// # Example
5021 /// ```ignore,no_run
5022 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5023 /// use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
5024 /// let x = StreamingRecognitionConfig::new().set_or_clear_streaming_features(Some(StreamingRecognitionFeatures::default()/* use setters */));
5025 /// let x = StreamingRecognitionConfig::new().set_or_clear_streaming_features(None::<StreamingRecognitionFeatures>);
5026 /// ```
5027 pub fn set_or_clear_streaming_features<T>(mut self, v: std::option::Option<T>) -> Self
5028 where
5029 T: std::convert::Into<crate::model::StreamingRecognitionFeatures>,
5030 {
5031 self.streaming_features = v.map(|x| x.into());
5032 self
5033 }
5034}
5035
5036impl wkt::message::Message for StreamingRecognitionConfig {
5037 fn typename() -> &'static str {
5038 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionConfig"
5039 }
5040}
5041
5042/// Request message for the
5043/// [StreamingRecognize][google.cloud.speech.v2.Speech.StreamingRecognize]
5044/// method. Multiple
5045/// [StreamingRecognizeRequest][google.cloud.speech.v2.StreamingRecognizeRequest]
5046/// messages are sent in one call.
5047///
5048/// If the [Recognizer][google.cloud.speech.v2.Recognizer] referenced by
5049/// [recognizer][google.cloud.speech.v2.StreamingRecognizeRequest.recognizer]
5050/// contains a fully specified request configuration then the stream may only
5051/// contain messages with only
5052/// [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio] set.
5053///
5054/// Otherwise the first message must contain a
5055/// [recognizer][google.cloud.speech.v2.StreamingRecognizeRequest.recognizer] and
5056/// a
5057/// [streaming_config][google.cloud.speech.v2.StreamingRecognizeRequest.streaming_config]
5058/// message that together fully specify the request configuration and must not
5059/// contain [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio]. All
5060/// subsequent messages must only have
5061/// [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio] set.
5062///
5063/// [google.cloud.speech.v2.Recognizer]: crate::model::Recognizer
5064/// [google.cloud.speech.v2.StreamingRecognizeRequest]: crate::model::StreamingRecognizeRequest
5065/// [google.cloud.speech.v2.StreamingRecognizeRequest.audio]: crate::model::StreamingRecognizeRequest::streaming_request
5066/// [google.cloud.speech.v2.StreamingRecognizeRequest.recognizer]: crate::model::StreamingRecognizeRequest::recognizer
5067/// [google.cloud.speech.v2.StreamingRecognizeRequest.streaming_config]: crate::model::StreamingRecognizeRequest::streaming_request
5068#[derive(Clone, Default, PartialEq)]
5069#[non_exhaustive]
5070pub struct StreamingRecognizeRequest {
5071 /// Required. The name of the Recognizer to use during recognition. The
5072 /// expected format is
5073 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
5074 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
5075 pub recognizer: std::string::String,
5076
5077 pub streaming_request:
5078 std::option::Option<crate::model::streaming_recognize_request::StreamingRequest>,
5079
5080 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5081}
5082
5083impl StreamingRecognizeRequest {
5084 pub fn new() -> Self {
5085 std::default::Default::default()
5086 }
5087
5088 /// Sets the value of [recognizer][crate::model::StreamingRecognizeRequest::recognizer].
5089 ///
5090 /// # Example
5091 /// ```ignore,no_run
5092 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5093 /// let x = StreamingRecognizeRequest::new().set_recognizer("example");
5094 /// ```
5095 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5096 self.recognizer = v.into();
5097 self
5098 }
5099
5100 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request].
5101 ///
5102 /// Note that all the setters affecting `streaming_request` are mutually
5103 /// exclusive.
5104 ///
5105 /// # Example
5106 /// ```ignore,no_run
5107 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5108 /// use google_cloud_speech_v2::model::streaming_recognize_request::StreamingRequest;
5109 /// let x = StreamingRecognizeRequest::new().set_streaming_request(Some(StreamingRequest::Audio(bytes::Bytes::from_static(b"example"))));
5110 /// ```
5111 pub fn set_streaming_request<
5112 T: std::convert::Into<
5113 std::option::Option<crate::model::streaming_recognize_request::StreamingRequest>,
5114 >,
5115 >(
5116 mut self,
5117 v: T,
5118 ) -> Self {
5119 self.streaming_request = v.into();
5120 self
5121 }
5122
5123 /// The value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5124 /// if it holds a `StreamingConfig`, `None` if the field is not set or
5125 /// holds a different branch.
5126 pub fn streaming_config(
5127 &self,
5128 ) -> std::option::Option<&std::boxed::Box<crate::model::StreamingRecognitionConfig>> {
5129 #[allow(unreachable_patterns)]
5130 self.streaming_request.as_ref().and_then(|v| match v {
5131 crate::model::streaming_recognize_request::StreamingRequest::StreamingConfig(v) => {
5132 std::option::Option::Some(v)
5133 }
5134 _ => std::option::Option::None,
5135 })
5136 }
5137
5138 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5139 /// to hold a `StreamingConfig`.
5140 ///
5141 /// Note that all the setters affecting `streaming_request` are
5142 /// mutually exclusive.
5143 ///
5144 /// # Example
5145 /// ```ignore,no_run
5146 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5147 /// use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5148 /// let x = StreamingRecognizeRequest::new().set_streaming_config(StreamingRecognitionConfig::default()/* use setters */);
5149 /// assert!(x.streaming_config().is_some());
5150 /// assert!(x.audio().is_none());
5151 /// ```
5152 pub fn set_streaming_config<
5153 T: std::convert::Into<std::boxed::Box<crate::model::StreamingRecognitionConfig>>,
5154 >(
5155 mut self,
5156 v: T,
5157 ) -> Self {
5158 self.streaming_request = std::option::Option::Some(
5159 crate::model::streaming_recognize_request::StreamingRequest::StreamingConfig(v.into()),
5160 );
5161 self
5162 }
5163
5164 /// The value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5165 /// if it holds a `Audio`, `None` if the field is not set or
5166 /// holds a different branch.
5167 pub fn audio(&self) -> std::option::Option<&::bytes::Bytes> {
5168 #[allow(unreachable_patterns)]
5169 self.streaming_request.as_ref().and_then(|v| match v {
5170 crate::model::streaming_recognize_request::StreamingRequest::Audio(v) => {
5171 std::option::Option::Some(v)
5172 }
5173 _ => std::option::Option::None,
5174 })
5175 }
5176
5177 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5178 /// to hold a `Audio`.
5179 ///
5180 /// Note that all the setters affecting `streaming_request` are
5181 /// mutually exclusive.
5182 ///
5183 /// # Example
5184 /// ```ignore,no_run
5185 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5186 /// let x = StreamingRecognizeRequest::new().set_audio(bytes::Bytes::from_static(b"example"));
5187 /// assert!(x.audio().is_some());
5188 /// assert!(x.streaming_config().is_none());
5189 /// ```
5190 pub fn set_audio<T: std::convert::Into<::bytes::Bytes>>(mut self, v: T) -> Self {
5191 self.streaming_request = std::option::Option::Some(
5192 crate::model::streaming_recognize_request::StreamingRequest::Audio(v.into()),
5193 );
5194 self
5195 }
5196}
5197
5198impl wkt::message::Message for StreamingRecognizeRequest {
5199 fn typename() -> &'static str {
5200 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognizeRequest"
5201 }
5202}
5203
5204/// Defines additional types related to [StreamingRecognizeRequest].
5205pub mod streaming_recognize_request {
5206 #[allow(unused_imports)]
5207 use super::*;
5208
5209 #[derive(Clone, Debug, PartialEq)]
5210 #[non_exhaustive]
5211 pub enum StreamingRequest {
5212 /// StreamingRecognitionConfig to be used in this recognition attempt.
5213 /// If provided, it will override the default RecognitionConfig stored in the
5214 /// Recognizer.
5215 StreamingConfig(std::boxed::Box<crate::model::StreamingRecognitionConfig>),
5216 /// Inline audio bytes to be Recognized.
5217 /// Maximum size for this field is 15 KB per request.
5218 Audio(::bytes::Bytes),
5219 }
5220}
5221
5222/// Request message for the
5223/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize]
5224/// method.
5225///
5226/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
5227#[derive(Clone, Default, PartialEq)]
5228#[non_exhaustive]
5229pub struct BatchRecognizeRequest {
5230 /// Required. The name of the Recognizer to use during recognition. The
5231 /// expected format is
5232 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
5233 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
5234 pub recognizer: std::string::String,
5235
5236 /// Features and audio metadata to use for the Automatic Speech Recognition.
5237 /// This field in combination with the
5238 /// [config_mask][google.cloud.speech.v2.BatchRecognizeRequest.config_mask]
5239 /// field can be used to override parts of the
5240 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5241 /// of the Recognizer resource.
5242 ///
5243 /// [google.cloud.speech.v2.BatchRecognizeRequest.config_mask]: crate::model::BatchRecognizeRequest::config_mask
5244 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5245 pub config: std::option::Option<crate::model::RecognitionConfig>,
5246
5247 /// The list of fields in
5248 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] that override
5249 /// the values in the
5250 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5251 /// of the recognizer during this recognition request. If no mask is provided,
5252 /// all given fields in
5253 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] override the
5254 /// values in the recognizer for this recognition request. If a mask is
5255 /// provided, only the fields listed in the mask override the config in the
5256 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
5257 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] completely
5258 /// overrides and replaces the config in the recognizer for this recognition
5259 /// request.
5260 ///
5261 /// [google.cloud.speech.v2.BatchRecognizeRequest.config]: crate::model::BatchRecognizeRequest::config
5262 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5263 pub config_mask: std::option::Option<wkt::FieldMask>,
5264
5265 /// Audio files with file metadata for ASR.
5266 /// The maximum number of files allowed to be specified is 15.
5267 pub files: std::vec::Vec<crate::model::BatchRecognizeFileMetadata>,
5268
5269 /// Configuration options for where to output the transcripts of each file.
5270 pub recognition_output_config: std::option::Option<crate::model::RecognitionOutputConfig>,
5271
5272 /// Processing strategy to use for this request.
5273 pub processing_strategy: crate::model::batch_recognize_request::ProcessingStrategy,
5274
5275 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5276}
5277
5278impl BatchRecognizeRequest {
5279 pub fn new() -> Self {
5280 std::default::Default::default()
5281 }
5282
5283 /// Sets the value of [recognizer][crate::model::BatchRecognizeRequest::recognizer].
5284 ///
5285 /// # Example
5286 /// ```ignore,no_run
5287 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5288 /// let x = BatchRecognizeRequest::new().set_recognizer("example");
5289 /// ```
5290 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5291 self.recognizer = v.into();
5292 self
5293 }
5294
5295 /// Sets the value of [config][crate::model::BatchRecognizeRequest::config].
5296 ///
5297 /// # Example
5298 /// ```ignore,no_run
5299 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5300 /// use google_cloud_speech_v2::model::RecognitionConfig;
5301 /// let x = BatchRecognizeRequest::new().set_config(RecognitionConfig::default()/* use setters */);
5302 /// ```
5303 pub fn set_config<T>(mut self, v: T) -> Self
5304 where
5305 T: std::convert::Into<crate::model::RecognitionConfig>,
5306 {
5307 self.config = std::option::Option::Some(v.into());
5308 self
5309 }
5310
5311 /// Sets or clears the value of [config][crate::model::BatchRecognizeRequest::config].
5312 ///
5313 /// # Example
5314 /// ```ignore,no_run
5315 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5316 /// use google_cloud_speech_v2::model::RecognitionConfig;
5317 /// let x = BatchRecognizeRequest::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
5318 /// let x = BatchRecognizeRequest::new().set_or_clear_config(None::<RecognitionConfig>);
5319 /// ```
5320 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
5321 where
5322 T: std::convert::Into<crate::model::RecognitionConfig>,
5323 {
5324 self.config = v.map(|x| x.into());
5325 self
5326 }
5327
5328 /// Sets the value of [config_mask][crate::model::BatchRecognizeRequest::config_mask].
5329 ///
5330 /// # Example
5331 /// ```ignore,no_run
5332 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5333 /// use wkt::FieldMask;
5334 /// let x = BatchRecognizeRequest::new().set_config_mask(FieldMask::default()/* use setters */);
5335 /// ```
5336 pub fn set_config_mask<T>(mut self, v: T) -> Self
5337 where
5338 T: std::convert::Into<wkt::FieldMask>,
5339 {
5340 self.config_mask = std::option::Option::Some(v.into());
5341 self
5342 }
5343
5344 /// Sets or clears the value of [config_mask][crate::model::BatchRecognizeRequest::config_mask].
5345 ///
5346 /// # Example
5347 /// ```ignore,no_run
5348 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5349 /// use wkt::FieldMask;
5350 /// let x = BatchRecognizeRequest::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
5351 /// let x = BatchRecognizeRequest::new().set_or_clear_config_mask(None::<FieldMask>);
5352 /// ```
5353 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
5354 where
5355 T: std::convert::Into<wkt::FieldMask>,
5356 {
5357 self.config_mask = v.map(|x| x.into());
5358 self
5359 }
5360
5361 /// Sets the value of [files][crate::model::BatchRecognizeRequest::files].
5362 ///
5363 /// # Example
5364 /// ```ignore,no_run
5365 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5366 /// use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
5367 /// let x = BatchRecognizeRequest::new()
5368 /// .set_files([
5369 /// BatchRecognizeFileMetadata::default()/* use setters */,
5370 /// BatchRecognizeFileMetadata::default()/* use (different) setters */,
5371 /// ]);
5372 /// ```
5373 pub fn set_files<T, V>(mut self, v: T) -> Self
5374 where
5375 T: std::iter::IntoIterator<Item = V>,
5376 V: std::convert::Into<crate::model::BatchRecognizeFileMetadata>,
5377 {
5378 use std::iter::Iterator;
5379 self.files = v.into_iter().map(|i| i.into()).collect();
5380 self
5381 }
5382
5383 /// Sets the value of [recognition_output_config][crate::model::BatchRecognizeRequest::recognition_output_config].
5384 ///
5385 /// # Example
5386 /// ```ignore,no_run
5387 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5388 /// use google_cloud_speech_v2::model::RecognitionOutputConfig;
5389 /// let x = BatchRecognizeRequest::new().set_recognition_output_config(RecognitionOutputConfig::default()/* use setters */);
5390 /// ```
5391 pub fn set_recognition_output_config<T>(mut self, v: T) -> Self
5392 where
5393 T: std::convert::Into<crate::model::RecognitionOutputConfig>,
5394 {
5395 self.recognition_output_config = std::option::Option::Some(v.into());
5396 self
5397 }
5398
5399 /// Sets or clears the value of [recognition_output_config][crate::model::BatchRecognizeRequest::recognition_output_config].
5400 ///
5401 /// # Example
5402 /// ```ignore,no_run
5403 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5404 /// use google_cloud_speech_v2::model::RecognitionOutputConfig;
5405 /// let x = BatchRecognizeRequest::new().set_or_clear_recognition_output_config(Some(RecognitionOutputConfig::default()/* use setters */));
5406 /// let x = BatchRecognizeRequest::new().set_or_clear_recognition_output_config(None::<RecognitionOutputConfig>);
5407 /// ```
5408 pub fn set_or_clear_recognition_output_config<T>(mut self, v: std::option::Option<T>) -> Self
5409 where
5410 T: std::convert::Into<crate::model::RecognitionOutputConfig>,
5411 {
5412 self.recognition_output_config = v.map(|x| x.into());
5413 self
5414 }
5415
5416 /// Sets the value of [processing_strategy][crate::model::BatchRecognizeRequest::processing_strategy].
5417 ///
5418 /// # Example
5419 /// ```ignore,no_run
5420 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5421 /// use google_cloud_speech_v2::model::batch_recognize_request::ProcessingStrategy;
5422 /// let x0 = BatchRecognizeRequest::new().set_processing_strategy(ProcessingStrategy::DynamicBatching);
5423 /// ```
5424 pub fn set_processing_strategy<
5425 T: std::convert::Into<crate::model::batch_recognize_request::ProcessingStrategy>,
5426 >(
5427 mut self,
5428 v: T,
5429 ) -> Self {
5430 self.processing_strategy = v.into();
5431 self
5432 }
5433}
5434
5435impl wkt::message::Message for BatchRecognizeRequest {
5436 fn typename() -> &'static str {
5437 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeRequest"
5438 }
5439}
5440
5441/// Defines additional types related to [BatchRecognizeRequest].
5442pub mod batch_recognize_request {
5443 #[allow(unused_imports)]
5444 use super::*;
5445
5446 /// Possible processing strategies for batch requests.
5447 ///
5448 /// # Working with unknown values
5449 ///
5450 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
5451 /// additional enum variants at any time. Adding new variants is not considered
5452 /// a breaking change. Applications should write their code in anticipation of:
5453 ///
5454 /// - New values appearing in future releases of the client library, **and**
5455 /// - New values received dynamically, without application changes.
5456 ///
5457 /// Please consult the [Working with enums] section in the user guide for some
5458 /// guidelines.
5459 ///
5460 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
5461 #[derive(Clone, Debug, PartialEq)]
5462 #[non_exhaustive]
5463 pub enum ProcessingStrategy {
5464 /// Default value for the processing strategy. The request is processed as
5465 /// soon as its received.
5466 Unspecified,
5467 /// If selected, processes the request during lower utilization periods for a
5468 /// price discount. The request is fulfilled within 24 hours.
5469 DynamicBatching,
5470 /// If set, the enum was initialized with an unknown value.
5471 ///
5472 /// Applications can examine the value using [ProcessingStrategy::value] or
5473 /// [ProcessingStrategy::name].
5474 UnknownValue(processing_strategy::UnknownValue),
5475 }
5476
5477 #[doc(hidden)]
5478 pub mod processing_strategy {
5479 #[allow(unused_imports)]
5480 use super::*;
5481 #[derive(Clone, Debug, PartialEq)]
5482 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
5483 }
5484
5485 impl ProcessingStrategy {
5486 /// Gets the enum value.
5487 ///
5488 /// Returns `None` if the enum contains an unknown value deserialized from
5489 /// the string representation of enums.
5490 pub fn value(&self) -> std::option::Option<i32> {
5491 match self {
5492 Self::Unspecified => std::option::Option::Some(0),
5493 Self::DynamicBatching => std::option::Option::Some(1),
5494 Self::UnknownValue(u) => u.0.value(),
5495 }
5496 }
5497
5498 /// Gets the enum value as a string.
5499 ///
5500 /// Returns `None` if the enum contains an unknown value deserialized from
5501 /// the integer representation of enums.
5502 pub fn name(&self) -> std::option::Option<&str> {
5503 match self {
5504 Self::Unspecified => std::option::Option::Some("PROCESSING_STRATEGY_UNSPECIFIED"),
5505 Self::DynamicBatching => std::option::Option::Some("DYNAMIC_BATCHING"),
5506 Self::UnknownValue(u) => u.0.name(),
5507 }
5508 }
5509 }
5510
5511 impl std::default::Default for ProcessingStrategy {
5512 fn default() -> Self {
5513 use std::convert::From;
5514 Self::from(0)
5515 }
5516 }
5517
5518 impl std::fmt::Display for ProcessingStrategy {
5519 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
5520 wkt::internal::display_enum(f, self.name(), self.value())
5521 }
5522 }
5523
5524 impl std::convert::From<i32> for ProcessingStrategy {
5525 fn from(value: i32) -> Self {
5526 match value {
5527 0 => Self::Unspecified,
5528 1 => Self::DynamicBatching,
5529 _ => Self::UnknownValue(processing_strategy::UnknownValue(
5530 wkt::internal::UnknownEnumValue::Integer(value),
5531 )),
5532 }
5533 }
5534 }
5535
5536 impl std::convert::From<&str> for ProcessingStrategy {
5537 fn from(value: &str) -> Self {
5538 use std::string::ToString;
5539 match value {
5540 "PROCESSING_STRATEGY_UNSPECIFIED" => Self::Unspecified,
5541 "DYNAMIC_BATCHING" => Self::DynamicBatching,
5542 _ => Self::UnknownValue(processing_strategy::UnknownValue(
5543 wkt::internal::UnknownEnumValue::String(value.to_string()),
5544 )),
5545 }
5546 }
5547 }
5548
5549 impl serde::ser::Serialize for ProcessingStrategy {
5550 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
5551 where
5552 S: serde::Serializer,
5553 {
5554 match self {
5555 Self::Unspecified => serializer.serialize_i32(0),
5556 Self::DynamicBatching => serializer.serialize_i32(1),
5557 Self::UnknownValue(u) => u.0.serialize(serializer),
5558 }
5559 }
5560 }
5561
5562 impl<'de> serde::de::Deserialize<'de> for ProcessingStrategy {
5563 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
5564 where
5565 D: serde::Deserializer<'de>,
5566 {
5567 deserializer.deserialize_any(wkt::internal::EnumVisitor::<ProcessingStrategy>::new(
5568 ".google.cloud.speech.v2.BatchRecognizeRequest.ProcessingStrategy",
5569 ))
5570 }
5571 }
5572}
5573
5574/// Output configurations for Cloud Storage.
5575#[derive(Clone, Default, PartialEq)]
5576#[non_exhaustive]
5577pub struct GcsOutputConfig {
5578 /// The Cloud Storage URI prefix with which recognition results will be
5579 /// written.
5580 pub uri: std::string::String,
5581
5582 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5583}
5584
5585impl GcsOutputConfig {
5586 pub fn new() -> Self {
5587 std::default::Default::default()
5588 }
5589
5590 /// Sets the value of [uri][crate::model::GcsOutputConfig::uri].
5591 ///
5592 /// # Example
5593 /// ```ignore,no_run
5594 /// # use google_cloud_speech_v2::model::GcsOutputConfig;
5595 /// let x = GcsOutputConfig::new().set_uri("example");
5596 /// ```
5597 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5598 self.uri = v.into();
5599 self
5600 }
5601}
5602
5603impl wkt::message::Message for GcsOutputConfig {
5604 fn typename() -> &'static str {
5605 "type.googleapis.com/google.cloud.speech.v2.GcsOutputConfig"
5606 }
5607}
5608
5609/// Output configurations for inline response.
5610#[derive(Clone, Default, PartialEq)]
5611#[non_exhaustive]
5612pub struct InlineOutputConfig {
5613 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5614}
5615
5616impl InlineOutputConfig {
5617 pub fn new() -> Self {
5618 std::default::Default::default()
5619 }
5620}
5621
5622impl wkt::message::Message for InlineOutputConfig {
5623 fn typename() -> &'static str {
5624 "type.googleapis.com/google.cloud.speech.v2.InlineOutputConfig"
5625 }
5626}
5627
5628/// Output configurations for serialized `BatchRecognizeResults` protos.
5629#[derive(Clone, Default, PartialEq)]
5630#[non_exhaustive]
5631pub struct NativeOutputFileFormatConfig {
5632 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5633}
5634
5635impl NativeOutputFileFormatConfig {
5636 pub fn new() -> Self {
5637 std::default::Default::default()
5638 }
5639}
5640
5641impl wkt::message::Message for NativeOutputFileFormatConfig {
5642 fn typename() -> &'static str {
5643 "type.googleapis.com/google.cloud.speech.v2.NativeOutputFileFormatConfig"
5644 }
5645}
5646
5647/// Output configurations for [WebVTT](https://www.w3.org/TR/webvtt1/) formatted
5648/// subtitle file.
5649#[derive(Clone, Default, PartialEq)]
5650#[non_exhaustive]
5651pub struct VttOutputFileFormatConfig {
5652 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5653}
5654
5655impl VttOutputFileFormatConfig {
5656 pub fn new() -> Self {
5657 std::default::Default::default()
5658 }
5659}
5660
5661impl wkt::message::Message for VttOutputFileFormatConfig {
5662 fn typename() -> &'static str {
5663 "type.googleapis.com/google.cloud.speech.v2.VttOutputFileFormatConfig"
5664 }
5665}
5666
5667/// Output configurations [SubRip
5668/// Text](https://www.matroska.org/technical/subtitles.html#srt-subtitles)
5669/// formatted subtitle file.
5670#[derive(Clone, Default, PartialEq)]
5671#[non_exhaustive]
5672pub struct SrtOutputFileFormatConfig {
5673 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5674}
5675
5676impl SrtOutputFileFormatConfig {
5677 pub fn new() -> Self {
5678 std::default::Default::default()
5679 }
5680}
5681
5682impl wkt::message::Message for SrtOutputFileFormatConfig {
5683 fn typename() -> &'static str {
5684 "type.googleapis.com/google.cloud.speech.v2.SrtOutputFileFormatConfig"
5685 }
5686}
5687
5688/// Configuration for the format of the results stored to `output`.
5689#[derive(Clone, Default, PartialEq)]
5690#[non_exhaustive]
5691pub struct OutputFormatConfig {
5692 /// Configuration for the native output format. If this field is set or if no
5693 /// other output format field is set, then transcripts will be written to the
5694 /// sink in the native format.
5695 pub native: std::option::Option<crate::model::NativeOutputFileFormatConfig>,
5696
5697 /// Configuration for the VTT output format. If this field is set, then
5698 /// transcripts will be written to the sink in the VTT format.
5699 pub vtt: std::option::Option<crate::model::VttOutputFileFormatConfig>,
5700
5701 /// Configuration for the SRT output format. If this field is set, then
5702 /// transcripts will be written to the sink in the SRT format.
5703 pub srt: std::option::Option<crate::model::SrtOutputFileFormatConfig>,
5704
5705 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5706}
5707
5708impl OutputFormatConfig {
5709 pub fn new() -> Self {
5710 std::default::Default::default()
5711 }
5712
5713 /// Sets the value of [native][crate::model::OutputFormatConfig::native].
5714 ///
5715 /// # Example
5716 /// ```ignore,no_run
5717 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5718 /// use google_cloud_speech_v2::model::NativeOutputFileFormatConfig;
5719 /// let x = OutputFormatConfig::new().set_native(NativeOutputFileFormatConfig::default()/* use setters */);
5720 /// ```
5721 pub fn set_native<T>(mut self, v: T) -> Self
5722 where
5723 T: std::convert::Into<crate::model::NativeOutputFileFormatConfig>,
5724 {
5725 self.native = std::option::Option::Some(v.into());
5726 self
5727 }
5728
5729 /// Sets or clears the value of [native][crate::model::OutputFormatConfig::native].
5730 ///
5731 /// # Example
5732 /// ```ignore,no_run
5733 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5734 /// use google_cloud_speech_v2::model::NativeOutputFileFormatConfig;
5735 /// let x = OutputFormatConfig::new().set_or_clear_native(Some(NativeOutputFileFormatConfig::default()/* use setters */));
5736 /// let x = OutputFormatConfig::new().set_or_clear_native(None::<NativeOutputFileFormatConfig>);
5737 /// ```
5738 pub fn set_or_clear_native<T>(mut self, v: std::option::Option<T>) -> Self
5739 where
5740 T: std::convert::Into<crate::model::NativeOutputFileFormatConfig>,
5741 {
5742 self.native = v.map(|x| x.into());
5743 self
5744 }
5745
5746 /// Sets the value of [vtt][crate::model::OutputFormatConfig::vtt].
5747 ///
5748 /// # Example
5749 /// ```ignore,no_run
5750 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5751 /// use google_cloud_speech_v2::model::VttOutputFileFormatConfig;
5752 /// let x = OutputFormatConfig::new().set_vtt(VttOutputFileFormatConfig::default()/* use setters */);
5753 /// ```
5754 pub fn set_vtt<T>(mut self, v: T) -> Self
5755 where
5756 T: std::convert::Into<crate::model::VttOutputFileFormatConfig>,
5757 {
5758 self.vtt = std::option::Option::Some(v.into());
5759 self
5760 }
5761
5762 /// Sets or clears the value of [vtt][crate::model::OutputFormatConfig::vtt].
5763 ///
5764 /// # Example
5765 /// ```ignore,no_run
5766 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5767 /// use google_cloud_speech_v2::model::VttOutputFileFormatConfig;
5768 /// let x = OutputFormatConfig::new().set_or_clear_vtt(Some(VttOutputFileFormatConfig::default()/* use setters */));
5769 /// let x = OutputFormatConfig::new().set_or_clear_vtt(None::<VttOutputFileFormatConfig>);
5770 /// ```
5771 pub fn set_or_clear_vtt<T>(mut self, v: std::option::Option<T>) -> Self
5772 where
5773 T: std::convert::Into<crate::model::VttOutputFileFormatConfig>,
5774 {
5775 self.vtt = v.map(|x| x.into());
5776 self
5777 }
5778
5779 /// Sets the value of [srt][crate::model::OutputFormatConfig::srt].
5780 ///
5781 /// # Example
5782 /// ```ignore,no_run
5783 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5784 /// use google_cloud_speech_v2::model::SrtOutputFileFormatConfig;
5785 /// let x = OutputFormatConfig::new().set_srt(SrtOutputFileFormatConfig::default()/* use setters */);
5786 /// ```
5787 pub fn set_srt<T>(mut self, v: T) -> Self
5788 where
5789 T: std::convert::Into<crate::model::SrtOutputFileFormatConfig>,
5790 {
5791 self.srt = std::option::Option::Some(v.into());
5792 self
5793 }
5794
5795 /// Sets or clears the value of [srt][crate::model::OutputFormatConfig::srt].
5796 ///
5797 /// # Example
5798 /// ```ignore,no_run
5799 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5800 /// use google_cloud_speech_v2::model::SrtOutputFileFormatConfig;
5801 /// let x = OutputFormatConfig::new().set_or_clear_srt(Some(SrtOutputFileFormatConfig::default()/* use setters */));
5802 /// let x = OutputFormatConfig::new().set_or_clear_srt(None::<SrtOutputFileFormatConfig>);
5803 /// ```
5804 pub fn set_or_clear_srt<T>(mut self, v: std::option::Option<T>) -> Self
5805 where
5806 T: std::convert::Into<crate::model::SrtOutputFileFormatConfig>,
5807 {
5808 self.srt = v.map(|x| x.into());
5809 self
5810 }
5811}
5812
5813impl wkt::message::Message for OutputFormatConfig {
5814 fn typename() -> &'static str {
5815 "type.googleapis.com/google.cloud.speech.v2.OutputFormatConfig"
5816 }
5817}
5818
5819/// Configuration options for the output(s) of recognition.
5820#[derive(Clone, Default, PartialEq)]
5821#[non_exhaustive]
5822pub struct RecognitionOutputConfig {
5823 /// Optional. Configuration for the format of the results stored to `output`.
5824 /// If unspecified transcripts will be written in the `NATIVE` format only.
5825 pub output_format_config: std::option::Option<crate::model::OutputFormatConfig>,
5826
5827 pub output: std::option::Option<crate::model::recognition_output_config::Output>,
5828
5829 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5830}
5831
5832impl RecognitionOutputConfig {
5833 pub fn new() -> Self {
5834 std::default::Default::default()
5835 }
5836
5837 /// Sets the value of [output_format_config][crate::model::RecognitionOutputConfig::output_format_config].
5838 ///
5839 /// # Example
5840 /// ```ignore,no_run
5841 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5842 /// use google_cloud_speech_v2::model::OutputFormatConfig;
5843 /// let x = RecognitionOutputConfig::new().set_output_format_config(OutputFormatConfig::default()/* use setters */);
5844 /// ```
5845 pub fn set_output_format_config<T>(mut self, v: T) -> Self
5846 where
5847 T: std::convert::Into<crate::model::OutputFormatConfig>,
5848 {
5849 self.output_format_config = std::option::Option::Some(v.into());
5850 self
5851 }
5852
5853 /// Sets or clears the value of [output_format_config][crate::model::RecognitionOutputConfig::output_format_config].
5854 ///
5855 /// # Example
5856 /// ```ignore,no_run
5857 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5858 /// use google_cloud_speech_v2::model::OutputFormatConfig;
5859 /// let x = RecognitionOutputConfig::new().set_or_clear_output_format_config(Some(OutputFormatConfig::default()/* use setters */));
5860 /// let x = RecognitionOutputConfig::new().set_or_clear_output_format_config(None::<OutputFormatConfig>);
5861 /// ```
5862 pub fn set_or_clear_output_format_config<T>(mut self, v: std::option::Option<T>) -> Self
5863 where
5864 T: std::convert::Into<crate::model::OutputFormatConfig>,
5865 {
5866 self.output_format_config = v.map(|x| x.into());
5867 self
5868 }
5869
5870 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output].
5871 ///
5872 /// Note that all the setters affecting `output` are mutually
5873 /// exclusive.
5874 ///
5875 /// # Example
5876 /// ```ignore,no_run
5877 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5878 /// use google_cloud_speech_v2::model::GcsOutputConfig;
5879 /// let x = RecognitionOutputConfig::new().set_output(Some(
5880 /// google_cloud_speech_v2::model::recognition_output_config::Output::GcsOutputConfig(GcsOutputConfig::default().into())));
5881 /// ```
5882 pub fn set_output<
5883 T: std::convert::Into<std::option::Option<crate::model::recognition_output_config::Output>>,
5884 >(
5885 mut self,
5886 v: T,
5887 ) -> Self {
5888 self.output = v.into();
5889 self
5890 }
5891
5892 /// The value of [output][crate::model::RecognitionOutputConfig::output]
5893 /// if it holds a `GcsOutputConfig`, `None` if the field is not set or
5894 /// holds a different branch.
5895 pub fn gcs_output_config(
5896 &self,
5897 ) -> std::option::Option<&std::boxed::Box<crate::model::GcsOutputConfig>> {
5898 #[allow(unreachable_patterns)]
5899 self.output.as_ref().and_then(|v| match v {
5900 crate::model::recognition_output_config::Output::GcsOutputConfig(v) => {
5901 std::option::Option::Some(v)
5902 }
5903 _ => std::option::Option::None,
5904 })
5905 }
5906
5907 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output]
5908 /// to hold a `GcsOutputConfig`.
5909 ///
5910 /// Note that all the setters affecting `output` are
5911 /// mutually exclusive.
5912 ///
5913 /// # Example
5914 /// ```ignore,no_run
5915 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5916 /// use google_cloud_speech_v2::model::GcsOutputConfig;
5917 /// let x = RecognitionOutputConfig::new().set_gcs_output_config(GcsOutputConfig::default()/* use setters */);
5918 /// assert!(x.gcs_output_config().is_some());
5919 /// assert!(x.inline_response_config().is_none());
5920 /// ```
5921 pub fn set_gcs_output_config<
5922 T: std::convert::Into<std::boxed::Box<crate::model::GcsOutputConfig>>,
5923 >(
5924 mut self,
5925 v: T,
5926 ) -> Self {
5927 self.output = std::option::Option::Some(
5928 crate::model::recognition_output_config::Output::GcsOutputConfig(v.into()),
5929 );
5930 self
5931 }
5932
5933 /// The value of [output][crate::model::RecognitionOutputConfig::output]
5934 /// if it holds a `InlineResponseConfig`, `None` if the field is not set or
5935 /// holds a different branch.
5936 pub fn inline_response_config(
5937 &self,
5938 ) -> std::option::Option<&std::boxed::Box<crate::model::InlineOutputConfig>> {
5939 #[allow(unreachable_patterns)]
5940 self.output.as_ref().and_then(|v| match v {
5941 crate::model::recognition_output_config::Output::InlineResponseConfig(v) => {
5942 std::option::Option::Some(v)
5943 }
5944 _ => std::option::Option::None,
5945 })
5946 }
5947
5948 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output]
5949 /// to hold a `InlineResponseConfig`.
5950 ///
5951 /// Note that all the setters affecting `output` are
5952 /// mutually exclusive.
5953 ///
5954 /// # Example
5955 /// ```ignore,no_run
5956 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5957 /// use google_cloud_speech_v2::model::InlineOutputConfig;
5958 /// let x = RecognitionOutputConfig::new().set_inline_response_config(InlineOutputConfig::default()/* use setters */);
5959 /// assert!(x.inline_response_config().is_some());
5960 /// assert!(x.gcs_output_config().is_none());
5961 /// ```
5962 pub fn set_inline_response_config<
5963 T: std::convert::Into<std::boxed::Box<crate::model::InlineOutputConfig>>,
5964 >(
5965 mut self,
5966 v: T,
5967 ) -> Self {
5968 self.output = std::option::Option::Some(
5969 crate::model::recognition_output_config::Output::InlineResponseConfig(v.into()),
5970 );
5971 self
5972 }
5973}
5974
5975impl wkt::message::Message for RecognitionOutputConfig {
5976 fn typename() -> &'static str {
5977 "type.googleapis.com/google.cloud.speech.v2.RecognitionOutputConfig"
5978 }
5979}
5980
5981/// Defines additional types related to [RecognitionOutputConfig].
5982pub mod recognition_output_config {
5983 #[allow(unused_imports)]
5984 use super::*;
5985
5986 #[derive(Clone, Debug, PartialEq)]
5987 #[non_exhaustive]
5988 pub enum Output {
5989 /// If this message is populated, recognition results are written to the
5990 /// provided Google Cloud Storage URI.
5991 GcsOutputConfig(std::boxed::Box<crate::model::GcsOutputConfig>),
5992 /// If this message is populated, recognition results are provided in the
5993 /// [BatchRecognizeResponse][google.cloud.speech.v2.BatchRecognizeResponse]
5994 /// message of the Operation when completed. This is only supported when
5995 /// calling [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize]
5996 /// with just one audio file.
5997 ///
5998 /// [google.cloud.speech.v2.BatchRecognizeResponse]: crate::model::BatchRecognizeResponse
5999 /// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6000 InlineResponseConfig(std::boxed::Box<crate::model::InlineOutputConfig>),
6001 }
6002}
6003
6004/// Response message for
6005/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize] that is
6006/// packaged into a longrunning [Operation][google.longrunning.Operation].
6007///
6008/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6009/// [google.longrunning.Operation]: longrunning::model::Operation
6010#[derive(Clone, Default, PartialEq)]
6011#[non_exhaustive]
6012pub struct BatchRecognizeResponse {
6013 /// Map from filename to the final result for that file.
6014 pub results:
6015 std::collections::HashMap<std::string::String, crate::model::BatchRecognizeFileResult>,
6016
6017 /// When available, billed audio seconds for the corresponding request.
6018 pub total_billed_duration: std::option::Option<wkt::Duration>,
6019
6020 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6021}
6022
6023impl BatchRecognizeResponse {
6024 pub fn new() -> Self {
6025 std::default::Default::default()
6026 }
6027
6028 /// Sets the value of [results][crate::model::BatchRecognizeResponse::results].
6029 ///
6030 /// # Example
6031 /// ```ignore,no_run
6032 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6033 /// use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6034 /// let x = BatchRecognizeResponse::new().set_results([
6035 /// ("key0", BatchRecognizeFileResult::default()/* use setters */),
6036 /// ("key1", BatchRecognizeFileResult::default()/* use (different) setters */),
6037 /// ]);
6038 /// ```
6039 pub fn set_results<T, K, V>(mut self, v: T) -> Self
6040 where
6041 T: std::iter::IntoIterator<Item = (K, V)>,
6042 K: std::convert::Into<std::string::String>,
6043 V: std::convert::Into<crate::model::BatchRecognizeFileResult>,
6044 {
6045 use std::iter::Iterator;
6046 self.results = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
6047 self
6048 }
6049
6050 /// Sets the value of [total_billed_duration][crate::model::BatchRecognizeResponse::total_billed_duration].
6051 ///
6052 /// # Example
6053 /// ```ignore,no_run
6054 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6055 /// use wkt::Duration;
6056 /// let x = BatchRecognizeResponse::new().set_total_billed_duration(Duration::default()/* use setters */);
6057 /// ```
6058 pub fn set_total_billed_duration<T>(mut self, v: T) -> Self
6059 where
6060 T: std::convert::Into<wkt::Duration>,
6061 {
6062 self.total_billed_duration = std::option::Option::Some(v.into());
6063 self
6064 }
6065
6066 /// Sets or clears the value of [total_billed_duration][crate::model::BatchRecognizeResponse::total_billed_duration].
6067 ///
6068 /// # Example
6069 /// ```ignore,no_run
6070 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6071 /// use wkt::Duration;
6072 /// let x = BatchRecognizeResponse::new().set_or_clear_total_billed_duration(Some(Duration::default()/* use setters */));
6073 /// let x = BatchRecognizeResponse::new().set_or_clear_total_billed_duration(None::<Duration>);
6074 /// ```
6075 pub fn set_or_clear_total_billed_duration<T>(mut self, v: std::option::Option<T>) -> Self
6076 where
6077 T: std::convert::Into<wkt::Duration>,
6078 {
6079 self.total_billed_duration = v.map(|x| x.into());
6080 self
6081 }
6082}
6083
6084impl wkt::message::Message for BatchRecognizeResponse {
6085 fn typename() -> &'static str {
6086 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeResponse"
6087 }
6088}
6089
6090/// Output type for Cloud Storage of BatchRecognize transcripts. Though this
6091/// proto isn't returned in this API anywhere, the Cloud Storage transcripts will
6092/// be this proto serialized and should be parsed as such.
6093#[derive(Clone, Default, PartialEq)]
6094#[non_exhaustive]
6095pub struct BatchRecognizeResults {
6096 /// Sequential list of transcription results corresponding to sequential
6097 /// portions of audio.
6098 pub results: std::vec::Vec<crate::model::SpeechRecognitionResult>,
6099
6100 /// Metadata about the recognition.
6101 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
6102
6103 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6104}
6105
6106impl BatchRecognizeResults {
6107 pub fn new() -> Self {
6108 std::default::Default::default()
6109 }
6110
6111 /// Sets the value of [results][crate::model::BatchRecognizeResults::results].
6112 ///
6113 /// # Example
6114 /// ```ignore,no_run
6115 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6116 /// use google_cloud_speech_v2::model::SpeechRecognitionResult;
6117 /// let x = BatchRecognizeResults::new()
6118 /// .set_results([
6119 /// SpeechRecognitionResult::default()/* use setters */,
6120 /// SpeechRecognitionResult::default()/* use (different) setters */,
6121 /// ]);
6122 /// ```
6123 pub fn set_results<T, V>(mut self, v: T) -> Self
6124 where
6125 T: std::iter::IntoIterator<Item = V>,
6126 V: std::convert::Into<crate::model::SpeechRecognitionResult>,
6127 {
6128 use std::iter::Iterator;
6129 self.results = v.into_iter().map(|i| i.into()).collect();
6130 self
6131 }
6132
6133 /// Sets the value of [metadata][crate::model::BatchRecognizeResults::metadata].
6134 ///
6135 /// # Example
6136 /// ```ignore,no_run
6137 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6138 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6139 /// let x = BatchRecognizeResults::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
6140 /// ```
6141 pub fn set_metadata<T>(mut self, v: T) -> Self
6142 where
6143 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6144 {
6145 self.metadata = std::option::Option::Some(v.into());
6146 self
6147 }
6148
6149 /// Sets or clears the value of [metadata][crate::model::BatchRecognizeResults::metadata].
6150 ///
6151 /// # Example
6152 /// ```ignore,no_run
6153 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6154 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6155 /// let x = BatchRecognizeResults::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
6156 /// let x = BatchRecognizeResults::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
6157 /// ```
6158 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
6159 where
6160 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6161 {
6162 self.metadata = v.map(|x| x.into());
6163 self
6164 }
6165}
6166
6167impl wkt::message::Message for BatchRecognizeResults {
6168 fn typename() -> &'static str {
6169 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeResults"
6170 }
6171}
6172
6173/// Final results written to Cloud Storage.
6174#[derive(Clone, Default, PartialEq)]
6175#[non_exhaustive]
6176pub struct CloudStorageResult {
6177 /// The Cloud Storage URI to which recognition results were written.
6178 pub uri: std::string::String,
6179
6180 /// The Cloud Storage URI to which recognition results were written as VTT
6181 /// formatted captions. This is populated only when `VTT` output is requested.
6182 pub vtt_format_uri: std::string::String,
6183
6184 /// The Cloud Storage URI to which recognition results were written as SRT
6185 /// formatted captions. This is populated only when `SRT` output is requested.
6186 pub srt_format_uri: std::string::String,
6187
6188 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6189}
6190
6191impl CloudStorageResult {
6192 pub fn new() -> Self {
6193 std::default::Default::default()
6194 }
6195
6196 /// Sets the value of [uri][crate::model::CloudStorageResult::uri].
6197 ///
6198 /// # Example
6199 /// ```ignore,no_run
6200 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6201 /// let x = CloudStorageResult::new().set_uri("example");
6202 /// ```
6203 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6204 self.uri = v.into();
6205 self
6206 }
6207
6208 /// Sets the value of [vtt_format_uri][crate::model::CloudStorageResult::vtt_format_uri].
6209 ///
6210 /// # Example
6211 /// ```ignore,no_run
6212 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6213 /// let x = CloudStorageResult::new().set_vtt_format_uri("example");
6214 /// ```
6215 pub fn set_vtt_format_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6216 self.vtt_format_uri = v.into();
6217 self
6218 }
6219
6220 /// Sets the value of [srt_format_uri][crate::model::CloudStorageResult::srt_format_uri].
6221 ///
6222 /// # Example
6223 /// ```ignore,no_run
6224 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6225 /// let x = CloudStorageResult::new().set_srt_format_uri("example");
6226 /// ```
6227 pub fn set_srt_format_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6228 self.srt_format_uri = v.into();
6229 self
6230 }
6231}
6232
6233impl wkt::message::Message for CloudStorageResult {
6234 fn typename() -> &'static str {
6235 "type.googleapis.com/google.cloud.speech.v2.CloudStorageResult"
6236 }
6237}
6238
6239/// Final results returned inline in the recognition response.
6240#[derive(Clone, Default, PartialEq)]
6241#[non_exhaustive]
6242pub struct InlineResult {
6243 /// The transcript for the audio file.
6244 pub transcript: std::option::Option<crate::model::BatchRecognizeResults>,
6245
6246 /// The transcript for the audio file as VTT formatted captions. This is
6247 /// populated only when `VTT` output is requested.
6248 pub vtt_captions: std::string::String,
6249
6250 /// The transcript for the audio file as SRT formatted captions. This is
6251 /// populated only when `SRT` output is requested.
6252 pub srt_captions: std::string::String,
6253
6254 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6255}
6256
6257impl InlineResult {
6258 pub fn new() -> Self {
6259 std::default::Default::default()
6260 }
6261
6262 /// Sets the value of [transcript][crate::model::InlineResult::transcript].
6263 ///
6264 /// # Example
6265 /// ```ignore,no_run
6266 /// # use google_cloud_speech_v2::model::InlineResult;
6267 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6268 /// let x = InlineResult::new().set_transcript(BatchRecognizeResults::default()/* use setters */);
6269 /// ```
6270 pub fn set_transcript<T>(mut self, v: T) -> Self
6271 where
6272 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6273 {
6274 self.transcript = std::option::Option::Some(v.into());
6275 self
6276 }
6277
6278 /// Sets or clears the value of [transcript][crate::model::InlineResult::transcript].
6279 ///
6280 /// # Example
6281 /// ```ignore,no_run
6282 /// # use google_cloud_speech_v2::model::InlineResult;
6283 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6284 /// let x = InlineResult::new().set_or_clear_transcript(Some(BatchRecognizeResults::default()/* use setters */));
6285 /// let x = InlineResult::new().set_or_clear_transcript(None::<BatchRecognizeResults>);
6286 /// ```
6287 pub fn set_or_clear_transcript<T>(mut self, v: std::option::Option<T>) -> Self
6288 where
6289 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6290 {
6291 self.transcript = v.map(|x| x.into());
6292 self
6293 }
6294
6295 /// Sets the value of [vtt_captions][crate::model::InlineResult::vtt_captions].
6296 ///
6297 /// # Example
6298 /// ```ignore,no_run
6299 /// # use google_cloud_speech_v2::model::InlineResult;
6300 /// let x = InlineResult::new().set_vtt_captions("example");
6301 /// ```
6302 pub fn set_vtt_captions<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6303 self.vtt_captions = v.into();
6304 self
6305 }
6306
6307 /// Sets the value of [srt_captions][crate::model::InlineResult::srt_captions].
6308 ///
6309 /// # Example
6310 /// ```ignore,no_run
6311 /// # use google_cloud_speech_v2::model::InlineResult;
6312 /// let x = InlineResult::new().set_srt_captions("example");
6313 /// ```
6314 pub fn set_srt_captions<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6315 self.srt_captions = v.into();
6316 self
6317 }
6318}
6319
6320impl wkt::message::Message for InlineResult {
6321 fn typename() -> &'static str {
6322 "type.googleapis.com/google.cloud.speech.v2.InlineResult"
6323 }
6324}
6325
6326/// Final results for a single file.
6327#[derive(Clone, Default, PartialEq)]
6328#[non_exhaustive]
6329pub struct BatchRecognizeFileResult {
6330 /// Error if one was encountered.
6331 pub error: std::option::Option<rpc::model::Status>,
6332
6333 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
6334
6335 /// Deprecated. Use `cloud_storage_result.native_format_uri` instead.
6336 #[deprecated]
6337 pub uri: std::string::String,
6338
6339 /// Deprecated. Use `inline_result.transcript` instead.
6340 #[deprecated]
6341 pub transcript: std::option::Option<crate::model::BatchRecognizeResults>,
6342
6343 pub result: std::option::Option<crate::model::batch_recognize_file_result::Result>,
6344
6345 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6346}
6347
6348impl BatchRecognizeFileResult {
6349 pub fn new() -> Self {
6350 std::default::Default::default()
6351 }
6352
6353 /// Sets the value of [error][crate::model::BatchRecognizeFileResult::error].
6354 ///
6355 /// # Example
6356 /// ```ignore,no_run
6357 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6358 /// use rpc::model::Status;
6359 /// let x = BatchRecognizeFileResult::new().set_error(Status::default()/* use setters */);
6360 /// ```
6361 pub fn set_error<T>(mut self, v: T) -> Self
6362 where
6363 T: std::convert::Into<rpc::model::Status>,
6364 {
6365 self.error = std::option::Option::Some(v.into());
6366 self
6367 }
6368
6369 /// Sets or clears the value of [error][crate::model::BatchRecognizeFileResult::error].
6370 ///
6371 /// # Example
6372 /// ```ignore,no_run
6373 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6374 /// use rpc::model::Status;
6375 /// let x = BatchRecognizeFileResult::new().set_or_clear_error(Some(Status::default()/* use setters */));
6376 /// let x = BatchRecognizeFileResult::new().set_or_clear_error(None::<Status>);
6377 /// ```
6378 pub fn set_or_clear_error<T>(mut self, v: std::option::Option<T>) -> Self
6379 where
6380 T: std::convert::Into<rpc::model::Status>,
6381 {
6382 self.error = v.map(|x| x.into());
6383 self
6384 }
6385
6386 /// Sets the value of [metadata][crate::model::BatchRecognizeFileResult::metadata].
6387 ///
6388 /// # Example
6389 /// ```ignore,no_run
6390 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6391 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6392 /// let x = BatchRecognizeFileResult::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
6393 /// ```
6394 pub fn set_metadata<T>(mut self, v: T) -> Self
6395 where
6396 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6397 {
6398 self.metadata = std::option::Option::Some(v.into());
6399 self
6400 }
6401
6402 /// Sets or clears the value of [metadata][crate::model::BatchRecognizeFileResult::metadata].
6403 ///
6404 /// # Example
6405 /// ```ignore,no_run
6406 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6407 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6408 /// let x = BatchRecognizeFileResult::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
6409 /// let x = BatchRecognizeFileResult::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
6410 /// ```
6411 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
6412 where
6413 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6414 {
6415 self.metadata = v.map(|x| x.into());
6416 self
6417 }
6418
6419 /// Sets the value of [uri][crate::model::BatchRecognizeFileResult::uri].
6420 ///
6421 /// # Example
6422 /// ```ignore,no_run
6423 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6424 /// let x = BatchRecognizeFileResult::new().set_uri("example");
6425 /// ```
6426 #[deprecated]
6427 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6428 self.uri = v.into();
6429 self
6430 }
6431
6432 /// Sets the value of [transcript][crate::model::BatchRecognizeFileResult::transcript].
6433 ///
6434 /// # Example
6435 /// ```ignore,no_run
6436 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6437 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6438 /// let x = BatchRecognizeFileResult::new().set_transcript(BatchRecognizeResults::default()/* use setters */);
6439 /// ```
6440 #[deprecated]
6441 pub fn set_transcript<T>(mut self, v: T) -> Self
6442 where
6443 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6444 {
6445 self.transcript = std::option::Option::Some(v.into());
6446 self
6447 }
6448
6449 /// Sets or clears the value of [transcript][crate::model::BatchRecognizeFileResult::transcript].
6450 ///
6451 /// # Example
6452 /// ```ignore,no_run
6453 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6454 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6455 /// let x = BatchRecognizeFileResult::new().set_or_clear_transcript(Some(BatchRecognizeResults::default()/* use setters */));
6456 /// let x = BatchRecognizeFileResult::new().set_or_clear_transcript(None::<BatchRecognizeResults>);
6457 /// ```
6458 #[deprecated]
6459 pub fn set_or_clear_transcript<T>(mut self, v: std::option::Option<T>) -> Self
6460 where
6461 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6462 {
6463 self.transcript = v.map(|x| x.into());
6464 self
6465 }
6466
6467 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result].
6468 ///
6469 /// Note that all the setters affecting `result` are mutually
6470 /// exclusive.
6471 ///
6472 /// # Example
6473 /// ```ignore,no_run
6474 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6475 /// use google_cloud_speech_v2::model::CloudStorageResult;
6476 /// let x = BatchRecognizeFileResult::new().set_result(Some(
6477 /// google_cloud_speech_v2::model::batch_recognize_file_result::Result::CloudStorageResult(CloudStorageResult::default().into())));
6478 /// ```
6479 pub fn set_result<
6480 T: std::convert::Into<std::option::Option<crate::model::batch_recognize_file_result::Result>>,
6481 >(
6482 mut self,
6483 v: T,
6484 ) -> Self {
6485 self.result = v.into();
6486 self
6487 }
6488
6489 /// The value of [result][crate::model::BatchRecognizeFileResult::result]
6490 /// if it holds a `CloudStorageResult`, `None` if the field is not set or
6491 /// holds a different branch.
6492 pub fn cloud_storage_result(
6493 &self,
6494 ) -> std::option::Option<&std::boxed::Box<crate::model::CloudStorageResult>> {
6495 #[allow(unreachable_patterns)]
6496 self.result.as_ref().and_then(|v| match v {
6497 crate::model::batch_recognize_file_result::Result::CloudStorageResult(v) => {
6498 std::option::Option::Some(v)
6499 }
6500 _ => std::option::Option::None,
6501 })
6502 }
6503
6504 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result]
6505 /// to hold a `CloudStorageResult`.
6506 ///
6507 /// Note that all the setters affecting `result` are
6508 /// mutually exclusive.
6509 ///
6510 /// # Example
6511 /// ```ignore,no_run
6512 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6513 /// use google_cloud_speech_v2::model::CloudStorageResult;
6514 /// let x = BatchRecognizeFileResult::new().set_cloud_storage_result(CloudStorageResult::default()/* use setters */);
6515 /// assert!(x.cloud_storage_result().is_some());
6516 /// assert!(x.inline_result().is_none());
6517 /// ```
6518 pub fn set_cloud_storage_result<
6519 T: std::convert::Into<std::boxed::Box<crate::model::CloudStorageResult>>,
6520 >(
6521 mut self,
6522 v: T,
6523 ) -> Self {
6524 self.result = std::option::Option::Some(
6525 crate::model::batch_recognize_file_result::Result::CloudStorageResult(v.into()),
6526 );
6527 self
6528 }
6529
6530 /// The value of [result][crate::model::BatchRecognizeFileResult::result]
6531 /// if it holds a `InlineResult`, `None` if the field is not set or
6532 /// holds a different branch.
6533 pub fn inline_result(
6534 &self,
6535 ) -> std::option::Option<&std::boxed::Box<crate::model::InlineResult>> {
6536 #[allow(unreachable_patterns)]
6537 self.result.as_ref().and_then(|v| match v {
6538 crate::model::batch_recognize_file_result::Result::InlineResult(v) => {
6539 std::option::Option::Some(v)
6540 }
6541 _ => std::option::Option::None,
6542 })
6543 }
6544
6545 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result]
6546 /// to hold a `InlineResult`.
6547 ///
6548 /// Note that all the setters affecting `result` are
6549 /// mutually exclusive.
6550 ///
6551 /// # Example
6552 /// ```ignore,no_run
6553 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6554 /// use google_cloud_speech_v2::model::InlineResult;
6555 /// let x = BatchRecognizeFileResult::new().set_inline_result(InlineResult::default()/* use setters */);
6556 /// assert!(x.inline_result().is_some());
6557 /// assert!(x.cloud_storage_result().is_none());
6558 /// ```
6559 pub fn set_inline_result<T: std::convert::Into<std::boxed::Box<crate::model::InlineResult>>>(
6560 mut self,
6561 v: T,
6562 ) -> Self {
6563 self.result = std::option::Option::Some(
6564 crate::model::batch_recognize_file_result::Result::InlineResult(v.into()),
6565 );
6566 self
6567 }
6568}
6569
6570impl wkt::message::Message for BatchRecognizeFileResult {
6571 fn typename() -> &'static str {
6572 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeFileResult"
6573 }
6574}
6575
6576/// Defines additional types related to [BatchRecognizeFileResult].
6577pub mod batch_recognize_file_result {
6578 #[allow(unused_imports)]
6579 use super::*;
6580
6581 #[derive(Clone, Debug, PartialEq)]
6582 #[non_exhaustive]
6583 pub enum Result {
6584 /// Recognition results written to Cloud Storage. This is
6585 /// populated only when
6586 /// [GcsOutputConfig][google.cloud.speech.v2.GcsOutputConfig] is set in
6587 /// the
6588 /// [RecognitionOutputConfig][[google.cloud.speech.v2.RecognitionOutputConfig].
6589 ///
6590 /// [google.cloud.speech.v2.GcsOutputConfig]: crate::model::GcsOutputConfig
6591 CloudStorageResult(std::boxed::Box<crate::model::CloudStorageResult>),
6592 /// Recognition results. This is populated only when
6593 /// [InlineOutputConfig][google.cloud.speech.v2.InlineOutputConfig] is set in
6594 /// the
6595 /// [RecognitionOutputConfig][[google.cloud.speech.v2.RecognitionOutputConfig].
6596 ///
6597 /// [google.cloud.speech.v2.InlineOutputConfig]: crate::model::InlineOutputConfig
6598 InlineResult(std::boxed::Box<crate::model::InlineResult>),
6599 }
6600}
6601
6602/// Metadata about transcription for a single file (for example, progress
6603/// percent).
6604#[derive(Clone, Default, PartialEq)]
6605#[non_exhaustive]
6606pub struct BatchRecognizeTranscriptionMetadata {
6607 /// How much of the file has been transcribed so far.
6608 pub progress_percent: i32,
6609
6610 /// Error if one was encountered.
6611 pub error: std::option::Option<rpc::model::Status>,
6612
6613 /// The Cloud Storage URI to which recognition results will be written.
6614 pub uri: std::string::String,
6615
6616 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6617}
6618
6619impl BatchRecognizeTranscriptionMetadata {
6620 pub fn new() -> Self {
6621 std::default::Default::default()
6622 }
6623
6624 /// Sets the value of [progress_percent][crate::model::BatchRecognizeTranscriptionMetadata::progress_percent].
6625 ///
6626 /// # Example
6627 /// ```ignore,no_run
6628 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6629 /// let x = BatchRecognizeTranscriptionMetadata::new().set_progress_percent(42);
6630 /// ```
6631 pub fn set_progress_percent<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
6632 self.progress_percent = v.into();
6633 self
6634 }
6635
6636 /// Sets the value of [error][crate::model::BatchRecognizeTranscriptionMetadata::error].
6637 ///
6638 /// # Example
6639 /// ```ignore,no_run
6640 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6641 /// use rpc::model::Status;
6642 /// let x = BatchRecognizeTranscriptionMetadata::new().set_error(Status::default()/* use setters */);
6643 /// ```
6644 pub fn set_error<T>(mut self, v: T) -> Self
6645 where
6646 T: std::convert::Into<rpc::model::Status>,
6647 {
6648 self.error = std::option::Option::Some(v.into());
6649 self
6650 }
6651
6652 /// Sets or clears the value of [error][crate::model::BatchRecognizeTranscriptionMetadata::error].
6653 ///
6654 /// # Example
6655 /// ```ignore,no_run
6656 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6657 /// use rpc::model::Status;
6658 /// let x = BatchRecognizeTranscriptionMetadata::new().set_or_clear_error(Some(Status::default()/* use setters */));
6659 /// let x = BatchRecognizeTranscriptionMetadata::new().set_or_clear_error(None::<Status>);
6660 /// ```
6661 pub fn set_or_clear_error<T>(mut self, v: std::option::Option<T>) -> Self
6662 where
6663 T: std::convert::Into<rpc::model::Status>,
6664 {
6665 self.error = v.map(|x| x.into());
6666 self
6667 }
6668
6669 /// Sets the value of [uri][crate::model::BatchRecognizeTranscriptionMetadata::uri].
6670 ///
6671 /// # Example
6672 /// ```ignore,no_run
6673 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6674 /// let x = BatchRecognizeTranscriptionMetadata::new().set_uri("example");
6675 /// ```
6676 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6677 self.uri = v.into();
6678 self
6679 }
6680}
6681
6682impl wkt::message::Message for BatchRecognizeTranscriptionMetadata {
6683 fn typename() -> &'static str {
6684 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeTranscriptionMetadata"
6685 }
6686}
6687
6688/// Operation metadata for
6689/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize].
6690///
6691/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6692#[derive(Clone, Default, PartialEq)]
6693#[non_exhaustive]
6694pub struct BatchRecognizeMetadata {
6695 /// Map from provided filename to the transcription metadata for that file.
6696 pub transcription_metadata: std::collections::HashMap<
6697 std::string::String,
6698 crate::model::BatchRecognizeTranscriptionMetadata,
6699 >,
6700
6701 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6702}
6703
6704impl BatchRecognizeMetadata {
6705 pub fn new() -> Self {
6706 std::default::Default::default()
6707 }
6708
6709 /// Sets the value of [transcription_metadata][crate::model::BatchRecognizeMetadata::transcription_metadata].
6710 ///
6711 /// # Example
6712 /// ```ignore,no_run
6713 /// # use google_cloud_speech_v2::model::BatchRecognizeMetadata;
6714 /// use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6715 /// let x = BatchRecognizeMetadata::new().set_transcription_metadata([
6716 /// ("key0", BatchRecognizeTranscriptionMetadata::default()/* use setters */),
6717 /// ("key1", BatchRecognizeTranscriptionMetadata::default()/* use (different) setters */),
6718 /// ]);
6719 /// ```
6720 pub fn set_transcription_metadata<T, K, V>(mut self, v: T) -> Self
6721 where
6722 T: std::iter::IntoIterator<Item = (K, V)>,
6723 K: std::convert::Into<std::string::String>,
6724 V: std::convert::Into<crate::model::BatchRecognizeTranscriptionMetadata>,
6725 {
6726 use std::iter::Iterator;
6727 self.transcription_metadata = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
6728 self
6729 }
6730}
6731
6732impl wkt::message::Message for BatchRecognizeMetadata {
6733 fn typename() -> &'static str {
6734 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeMetadata"
6735 }
6736}
6737
6738/// Metadata about a single file in a batch for BatchRecognize.
6739#[derive(Clone, Default, PartialEq)]
6740#[non_exhaustive]
6741pub struct BatchRecognizeFileMetadata {
6742 /// Features and audio metadata to use for the Automatic Speech Recognition.
6743 /// This field in combination with the
6744 /// [config_mask][google.cloud.speech.v2.BatchRecognizeFileMetadata.config_mask]
6745 /// field can be used to override parts of the
6746 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
6747 /// of the Recognizer resource as well as the
6748 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] at the
6749 /// request level.
6750 ///
6751 /// [google.cloud.speech.v2.BatchRecognizeFileMetadata.config_mask]: crate::model::BatchRecognizeFileMetadata::config_mask
6752 /// [google.cloud.speech.v2.BatchRecognizeRequest.config]: crate::model::BatchRecognizeRequest::config
6753 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
6754 pub config: std::option::Option<crate::model::RecognitionConfig>,
6755
6756 /// The list of fields in
6757 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config] that
6758 /// override the values in the
6759 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
6760 /// of the recognizer during this recognition request. If no mask is provided,
6761 /// all non-default valued fields in
6762 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config] override
6763 /// the values in the recognizer for this recognition request. If a mask is
6764 /// provided, only the fields listed in the mask override the config in the
6765 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
6766 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config]
6767 /// completely overrides and replaces the config in the recognizer for this
6768 /// recognition request.
6769 ///
6770 /// [google.cloud.speech.v2.BatchRecognizeFileMetadata.config]: crate::model::BatchRecognizeFileMetadata::config
6771 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
6772 pub config_mask: std::option::Option<wkt::FieldMask>,
6773
6774 /// The audio source, which is a Google Cloud Storage URI.
6775 pub audio_source: std::option::Option<crate::model::batch_recognize_file_metadata::AudioSource>,
6776
6777 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6778}
6779
6780impl BatchRecognizeFileMetadata {
6781 pub fn new() -> Self {
6782 std::default::Default::default()
6783 }
6784
6785 /// Sets the value of [config][crate::model::BatchRecognizeFileMetadata::config].
6786 ///
6787 /// # Example
6788 /// ```ignore,no_run
6789 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6790 /// use google_cloud_speech_v2::model::RecognitionConfig;
6791 /// let x = BatchRecognizeFileMetadata::new().set_config(RecognitionConfig::default()/* use setters */);
6792 /// ```
6793 pub fn set_config<T>(mut self, v: T) -> Self
6794 where
6795 T: std::convert::Into<crate::model::RecognitionConfig>,
6796 {
6797 self.config = std::option::Option::Some(v.into());
6798 self
6799 }
6800
6801 /// Sets or clears the value of [config][crate::model::BatchRecognizeFileMetadata::config].
6802 ///
6803 /// # Example
6804 /// ```ignore,no_run
6805 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6806 /// use google_cloud_speech_v2::model::RecognitionConfig;
6807 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
6808 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config(None::<RecognitionConfig>);
6809 /// ```
6810 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
6811 where
6812 T: std::convert::Into<crate::model::RecognitionConfig>,
6813 {
6814 self.config = v.map(|x| x.into());
6815 self
6816 }
6817
6818 /// Sets the value of [config_mask][crate::model::BatchRecognizeFileMetadata::config_mask].
6819 ///
6820 /// # Example
6821 /// ```ignore,no_run
6822 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6823 /// use wkt::FieldMask;
6824 /// let x = BatchRecognizeFileMetadata::new().set_config_mask(FieldMask::default()/* use setters */);
6825 /// ```
6826 pub fn set_config_mask<T>(mut self, v: T) -> Self
6827 where
6828 T: std::convert::Into<wkt::FieldMask>,
6829 {
6830 self.config_mask = std::option::Option::Some(v.into());
6831 self
6832 }
6833
6834 /// Sets or clears the value of [config_mask][crate::model::BatchRecognizeFileMetadata::config_mask].
6835 ///
6836 /// # Example
6837 /// ```ignore,no_run
6838 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6839 /// use wkt::FieldMask;
6840 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
6841 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config_mask(None::<FieldMask>);
6842 /// ```
6843 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
6844 where
6845 T: std::convert::Into<wkt::FieldMask>,
6846 {
6847 self.config_mask = v.map(|x| x.into());
6848 self
6849 }
6850
6851 /// Sets the value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source].
6852 ///
6853 /// Note that all the setters affecting `audio_source` are mutually
6854 /// exclusive.
6855 ///
6856 /// # Example
6857 /// ```ignore,no_run
6858 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6859 /// use google_cloud_speech_v2::model::batch_recognize_file_metadata::AudioSource;
6860 /// let x = BatchRecognizeFileMetadata::new().set_audio_source(Some(AudioSource::Uri("example".to_string())));
6861 /// ```
6862 pub fn set_audio_source<
6863 T: std::convert::Into<
6864 std::option::Option<crate::model::batch_recognize_file_metadata::AudioSource>,
6865 >,
6866 >(
6867 mut self,
6868 v: T,
6869 ) -> Self {
6870 self.audio_source = v.into();
6871 self
6872 }
6873
6874 /// The value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source]
6875 /// if it holds a `Uri`, `None` if the field is not set or
6876 /// holds a different branch.
6877 pub fn uri(&self) -> std::option::Option<&std::string::String> {
6878 #[allow(unreachable_patterns)]
6879 self.audio_source.as_ref().and_then(|v| match v {
6880 crate::model::batch_recognize_file_metadata::AudioSource::Uri(v) => {
6881 std::option::Option::Some(v)
6882 }
6883 _ => std::option::Option::None,
6884 })
6885 }
6886
6887 /// Sets the value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source]
6888 /// to hold a `Uri`.
6889 ///
6890 /// Note that all the setters affecting `audio_source` are
6891 /// mutually exclusive.
6892 ///
6893 /// # Example
6894 /// ```ignore,no_run
6895 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6896 /// let x = BatchRecognizeFileMetadata::new().set_uri("example");
6897 /// assert!(x.uri().is_some());
6898 /// ```
6899 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6900 self.audio_source = std::option::Option::Some(
6901 crate::model::batch_recognize_file_metadata::AudioSource::Uri(v.into()),
6902 );
6903 self
6904 }
6905}
6906
6907impl wkt::message::Message for BatchRecognizeFileMetadata {
6908 fn typename() -> &'static str {
6909 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeFileMetadata"
6910 }
6911}
6912
6913/// Defines additional types related to [BatchRecognizeFileMetadata].
6914pub mod batch_recognize_file_metadata {
6915 #[allow(unused_imports)]
6916 use super::*;
6917
6918 /// The audio source, which is a Google Cloud Storage URI.
6919 #[derive(Clone, Debug, PartialEq)]
6920 #[non_exhaustive]
6921 pub enum AudioSource {
6922 /// Cloud Storage URI for the audio file.
6923 Uri(std::string::String),
6924 }
6925}
6926
6927/// A streaming speech recognition result corresponding to a portion of the audio
6928/// that is currently being processed.
6929#[derive(Clone, Default, PartialEq)]
6930#[non_exhaustive]
6931pub struct StreamingRecognitionResult {
6932 /// May contain one or more recognition hypotheses. These alternatives are
6933 /// ordered in terms of accuracy, with the top (first) alternative being the
6934 /// most probable, as ranked by the recognizer.
6935 pub alternatives: std::vec::Vec<crate::model::SpeechRecognitionAlternative>,
6936
6937 /// If `false`, this
6938 /// [StreamingRecognitionResult][google.cloud.speech.v2.StreamingRecognitionResult]
6939 /// represents an interim result that may change. If `true`, this is the final
6940 /// time the speech service will return this particular
6941 /// [StreamingRecognitionResult][google.cloud.speech.v2.StreamingRecognitionResult],
6942 /// the recognizer will not return any further hypotheses for this portion of
6943 /// the transcript and corresponding audio.
6944 ///
6945 /// [google.cloud.speech.v2.StreamingRecognitionResult]: crate::model::StreamingRecognitionResult
6946 pub is_final: bool,
6947
6948 /// An estimate of the likelihood that the recognizer will not change its guess
6949 /// about this interim result. Values range from 0.0 (completely unstable)
6950 /// to 1.0 (completely stable). This field is only provided for interim results
6951 /// ([is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`false`).
6952 /// The default of 0.0 is a sentinel value indicating `stability` was not set.
6953 ///
6954 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
6955 pub stability: f32,
6956
6957 /// Time offset of the end of this result relative to the beginning of the
6958 /// audio.
6959 pub result_end_offset: std::option::Option<wkt::Duration>,
6960
6961 /// For multi-channel audio, this is the channel number corresponding to the
6962 /// recognized result for the audio from that channel.
6963 /// For
6964 /// `audio_channel_count` = `N`, its output values can range from `1` to `N`.
6965 pub channel_tag: i32,
6966
6967 /// Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
6968 /// language tag of the language in this result. This language code was
6969 /// detected to have the most likelihood of being spoken in the audio.
6970 pub language_code: std::string::String,
6971
6972 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6973}
6974
6975impl StreamingRecognitionResult {
6976 pub fn new() -> Self {
6977 std::default::Default::default()
6978 }
6979
6980 /// Sets the value of [alternatives][crate::model::StreamingRecognitionResult::alternatives].
6981 ///
6982 /// # Example
6983 /// ```ignore,no_run
6984 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
6985 /// use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
6986 /// let x = StreamingRecognitionResult::new()
6987 /// .set_alternatives([
6988 /// SpeechRecognitionAlternative::default()/* use setters */,
6989 /// SpeechRecognitionAlternative::default()/* use (different) setters */,
6990 /// ]);
6991 /// ```
6992 pub fn set_alternatives<T, V>(mut self, v: T) -> Self
6993 where
6994 T: std::iter::IntoIterator<Item = V>,
6995 V: std::convert::Into<crate::model::SpeechRecognitionAlternative>,
6996 {
6997 use std::iter::Iterator;
6998 self.alternatives = v.into_iter().map(|i| i.into()).collect();
6999 self
7000 }
7001
7002 /// Sets the value of [is_final][crate::model::StreamingRecognitionResult::is_final].
7003 ///
7004 /// # Example
7005 /// ```ignore,no_run
7006 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7007 /// let x = StreamingRecognitionResult::new().set_is_final(true);
7008 /// ```
7009 pub fn set_is_final<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
7010 self.is_final = v.into();
7011 self
7012 }
7013
7014 /// Sets the value of [stability][crate::model::StreamingRecognitionResult::stability].
7015 ///
7016 /// # Example
7017 /// ```ignore,no_run
7018 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7019 /// let x = StreamingRecognitionResult::new().set_stability(42.0);
7020 /// ```
7021 pub fn set_stability<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
7022 self.stability = v.into();
7023 self
7024 }
7025
7026 /// Sets the value of [result_end_offset][crate::model::StreamingRecognitionResult::result_end_offset].
7027 ///
7028 /// # Example
7029 /// ```ignore,no_run
7030 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7031 /// use wkt::Duration;
7032 /// let x = StreamingRecognitionResult::new().set_result_end_offset(Duration::default()/* use setters */);
7033 /// ```
7034 pub fn set_result_end_offset<T>(mut self, v: T) -> Self
7035 where
7036 T: std::convert::Into<wkt::Duration>,
7037 {
7038 self.result_end_offset = std::option::Option::Some(v.into());
7039 self
7040 }
7041
7042 /// Sets or clears the value of [result_end_offset][crate::model::StreamingRecognitionResult::result_end_offset].
7043 ///
7044 /// # Example
7045 /// ```ignore,no_run
7046 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7047 /// use wkt::Duration;
7048 /// let x = StreamingRecognitionResult::new().set_or_clear_result_end_offset(Some(Duration::default()/* use setters */));
7049 /// let x = StreamingRecognitionResult::new().set_or_clear_result_end_offset(None::<Duration>);
7050 /// ```
7051 pub fn set_or_clear_result_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
7052 where
7053 T: std::convert::Into<wkt::Duration>,
7054 {
7055 self.result_end_offset = v.map(|x| x.into());
7056 self
7057 }
7058
7059 /// Sets the value of [channel_tag][crate::model::StreamingRecognitionResult::channel_tag].
7060 ///
7061 /// # Example
7062 /// ```ignore,no_run
7063 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7064 /// let x = StreamingRecognitionResult::new().set_channel_tag(42);
7065 /// ```
7066 pub fn set_channel_tag<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
7067 self.channel_tag = v.into();
7068 self
7069 }
7070
7071 /// Sets the value of [language_code][crate::model::StreamingRecognitionResult::language_code].
7072 ///
7073 /// # Example
7074 /// ```ignore,no_run
7075 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7076 /// let x = StreamingRecognitionResult::new().set_language_code("example");
7077 /// ```
7078 pub fn set_language_code<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7079 self.language_code = v.into();
7080 self
7081 }
7082}
7083
7084impl wkt::message::Message for StreamingRecognitionResult {
7085 fn typename() -> &'static str {
7086 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionResult"
7087 }
7088}
7089
7090/// `StreamingRecognizeResponse` is the only message returned to the client by
7091/// `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse`
7092/// messages are streamed back to the client. If there is no recognizable
7093/// audio then no messages are streamed back to the client.
7094///
7095/// Here are some examples of `StreamingRecognizeResponse`s that might
7096/// be returned while processing audio:
7097///
7098/// 1. results { alternatives { transcript: "tube" } stability: 0.01 }
7099///
7100/// 1. results { alternatives { transcript: "to be a" } stability: 0.01 }
7101///
7102/// 1. results { alternatives { transcript: "to be" } stability: 0.9 }
7103/// results { alternatives { transcript: " or not to be" } stability: 0.01 }
7104///
7105/// 1. results { alternatives { transcript: "to be or not to be"
7106/// confidence: 0.92 }
7107/// alternatives { transcript: "to bee or not to bee" }
7108/// is_final: true }
7109///
7110/// 1. results { alternatives { transcript: " that's" } stability: 0.01 }
7111///
7112/// 1. results { alternatives { transcript: " that is" } stability: 0.9 }
7113/// results { alternatives { transcript: " the question" } stability: 0.01 }
7114///
7115/// 1. results { alternatives { transcript: " that is the question"
7116/// confidence: 0.98 }
7117/// alternatives { transcript: " that was the question" }
7118/// is_final: true }
7119///
7120///
7121/// Notes:
7122///
7123/// - Only two of the above responses #4 and #7 contain final results; they are
7124/// indicated by `is_final: true`. Concatenating these together generates the
7125/// full transcript: "to be or not to be that is the question".
7126///
7127/// - The others contain interim `results`. #3 and #6 contain two interim
7128/// `results`: the first portion has a high stability and is less likely to
7129/// change; the second portion has a low stability and is very likely to
7130/// change. A UI designer might choose to show only high stability `results`.
7131///
7132/// - The specific `stability` and `confidence` values shown above are only for
7133/// illustrative purposes. Actual values may vary.
7134///
7135/// - In each response, only one of these fields will be set:
7136/// `error`,
7137/// `speech_event_type`, or
7138/// one or more (repeated) `results`.
7139///
7140#[derive(Clone, Default, PartialEq)]
7141#[non_exhaustive]
7142pub struct StreamingRecognizeResponse {
7143 /// This repeated list contains zero or more results that
7144 /// correspond to consecutive portions of the audio currently being processed.
7145 /// It contains zero or one
7146 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`true`
7147 /// result (the newly settled portion), followed by zero or more
7148 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`false`
7149 /// results (the interim results).
7150 ///
7151 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
7152 pub results: std::vec::Vec<crate::model::StreamingRecognitionResult>,
7153
7154 /// Indicates the type of speech event.
7155 pub speech_event_type: crate::model::streaming_recognize_response::SpeechEventType,
7156
7157 /// Time offset between the beginning of the audio and event emission.
7158 pub speech_event_offset: std::option::Option<wkt::Duration>,
7159
7160 /// Metadata about the recognition.
7161 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
7162
7163 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7164}
7165
7166impl StreamingRecognizeResponse {
7167 pub fn new() -> Self {
7168 std::default::Default::default()
7169 }
7170
7171 /// Sets the value of [results][crate::model::StreamingRecognizeResponse::results].
7172 ///
7173 /// # Example
7174 /// ```ignore,no_run
7175 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7176 /// use google_cloud_speech_v2::model::StreamingRecognitionResult;
7177 /// let x = StreamingRecognizeResponse::new()
7178 /// .set_results([
7179 /// StreamingRecognitionResult::default()/* use setters */,
7180 /// StreamingRecognitionResult::default()/* use (different) setters */,
7181 /// ]);
7182 /// ```
7183 pub fn set_results<T, V>(mut self, v: T) -> Self
7184 where
7185 T: std::iter::IntoIterator<Item = V>,
7186 V: std::convert::Into<crate::model::StreamingRecognitionResult>,
7187 {
7188 use std::iter::Iterator;
7189 self.results = v.into_iter().map(|i| i.into()).collect();
7190 self
7191 }
7192
7193 /// Sets the value of [speech_event_type][crate::model::StreamingRecognizeResponse::speech_event_type].
7194 ///
7195 /// # Example
7196 /// ```ignore,no_run
7197 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7198 /// use google_cloud_speech_v2::model::streaming_recognize_response::SpeechEventType;
7199 /// let x0 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::EndOfSingleUtterance);
7200 /// let x1 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::SpeechActivityBegin);
7201 /// let x2 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::SpeechActivityEnd);
7202 /// ```
7203 pub fn set_speech_event_type<
7204 T: std::convert::Into<crate::model::streaming_recognize_response::SpeechEventType>,
7205 >(
7206 mut self,
7207 v: T,
7208 ) -> Self {
7209 self.speech_event_type = v.into();
7210 self
7211 }
7212
7213 /// Sets the value of [speech_event_offset][crate::model::StreamingRecognizeResponse::speech_event_offset].
7214 ///
7215 /// # Example
7216 /// ```ignore,no_run
7217 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7218 /// use wkt::Duration;
7219 /// let x = StreamingRecognizeResponse::new().set_speech_event_offset(Duration::default()/* use setters */);
7220 /// ```
7221 pub fn set_speech_event_offset<T>(mut self, v: T) -> Self
7222 where
7223 T: std::convert::Into<wkt::Duration>,
7224 {
7225 self.speech_event_offset = std::option::Option::Some(v.into());
7226 self
7227 }
7228
7229 /// Sets or clears the value of [speech_event_offset][crate::model::StreamingRecognizeResponse::speech_event_offset].
7230 ///
7231 /// # Example
7232 /// ```ignore,no_run
7233 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7234 /// use wkt::Duration;
7235 /// let x = StreamingRecognizeResponse::new().set_or_clear_speech_event_offset(Some(Duration::default()/* use setters */));
7236 /// let x = StreamingRecognizeResponse::new().set_or_clear_speech_event_offset(None::<Duration>);
7237 /// ```
7238 pub fn set_or_clear_speech_event_offset<T>(mut self, v: std::option::Option<T>) -> Self
7239 where
7240 T: std::convert::Into<wkt::Duration>,
7241 {
7242 self.speech_event_offset = v.map(|x| x.into());
7243 self
7244 }
7245
7246 /// Sets the value of [metadata][crate::model::StreamingRecognizeResponse::metadata].
7247 ///
7248 /// # Example
7249 /// ```ignore,no_run
7250 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7251 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
7252 /// let x = StreamingRecognizeResponse::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
7253 /// ```
7254 pub fn set_metadata<T>(mut self, v: T) -> Self
7255 where
7256 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
7257 {
7258 self.metadata = std::option::Option::Some(v.into());
7259 self
7260 }
7261
7262 /// Sets or clears the value of [metadata][crate::model::StreamingRecognizeResponse::metadata].
7263 ///
7264 /// # Example
7265 /// ```ignore,no_run
7266 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7267 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
7268 /// let x = StreamingRecognizeResponse::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
7269 /// let x = StreamingRecognizeResponse::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
7270 /// ```
7271 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
7272 where
7273 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
7274 {
7275 self.metadata = v.map(|x| x.into());
7276 self
7277 }
7278}
7279
7280impl wkt::message::Message for StreamingRecognizeResponse {
7281 fn typename() -> &'static str {
7282 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognizeResponse"
7283 }
7284}
7285
7286/// Defines additional types related to [StreamingRecognizeResponse].
7287pub mod streaming_recognize_response {
7288 #[allow(unused_imports)]
7289 use super::*;
7290
7291 /// Indicates the type of speech event.
7292 ///
7293 /// # Working with unknown values
7294 ///
7295 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
7296 /// additional enum variants at any time. Adding new variants is not considered
7297 /// a breaking change. Applications should write their code in anticipation of:
7298 ///
7299 /// - New values appearing in future releases of the client library, **and**
7300 /// - New values received dynamically, without application changes.
7301 ///
7302 /// Please consult the [Working with enums] section in the user guide for some
7303 /// guidelines.
7304 ///
7305 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
7306 #[derive(Clone, Debug, PartialEq)]
7307 #[non_exhaustive]
7308 pub enum SpeechEventType {
7309 /// No speech event specified.
7310 Unspecified,
7311 /// This event indicates that the server has detected the end of the user's
7312 /// speech utterance and expects no additional speech. Therefore, the server
7313 /// will not process additional audio and will close the gRPC bidirectional
7314 /// stream. This event is only sent if there was a force cutoff due to
7315 /// silence being detected early. This event is only available through the
7316 /// `latest_short` [model][google.cloud.speech.v2.Recognizer.model].
7317 ///
7318 /// [google.cloud.speech.v2.Recognizer.model]: crate::model::Recognizer::model
7319 EndOfSingleUtterance,
7320 /// This event indicates that the server has detected the beginning of human
7321 /// voice activity in the stream. This event can be returned multiple times
7322 /// if speech starts and stops repeatedly throughout the stream. This event
7323 /// is only sent if `voice_activity_events` is set to true.
7324 SpeechActivityBegin,
7325 /// This event indicates that the server has detected the end of human voice
7326 /// activity in the stream. This event can be returned multiple times if
7327 /// speech starts and stops repeatedly throughout the stream. This event is
7328 /// only sent if `voice_activity_events` is set to true.
7329 SpeechActivityEnd,
7330 /// If set, the enum was initialized with an unknown value.
7331 ///
7332 /// Applications can examine the value using [SpeechEventType::value] or
7333 /// [SpeechEventType::name].
7334 UnknownValue(speech_event_type::UnknownValue),
7335 }
7336
7337 #[doc(hidden)]
7338 pub mod speech_event_type {
7339 #[allow(unused_imports)]
7340 use super::*;
7341 #[derive(Clone, Debug, PartialEq)]
7342 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
7343 }
7344
7345 impl SpeechEventType {
7346 /// Gets the enum value.
7347 ///
7348 /// Returns `None` if the enum contains an unknown value deserialized from
7349 /// the string representation of enums.
7350 pub fn value(&self) -> std::option::Option<i32> {
7351 match self {
7352 Self::Unspecified => std::option::Option::Some(0),
7353 Self::EndOfSingleUtterance => std::option::Option::Some(1),
7354 Self::SpeechActivityBegin => std::option::Option::Some(2),
7355 Self::SpeechActivityEnd => std::option::Option::Some(3),
7356 Self::UnknownValue(u) => u.0.value(),
7357 }
7358 }
7359
7360 /// Gets the enum value as a string.
7361 ///
7362 /// Returns `None` if the enum contains an unknown value deserialized from
7363 /// the integer representation of enums.
7364 pub fn name(&self) -> std::option::Option<&str> {
7365 match self {
7366 Self::Unspecified => std::option::Option::Some("SPEECH_EVENT_TYPE_UNSPECIFIED"),
7367 Self::EndOfSingleUtterance => std::option::Option::Some("END_OF_SINGLE_UTTERANCE"),
7368 Self::SpeechActivityBegin => std::option::Option::Some("SPEECH_ACTIVITY_BEGIN"),
7369 Self::SpeechActivityEnd => std::option::Option::Some("SPEECH_ACTIVITY_END"),
7370 Self::UnknownValue(u) => u.0.name(),
7371 }
7372 }
7373 }
7374
7375 impl std::default::Default for SpeechEventType {
7376 fn default() -> Self {
7377 use std::convert::From;
7378 Self::from(0)
7379 }
7380 }
7381
7382 impl std::fmt::Display for SpeechEventType {
7383 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
7384 wkt::internal::display_enum(f, self.name(), self.value())
7385 }
7386 }
7387
7388 impl std::convert::From<i32> for SpeechEventType {
7389 fn from(value: i32) -> Self {
7390 match value {
7391 0 => Self::Unspecified,
7392 1 => Self::EndOfSingleUtterance,
7393 2 => Self::SpeechActivityBegin,
7394 3 => Self::SpeechActivityEnd,
7395 _ => Self::UnknownValue(speech_event_type::UnknownValue(
7396 wkt::internal::UnknownEnumValue::Integer(value),
7397 )),
7398 }
7399 }
7400 }
7401
7402 impl std::convert::From<&str> for SpeechEventType {
7403 fn from(value: &str) -> Self {
7404 use std::string::ToString;
7405 match value {
7406 "SPEECH_EVENT_TYPE_UNSPECIFIED" => Self::Unspecified,
7407 "END_OF_SINGLE_UTTERANCE" => Self::EndOfSingleUtterance,
7408 "SPEECH_ACTIVITY_BEGIN" => Self::SpeechActivityBegin,
7409 "SPEECH_ACTIVITY_END" => Self::SpeechActivityEnd,
7410 _ => Self::UnknownValue(speech_event_type::UnknownValue(
7411 wkt::internal::UnknownEnumValue::String(value.to_string()),
7412 )),
7413 }
7414 }
7415 }
7416
7417 impl serde::ser::Serialize for SpeechEventType {
7418 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
7419 where
7420 S: serde::Serializer,
7421 {
7422 match self {
7423 Self::Unspecified => serializer.serialize_i32(0),
7424 Self::EndOfSingleUtterance => serializer.serialize_i32(1),
7425 Self::SpeechActivityBegin => serializer.serialize_i32(2),
7426 Self::SpeechActivityEnd => serializer.serialize_i32(3),
7427 Self::UnknownValue(u) => u.0.serialize(serializer),
7428 }
7429 }
7430 }
7431
7432 impl<'de> serde::de::Deserialize<'de> for SpeechEventType {
7433 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
7434 where
7435 D: serde::Deserializer<'de>,
7436 {
7437 deserializer.deserialize_any(wkt::internal::EnumVisitor::<SpeechEventType>::new(
7438 ".google.cloud.speech.v2.StreamingRecognizeResponse.SpeechEventType",
7439 ))
7440 }
7441 }
7442}
7443
7444/// Message representing the config for the Speech-to-Text API. This includes an
7445/// optional [KMS key](https://cloud.google.com/kms/docs/resource-hierarchy#keys)
7446/// with which incoming data will be encrypted.
7447#[derive(Clone, Default, PartialEq)]
7448#[non_exhaustive]
7449pub struct Config {
7450 /// Output only. Identifier. The name of the config resource. There is exactly
7451 /// one config resource per project per location. The expected format is
7452 /// `projects/{project}/locations/{location}/config`.
7453 pub name: std::string::String,
7454
7455 /// Optional. An optional [KMS key
7456 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) that if
7457 /// present, will be used to encrypt Speech-to-Text resources at-rest. Updating
7458 /// this key will not encrypt existing resources using this key; only new
7459 /// resources will be encrypted using this key. The expected format is
7460 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
7461 pub kms_key_name: std::string::String,
7462
7463 /// Output only. The most recent time this resource was modified.
7464 pub update_time: std::option::Option<wkt::Timestamp>,
7465
7466 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7467}
7468
7469impl Config {
7470 pub fn new() -> Self {
7471 std::default::Default::default()
7472 }
7473
7474 /// Sets the value of [name][crate::model::Config::name].
7475 ///
7476 /// # Example
7477 /// ```ignore,no_run
7478 /// # use google_cloud_speech_v2::model::Config;
7479 /// let x = Config::new().set_name("example");
7480 /// ```
7481 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7482 self.name = v.into();
7483 self
7484 }
7485
7486 /// Sets the value of [kms_key_name][crate::model::Config::kms_key_name].
7487 ///
7488 /// # Example
7489 /// ```ignore,no_run
7490 /// # use google_cloud_speech_v2::model::Config;
7491 /// let x = Config::new().set_kms_key_name("example");
7492 /// ```
7493 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7494 self.kms_key_name = v.into();
7495 self
7496 }
7497
7498 /// Sets the value of [update_time][crate::model::Config::update_time].
7499 ///
7500 /// # Example
7501 /// ```ignore,no_run
7502 /// # use google_cloud_speech_v2::model::Config;
7503 /// use wkt::Timestamp;
7504 /// let x = Config::new().set_update_time(Timestamp::default()/* use setters */);
7505 /// ```
7506 pub fn set_update_time<T>(mut self, v: T) -> Self
7507 where
7508 T: std::convert::Into<wkt::Timestamp>,
7509 {
7510 self.update_time = std::option::Option::Some(v.into());
7511 self
7512 }
7513
7514 /// Sets or clears the value of [update_time][crate::model::Config::update_time].
7515 ///
7516 /// # Example
7517 /// ```ignore,no_run
7518 /// # use google_cloud_speech_v2::model::Config;
7519 /// use wkt::Timestamp;
7520 /// let x = Config::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
7521 /// let x = Config::new().set_or_clear_update_time(None::<Timestamp>);
7522 /// ```
7523 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
7524 where
7525 T: std::convert::Into<wkt::Timestamp>,
7526 {
7527 self.update_time = v.map(|x| x.into());
7528 self
7529 }
7530}
7531
7532impl wkt::message::Message for Config {
7533 fn typename() -> &'static str {
7534 "type.googleapis.com/google.cloud.speech.v2.Config"
7535 }
7536}
7537
7538/// Request message for the
7539/// [GetConfig][google.cloud.speech.v2.Speech.GetConfig] method.
7540///
7541/// [google.cloud.speech.v2.Speech.GetConfig]: crate::client::Speech::get_config
7542#[derive(Clone, Default, PartialEq)]
7543#[non_exhaustive]
7544pub struct GetConfigRequest {
7545 /// Required. The name of the config to retrieve. There is exactly one config
7546 /// resource per project per location. The expected format is
7547 /// `projects/{project}/locations/{location}/config`.
7548 pub name: std::string::String,
7549
7550 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7551}
7552
7553impl GetConfigRequest {
7554 pub fn new() -> Self {
7555 std::default::Default::default()
7556 }
7557
7558 /// Sets the value of [name][crate::model::GetConfigRequest::name].
7559 ///
7560 /// # Example
7561 /// ```ignore,no_run
7562 /// # use google_cloud_speech_v2::model::GetConfigRequest;
7563 /// let x = GetConfigRequest::new().set_name("example");
7564 /// ```
7565 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7566 self.name = v.into();
7567 self
7568 }
7569}
7570
7571impl wkt::message::Message for GetConfigRequest {
7572 fn typename() -> &'static str {
7573 "type.googleapis.com/google.cloud.speech.v2.GetConfigRequest"
7574 }
7575}
7576
7577/// Request message for the
7578/// [UpdateConfig][google.cloud.speech.v2.Speech.UpdateConfig] method.
7579///
7580/// [google.cloud.speech.v2.Speech.UpdateConfig]: crate::client::Speech::update_config
7581#[derive(Clone, Default, PartialEq)]
7582#[non_exhaustive]
7583pub struct UpdateConfigRequest {
7584 /// Required. The config to update.
7585 ///
7586 /// The config's `name` field is used to identify the config to be updated.
7587 /// The expected format is `projects/{project}/locations/{location}/config`.
7588 pub config: std::option::Option<crate::model::Config>,
7589
7590 /// The list of fields to be updated.
7591 pub update_mask: std::option::Option<wkt::FieldMask>,
7592
7593 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7594}
7595
7596impl UpdateConfigRequest {
7597 pub fn new() -> Self {
7598 std::default::Default::default()
7599 }
7600
7601 /// Sets the value of [config][crate::model::UpdateConfigRequest::config].
7602 ///
7603 /// # Example
7604 /// ```ignore,no_run
7605 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7606 /// use google_cloud_speech_v2::model::Config;
7607 /// let x = UpdateConfigRequest::new().set_config(Config::default()/* use setters */);
7608 /// ```
7609 pub fn set_config<T>(mut self, v: T) -> Self
7610 where
7611 T: std::convert::Into<crate::model::Config>,
7612 {
7613 self.config = std::option::Option::Some(v.into());
7614 self
7615 }
7616
7617 /// Sets or clears the value of [config][crate::model::UpdateConfigRequest::config].
7618 ///
7619 /// # Example
7620 /// ```ignore,no_run
7621 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7622 /// use google_cloud_speech_v2::model::Config;
7623 /// let x = UpdateConfigRequest::new().set_or_clear_config(Some(Config::default()/* use setters */));
7624 /// let x = UpdateConfigRequest::new().set_or_clear_config(None::<Config>);
7625 /// ```
7626 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
7627 where
7628 T: std::convert::Into<crate::model::Config>,
7629 {
7630 self.config = v.map(|x| x.into());
7631 self
7632 }
7633
7634 /// Sets the value of [update_mask][crate::model::UpdateConfigRequest::update_mask].
7635 ///
7636 /// # Example
7637 /// ```ignore,no_run
7638 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7639 /// use wkt::FieldMask;
7640 /// let x = UpdateConfigRequest::new().set_update_mask(FieldMask::default()/* use setters */);
7641 /// ```
7642 pub fn set_update_mask<T>(mut self, v: T) -> Self
7643 where
7644 T: std::convert::Into<wkt::FieldMask>,
7645 {
7646 self.update_mask = std::option::Option::Some(v.into());
7647 self
7648 }
7649
7650 /// Sets or clears the value of [update_mask][crate::model::UpdateConfigRequest::update_mask].
7651 ///
7652 /// # Example
7653 /// ```ignore,no_run
7654 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7655 /// use wkt::FieldMask;
7656 /// let x = UpdateConfigRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
7657 /// let x = UpdateConfigRequest::new().set_or_clear_update_mask(None::<FieldMask>);
7658 /// ```
7659 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
7660 where
7661 T: std::convert::Into<wkt::FieldMask>,
7662 {
7663 self.update_mask = v.map(|x| x.into());
7664 self
7665 }
7666}
7667
7668impl wkt::message::Message for UpdateConfigRequest {
7669 fn typename() -> &'static str {
7670 "type.googleapis.com/google.cloud.speech.v2.UpdateConfigRequest"
7671 }
7672}
7673
7674/// CustomClass for biasing in speech recognition. Used to define a set of words
7675/// or phrases that represents a common concept or theme likely to appear in your
7676/// audio, for example a list of passenger ship names.
7677#[derive(Clone, Default, PartialEq)]
7678#[non_exhaustive]
7679pub struct CustomClass {
7680 /// Output only. Identifier. The resource name of the CustomClass.
7681 /// Format:
7682 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
7683 pub name: std::string::String,
7684
7685 /// Output only. System-assigned unique identifier for the CustomClass.
7686 pub uid: std::string::String,
7687
7688 /// Optional. User-settable, human-readable name for the CustomClass. Must be
7689 /// 63 characters or less.
7690 pub display_name: std::string::String,
7691
7692 /// A collection of class items.
7693 pub items: std::vec::Vec<crate::model::custom_class::ClassItem>,
7694
7695 /// Output only. The CustomClass lifecycle state.
7696 pub state: crate::model::custom_class::State,
7697
7698 /// Output only. Creation time.
7699 pub create_time: std::option::Option<wkt::Timestamp>,
7700
7701 /// Output only. The most recent time this resource was modified.
7702 pub update_time: std::option::Option<wkt::Timestamp>,
7703
7704 /// Output only. The time at which this resource was requested for deletion.
7705 pub delete_time: std::option::Option<wkt::Timestamp>,
7706
7707 /// Output only. The time at which this resource will be purged.
7708 pub expire_time: std::option::Option<wkt::Timestamp>,
7709
7710 /// Optional. Allows users to store small amounts of arbitrary data.
7711 /// Both the key and the value must be 63 characters or less each.
7712 /// At most 100 annotations.
7713 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
7714
7715 /// Output only. This checksum is computed by the server based on the value of
7716 /// other fields. This may be sent on update, undelete, and delete requests to
7717 /// ensure the client has an up-to-date value before proceeding.
7718 pub etag: std::string::String,
7719
7720 /// Output only. Whether or not this CustomClass is in the process of being
7721 /// updated.
7722 pub reconciling: bool,
7723
7724 /// Output only. The [KMS key
7725 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
7726 /// the CustomClass is encrypted. The expected format is
7727 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
7728 pub kms_key_name: std::string::String,
7729
7730 /// Output only. The [KMS key version
7731 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
7732 /// with which the CustomClass is encrypted. The expected format is
7733 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
7734 pub kms_key_version_name: std::string::String,
7735
7736 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7737}
7738
7739impl CustomClass {
7740 pub fn new() -> Self {
7741 std::default::Default::default()
7742 }
7743
7744 /// Sets the value of [name][crate::model::CustomClass::name].
7745 ///
7746 /// # Example
7747 /// ```ignore,no_run
7748 /// # use google_cloud_speech_v2::model::CustomClass;
7749 /// let x = CustomClass::new().set_name("example");
7750 /// ```
7751 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7752 self.name = v.into();
7753 self
7754 }
7755
7756 /// Sets the value of [uid][crate::model::CustomClass::uid].
7757 ///
7758 /// # Example
7759 /// ```ignore,no_run
7760 /// # use google_cloud_speech_v2::model::CustomClass;
7761 /// let x = CustomClass::new().set_uid("example");
7762 /// ```
7763 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7764 self.uid = v.into();
7765 self
7766 }
7767
7768 /// Sets the value of [display_name][crate::model::CustomClass::display_name].
7769 ///
7770 /// # Example
7771 /// ```ignore,no_run
7772 /// # use google_cloud_speech_v2::model::CustomClass;
7773 /// let x = CustomClass::new().set_display_name("example");
7774 /// ```
7775 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7776 self.display_name = v.into();
7777 self
7778 }
7779
7780 /// Sets the value of [items][crate::model::CustomClass::items].
7781 ///
7782 /// # Example
7783 /// ```ignore,no_run
7784 /// # use google_cloud_speech_v2::model::CustomClass;
7785 /// use google_cloud_speech_v2::model::custom_class::ClassItem;
7786 /// let x = CustomClass::new()
7787 /// .set_items([
7788 /// ClassItem::default()/* use setters */,
7789 /// ClassItem::default()/* use (different) setters */,
7790 /// ]);
7791 /// ```
7792 pub fn set_items<T, V>(mut self, v: T) -> Self
7793 where
7794 T: std::iter::IntoIterator<Item = V>,
7795 V: std::convert::Into<crate::model::custom_class::ClassItem>,
7796 {
7797 use std::iter::Iterator;
7798 self.items = v.into_iter().map(|i| i.into()).collect();
7799 self
7800 }
7801
7802 /// Sets the value of [state][crate::model::CustomClass::state].
7803 ///
7804 /// # Example
7805 /// ```ignore,no_run
7806 /// # use google_cloud_speech_v2::model::CustomClass;
7807 /// use google_cloud_speech_v2::model::custom_class::State;
7808 /// let x0 = CustomClass::new().set_state(State::Active);
7809 /// let x1 = CustomClass::new().set_state(State::Deleted);
7810 /// ```
7811 pub fn set_state<T: std::convert::Into<crate::model::custom_class::State>>(
7812 mut self,
7813 v: T,
7814 ) -> Self {
7815 self.state = v.into();
7816 self
7817 }
7818
7819 /// Sets the value of [create_time][crate::model::CustomClass::create_time].
7820 ///
7821 /// # Example
7822 /// ```ignore,no_run
7823 /// # use google_cloud_speech_v2::model::CustomClass;
7824 /// use wkt::Timestamp;
7825 /// let x = CustomClass::new().set_create_time(Timestamp::default()/* use setters */);
7826 /// ```
7827 pub fn set_create_time<T>(mut self, v: T) -> Self
7828 where
7829 T: std::convert::Into<wkt::Timestamp>,
7830 {
7831 self.create_time = std::option::Option::Some(v.into());
7832 self
7833 }
7834
7835 /// Sets or clears the value of [create_time][crate::model::CustomClass::create_time].
7836 ///
7837 /// # Example
7838 /// ```ignore,no_run
7839 /// # use google_cloud_speech_v2::model::CustomClass;
7840 /// use wkt::Timestamp;
7841 /// let x = CustomClass::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
7842 /// let x = CustomClass::new().set_or_clear_create_time(None::<Timestamp>);
7843 /// ```
7844 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
7845 where
7846 T: std::convert::Into<wkt::Timestamp>,
7847 {
7848 self.create_time = v.map(|x| x.into());
7849 self
7850 }
7851
7852 /// Sets the value of [update_time][crate::model::CustomClass::update_time].
7853 ///
7854 /// # Example
7855 /// ```ignore,no_run
7856 /// # use google_cloud_speech_v2::model::CustomClass;
7857 /// use wkt::Timestamp;
7858 /// let x = CustomClass::new().set_update_time(Timestamp::default()/* use setters */);
7859 /// ```
7860 pub fn set_update_time<T>(mut self, v: T) -> Self
7861 where
7862 T: std::convert::Into<wkt::Timestamp>,
7863 {
7864 self.update_time = std::option::Option::Some(v.into());
7865 self
7866 }
7867
7868 /// Sets or clears the value of [update_time][crate::model::CustomClass::update_time].
7869 ///
7870 /// # Example
7871 /// ```ignore,no_run
7872 /// # use google_cloud_speech_v2::model::CustomClass;
7873 /// use wkt::Timestamp;
7874 /// let x = CustomClass::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
7875 /// let x = CustomClass::new().set_or_clear_update_time(None::<Timestamp>);
7876 /// ```
7877 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
7878 where
7879 T: std::convert::Into<wkt::Timestamp>,
7880 {
7881 self.update_time = v.map(|x| x.into());
7882 self
7883 }
7884
7885 /// Sets the value of [delete_time][crate::model::CustomClass::delete_time].
7886 ///
7887 /// # Example
7888 /// ```ignore,no_run
7889 /// # use google_cloud_speech_v2::model::CustomClass;
7890 /// use wkt::Timestamp;
7891 /// let x = CustomClass::new().set_delete_time(Timestamp::default()/* use setters */);
7892 /// ```
7893 pub fn set_delete_time<T>(mut self, v: T) -> Self
7894 where
7895 T: std::convert::Into<wkt::Timestamp>,
7896 {
7897 self.delete_time = std::option::Option::Some(v.into());
7898 self
7899 }
7900
7901 /// Sets or clears the value of [delete_time][crate::model::CustomClass::delete_time].
7902 ///
7903 /// # Example
7904 /// ```ignore,no_run
7905 /// # use google_cloud_speech_v2::model::CustomClass;
7906 /// use wkt::Timestamp;
7907 /// let x = CustomClass::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
7908 /// let x = CustomClass::new().set_or_clear_delete_time(None::<Timestamp>);
7909 /// ```
7910 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
7911 where
7912 T: std::convert::Into<wkt::Timestamp>,
7913 {
7914 self.delete_time = v.map(|x| x.into());
7915 self
7916 }
7917
7918 /// Sets the value of [expire_time][crate::model::CustomClass::expire_time].
7919 ///
7920 /// # Example
7921 /// ```ignore,no_run
7922 /// # use google_cloud_speech_v2::model::CustomClass;
7923 /// use wkt::Timestamp;
7924 /// let x = CustomClass::new().set_expire_time(Timestamp::default()/* use setters */);
7925 /// ```
7926 pub fn set_expire_time<T>(mut self, v: T) -> Self
7927 where
7928 T: std::convert::Into<wkt::Timestamp>,
7929 {
7930 self.expire_time = std::option::Option::Some(v.into());
7931 self
7932 }
7933
7934 /// Sets or clears the value of [expire_time][crate::model::CustomClass::expire_time].
7935 ///
7936 /// # Example
7937 /// ```ignore,no_run
7938 /// # use google_cloud_speech_v2::model::CustomClass;
7939 /// use wkt::Timestamp;
7940 /// let x = CustomClass::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
7941 /// let x = CustomClass::new().set_or_clear_expire_time(None::<Timestamp>);
7942 /// ```
7943 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
7944 where
7945 T: std::convert::Into<wkt::Timestamp>,
7946 {
7947 self.expire_time = v.map(|x| x.into());
7948 self
7949 }
7950
7951 /// Sets the value of [annotations][crate::model::CustomClass::annotations].
7952 ///
7953 /// # Example
7954 /// ```ignore,no_run
7955 /// # use google_cloud_speech_v2::model::CustomClass;
7956 /// let x = CustomClass::new().set_annotations([
7957 /// ("key0", "abc"),
7958 /// ("key1", "xyz"),
7959 /// ]);
7960 /// ```
7961 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
7962 where
7963 T: std::iter::IntoIterator<Item = (K, V)>,
7964 K: std::convert::Into<std::string::String>,
7965 V: std::convert::Into<std::string::String>,
7966 {
7967 use std::iter::Iterator;
7968 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
7969 self
7970 }
7971
7972 /// Sets the value of [etag][crate::model::CustomClass::etag].
7973 ///
7974 /// # Example
7975 /// ```ignore,no_run
7976 /// # use google_cloud_speech_v2::model::CustomClass;
7977 /// let x = CustomClass::new().set_etag("example");
7978 /// ```
7979 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7980 self.etag = v.into();
7981 self
7982 }
7983
7984 /// Sets the value of [reconciling][crate::model::CustomClass::reconciling].
7985 ///
7986 /// # Example
7987 /// ```ignore,no_run
7988 /// # use google_cloud_speech_v2::model::CustomClass;
7989 /// let x = CustomClass::new().set_reconciling(true);
7990 /// ```
7991 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
7992 self.reconciling = v.into();
7993 self
7994 }
7995
7996 /// Sets the value of [kms_key_name][crate::model::CustomClass::kms_key_name].
7997 ///
7998 /// # Example
7999 /// ```ignore,no_run
8000 /// # use google_cloud_speech_v2::model::CustomClass;
8001 /// let x = CustomClass::new().set_kms_key_name("example");
8002 /// ```
8003 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8004 self.kms_key_name = v.into();
8005 self
8006 }
8007
8008 /// Sets the value of [kms_key_version_name][crate::model::CustomClass::kms_key_version_name].
8009 ///
8010 /// # Example
8011 /// ```ignore,no_run
8012 /// # use google_cloud_speech_v2::model::CustomClass;
8013 /// let x = CustomClass::new().set_kms_key_version_name("example");
8014 /// ```
8015 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
8016 mut self,
8017 v: T,
8018 ) -> Self {
8019 self.kms_key_version_name = v.into();
8020 self
8021 }
8022}
8023
8024impl wkt::message::Message for CustomClass {
8025 fn typename() -> &'static str {
8026 "type.googleapis.com/google.cloud.speech.v2.CustomClass"
8027 }
8028}
8029
8030/// Defines additional types related to [CustomClass].
8031pub mod custom_class {
8032 #[allow(unused_imports)]
8033 use super::*;
8034
8035 /// An item of the class.
8036 #[derive(Clone, Default, PartialEq)]
8037 #[non_exhaustive]
8038 pub struct ClassItem {
8039 /// The class item's value.
8040 pub value: std::string::String,
8041
8042 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8043 }
8044
8045 impl ClassItem {
8046 pub fn new() -> Self {
8047 std::default::Default::default()
8048 }
8049
8050 /// Sets the value of [value][crate::model::custom_class::ClassItem::value].
8051 ///
8052 /// # Example
8053 /// ```ignore,no_run
8054 /// # use google_cloud_speech_v2::model::custom_class::ClassItem;
8055 /// let x = ClassItem::new().set_value("example");
8056 /// ```
8057 pub fn set_value<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8058 self.value = v.into();
8059 self
8060 }
8061 }
8062
8063 impl wkt::message::Message for ClassItem {
8064 fn typename() -> &'static str {
8065 "type.googleapis.com/google.cloud.speech.v2.CustomClass.ClassItem"
8066 }
8067 }
8068
8069 /// Set of states that define the lifecycle of a CustomClass.
8070 ///
8071 /// # Working with unknown values
8072 ///
8073 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
8074 /// additional enum variants at any time. Adding new variants is not considered
8075 /// a breaking change. Applications should write their code in anticipation of:
8076 ///
8077 /// - New values appearing in future releases of the client library, **and**
8078 /// - New values received dynamically, without application changes.
8079 ///
8080 /// Please consult the [Working with enums] section in the user guide for some
8081 /// guidelines.
8082 ///
8083 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
8084 #[derive(Clone, Debug, PartialEq)]
8085 #[non_exhaustive]
8086 pub enum State {
8087 /// Unspecified state. This is only used/useful for distinguishing
8088 /// unset values.
8089 Unspecified,
8090 /// The normal and active state.
8091 Active,
8092 /// This CustomClass has been deleted.
8093 Deleted,
8094 /// If set, the enum was initialized with an unknown value.
8095 ///
8096 /// Applications can examine the value using [State::value] or
8097 /// [State::name].
8098 UnknownValue(state::UnknownValue),
8099 }
8100
8101 #[doc(hidden)]
8102 pub mod state {
8103 #[allow(unused_imports)]
8104 use super::*;
8105 #[derive(Clone, Debug, PartialEq)]
8106 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
8107 }
8108
8109 impl State {
8110 /// Gets the enum value.
8111 ///
8112 /// Returns `None` if the enum contains an unknown value deserialized from
8113 /// the string representation of enums.
8114 pub fn value(&self) -> std::option::Option<i32> {
8115 match self {
8116 Self::Unspecified => std::option::Option::Some(0),
8117 Self::Active => std::option::Option::Some(2),
8118 Self::Deleted => std::option::Option::Some(4),
8119 Self::UnknownValue(u) => u.0.value(),
8120 }
8121 }
8122
8123 /// Gets the enum value as a string.
8124 ///
8125 /// Returns `None` if the enum contains an unknown value deserialized from
8126 /// the integer representation of enums.
8127 pub fn name(&self) -> std::option::Option<&str> {
8128 match self {
8129 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
8130 Self::Active => std::option::Option::Some("ACTIVE"),
8131 Self::Deleted => std::option::Option::Some("DELETED"),
8132 Self::UnknownValue(u) => u.0.name(),
8133 }
8134 }
8135 }
8136
8137 impl std::default::Default for State {
8138 fn default() -> Self {
8139 use std::convert::From;
8140 Self::from(0)
8141 }
8142 }
8143
8144 impl std::fmt::Display for State {
8145 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
8146 wkt::internal::display_enum(f, self.name(), self.value())
8147 }
8148 }
8149
8150 impl std::convert::From<i32> for State {
8151 fn from(value: i32) -> Self {
8152 match value {
8153 0 => Self::Unspecified,
8154 2 => Self::Active,
8155 4 => Self::Deleted,
8156 _ => Self::UnknownValue(state::UnknownValue(
8157 wkt::internal::UnknownEnumValue::Integer(value),
8158 )),
8159 }
8160 }
8161 }
8162
8163 impl std::convert::From<&str> for State {
8164 fn from(value: &str) -> Self {
8165 use std::string::ToString;
8166 match value {
8167 "STATE_UNSPECIFIED" => Self::Unspecified,
8168 "ACTIVE" => Self::Active,
8169 "DELETED" => Self::Deleted,
8170 _ => Self::UnknownValue(state::UnknownValue(
8171 wkt::internal::UnknownEnumValue::String(value.to_string()),
8172 )),
8173 }
8174 }
8175 }
8176
8177 impl serde::ser::Serialize for State {
8178 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
8179 where
8180 S: serde::Serializer,
8181 {
8182 match self {
8183 Self::Unspecified => serializer.serialize_i32(0),
8184 Self::Active => serializer.serialize_i32(2),
8185 Self::Deleted => serializer.serialize_i32(4),
8186 Self::UnknownValue(u) => u.0.serialize(serializer),
8187 }
8188 }
8189 }
8190
8191 impl<'de> serde::de::Deserialize<'de> for State {
8192 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
8193 where
8194 D: serde::Deserializer<'de>,
8195 {
8196 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
8197 ".google.cloud.speech.v2.CustomClass.State",
8198 ))
8199 }
8200 }
8201}
8202
8203/// PhraseSet for biasing in speech recognition. A PhraseSet is used to provide
8204/// "hints" to the speech recognizer to favor specific words and phrases in the
8205/// results.
8206#[derive(Clone, Default, PartialEq)]
8207#[non_exhaustive]
8208pub struct PhraseSet {
8209 /// Output only. Identifier. The resource name of the PhraseSet.
8210 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
8211 pub name: std::string::String,
8212
8213 /// Output only. System-assigned unique identifier for the PhraseSet.
8214 pub uid: std::string::String,
8215
8216 /// A list of word and phrases.
8217 pub phrases: std::vec::Vec<crate::model::phrase_set::Phrase>,
8218
8219 /// Hint Boost. Positive value will increase the probability that a specific
8220 /// phrase will be recognized over other similar sounding phrases. The higher
8221 /// the boost, the higher the chance of false positive recognition as well.
8222 /// Valid `boost` values are between 0 (exclusive) and 20. We recommend using a
8223 /// binary search approach to finding the optimal value for your use case as
8224 /// well as adding phrases both with and without boost to your requests.
8225 pub boost: f32,
8226
8227 /// User-settable, human-readable name for the PhraseSet. Must be 63
8228 /// characters or less.
8229 pub display_name: std::string::String,
8230
8231 /// Output only. The PhraseSet lifecycle state.
8232 pub state: crate::model::phrase_set::State,
8233
8234 /// Output only. Creation time.
8235 pub create_time: std::option::Option<wkt::Timestamp>,
8236
8237 /// Output only. The most recent time this resource was modified.
8238 pub update_time: std::option::Option<wkt::Timestamp>,
8239
8240 /// Output only. The time at which this resource was requested for deletion.
8241 pub delete_time: std::option::Option<wkt::Timestamp>,
8242
8243 /// Output only. The time at which this resource will be purged.
8244 pub expire_time: std::option::Option<wkt::Timestamp>,
8245
8246 /// Allows users to store small amounts of arbitrary data.
8247 /// Both the key and the value must be 63 characters or less each.
8248 /// At most 100 annotations.
8249 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
8250
8251 /// Output only. This checksum is computed by the server based on the value of
8252 /// other fields. This may be sent on update, undelete, and delete requests to
8253 /// ensure the client has an up-to-date value before proceeding.
8254 pub etag: std::string::String,
8255
8256 /// Output only. Whether or not this PhraseSet is in the process of being
8257 /// updated.
8258 pub reconciling: bool,
8259
8260 /// Output only. The [KMS key
8261 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
8262 /// the PhraseSet is encrypted. The expected format is
8263 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
8264 pub kms_key_name: std::string::String,
8265
8266 /// Output only. The [KMS key version
8267 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
8268 /// with which the PhraseSet is encrypted. The expected format is
8269 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
8270 pub kms_key_version_name: std::string::String,
8271
8272 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8273}
8274
8275impl PhraseSet {
8276 pub fn new() -> Self {
8277 std::default::Default::default()
8278 }
8279
8280 /// Sets the value of [name][crate::model::PhraseSet::name].
8281 ///
8282 /// # Example
8283 /// ```ignore,no_run
8284 /// # use google_cloud_speech_v2::model::PhraseSet;
8285 /// let x = PhraseSet::new().set_name("example");
8286 /// ```
8287 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8288 self.name = v.into();
8289 self
8290 }
8291
8292 /// Sets the value of [uid][crate::model::PhraseSet::uid].
8293 ///
8294 /// # Example
8295 /// ```ignore,no_run
8296 /// # use google_cloud_speech_v2::model::PhraseSet;
8297 /// let x = PhraseSet::new().set_uid("example");
8298 /// ```
8299 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8300 self.uid = v.into();
8301 self
8302 }
8303
8304 /// Sets the value of [phrases][crate::model::PhraseSet::phrases].
8305 ///
8306 /// # Example
8307 /// ```ignore,no_run
8308 /// # use google_cloud_speech_v2::model::PhraseSet;
8309 /// use google_cloud_speech_v2::model::phrase_set::Phrase;
8310 /// let x = PhraseSet::new()
8311 /// .set_phrases([
8312 /// Phrase::default()/* use setters */,
8313 /// Phrase::default()/* use (different) setters */,
8314 /// ]);
8315 /// ```
8316 pub fn set_phrases<T, V>(mut self, v: T) -> Self
8317 where
8318 T: std::iter::IntoIterator<Item = V>,
8319 V: std::convert::Into<crate::model::phrase_set::Phrase>,
8320 {
8321 use std::iter::Iterator;
8322 self.phrases = v.into_iter().map(|i| i.into()).collect();
8323 self
8324 }
8325
8326 /// Sets the value of [boost][crate::model::PhraseSet::boost].
8327 ///
8328 /// # Example
8329 /// ```ignore,no_run
8330 /// # use google_cloud_speech_v2::model::PhraseSet;
8331 /// let x = PhraseSet::new().set_boost(42.0);
8332 /// ```
8333 pub fn set_boost<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
8334 self.boost = v.into();
8335 self
8336 }
8337
8338 /// Sets the value of [display_name][crate::model::PhraseSet::display_name].
8339 ///
8340 /// # Example
8341 /// ```ignore,no_run
8342 /// # use google_cloud_speech_v2::model::PhraseSet;
8343 /// let x = PhraseSet::new().set_display_name("example");
8344 /// ```
8345 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8346 self.display_name = v.into();
8347 self
8348 }
8349
8350 /// Sets the value of [state][crate::model::PhraseSet::state].
8351 ///
8352 /// # Example
8353 /// ```ignore,no_run
8354 /// # use google_cloud_speech_v2::model::PhraseSet;
8355 /// use google_cloud_speech_v2::model::phrase_set::State;
8356 /// let x0 = PhraseSet::new().set_state(State::Active);
8357 /// let x1 = PhraseSet::new().set_state(State::Deleted);
8358 /// ```
8359 pub fn set_state<T: std::convert::Into<crate::model::phrase_set::State>>(
8360 mut self,
8361 v: T,
8362 ) -> Self {
8363 self.state = v.into();
8364 self
8365 }
8366
8367 /// Sets the value of [create_time][crate::model::PhraseSet::create_time].
8368 ///
8369 /// # Example
8370 /// ```ignore,no_run
8371 /// # use google_cloud_speech_v2::model::PhraseSet;
8372 /// use wkt::Timestamp;
8373 /// let x = PhraseSet::new().set_create_time(Timestamp::default()/* use setters */);
8374 /// ```
8375 pub fn set_create_time<T>(mut self, v: T) -> Self
8376 where
8377 T: std::convert::Into<wkt::Timestamp>,
8378 {
8379 self.create_time = std::option::Option::Some(v.into());
8380 self
8381 }
8382
8383 /// Sets or clears the value of [create_time][crate::model::PhraseSet::create_time].
8384 ///
8385 /// # Example
8386 /// ```ignore,no_run
8387 /// # use google_cloud_speech_v2::model::PhraseSet;
8388 /// use wkt::Timestamp;
8389 /// let x = PhraseSet::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
8390 /// let x = PhraseSet::new().set_or_clear_create_time(None::<Timestamp>);
8391 /// ```
8392 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
8393 where
8394 T: std::convert::Into<wkt::Timestamp>,
8395 {
8396 self.create_time = v.map(|x| x.into());
8397 self
8398 }
8399
8400 /// Sets the value of [update_time][crate::model::PhraseSet::update_time].
8401 ///
8402 /// # Example
8403 /// ```ignore,no_run
8404 /// # use google_cloud_speech_v2::model::PhraseSet;
8405 /// use wkt::Timestamp;
8406 /// let x = PhraseSet::new().set_update_time(Timestamp::default()/* use setters */);
8407 /// ```
8408 pub fn set_update_time<T>(mut self, v: T) -> Self
8409 where
8410 T: std::convert::Into<wkt::Timestamp>,
8411 {
8412 self.update_time = std::option::Option::Some(v.into());
8413 self
8414 }
8415
8416 /// Sets or clears the value of [update_time][crate::model::PhraseSet::update_time].
8417 ///
8418 /// # Example
8419 /// ```ignore,no_run
8420 /// # use google_cloud_speech_v2::model::PhraseSet;
8421 /// use wkt::Timestamp;
8422 /// let x = PhraseSet::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
8423 /// let x = PhraseSet::new().set_or_clear_update_time(None::<Timestamp>);
8424 /// ```
8425 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
8426 where
8427 T: std::convert::Into<wkt::Timestamp>,
8428 {
8429 self.update_time = v.map(|x| x.into());
8430 self
8431 }
8432
8433 /// Sets the value of [delete_time][crate::model::PhraseSet::delete_time].
8434 ///
8435 /// # Example
8436 /// ```ignore,no_run
8437 /// # use google_cloud_speech_v2::model::PhraseSet;
8438 /// use wkt::Timestamp;
8439 /// let x = PhraseSet::new().set_delete_time(Timestamp::default()/* use setters */);
8440 /// ```
8441 pub fn set_delete_time<T>(mut self, v: T) -> Self
8442 where
8443 T: std::convert::Into<wkt::Timestamp>,
8444 {
8445 self.delete_time = std::option::Option::Some(v.into());
8446 self
8447 }
8448
8449 /// Sets or clears the value of [delete_time][crate::model::PhraseSet::delete_time].
8450 ///
8451 /// # Example
8452 /// ```ignore,no_run
8453 /// # use google_cloud_speech_v2::model::PhraseSet;
8454 /// use wkt::Timestamp;
8455 /// let x = PhraseSet::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
8456 /// let x = PhraseSet::new().set_or_clear_delete_time(None::<Timestamp>);
8457 /// ```
8458 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
8459 where
8460 T: std::convert::Into<wkt::Timestamp>,
8461 {
8462 self.delete_time = v.map(|x| x.into());
8463 self
8464 }
8465
8466 /// Sets the value of [expire_time][crate::model::PhraseSet::expire_time].
8467 ///
8468 /// # Example
8469 /// ```ignore,no_run
8470 /// # use google_cloud_speech_v2::model::PhraseSet;
8471 /// use wkt::Timestamp;
8472 /// let x = PhraseSet::new().set_expire_time(Timestamp::default()/* use setters */);
8473 /// ```
8474 pub fn set_expire_time<T>(mut self, v: T) -> Self
8475 where
8476 T: std::convert::Into<wkt::Timestamp>,
8477 {
8478 self.expire_time = std::option::Option::Some(v.into());
8479 self
8480 }
8481
8482 /// Sets or clears the value of [expire_time][crate::model::PhraseSet::expire_time].
8483 ///
8484 /// # Example
8485 /// ```ignore,no_run
8486 /// # use google_cloud_speech_v2::model::PhraseSet;
8487 /// use wkt::Timestamp;
8488 /// let x = PhraseSet::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
8489 /// let x = PhraseSet::new().set_or_clear_expire_time(None::<Timestamp>);
8490 /// ```
8491 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
8492 where
8493 T: std::convert::Into<wkt::Timestamp>,
8494 {
8495 self.expire_time = v.map(|x| x.into());
8496 self
8497 }
8498
8499 /// Sets the value of [annotations][crate::model::PhraseSet::annotations].
8500 ///
8501 /// # Example
8502 /// ```ignore,no_run
8503 /// # use google_cloud_speech_v2::model::PhraseSet;
8504 /// let x = PhraseSet::new().set_annotations([
8505 /// ("key0", "abc"),
8506 /// ("key1", "xyz"),
8507 /// ]);
8508 /// ```
8509 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
8510 where
8511 T: std::iter::IntoIterator<Item = (K, V)>,
8512 K: std::convert::Into<std::string::String>,
8513 V: std::convert::Into<std::string::String>,
8514 {
8515 use std::iter::Iterator;
8516 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
8517 self
8518 }
8519
8520 /// Sets the value of [etag][crate::model::PhraseSet::etag].
8521 ///
8522 /// # Example
8523 /// ```ignore,no_run
8524 /// # use google_cloud_speech_v2::model::PhraseSet;
8525 /// let x = PhraseSet::new().set_etag("example");
8526 /// ```
8527 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8528 self.etag = v.into();
8529 self
8530 }
8531
8532 /// Sets the value of [reconciling][crate::model::PhraseSet::reconciling].
8533 ///
8534 /// # Example
8535 /// ```ignore,no_run
8536 /// # use google_cloud_speech_v2::model::PhraseSet;
8537 /// let x = PhraseSet::new().set_reconciling(true);
8538 /// ```
8539 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8540 self.reconciling = v.into();
8541 self
8542 }
8543
8544 /// Sets the value of [kms_key_name][crate::model::PhraseSet::kms_key_name].
8545 ///
8546 /// # Example
8547 /// ```ignore,no_run
8548 /// # use google_cloud_speech_v2::model::PhraseSet;
8549 /// let x = PhraseSet::new().set_kms_key_name("example");
8550 /// ```
8551 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8552 self.kms_key_name = v.into();
8553 self
8554 }
8555
8556 /// Sets the value of [kms_key_version_name][crate::model::PhraseSet::kms_key_version_name].
8557 ///
8558 /// # Example
8559 /// ```ignore,no_run
8560 /// # use google_cloud_speech_v2::model::PhraseSet;
8561 /// let x = PhraseSet::new().set_kms_key_version_name("example");
8562 /// ```
8563 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
8564 mut self,
8565 v: T,
8566 ) -> Self {
8567 self.kms_key_version_name = v.into();
8568 self
8569 }
8570}
8571
8572impl wkt::message::Message for PhraseSet {
8573 fn typename() -> &'static str {
8574 "type.googleapis.com/google.cloud.speech.v2.PhraseSet"
8575 }
8576}
8577
8578/// Defines additional types related to [PhraseSet].
8579pub mod phrase_set {
8580 #[allow(unused_imports)]
8581 use super::*;
8582
8583 /// A Phrase contains words and phrase "hints" so that the speech recognition
8584 /// is more likely to recognize them. This can be used to improve the accuracy
8585 /// for specific words and phrases, for example, if specific commands are
8586 /// typically spoken by the user. This can also be used to add additional words
8587 /// to the vocabulary of the recognizer.
8588 ///
8589 /// List items can also include CustomClass references containing groups of
8590 /// words that represent common concepts that occur in natural language.
8591 #[derive(Clone, Default, PartialEq)]
8592 #[non_exhaustive]
8593 pub struct Phrase {
8594 /// The phrase itself.
8595 pub value: std::string::String,
8596
8597 /// Hint Boost. Overrides the boost set at the phrase set level.
8598 /// Positive value will increase the probability that a specific phrase will
8599 /// be recognized over other similar sounding phrases. The higher the boost,
8600 /// the higher the chance of false positive recognition as well. Negative
8601 /// boost values would correspond to anti-biasing. Anti-biasing is not
8602 /// enabled, so negative boost values will return an error. Boost values must
8603 /// be between 0 and 20. Any values outside that range will return an error.
8604 /// We recommend using a binary search approach to finding the optimal value
8605 /// for your use case as well as adding phrases both with and without boost
8606 /// to your requests.
8607 pub boost: f32,
8608
8609 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8610 }
8611
8612 impl Phrase {
8613 pub fn new() -> Self {
8614 std::default::Default::default()
8615 }
8616
8617 /// Sets the value of [value][crate::model::phrase_set::Phrase::value].
8618 ///
8619 /// # Example
8620 /// ```ignore,no_run
8621 /// # use google_cloud_speech_v2::model::phrase_set::Phrase;
8622 /// let x = Phrase::new().set_value("example");
8623 /// ```
8624 pub fn set_value<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8625 self.value = v.into();
8626 self
8627 }
8628
8629 /// Sets the value of [boost][crate::model::phrase_set::Phrase::boost].
8630 ///
8631 /// # Example
8632 /// ```ignore,no_run
8633 /// # use google_cloud_speech_v2::model::phrase_set::Phrase;
8634 /// let x = Phrase::new().set_boost(42.0);
8635 /// ```
8636 pub fn set_boost<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
8637 self.boost = v.into();
8638 self
8639 }
8640 }
8641
8642 impl wkt::message::Message for Phrase {
8643 fn typename() -> &'static str {
8644 "type.googleapis.com/google.cloud.speech.v2.PhraseSet.Phrase"
8645 }
8646 }
8647
8648 /// Set of states that define the lifecycle of a PhraseSet.
8649 ///
8650 /// # Working with unknown values
8651 ///
8652 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
8653 /// additional enum variants at any time. Adding new variants is not considered
8654 /// a breaking change. Applications should write their code in anticipation of:
8655 ///
8656 /// - New values appearing in future releases of the client library, **and**
8657 /// - New values received dynamically, without application changes.
8658 ///
8659 /// Please consult the [Working with enums] section in the user guide for some
8660 /// guidelines.
8661 ///
8662 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
8663 #[derive(Clone, Debug, PartialEq)]
8664 #[non_exhaustive]
8665 pub enum State {
8666 /// Unspecified state. This is only used/useful for distinguishing
8667 /// unset values.
8668 Unspecified,
8669 /// The normal and active state.
8670 Active,
8671 /// This PhraseSet has been deleted.
8672 Deleted,
8673 /// If set, the enum was initialized with an unknown value.
8674 ///
8675 /// Applications can examine the value using [State::value] or
8676 /// [State::name].
8677 UnknownValue(state::UnknownValue),
8678 }
8679
8680 #[doc(hidden)]
8681 pub mod state {
8682 #[allow(unused_imports)]
8683 use super::*;
8684 #[derive(Clone, Debug, PartialEq)]
8685 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
8686 }
8687
8688 impl State {
8689 /// Gets the enum value.
8690 ///
8691 /// Returns `None` if the enum contains an unknown value deserialized from
8692 /// the string representation of enums.
8693 pub fn value(&self) -> std::option::Option<i32> {
8694 match self {
8695 Self::Unspecified => std::option::Option::Some(0),
8696 Self::Active => std::option::Option::Some(2),
8697 Self::Deleted => std::option::Option::Some(4),
8698 Self::UnknownValue(u) => u.0.value(),
8699 }
8700 }
8701
8702 /// Gets the enum value as a string.
8703 ///
8704 /// Returns `None` if the enum contains an unknown value deserialized from
8705 /// the integer representation of enums.
8706 pub fn name(&self) -> std::option::Option<&str> {
8707 match self {
8708 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
8709 Self::Active => std::option::Option::Some("ACTIVE"),
8710 Self::Deleted => std::option::Option::Some("DELETED"),
8711 Self::UnknownValue(u) => u.0.name(),
8712 }
8713 }
8714 }
8715
8716 impl std::default::Default for State {
8717 fn default() -> Self {
8718 use std::convert::From;
8719 Self::from(0)
8720 }
8721 }
8722
8723 impl std::fmt::Display for State {
8724 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
8725 wkt::internal::display_enum(f, self.name(), self.value())
8726 }
8727 }
8728
8729 impl std::convert::From<i32> for State {
8730 fn from(value: i32) -> Self {
8731 match value {
8732 0 => Self::Unspecified,
8733 2 => Self::Active,
8734 4 => Self::Deleted,
8735 _ => Self::UnknownValue(state::UnknownValue(
8736 wkt::internal::UnknownEnumValue::Integer(value),
8737 )),
8738 }
8739 }
8740 }
8741
8742 impl std::convert::From<&str> for State {
8743 fn from(value: &str) -> Self {
8744 use std::string::ToString;
8745 match value {
8746 "STATE_UNSPECIFIED" => Self::Unspecified,
8747 "ACTIVE" => Self::Active,
8748 "DELETED" => Self::Deleted,
8749 _ => Self::UnknownValue(state::UnknownValue(
8750 wkt::internal::UnknownEnumValue::String(value.to_string()),
8751 )),
8752 }
8753 }
8754 }
8755
8756 impl serde::ser::Serialize for State {
8757 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
8758 where
8759 S: serde::Serializer,
8760 {
8761 match self {
8762 Self::Unspecified => serializer.serialize_i32(0),
8763 Self::Active => serializer.serialize_i32(2),
8764 Self::Deleted => serializer.serialize_i32(4),
8765 Self::UnknownValue(u) => u.0.serialize(serializer),
8766 }
8767 }
8768 }
8769
8770 impl<'de> serde::de::Deserialize<'de> for State {
8771 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
8772 where
8773 D: serde::Deserializer<'de>,
8774 {
8775 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
8776 ".google.cloud.speech.v2.PhraseSet.State",
8777 ))
8778 }
8779 }
8780}
8781
8782/// Request message for the
8783/// [CreateCustomClass][google.cloud.speech.v2.Speech.CreateCustomClass] method.
8784///
8785/// [google.cloud.speech.v2.Speech.CreateCustomClass]: crate::client::Speech::create_custom_class
8786#[derive(Clone, Default, PartialEq)]
8787#[non_exhaustive]
8788pub struct CreateCustomClassRequest {
8789 /// Required. The CustomClass to create.
8790 pub custom_class: std::option::Option<crate::model::CustomClass>,
8791
8792 /// If set, validate the request and preview the CustomClass, but do not
8793 /// actually create it.
8794 pub validate_only: bool,
8795
8796 /// The ID to use for the CustomClass, which will become the final component of
8797 /// the CustomClass's resource name.
8798 ///
8799 /// This value should be 4-63 characters, and valid characters
8800 /// are /[a-z][0-9]-/.
8801 pub custom_class_id: std::string::String,
8802
8803 /// Required. The project and location where this CustomClass will be created.
8804 /// The expected format is `projects/{project}/locations/{location}`.
8805 pub parent: std::string::String,
8806
8807 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8808}
8809
8810impl CreateCustomClassRequest {
8811 pub fn new() -> Self {
8812 std::default::Default::default()
8813 }
8814
8815 /// Sets the value of [custom_class][crate::model::CreateCustomClassRequest::custom_class].
8816 ///
8817 /// # Example
8818 /// ```ignore,no_run
8819 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8820 /// use google_cloud_speech_v2::model::CustomClass;
8821 /// let x = CreateCustomClassRequest::new().set_custom_class(CustomClass::default()/* use setters */);
8822 /// ```
8823 pub fn set_custom_class<T>(mut self, v: T) -> Self
8824 where
8825 T: std::convert::Into<crate::model::CustomClass>,
8826 {
8827 self.custom_class = std::option::Option::Some(v.into());
8828 self
8829 }
8830
8831 /// Sets or clears the value of [custom_class][crate::model::CreateCustomClassRequest::custom_class].
8832 ///
8833 /// # Example
8834 /// ```ignore,no_run
8835 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8836 /// use google_cloud_speech_v2::model::CustomClass;
8837 /// let x = CreateCustomClassRequest::new().set_or_clear_custom_class(Some(CustomClass::default()/* use setters */));
8838 /// let x = CreateCustomClassRequest::new().set_or_clear_custom_class(None::<CustomClass>);
8839 /// ```
8840 pub fn set_or_clear_custom_class<T>(mut self, v: std::option::Option<T>) -> Self
8841 where
8842 T: std::convert::Into<crate::model::CustomClass>,
8843 {
8844 self.custom_class = v.map(|x| x.into());
8845 self
8846 }
8847
8848 /// Sets the value of [validate_only][crate::model::CreateCustomClassRequest::validate_only].
8849 ///
8850 /// # Example
8851 /// ```ignore,no_run
8852 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8853 /// let x = CreateCustomClassRequest::new().set_validate_only(true);
8854 /// ```
8855 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8856 self.validate_only = v.into();
8857 self
8858 }
8859
8860 /// Sets the value of [custom_class_id][crate::model::CreateCustomClassRequest::custom_class_id].
8861 ///
8862 /// # Example
8863 /// ```ignore,no_run
8864 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8865 /// let x = CreateCustomClassRequest::new().set_custom_class_id("example");
8866 /// ```
8867 pub fn set_custom_class_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8868 self.custom_class_id = v.into();
8869 self
8870 }
8871
8872 /// Sets the value of [parent][crate::model::CreateCustomClassRequest::parent].
8873 ///
8874 /// # Example
8875 /// ```ignore,no_run
8876 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8877 /// let x = CreateCustomClassRequest::new().set_parent("example");
8878 /// ```
8879 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8880 self.parent = v.into();
8881 self
8882 }
8883}
8884
8885impl wkt::message::Message for CreateCustomClassRequest {
8886 fn typename() -> &'static str {
8887 "type.googleapis.com/google.cloud.speech.v2.CreateCustomClassRequest"
8888 }
8889}
8890
8891/// Request message for the
8892/// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] method.
8893///
8894/// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
8895#[derive(Clone, Default, PartialEq)]
8896#[non_exhaustive]
8897pub struct ListCustomClassesRequest {
8898 /// Required. The project and location of CustomClass resources to list. The
8899 /// expected format is `projects/{project}/locations/{location}`.
8900 pub parent: std::string::String,
8901
8902 /// Number of results per requests. A valid page_size ranges from 0 to 100
8903 /// inclusive. If the page_size is zero or unspecified, a page size of 5 will
8904 /// be chosen. If the page size exceeds 100, it will be coerced down to 100.
8905 /// Note that a call might return fewer results than the requested page size.
8906 pub page_size: i32,
8907
8908 /// A page token, received from a previous
8909 /// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] call.
8910 /// Provide this to retrieve the subsequent page.
8911 ///
8912 /// When paginating, all other parameters provided to
8913 /// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] must
8914 /// match the call that provided the page token.
8915 ///
8916 /// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
8917 pub page_token: std::string::String,
8918
8919 /// Whether, or not, to show resources that have been deleted.
8920 pub show_deleted: bool,
8921
8922 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8923}
8924
8925impl ListCustomClassesRequest {
8926 pub fn new() -> Self {
8927 std::default::Default::default()
8928 }
8929
8930 /// Sets the value of [parent][crate::model::ListCustomClassesRequest::parent].
8931 ///
8932 /// # Example
8933 /// ```ignore,no_run
8934 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8935 /// let x = ListCustomClassesRequest::new().set_parent("example");
8936 /// ```
8937 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8938 self.parent = v.into();
8939 self
8940 }
8941
8942 /// Sets the value of [page_size][crate::model::ListCustomClassesRequest::page_size].
8943 ///
8944 /// # Example
8945 /// ```ignore,no_run
8946 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8947 /// let x = ListCustomClassesRequest::new().set_page_size(42);
8948 /// ```
8949 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
8950 self.page_size = v.into();
8951 self
8952 }
8953
8954 /// Sets the value of [page_token][crate::model::ListCustomClassesRequest::page_token].
8955 ///
8956 /// # Example
8957 /// ```ignore,no_run
8958 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8959 /// let x = ListCustomClassesRequest::new().set_page_token("example");
8960 /// ```
8961 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8962 self.page_token = v.into();
8963 self
8964 }
8965
8966 /// Sets the value of [show_deleted][crate::model::ListCustomClassesRequest::show_deleted].
8967 ///
8968 /// # Example
8969 /// ```ignore,no_run
8970 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8971 /// let x = ListCustomClassesRequest::new().set_show_deleted(true);
8972 /// ```
8973 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8974 self.show_deleted = v.into();
8975 self
8976 }
8977}
8978
8979impl wkt::message::Message for ListCustomClassesRequest {
8980 fn typename() -> &'static str {
8981 "type.googleapis.com/google.cloud.speech.v2.ListCustomClassesRequest"
8982 }
8983}
8984
8985/// Response message for the
8986/// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] method.
8987///
8988/// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
8989#[derive(Clone, Default, PartialEq)]
8990#[non_exhaustive]
8991pub struct ListCustomClassesResponse {
8992 /// The list of requested CustomClasses.
8993 pub custom_classes: std::vec::Vec<crate::model::CustomClass>,
8994
8995 /// A token, which can be sent as
8996 /// [page_token][google.cloud.speech.v2.ListCustomClassesRequest.page_token] to
8997 /// retrieve the next page. If this field is omitted, there are no subsequent
8998 /// pages. This token expires after 72 hours.
8999 ///
9000 /// [google.cloud.speech.v2.ListCustomClassesRequest.page_token]: crate::model::ListCustomClassesRequest::page_token
9001 pub next_page_token: std::string::String,
9002
9003 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9004}
9005
9006impl ListCustomClassesResponse {
9007 pub fn new() -> Self {
9008 std::default::Default::default()
9009 }
9010
9011 /// Sets the value of [custom_classes][crate::model::ListCustomClassesResponse::custom_classes].
9012 ///
9013 /// # Example
9014 /// ```ignore,no_run
9015 /// # use google_cloud_speech_v2::model::ListCustomClassesResponse;
9016 /// use google_cloud_speech_v2::model::CustomClass;
9017 /// let x = ListCustomClassesResponse::new()
9018 /// .set_custom_classes([
9019 /// CustomClass::default()/* use setters */,
9020 /// CustomClass::default()/* use (different) setters */,
9021 /// ]);
9022 /// ```
9023 pub fn set_custom_classes<T, V>(mut self, v: T) -> Self
9024 where
9025 T: std::iter::IntoIterator<Item = V>,
9026 V: std::convert::Into<crate::model::CustomClass>,
9027 {
9028 use std::iter::Iterator;
9029 self.custom_classes = v.into_iter().map(|i| i.into()).collect();
9030 self
9031 }
9032
9033 /// Sets the value of [next_page_token][crate::model::ListCustomClassesResponse::next_page_token].
9034 ///
9035 /// # Example
9036 /// ```ignore,no_run
9037 /// # use google_cloud_speech_v2::model::ListCustomClassesResponse;
9038 /// let x = ListCustomClassesResponse::new().set_next_page_token("example");
9039 /// ```
9040 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9041 self.next_page_token = v.into();
9042 self
9043 }
9044}
9045
9046impl wkt::message::Message for ListCustomClassesResponse {
9047 fn typename() -> &'static str {
9048 "type.googleapis.com/google.cloud.speech.v2.ListCustomClassesResponse"
9049 }
9050}
9051
9052#[doc(hidden)]
9053impl gax::paginator::internal::PageableResponse for ListCustomClassesResponse {
9054 type PageItem = crate::model::CustomClass;
9055
9056 fn items(self) -> std::vec::Vec<Self::PageItem> {
9057 self.custom_classes
9058 }
9059
9060 fn next_page_token(&self) -> std::string::String {
9061 use std::clone::Clone;
9062 self.next_page_token.clone()
9063 }
9064}
9065
9066/// Request message for the
9067/// [GetCustomClass][google.cloud.speech.v2.Speech.GetCustomClass] method.
9068///
9069/// [google.cloud.speech.v2.Speech.GetCustomClass]: crate::client::Speech::get_custom_class
9070#[derive(Clone, Default, PartialEq)]
9071#[non_exhaustive]
9072pub struct GetCustomClassRequest {
9073 /// Required. The name of the CustomClass to retrieve. The expected format is
9074 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
9075 pub name: std::string::String,
9076
9077 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9078}
9079
9080impl GetCustomClassRequest {
9081 pub fn new() -> Self {
9082 std::default::Default::default()
9083 }
9084
9085 /// Sets the value of [name][crate::model::GetCustomClassRequest::name].
9086 ///
9087 /// # Example
9088 /// ```ignore,no_run
9089 /// # use google_cloud_speech_v2::model::GetCustomClassRequest;
9090 /// let x = GetCustomClassRequest::new().set_name("example");
9091 /// ```
9092 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9093 self.name = v.into();
9094 self
9095 }
9096}
9097
9098impl wkt::message::Message for GetCustomClassRequest {
9099 fn typename() -> &'static str {
9100 "type.googleapis.com/google.cloud.speech.v2.GetCustomClassRequest"
9101 }
9102}
9103
9104/// Request message for the
9105/// [UpdateCustomClass][google.cloud.speech.v2.Speech.UpdateCustomClass] method.
9106///
9107/// [google.cloud.speech.v2.Speech.UpdateCustomClass]: crate::client::Speech::update_custom_class
9108#[derive(Clone, Default, PartialEq)]
9109#[non_exhaustive]
9110pub struct UpdateCustomClassRequest {
9111 /// Required. The CustomClass to update.
9112 ///
9113 /// The CustomClass's `name` field is used to identify the CustomClass to
9114 /// update. Format:
9115 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
9116 pub custom_class: std::option::Option<crate::model::CustomClass>,
9117
9118 /// The list of fields to be updated. If empty, all fields are considered for
9119 /// update.
9120 pub update_mask: std::option::Option<wkt::FieldMask>,
9121
9122 /// If set, validate the request and preview the updated CustomClass, but do
9123 /// not actually update it.
9124 pub validate_only: bool,
9125
9126 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9127}
9128
9129impl UpdateCustomClassRequest {
9130 pub fn new() -> Self {
9131 std::default::Default::default()
9132 }
9133
9134 /// Sets the value of [custom_class][crate::model::UpdateCustomClassRequest::custom_class].
9135 ///
9136 /// # Example
9137 /// ```ignore,no_run
9138 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9139 /// use google_cloud_speech_v2::model::CustomClass;
9140 /// let x = UpdateCustomClassRequest::new().set_custom_class(CustomClass::default()/* use setters */);
9141 /// ```
9142 pub fn set_custom_class<T>(mut self, v: T) -> Self
9143 where
9144 T: std::convert::Into<crate::model::CustomClass>,
9145 {
9146 self.custom_class = std::option::Option::Some(v.into());
9147 self
9148 }
9149
9150 /// Sets or clears the value of [custom_class][crate::model::UpdateCustomClassRequest::custom_class].
9151 ///
9152 /// # Example
9153 /// ```ignore,no_run
9154 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9155 /// use google_cloud_speech_v2::model::CustomClass;
9156 /// let x = UpdateCustomClassRequest::new().set_or_clear_custom_class(Some(CustomClass::default()/* use setters */));
9157 /// let x = UpdateCustomClassRequest::new().set_or_clear_custom_class(None::<CustomClass>);
9158 /// ```
9159 pub fn set_or_clear_custom_class<T>(mut self, v: std::option::Option<T>) -> Self
9160 where
9161 T: std::convert::Into<crate::model::CustomClass>,
9162 {
9163 self.custom_class = v.map(|x| x.into());
9164 self
9165 }
9166
9167 /// Sets the value of [update_mask][crate::model::UpdateCustomClassRequest::update_mask].
9168 ///
9169 /// # Example
9170 /// ```ignore,no_run
9171 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9172 /// use wkt::FieldMask;
9173 /// let x = UpdateCustomClassRequest::new().set_update_mask(FieldMask::default()/* use setters */);
9174 /// ```
9175 pub fn set_update_mask<T>(mut self, v: T) -> Self
9176 where
9177 T: std::convert::Into<wkt::FieldMask>,
9178 {
9179 self.update_mask = std::option::Option::Some(v.into());
9180 self
9181 }
9182
9183 /// Sets or clears the value of [update_mask][crate::model::UpdateCustomClassRequest::update_mask].
9184 ///
9185 /// # Example
9186 /// ```ignore,no_run
9187 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9188 /// use wkt::FieldMask;
9189 /// let x = UpdateCustomClassRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
9190 /// let x = UpdateCustomClassRequest::new().set_or_clear_update_mask(None::<FieldMask>);
9191 /// ```
9192 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
9193 where
9194 T: std::convert::Into<wkt::FieldMask>,
9195 {
9196 self.update_mask = v.map(|x| x.into());
9197 self
9198 }
9199
9200 /// Sets the value of [validate_only][crate::model::UpdateCustomClassRequest::validate_only].
9201 ///
9202 /// # Example
9203 /// ```ignore,no_run
9204 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9205 /// let x = UpdateCustomClassRequest::new().set_validate_only(true);
9206 /// ```
9207 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9208 self.validate_only = v.into();
9209 self
9210 }
9211}
9212
9213impl wkt::message::Message for UpdateCustomClassRequest {
9214 fn typename() -> &'static str {
9215 "type.googleapis.com/google.cloud.speech.v2.UpdateCustomClassRequest"
9216 }
9217}
9218
9219/// Request message for the
9220/// [DeleteCustomClass][google.cloud.speech.v2.Speech.DeleteCustomClass] method.
9221///
9222/// [google.cloud.speech.v2.Speech.DeleteCustomClass]: crate::client::Speech::delete_custom_class
9223#[derive(Clone, Default, PartialEq)]
9224#[non_exhaustive]
9225pub struct DeleteCustomClassRequest {
9226 /// Required. The name of the CustomClass to delete.
9227 /// Format:
9228 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`
9229 pub name: std::string::String,
9230
9231 /// If set, validate the request and preview the deleted CustomClass, but do
9232 /// not actually delete it.
9233 pub validate_only: bool,
9234
9235 /// If set to true, and the CustomClass is not found, the request will succeed
9236 /// and be a no-op (no Operation is recorded in this case).
9237 pub allow_missing: bool,
9238
9239 /// This checksum is computed by the server based on the value of other
9240 /// fields. This may be sent on update, undelete, and delete requests to ensure
9241 /// the client has an up-to-date value before proceeding.
9242 pub etag: std::string::String,
9243
9244 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9245}
9246
9247impl DeleteCustomClassRequest {
9248 pub fn new() -> Self {
9249 std::default::Default::default()
9250 }
9251
9252 /// Sets the value of [name][crate::model::DeleteCustomClassRequest::name].
9253 ///
9254 /// # Example
9255 /// ```ignore,no_run
9256 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9257 /// let x = DeleteCustomClassRequest::new().set_name("example");
9258 /// ```
9259 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9260 self.name = v.into();
9261 self
9262 }
9263
9264 /// Sets the value of [validate_only][crate::model::DeleteCustomClassRequest::validate_only].
9265 ///
9266 /// # Example
9267 /// ```ignore,no_run
9268 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9269 /// let x = DeleteCustomClassRequest::new().set_validate_only(true);
9270 /// ```
9271 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9272 self.validate_only = v.into();
9273 self
9274 }
9275
9276 /// Sets the value of [allow_missing][crate::model::DeleteCustomClassRequest::allow_missing].
9277 ///
9278 /// # Example
9279 /// ```ignore,no_run
9280 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9281 /// let x = DeleteCustomClassRequest::new().set_allow_missing(true);
9282 /// ```
9283 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9284 self.allow_missing = v.into();
9285 self
9286 }
9287
9288 /// Sets the value of [etag][crate::model::DeleteCustomClassRequest::etag].
9289 ///
9290 /// # Example
9291 /// ```ignore,no_run
9292 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9293 /// let x = DeleteCustomClassRequest::new().set_etag("example");
9294 /// ```
9295 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9296 self.etag = v.into();
9297 self
9298 }
9299}
9300
9301impl wkt::message::Message for DeleteCustomClassRequest {
9302 fn typename() -> &'static str {
9303 "type.googleapis.com/google.cloud.speech.v2.DeleteCustomClassRequest"
9304 }
9305}
9306
9307/// Request message for the
9308/// [UndeleteCustomClass][google.cloud.speech.v2.Speech.UndeleteCustomClass]
9309/// method.
9310///
9311/// [google.cloud.speech.v2.Speech.UndeleteCustomClass]: crate::client::Speech::undelete_custom_class
9312#[derive(Clone, Default, PartialEq)]
9313#[non_exhaustive]
9314pub struct UndeleteCustomClassRequest {
9315 /// Required. The name of the CustomClass to undelete.
9316 /// Format:
9317 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`
9318 pub name: std::string::String,
9319
9320 /// If set, validate the request and preview the undeleted CustomClass, but do
9321 /// not actually undelete it.
9322 pub validate_only: bool,
9323
9324 /// This checksum is computed by the server based on the value of other
9325 /// fields. This may be sent on update, undelete, and delete requests to ensure
9326 /// the client has an up-to-date value before proceeding.
9327 pub etag: std::string::String,
9328
9329 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9330}
9331
9332impl UndeleteCustomClassRequest {
9333 pub fn new() -> Self {
9334 std::default::Default::default()
9335 }
9336
9337 /// Sets the value of [name][crate::model::UndeleteCustomClassRequest::name].
9338 ///
9339 /// # Example
9340 /// ```ignore,no_run
9341 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9342 /// let x = UndeleteCustomClassRequest::new().set_name("example");
9343 /// ```
9344 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9345 self.name = v.into();
9346 self
9347 }
9348
9349 /// Sets the value of [validate_only][crate::model::UndeleteCustomClassRequest::validate_only].
9350 ///
9351 /// # Example
9352 /// ```ignore,no_run
9353 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9354 /// let x = UndeleteCustomClassRequest::new().set_validate_only(true);
9355 /// ```
9356 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9357 self.validate_only = v.into();
9358 self
9359 }
9360
9361 /// Sets the value of [etag][crate::model::UndeleteCustomClassRequest::etag].
9362 ///
9363 /// # Example
9364 /// ```ignore,no_run
9365 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9366 /// let x = UndeleteCustomClassRequest::new().set_etag("example");
9367 /// ```
9368 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9369 self.etag = v.into();
9370 self
9371 }
9372}
9373
9374impl wkt::message::Message for UndeleteCustomClassRequest {
9375 fn typename() -> &'static str {
9376 "type.googleapis.com/google.cloud.speech.v2.UndeleteCustomClassRequest"
9377 }
9378}
9379
9380/// Request message for the
9381/// [CreatePhraseSet][google.cloud.speech.v2.Speech.CreatePhraseSet] method.
9382///
9383/// [google.cloud.speech.v2.Speech.CreatePhraseSet]: crate::client::Speech::create_phrase_set
9384#[derive(Clone, Default, PartialEq)]
9385#[non_exhaustive]
9386pub struct CreatePhraseSetRequest {
9387 /// Required. The PhraseSet to create.
9388 pub phrase_set: std::option::Option<crate::model::PhraseSet>,
9389
9390 /// If set, validate the request and preview the PhraseSet, but do not
9391 /// actually create it.
9392 pub validate_only: bool,
9393
9394 /// The ID to use for the PhraseSet, which will become the final component of
9395 /// the PhraseSet's resource name.
9396 ///
9397 /// This value should be 4-63 characters, and valid characters
9398 /// are /[a-z][0-9]-/.
9399 pub phrase_set_id: std::string::String,
9400
9401 /// Required. The project and location where this PhraseSet will be created.
9402 /// The expected format is `projects/{project}/locations/{location}`.
9403 pub parent: std::string::String,
9404
9405 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9406}
9407
9408impl CreatePhraseSetRequest {
9409 pub fn new() -> Self {
9410 std::default::Default::default()
9411 }
9412
9413 /// Sets the value of [phrase_set][crate::model::CreatePhraseSetRequest::phrase_set].
9414 ///
9415 /// # Example
9416 /// ```ignore,no_run
9417 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9418 /// use google_cloud_speech_v2::model::PhraseSet;
9419 /// let x = CreatePhraseSetRequest::new().set_phrase_set(PhraseSet::default()/* use setters */);
9420 /// ```
9421 pub fn set_phrase_set<T>(mut self, v: T) -> Self
9422 where
9423 T: std::convert::Into<crate::model::PhraseSet>,
9424 {
9425 self.phrase_set = std::option::Option::Some(v.into());
9426 self
9427 }
9428
9429 /// Sets or clears the value of [phrase_set][crate::model::CreatePhraseSetRequest::phrase_set].
9430 ///
9431 /// # Example
9432 /// ```ignore,no_run
9433 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9434 /// use google_cloud_speech_v2::model::PhraseSet;
9435 /// let x = CreatePhraseSetRequest::new().set_or_clear_phrase_set(Some(PhraseSet::default()/* use setters */));
9436 /// let x = CreatePhraseSetRequest::new().set_or_clear_phrase_set(None::<PhraseSet>);
9437 /// ```
9438 pub fn set_or_clear_phrase_set<T>(mut self, v: std::option::Option<T>) -> Self
9439 where
9440 T: std::convert::Into<crate::model::PhraseSet>,
9441 {
9442 self.phrase_set = v.map(|x| x.into());
9443 self
9444 }
9445
9446 /// Sets the value of [validate_only][crate::model::CreatePhraseSetRequest::validate_only].
9447 ///
9448 /// # Example
9449 /// ```ignore,no_run
9450 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9451 /// let x = CreatePhraseSetRequest::new().set_validate_only(true);
9452 /// ```
9453 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9454 self.validate_only = v.into();
9455 self
9456 }
9457
9458 /// Sets the value of [phrase_set_id][crate::model::CreatePhraseSetRequest::phrase_set_id].
9459 ///
9460 /// # Example
9461 /// ```ignore,no_run
9462 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9463 /// let x = CreatePhraseSetRequest::new().set_phrase_set_id("example");
9464 /// ```
9465 pub fn set_phrase_set_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9466 self.phrase_set_id = v.into();
9467 self
9468 }
9469
9470 /// Sets the value of [parent][crate::model::CreatePhraseSetRequest::parent].
9471 ///
9472 /// # Example
9473 /// ```ignore,no_run
9474 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9475 /// let x = CreatePhraseSetRequest::new().set_parent("example");
9476 /// ```
9477 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9478 self.parent = v.into();
9479 self
9480 }
9481}
9482
9483impl wkt::message::Message for CreatePhraseSetRequest {
9484 fn typename() -> &'static str {
9485 "type.googleapis.com/google.cloud.speech.v2.CreatePhraseSetRequest"
9486 }
9487}
9488
9489/// Request message for the
9490/// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
9491///
9492/// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9493#[derive(Clone, Default, PartialEq)]
9494#[non_exhaustive]
9495pub struct ListPhraseSetsRequest {
9496 /// Required. The project and location of PhraseSet resources to list. The
9497 /// expected format is `projects/{project}/locations/{location}`.
9498 pub parent: std::string::String,
9499
9500 /// The maximum number of PhraseSets to return. The service may return fewer
9501 /// than this value. If unspecified, at most 5 PhraseSets will be returned.
9502 /// The maximum value is 100; values above 100 will be coerced to 100.
9503 pub page_size: i32,
9504
9505 /// A page token, received from a previous
9506 /// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] call.
9507 /// Provide this to retrieve the subsequent page.
9508 ///
9509 /// When paginating, all other parameters provided to
9510 /// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] must match
9511 /// the call that provided the page token.
9512 ///
9513 /// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9514 pub page_token: std::string::String,
9515
9516 /// Whether, or not, to show resources that have been deleted.
9517 pub show_deleted: bool,
9518
9519 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9520}
9521
9522impl ListPhraseSetsRequest {
9523 pub fn new() -> Self {
9524 std::default::Default::default()
9525 }
9526
9527 /// Sets the value of [parent][crate::model::ListPhraseSetsRequest::parent].
9528 ///
9529 /// # Example
9530 /// ```ignore,no_run
9531 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9532 /// let x = ListPhraseSetsRequest::new().set_parent("example");
9533 /// ```
9534 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9535 self.parent = v.into();
9536 self
9537 }
9538
9539 /// Sets the value of [page_size][crate::model::ListPhraseSetsRequest::page_size].
9540 ///
9541 /// # Example
9542 /// ```ignore,no_run
9543 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9544 /// let x = ListPhraseSetsRequest::new().set_page_size(42);
9545 /// ```
9546 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
9547 self.page_size = v.into();
9548 self
9549 }
9550
9551 /// Sets the value of [page_token][crate::model::ListPhraseSetsRequest::page_token].
9552 ///
9553 /// # Example
9554 /// ```ignore,no_run
9555 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9556 /// let x = ListPhraseSetsRequest::new().set_page_token("example");
9557 /// ```
9558 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9559 self.page_token = v.into();
9560 self
9561 }
9562
9563 /// Sets the value of [show_deleted][crate::model::ListPhraseSetsRequest::show_deleted].
9564 ///
9565 /// # Example
9566 /// ```ignore,no_run
9567 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9568 /// let x = ListPhraseSetsRequest::new().set_show_deleted(true);
9569 /// ```
9570 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9571 self.show_deleted = v.into();
9572 self
9573 }
9574}
9575
9576impl wkt::message::Message for ListPhraseSetsRequest {
9577 fn typename() -> &'static str {
9578 "type.googleapis.com/google.cloud.speech.v2.ListPhraseSetsRequest"
9579 }
9580}
9581
9582/// Response message for the
9583/// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
9584///
9585/// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9586#[derive(Clone, Default, PartialEq)]
9587#[non_exhaustive]
9588pub struct ListPhraseSetsResponse {
9589 /// The list of requested PhraseSets.
9590 pub phrase_sets: std::vec::Vec<crate::model::PhraseSet>,
9591
9592 /// A token, which can be sent as
9593 /// [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
9594 /// retrieve the next page. If this field is omitted, there are no subsequent
9595 /// pages. This token expires after 72 hours.
9596 ///
9597 /// [google.cloud.speech.v2.ListPhraseSetsRequest.page_token]: crate::model::ListPhraseSetsRequest::page_token
9598 pub next_page_token: std::string::String,
9599
9600 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9601}
9602
9603impl ListPhraseSetsResponse {
9604 pub fn new() -> Self {
9605 std::default::Default::default()
9606 }
9607
9608 /// Sets the value of [phrase_sets][crate::model::ListPhraseSetsResponse::phrase_sets].
9609 ///
9610 /// # Example
9611 /// ```ignore,no_run
9612 /// # use google_cloud_speech_v2::model::ListPhraseSetsResponse;
9613 /// use google_cloud_speech_v2::model::PhraseSet;
9614 /// let x = ListPhraseSetsResponse::new()
9615 /// .set_phrase_sets([
9616 /// PhraseSet::default()/* use setters */,
9617 /// PhraseSet::default()/* use (different) setters */,
9618 /// ]);
9619 /// ```
9620 pub fn set_phrase_sets<T, V>(mut self, v: T) -> Self
9621 where
9622 T: std::iter::IntoIterator<Item = V>,
9623 V: std::convert::Into<crate::model::PhraseSet>,
9624 {
9625 use std::iter::Iterator;
9626 self.phrase_sets = v.into_iter().map(|i| i.into()).collect();
9627 self
9628 }
9629
9630 /// Sets the value of [next_page_token][crate::model::ListPhraseSetsResponse::next_page_token].
9631 ///
9632 /// # Example
9633 /// ```ignore,no_run
9634 /// # use google_cloud_speech_v2::model::ListPhraseSetsResponse;
9635 /// let x = ListPhraseSetsResponse::new().set_next_page_token("example");
9636 /// ```
9637 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9638 self.next_page_token = v.into();
9639 self
9640 }
9641}
9642
9643impl wkt::message::Message for ListPhraseSetsResponse {
9644 fn typename() -> &'static str {
9645 "type.googleapis.com/google.cloud.speech.v2.ListPhraseSetsResponse"
9646 }
9647}
9648
9649#[doc(hidden)]
9650impl gax::paginator::internal::PageableResponse for ListPhraseSetsResponse {
9651 type PageItem = crate::model::PhraseSet;
9652
9653 fn items(self) -> std::vec::Vec<Self::PageItem> {
9654 self.phrase_sets
9655 }
9656
9657 fn next_page_token(&self) -> std::string::String {
9658 use std::clone::Clone;
9659 self.next_page_token.clone()
9660 }
9661}
9662
9663/// Request message for the
9664/// [GetPhraseSet][google.cloud.speech.v2.Speech.GetPhraseSet] method.
9665///
9666/// [google.cloud.speech.v2.Speech.GetPhraseSet]: crate::client::Speech::get_phrase_set
9667#[derive(Clone, Default, PartialEq)]
9668#[non_exhaustive]
9669pub struct GetPhraseSetRequest {
9670 /// Required. The name of the PhraseSet to retrieve. The expected format is
9671 /// `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
9672 pub name: std::string::String,
9673
9674 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9675}
9676
9677impl GetPhraseSetRequest {
9678 pub fn new() -> Self {
9679 std::default::Default::default()
9680 }
9681
9682 /// Sets the value of [name][crate::model::GetPhraseSetRequest::name].
9683 ///
9684 /// # Example
9685 /// ```ignore,no_run
9686 /// # use google_cloud_speech_v2::model::GetPhraseSetRequest;
9687 /// let x = GetPhraseSetRequest::new().set_name("example");
9688 /// ```
9689 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9690 self.name = v.into();
9691 self
9692 }
9693}
9694
9695impl wkt::message::Message for GetPhraseSetRequest {
9696 fn typename() -> &'static str {
9697 "type.googleapis.com/google.cloud.speech.v2.GetPhraseSetRequest"
9698 }
9699}
9700
9701/// Request message for the
9702/// [UpdatePhraseSet][google.cloud.speech.v2.Speech.UpdatePhraseSet] method.
9703///
9704/// [google.cloud.speech.v2.Speech.UpdatePhraseSet]: crate::client::Speech::update_phrase_set
9705#[derive(Clone, Default, PartialEq)]
9706#[non_exhaustive]
9707pub struct UpdatePhraseSetRequest {
9708 /// Required. The PhraseSet to update.
9709 ///
9710 /// The PhraseSet's `name` field is used to identify the PhraseSet to update.
9711 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
9712 pub phrase_set: std::option::Option<crate::model::PhraseSet>,
9713
9714 /// The list of fields to update. If empty, all non-default valued fields are
9715 /// considered for update. Use `*` to update the entire PhraseSet resource.
9716 pub update_mask: std::option::Option<wkt::FieldMask>,
9717
9718 /// If set, validate the request and preview the updated PhraseSet, but do not
9719 /// actually update it.
9720 pub validate_only: bool,
9721
9722 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9723}
9724
9725impl UpdatePhraseSetRequest {
9726 pub fn new() -> Self {
9727 std::default::Default::default()
9728 }
9729
9730 /// Sets the value of [phrase_set][crate::model::UpdatePhraseSetRequest::phrase_set].
9731 ///
9732 /// # Example
9733 /// ```ignore,no_run
9734 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9735 /// use google_cloud_speech_v2::model::PhraseSet;
9736 /// let x = UpdatePhraseSetRequest::new().set_phrase_set(PhraseSet::default()/* use setters */);
9737 /// ```
9738 pub fn set_phrase_set<T>(mut self, v: T) -> Self
9739 where
9740 T: std::convert::Into<crate::model::PhraseSet>,
9741 {
9742 self.phrase_set = std::option::Option::Some(v.into());
9743 self
9744 }
9745
9746 /// Sets or clears the value of [phrase_set][crate::model::UpdatePhraseSetRequest::phrase_set].
9747 ///
9748 /// # Example
9749 /// ```ignore,no_run
9750 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9751 /// use google_cloud_speech_v2::model::PhraseSet;
9752 /// let x = UpdatePhraseSetRequest::new().set_or_clear_phrase_set(Some(PhraseSet::default()/* use setters */));
9753 /// let x = UpdatePhraseSetRequest::new().set_or_clear_phrase_set(None::<PhraseSet>);
9754 /// ```
9755 pub fn set_or_clear_phrase_set<T>(mut self, v: std::option::Option<T>) -> Self
9756 where
9757 T: std::convert::Into<crate::model::PhraseSet>,
9758 {
9759 self.phrase_set = v.map(|x| x.into());
9760 self
9761 }
9762
9763 /// Sets the value of [update_mask][crate::model::UpdatePhraseSetRequest::update_mask].
9764 ///
9765 /// # Example
9766 /// ```ignore,no_run
9767 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9768 /// use wkt::FieldMask;
9769 /// let x = UpdatePhraseSetRequest::new().set_update_mask(FieldMask::default()/* use setters */);
9770 /// ```
9771 pub fn set_update_mask<T>(mut self, v: T) -> Self
9772 where
9773 T: std::convert::Into<wkt::FieldMask>,
9774 {
9775 self.update_mask = std::option::Option::Some(v.into());
9776 self
9777 }
9778
9779 /// Sets or clears the value of [update_mask][crate::model::UpdatePhraseSetRequest::update_mask].
9780 ///
9781 /// # Example
9782 /// ```ignore,no_run
9783 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9784 /// use wkt::FieldMask;
9785 /// let x = UpdatePhraseSetRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
9786 /// let x = UpdatePhraseSetRequest::new().set_or_clear_update_mask(None::<FieldMask>);
9787 /// ```
9788 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
9789 where
9790 T: std::convert::Into<wkt::FieldMask>,
9791 {
9792 self.update_mask = v.map(|x| x.into());
9793 self
9794 }
9795
9796 /// Sets the value of [validate_only][crate::model::UpdatePhraseSetRequest::validate_only].
9797 ///
9798 /// # Example
9799 /// ```ignore,no_run
9800 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9801 /// let x = UpdatePhraseSetRequest::new().set_validate_only(true);
9802 /// ```
9803 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9804 self.validate_only = v.into();
9805 self
9806 }
9807}
9808
9809impl wkt::message::Message for UpdatePhraseSetRequest {
9810 fn typename() -> &'static str {
9811 "type.googleapis.com/google.cloud.speech.v2.UpdatePhraseSetRequest"
9812 }
9813}
9814
9815/// Request message for the
9816/// [DeletePhraseSet][google.cloud.speech.v2.Speech.DeletePhraseSet] method.
9817///
9818/// [google.cloud.speech.v2.Speech.DeletePhraseSet]: crate::client::Speech::delete_phrase_set
9819#[derive(Clone, Default, PartialEq)]
9820#[non_exhaustive]
9821pub struct DeletePhraseSetRequest {
9822 /// Required. The name of the PhraseSet to delete.
9823 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`
9824 pub name: std::string::String,
9825
9826 /// If set, validate the request and preview the deleted PhraseSet, but do not
9827 /// actually delete it.
9828 pub validate_only: bool,
9829
9830 /// If set to true, and the PhraseSet is not found, the request will succeed
9831 /// and be a no-op (no Operation is recorded in this case).
9832 pub allow_missing: bool,
9833
9834 /// This checksum is computed by the server based on the value of other
9835 /// fields. This may be sent on update, undelete, and delete requests to ensure
9836 /// the client has an up-to-date value before proceeding.
9837 pub etag: std::string::String,
9838
9839 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9840}
9841
9842impl DeletePhraseSetRequest {
9843 pub fn new() -> Self {
9844 std::default::Default::default()
9845 }
9846
9847 /// Sets the value of [name][crate::model::DeletePhraseSetRequest::name].
9848 ///
9849 /// # Example
9850 /// ```ignore,no_run
9851 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9852 /// let x = DeletePhraseSetRequest::new().set_name("example");
9853 /// ```
9854 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9855 self.name = v.into();
9856 self
9857 }
9858
9859 /// Sets the value of [validate_only][crate::model::DeletePhraseSetRequest::validate_only].
9860 ///
9861 /// # Example
9862 /// ```ignore,no_run
9863 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9864 /// let x = DeletePhraseSetRequest::new().set_validate_only(true);
9865 /// ```
9866 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9867 self.validate_only = v.into();
9868 self
9869 }
9870
9871 /// Sets the value of [allow_missing][crate::model::DeletePhraseSetRequest::allow_missing].
9872 ///
9873 /// # Example
9874 /// ```ignore,no_run
9875 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9876 /// let x = DeletePhraseSetRequest::new().set_allow_missing(true);
9877 /// ```
9878 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9879 self.allow_missing = v.into();
9880 self
9881 }
9882
9883 /// Sets the value of [etag][crate::model::DeletePhraseSetRequest::etag].
9884 ///
9885 /// # Example
9886 /// ```ignore,no_run
9887 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9888 /// let x = DeletePhraseSetRequest::new().set_etag("example");
9889 /// ```
9890 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9891 self.etag = v.into();
9892 self
9893 }
9894}
9895
9896impl wkt::message::Message for DeletePhraseSetRequest {
9897 fn typename() -> &'static str {
9898 "type.googleapis.com/google.cloud.speech.v2.DeletePhraseSetRequest"
9899 }
9900}
9901
9902/// Request message for the
9903/// [UndeletePhraseSet][google.cloud.speech.v2.Speech.UndeletePhraseSet]
9904/// method.
9905///
9906/// [google.cloud.speech.v2.Speech.UndeletePhraseSet]: crate::client::Speech::undelete_phrase_set
9907#[derive(Clone, Default, PartialEq)]
9908#[non_exhaustive]
9909pub struct UndeletePhraseSetRequest {
9910 /// Required. The name of the PhraseSet to undelete.
9911 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`
9912 pub name: std::string::String,
9913
9914 /// If set, validate the request and preview the undeleted PhraseSet, but do
9915 /// not actually undelete it.
9916 pub validate_only: bool,
9917
9918 /// This checksum is computed by the server based on the value of other
9919 /// fields. This may be sent on update, undelete, and delete requests to ensure
9920 /// the client has an up-to-date value before proceeding.
9921 pub etag: std::string::String,
9922
9923 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9924}
9925
9926impl UndeletePhraseSetRequest {
9927 pub fn new() -> Self {
9928 std::default::Default::default()
9929 }
9930
9931 /// Sets the value of [name][crate::model::UndeletePhraseSetRequest::name].
9932 ///
9933 /// # Example
9934 /// ```ignore,no_run
9935 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
9936 /// let x = UndeletePhraseSetRequest::new().set_name("example");
9937 /// ```
9938 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9939 self.name = v.into();
9940 self
9941 }
9942
9943 /// Sets the value of [validate_only][crate::model::UndeletePhraseSetRequest::validate_only].
9944 ///
9945 /// # Example
9946 /// ```ignore,no_run
9947 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
9948 /// let x = UndeletePhraseSetRequest::new().set_validate_only(true);
9949 /// ```
9950 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9951 self.validate_only = v.into();
9952 self
9953 }
9954
9955 /// Sets the value of [etag][crate::model::UndeletePhraseSetRequest::etag].
9956 ///
9957 /// # Example
9958 /// ```ignore,no_run
9959 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
9960 /// let x = UndeletePhraseSetRequest::new().set_etag("example");
9961 /// ```
9962 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9963 self.etag = v.into();
9964 self
9965 }
9966}
9967
9968impl wkt::message::Message for UndeletePhraseSetRequest {
9969 fn typename() -> &'static str {
9970 "type.googleapis.com/google.cloud.speech.v2.UndeletePhraseSetRequest"
9971 }
9972}
9973
9974/// Represents a singular feature of a model. If the feature is `recognizer`,
9975/// the release_state of the feature represents the release_state of the model
9976#[derive(Clone, Default, PartialEq)]
9977#[non_exhaustive]
9978pub struct ModelFeature {
9979 /// The name of the feature (Note: the feature can be `recognizer`)
9980 pub feature: std::string::String,
9981
9982 /// The release state of the feature
9983 pub release_state: std::string::String,
9984
9985 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9986}
9987
9988impl ModelFeature {
9989 pub fn new() -> Self {
9990 std::default::Default::default()
9991 }
9992
9993 /// Sets the value of [feature][crate::model::ModelFeature::feature].
9994 ///
9995 /// # Example
9996 /// ```ignore,no_run
9997 /// # use google_cloud_speech_v2::model::ModelFeature;
9998 /// let x = ModelFeature::new().set_feature("example");
9999 /// ```
10000 pub fn set_feature<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10001 self.feature = v.into();
10002 self
10003 }
10004
10005 /// Sets the value of [release_state][crate::model::ModelFeature::release_state].
10006 ///
10007 /// # Example
10008 /// ```ignore,no_run
10009 /// # use google_cloud_speech_v2::model::ModelFeature;
10010 /// let x = ModelFeature::new().set_release_state("example");
10011 /// ```
10012 pub fn set_release_state<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10013 self.release_state = v.into();
10014 self
10015 }
10016}
10017
10018impl wkt::message::Message for ModelFeature {
10019 fn typename() -> &'static str {
10020 "type.googleapis.com/google.cloud.speech.v2.ModelFeature"
10021 }
10022}
10023
10024/// Represents the collection of features belonging to a model
10025#[derive(Clone, Default, PartialEq)]
10026#[non_exhaustive]
10027pub struct ModelFeatures {
10028 /// Repeated field that contains all features of the model
10029 pub model_feature: std::vec::Vec<crate::model::ModelFeature>,
10030
10031 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10032}
10033
10034impl ModelFeatures {
10035 pub fn new() -> Self {
10036 std::default::Default::default()
10037 }
10038
10039 /// Sets the value of [model_feature][crate::model::ModelFeatures::model_feature].
10040 ///
10041 /// # Example
10042 /// ```ignore,no_run
10043 /// # use google_cloud_speech_v2::model::ModelFeatures;
10044 /// use google_cloud_speech_v2::model::ModelFeature;
10045 /// let x = ModelFeatures::new()
10046 /// .set_model_feature([
10047 /// ModelFeature::default()/* use setters */,
10048 /// ModelFeature::default()/* use (different) setters */,
10049 /// ]);
10050 /// ```
10051 pub fn set_model_feature<T, V>(mut self, v: T) -> Self
10052 where
10053 T: std::iter::IntoIterator<Item = V>,
10054 V: std::convert::Into<crate::model::ModelFeature>,
10055 {
10056 use std::iter::Iterator;
10057 self.model_feature = v.into_iter().map(|i| i.into()).collect();
10058 self
10059 }
10060}
10061
10062impl wkt::message::Message for ModelFeatures {
10063 fn typename() -> &'static str {
10064 "type.googleapis.com/google.cloud.speech.v2.ModelFeatures"
10065 }
10066}
10067
10068/// The metadata about the models in a given region for a specific locale.
10069/// Currently this is just the features of the model
10070#[derive(Clone, Default, PartialEq)]
10071#[non_exhaustive]
10072pub struct ModelMetadata {
10073 /// Map of the model name -> features of that model
10074 pub model_features: std::collections::HashMap<std::string::String, crate::model::ModelFeatures>,
10075
10076 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10077}
10078
10079impl ModelMetadata {
10080 pub fn new() -> Self {
10081 std::default::Default::default()
10082 }
10083
10084 /// Sets the value of [model_features][crate::model::ModelMetadata::model_features].
10085 ///
10086 /// # Example
10087 /// ```ignore,no_run
10088 /// # use google_cloud_speech_v2::model::ModelMetadata;
10089 /// use google_cloud_speech_v2::model::ModelFeatures;
10090 /// let x = ModelMetadata::new().set_model_features([
10091 /// ("key0", ModelFeatures::default()/* use setters */),
10092 /// ("key1", ModelFeatures::default()/* use (different) setters */),
10093 /// ]);
10094 /// ```
10095 pub fn set_model_features<T, K, V>(mut self, v: T) -> Self
10096 where
10097 T: std::iter::IntoIterator<Item = (K, V)>,
10098 K: std::convert::Into<std::string::String>,
10099 V: std::convert::Into<crate::model::ModelFeatures>,
10100 {
10101 use std::iter::Iterator;
10102 self.model_features = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
10103 self
10104 }
10105}
10106
10107impl wkt::message::Message for ModelMetadata {
10108 fn typename() -> &'static str {
10109 "type.googleapis.com/google.cloud.speech.v2.ModelMetadata"
10110 }
10111}
10112
10113/// The metadata about locales available in a given region. Currently this is
10114/// just the models that are available for each locale
10115#[derive(Clone, Default, PartialEq)]
10116#[non_exhaustive]
10117pub struct LanguageMetadata {
10118 /// Map of locale (language code) -> models
10119 pub models: std::collections::HashMap<std::string::String, crate::model::ModelMetadata>,
10120
10121 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10122}
10123
10124impl LanguageMetadata {
10125 pub fn new() -> Self {
10126 std::default::Default::default()
10127 }
10128
10129 /// Sets the value of [models][crate::model::LanguageMetadata::models].
10130 ///
10131 /// # Example
10132 /// ```ignore,no_run
10133 /// # use google_cloud_speech_v2::model::LanguageMetadata;
10134 /// use google_cloud_speech_v2::model::ModelMetadata;
10135 /// let x = LanguageMetadata::new().set_models([
10136 /// ("key0", ModelMetadata::default()/* use setters */),
10137 /// ("key1", ModelMetadata::default()/* use (different) setters */),
10138 /// ]);
10139 /// ```
10140 pub fn set_models<T, K, V>(mut self, v: T) -> Self
10141 where
10142 T: std::iter::IntoIterator<Item = (K, V)>,
10143 K: std::convert::Into<std::string::String>,
10144 V: std::convert::Into<crate::model::ModelMetadata>,
10145 {
10146 use std::iter::Iterator;
10147 self.models = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
10148 self
10149 }
10150}
10151
10152impl wkt::message::Message for LanguageMetadata {
10153 fn typename() -> &'static str {
10154 "type.googleapis.com/google.cloud.speech.v2.LanguageMetadata"
10155 }
10156}
10157
10158/// The access metadata for a particular region. This can be applied if the org
10159/// policy for the given project disallows a particular region.
10160#[derive(Clone, Default, PartialEq)]
10161#[non_exhaustive]
10162pub struct AccessMetadata {
10163 /// Describes the different types of constraints that are applied.
10164 pub constraint_type: crate::model::access_metadata::ConstraintType,
10165
10166 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10167}
10168
10169impl AccessMetadata {
10170 pub fn new() -> Self {
10171 std::default::Default::default()
10172 }
10173
10174 /// Sets the value of [constraint_type][crate::model::AccessMetadata::constraint_type].
10175 ///
10176 /// # Example
10177 /// ```ignore,no_run
10178 /// # use google_cloud_speech_v2::model::AccessMetadata;
10179 /// use google_cloud_speech_v2::model::access_metadata::ConstraintType;
10180 /// let x0 = AccessMetadata::new().set_constraint_type(ConstraintType::ResourceLocationsOrgPolicyCreateConstraint);
10181 /// ```
10182 pub fn set_constraint_type<
10183 T: std::convert::Into<crate::model::access_metadata::ConstraintType>,
10184 >(
10185 mut self,
10186 v: T,
10187 ) -> Self {
10188 self.constraint_type = v.into();
10189 self
10190 }
10191}
10192
10193impl wkt::message::Message for AccessMetadata {
10194 fn typename() -> &'static str {
10195 "type.googleapis.com/google.cloud.speech.v2.AccessMetadata"
10196 }
10197}
10198
10199/// Defines additional types related to [AccessMetadata].
10200pub mod access_metadata {
10201 #[allow(unused_imports)]
10202 use super::*;
10203
10204 /// Describes the different types of constraints that can be applied on a
10205 /// region.
10206 ///
10207 /// # Working with unknown values
10208 ///
10209 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
10210 /// additional enum variants at any time. Adding new variants is not considered
10211 /// a breaking change. Applications should write their code in anticipation of:
10212 ///
10213 /// - New values appearing in future releases of the client library, **and**
10214 /// - New values received dynamically, without application changes.
10215 ///
10216 /// Please consult the [Working with enums] section in the user guide for some
10217 /// guidelines.
10218 ///
10219 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
10220 #[derive(Clone, Debug, PartialEq)]
10221 #[non_exhaustive]
10222 pub enum ConstraintType {
10223 /// Unspecified constraint applied.
10224 Unspecified,
10225 /// The project's org policy disallows the given region.
10226 ResourceLocationsOrgPolicyCreateConstraint,
10227 /// If set, the enum was initialized with an unknown value.
10228 ///
10229 /// Applications can examine the value using [ConstraintType::value] or
10230 /// [ConstraintType::name].
10231 UnknownValue(constraint_type::UnknownValue),
10232 }
10233
10234 #[doc(hidden)]
10235 pub mod constraint_type {
10236 #[allow(unused_imports)]
10237 use super::*;
10238 #[derive(Clone, Debug, PartialEq)]
10239 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
10240 }
10241
10242 impl ConstraintType {
10243 /// Gets the enum value.
10244 ///
10245 /// Returns `None` if the enum contains an unknown value deserialized from
10246 /// the string representation of enums.
10247 pub fn value(&self) -> std::option::Option<i32> {
10248 match self {
10249 Self::Unspecified => std::option::Option::Some(0),
10250 Self::ResourceLocationsOrgPolicyCreateConstraint => std::option::Option::Some(1),
10251 Self::UnknownValue(u) => u.0.value(),
10252 }
10253 }
10254
10255 /// Gets the enum value as a string.
10256 ///
10257 /// Returns `None` if the enum contains an unknown value deserialized from
10258 /// the integer representation of enums.
10259 pub fn name(&self) -> std::option::Option<&str> {
10260 match self {
10261 Self::Unspecified => std::option::Option::Some("CONSTRAINT_TYPE_UNSPECIFIED"),
10262 Self::ResourceLocationsOrgPolicyCreateConstraint => {
10263 std::option::Option::Some("RESOURCE_LOCATIONS_ORG_POLICY_CREATE_CONSTRAINT")
10264 }
10265 Self::UnknownValue(u) => u.0.name(),
10266 }
10267 }
10268 }
10269
10270 impl std::default::Default for ConstraintType {
10271 fn default() -> Self {
10272 use std::convert::From;
10273 Self::from(0)
10274 }
10275 }
10276
10277 impl std::fmt::Display for ConstraintType {
10278 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
10279 wkt::internal::display_enum(f, self.name(), self.value())
10280 }
10281 }
10282
10283 impl std::convert::From<i32> for ConstraintType {
10284 fn from(value: i32) -> Self {
10285 match value {
10286 0 => Self::Unspecified,
10287 1 => Self::ResourceLocationsOrgPolicyCreateConstraint,
10288 _ => Self::UnknownValue(constraint_type::UnknownValue(
10289 wkt::internal::UnknownEnumValue::Integer(value),
10290 )),
10291 }
10292 }
10293 }
10294
10295 impl std::convert::From<&str> for ConstraintType {
10296 fn from(value: &str) -> Self {
10297 use std::string::ToString;
10298 match value {
10299 "CONSTRAINT_TYPE_UNSPECIFIED" => Self::Unspecified,
10300 "RESOURCE_LOCATIONS_ORG_POLICY_CREATE_CONSTRAINT" => {
10301 Self::ResourceLocationsOrgPolicyCreateConstraint
10302 }
10303 _ => Self::UnknownValue(constraint_type::UnknownValue(
10304 wkt::internal::UnknownEnumValue::String(value.to_string()),
10305 )),
10306 }
10307 }
10308 }
10309
10310 impl serde::ser::Serialize for ConstraintType {
10311 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
10312 where
10313 S: serde::Serializer,
10314 {
10315 match self {
10316 Self::Unspecified => serializer.serialize_i32(0),
10317 Self::ResourceLocationsOrgPolicyCreateConstraint => serializer.serialize_i32(1),
10318 Self::UnknownValue(u) => u.0.serialize(serializer),
10319 }
10320 }
10321 }
10322
10323 impl<'de> serde::de::Deserialize<'de> for ConstraintType {
10324 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
10325 where
10326 D: serde::Deserializer<'de>,
10327 {
10328 deserializer.deserialize_any(wkt::internal::EnumVisitor::<ConstraintType>::new(
10329 ".google.cloud.speech.v2.AccessMetadata.ConstraintType",
10330 ))
10331 }
10332 }
10333}
10334
10335/// Main metadata for the Locations API for STT V2. Currently this is just the
10336/// metadata about locales, models, and features
10337#[derive(Clone, Default, PartialEq)]
10338#[non_exhaustive]
10339pub struct LocationsMetadata {
10340 /// Information about available locales, models, and features represented in
10341 /// the hierarchical structure of locales -> models -> features
10342 pub languages: std::option::Option<crate::model::LanguageMetadata>,
10343
10344 /// Information about access metadata for the region and given project.
10345 pub access_metadata: std::option::Option<crate::model::AccessMetadata>,
10346
10347 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10348}
10349
10350impl LocationsMetadata {
10351 pub fn new() -> Self {
10352 std::default::Default::default()
10353 }
10354
10355 /// Sets the value of [languages][crate::model::LocationsMetadata::languages].
10356 ///
10357 /// # Example
10358 /// ```ignore,no_run
10359 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10360 /// use google_cloud_speech_v2::model::LanguageMetadata;
10361 /// let x = LocationsMetadata::new().set_languages(LanguageMetadata::default()/* use setters */);
10362 /// ```
10363 pub fn set_languages<T>(mut self, v: T) -> Self
10364 where
10365 T: std::convert::Into<crate::model::LanguageMetadata>,
10366 {
10367 self.languages = std::option::Option::Some(v.into());
10368 self
10369 }
10370
10371 /// Sets or clears the value of [languages][crate::model::LocationsMetadata::languages].
10372 ///
10373 /// # Example
10374 /// ```ignore,no_run
10375 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10376 /// use google_cloud_speech_v2::model::LanguageMetadata;
10377 /// let x = LocationsMetadata::new().set_or_clear_languages(Some(LanguageMetadata::default()/* use setters */));
10378 /// let x = LocationsMetadata::new().set_or_clear_languages(None::<LanguageMetadata>);
10379 /// ```
10380 pub fn set_or_clear_languages<T>(mut self, v: std::option::Option<T>) -> Self
10381 where
10382 T: std::convert::Into<crate::model::LanguageMetadata>,
10383 {
10384 self.languages = v.map(|x| x.into());
10385 self
10386 }
10387
10388 /// Sets the value of [access_metadata][crate::model::LocationsMetadata::access_metadata].
10389 ///
10390 /// # Example
10391 /// ```ignore,no_run
10392 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10393 /// use google_cloud_speech_v2::model::AccessMetadata;
10394 /// let x = LocationsMetadata::new().set_access_metadata(AccessMetadata::default()/* use setters */);
10395 /// ```
10396 pub fn set_access_metadata<T>(mut self, v: T) -> Self
10397 where
10398 T: std::convert::Into<crate::model::AccessMetadata>,
10399 {
10400 self.access_metadata = std::option::Option::Some(v.into());
10401 self
10402 }
10403
10404 /// Sets or clears the value of [access_metadata][crate::model::LocationsMetadata::access_metadata].
10405 ///
10406 /// # Example
10407 /// ```ignore,no_run
10408 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10409 /// use google_cloud_speech_v2::model::AccessMetadata;
10410 /// let x = LocationsMetadata::new().set_or_clear_access_metadata(Some(AccessMetadata::default()/* use setters */));
10411 /// let x = LocationsMetadata::new().set_or_clear_access_metadata(None::<AccessMetadata>);
10412 /// ```
10413 pub fn set_or_clear_access_metadata<T>(mut self, v: std::option::Option<T>) -> Self
10414 where
10415 T: std::convert::Into<crate::model::AccessMetadata>,
10416 {
10417 self.access_metadata = v.map(|x| x.into());
10418 self
10419 }
10420}
10421
10422impl wkt::message::Message for LocationsMetadata {
10423 fn typename() -> &'static str {
10424 "type.googleapis.com/google.cloud.speech.v2.LocationsMetadata"
10425 }
10426}