google_cloud_speech_v2/model.rs
1// Copyright 2025 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14//
15// Code generated by sidekick. DO NOT EDIT.
16
17#![allow(rustdoc::redundant_explicit_links)]
18#![allow(rustdoc::broken_intra_doc_links)]
19#![no_implicit_prelude]
20extern crate async_trait;
21extern crate bytes;
22extern crate gaxi;
23extern crate google_cloud_gax;
24extern crate google_cloud_location;
25extern crate google_cloud_longrunning;
26extern crate google_cloud_lro;
27extern crate google_cloud_rpc;
28extern crate lazy_static;
29extern crate serde;
30extern crate serde_json;
31extern crate serde_with;
32extern crate std;
33extern crate tracing;
34extern crate wkt;
35
36mod debug;
37mod deserialize;
38mod serialize;
39
40/// Request message for the
41/// [CreateRecognizer][google.cloud.speech.v2.Speech.CreateRecognizer] method.
42///
43/// [google.cloud.speech.v2.Speech.CreateRecognizer]: crate::client::Speech::create_recognizer
44#[derive(Clone, Default, PartialEq)]
45#[non_exhaustive]
46pub struct CreateRecognizerRequest {
47 /// Required. The Recognizer to create.
48 pub recognizer: std::option::Option<crate::model::Recognizer>,
49
50 /// If set, validate the request and preview the Recognizer, but do not
51 /// actually create it.
52 pub validate_only: bool,
53
54 /// The ID to use for the Recognizer, which will become the final component of
55 /// the Recognizer's resource name.
56 ///
57 /// This value should be 4-63 characters, and valid characters
58 /// are /[a-z][0-9]-/.
59 pub recognizer_id: std::string::String,
60
61 /// Required. The project and location where this Recognizer will be created.
62 /// The expected format is `projects/{project}/locations/{location}`.
63 pub parent: std::string::String,
64
65 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
66}
67
68impl CreateRecognizerRequest {
69 pub fn new() -> Self {
70 std::default::Default::default()
71 }
72
73 /// Sets the value of [recognizer][crate::model::CreateRecognizerRequest::recognizer].
74 ///
75 /// # Example
76 /// ```ignore,no_run
77 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
78 /// use google_cloud_speech_v2::model::Recognizer;
79 /// let x = CreateRecognizerRequest::new().set_recognizer(Recognizer::default()/* use setters */);
80 /// ```
81 pub fn set_recognizer<T>(mut self, v: T) -> Self
82 where
83 T: std::convert::Into<crate::model::Recognizer>,
84 {
85 self.recognizer = std::option::Option::Some(v.into());
86 self
87 }
88
89 /// Sets or clears the value of [recognizer][crate::model::CreateRecognizerRequest::recognizer].
90 ///
91 /// # Example
92 /// ```ignore,no_run
93 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
94 /// use google_cloud_speech_v2::model::Recognizer;
95 /// let x = CreateRecognizerRequest::new().set_or_clear_recognizer(Some(Recognizer::default()/* use setters */));
96 /// let x = CreateRecognizerRequest::new().set_or_clear_recognizer(None::<Recognizer>);
97 /// ```
98 pub fn set_or_clear_recognizer<T>(mut self, v: std::option::Option<T>) -> Self
99 where
100 T: std::convert::Into<crate::model::Recognizer>,
101 {
102 self.recognizer = v.map(|x| x.into());
103 self
104 }
105
106 /// Sets the value of [validate_only][crate::model::CreateRecognizerRequest::validate_only].
107 ///
108 /// # Example
109 /// ```ignore,no_run
110 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
111 /// let x = CreateRecognizerRequest::new().set_validate_only(true);
112 /// ```
113 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
114 self.validate_only = v.into();
115 self
116 }
117
118 /// Sets the value of [recognizer_id][crate::model::CreateRecognizerRequest::recognizer_id].
119 ///
120 /// # Example
121 /// ```ignore,no_run
122 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
123 /// let x = CreateRecognizerRequest::new().set_recognizer_id("example");
124 /// ```
125 pub fn set_recognizer_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
126 self.recognizer_id = v.into();
127 self
128 }
129
130 /// Sets the value of [parent][crate::model::CreateRecognizerRequest::parent].
131 ///
132 /// # Example
133 /// ```ignore,no_run
134 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
135 /// let x = CreateRecognizerRequest::new().set_parent("example");
136 /// ```
137 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
138 self.parent = v.into();
139 self
140 }
141}
142
143impl wkt::message::Message for CreateRecognizerRequest {
144 fn typename() -> &'static str {
145 "type.googleapis.com/google.cloud.speech.v2.CreateRecognizerRequest"
146 }
147}
148
149/// Represents the metadata of a long-running operation.
150#[derive(Clone, Default, PartialEq)]
151#[non_exhaustive]
152pub struct OperationMetadata {
153 /// The time the operation was created.
154 pub create_time: std::option::Option<wkt::Timestamp>,
155
156 /// The time the operation was last updated.
157 pub update_time: std::option::Option<wkt::Timestamp>,
158
159 /// The resource path for the target of the operation.
160 pub resource: std::string::String,
161
162 /// The method that triggered the operation.
163 pub method: std::string::String,
164
165 /// The [KMS key
166 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
167 /// the content of the Operation is encrypted. The expected format is
168 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
169 pub kms_key_name: std::string::String,
170
171 /// The [KMS key version
172 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
173 /// with which content of the Operation is encrypted. The expected format is
174 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
175 pub kms_key_version_name: std::string::String,
176
177 /// The percent progress of the Operation. Values can range from 0-100. If the
178 /// value is 100, then the operation is finished.
179 pub progress_percent: i32,
180
181 /// The request that spawned the Operation.
182 pub request: std::option::Option<crate::model::operation_metadata::Request>,
183
184 /// Specific metadata per RPC.
185 pub metadata: std::option::Option<crate::model::operation_metadata::Metadata>,
186
187 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
188}
189
190impl OperationMetadata {
191 pub fn new() -> Self {
192 std::default::Default::default()
193 }
194
195 /// Sets the value of [create_time][crate::model::OperationMetadata::create_time].
196 ///
197 /// # Example
198 /// ```ignore,no_run
199 /// # use google_cloud_speech_v2::model::OperationMetadata;
200 /// use wkt::Timestamp;
201 /// let x = OperationMetadata::new().set_create_time(Timestamp::default()/* use setters */);
202 /// ```
203 pub fn set_create_time<T>(mut self, v: T) -> Self
204 where
205 T: std::convert::Into<wkt::Timestamp>,
206 {
207 self.create_time = std::option::Option::Some(v.into());
208 self
209 }
210
211 /// Sets or clears the value of [create_time][crate::model::OperationMetadata::create_time].
212 ///
213 /// # Example
214 /// ```ignore,no_run
215 /// # use google_cloud_speech_v2::model::OperationMetadata;
216 /// use wkt::Timestamp;
217 /// let x = OperationMetadata::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
218 /// let x = OperationMetadata::new().set_or_clear_create_time(None::<Timestamp>);
219 /// ```
220 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
221 where
222 T: std::convert::Into<wkt::Timestamp>,
223 {
224 self.create_time = v.map(|x| x.into());
225 self
226 }
227
228 /// Sets the value of [update_time][crate::model::OperationMetadata::update_time].
229 ///
230 /// # Example
231 /// ```ignore,no_run
232 /// # use google_cloud_speech_v2::model::OperationMetadata;
233 /// use wkt::Timestamp;
234 /// let x = OperationMetadata::new().set_update_time(Timestamp::default()/* use setters */);
235 /// ```
236 pub fn set_update_time<T>(mut self, v: T) -> Self
237 where
238 T: std::convert::Into<wkt::Timestamp>,
239 {
240 self.update_time = std::option::Option::Some(v.into());
241 self
242 }
243
244 /// Sets or clears the value of [update_time][crate::model::OperationMetadata::update_time].
245 ///
246 /// # Example
247 /// ```ignore,no_run
248 /// # use google_cloud_speech_v2::model::OperationMetadata;
249 /// use wkt::Timestamp;
250 /// let x = OperationMetadata::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
251 /// let x = OperationMetadata::new().set_or_clear_update_time(None::<Timestamp>);
252 /// ```
253 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
254 where
255 T: std::convert::Into<wkt::Timestamp>,
256 {
257 self.update_time = v.map(|x| x.into());
258 self
259 }
260
261 /// Sets the value of [resource][crate::model::OperationMetadata::resource].
262 ///
263 /// # Example
264 /// ```ignore,no_run
265 /// # use google_cloud_speech_v2::model::OperationMetadata;
266 /// let x = OperationMetadata::new().set_resource("example");
267 /// ```
268 pub fn set_resource<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
269 self.resource = v.into();
270 self
271 }
272
273 /// Sets the value of [method][crate::model::OperationMetadata::method].
274 ///
275 /// # Example
276 /// ```ignore,no_run
277 /// # use google_cloud_speech_v2::model::OperationMetadata;
278 /// let x = OperationMetadata::new().set_method("example");
279 /// ```
280 pub fn set_method<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
281 self.method = v.into();
282 self
283 }
284
285 /// Sets the value of [kms_key_name][crate::model::OperationMetadata::kms_key_name].
286 ///
287 /// # Example
288 /// ```ignore,no_run
289 /// # use google_cloud_speech_v2::model::OperationMetadata;
290 /// let x = OperationMetadata::new().set_kms_key_name("example");
291 /// ```
292 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
293 self.kms_key_name = v.into();
294 self
295 }
296
297 /// Sets the value of [kms_key_version_name][crate::model::OperationMetadata::kms_key_version_name].
298 ///
299 /// # Example
300 /// ```ignore,no_run
301 /// # use google_cloud_speech_v2::model::OperationMetadata;
302 /// let x = OperationMetadata::new().set_kms_key_version_name("example");
303 /// ```
304 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
305 mut self,
306 v: T,
307 ) -> Self {
308 self.kms_key_version_name = v.into();
309 self
310 }
311
312 /// Sets the value of [progress_percent][crate::model::OperationMetadata::progress_percent].
313 ///
314 /// # Example
315 /// ```ignore,no_run
316 /// # use google_cloud_speech_v2::model::OperationMetadata;
317 /// let x = OperationMetadata::new().set_progress_percent(42);
318 /// ```
319 pub fn set_progress_percent<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
320 self.progress_percent = v.into();
321 self
322 }
323
324 /// Sets the value of [request][crate::model::OperationMetadata::request].
325 ///
326 /// Note that all the setters affecting `request` are mutually
327 /// exclusive.
328 ///
329 /// # Example
330 /// ```ignore,no_run
331 /// # use google_cloud_speech_v2::model::OperationMetadata;
332 /// use google_cloud_speech_v2::model::BatchRecognizeRequest;
333 /// let x = OperationMetadata::new().set_request(Some(
334 /// google_cloud_speech_v2::model::operation_metadata::Request::BatchRecognizeRequest(BatchRecognizeRequest::default().into())));
335 /// ```
336 pub fn set_request<
337 T: std::convert::Into<std::option::Option<crate::model::operation_metadata::Request>>,
338 >(
339 mut self,
340 v: T,
341 ) -> Self {
342 self.request = v.into();
343 self
344 }
345
346 /// The value of [request][crate::model::OperationMetadata::request]
347 /// if it holds a `BatchRecognizeRequest`, `None` if the field is not set or
348 /// holds a different branch.
349 pub fn batch_recognize_request(
350 &self,
351 ) -> std::option::Option<&std::boxed::Box<crate::model::BatchRecognizeRequest>> {
352 #[allow(unreachable_patterns)]
353 self.request.as_ref().and_then(|v| match v {
354 crate::model::operation_metadata::Request::BatchRecognizeRequest(v) => {
355 std::option::Option::Some(v)
356 }
357 _ => std::option::Option::None,
358 })
359 }
360
361 /// Sets the value of [request][crate::model::OperationMetadata::request]
362 /// to hold a `BatchRecognizeRequest`.
363 ///
364 /// Note that all the setters affecting `request` are
365 /// mutually exclusive.
366 ///
367 /// # Example
368 /// ```ignore,no_run
369 /// # use google_cloud_speech_v2::model::OperationMetadata;
370 /// use google_cloud_speech_v2::model::BatchRecognizeRequest;
371 /// let x = OperationMetadata::new().set_batch_recognize_request(BatchRecognizeRequest::default()/* use setters */);
372 /// assert!(x.batch_recognize_request().is_some());
373 /// assert!(x.create_recognizer_request().is_none());
374 /// assert!(x.update_recognizer_request().is_none());
375 /// assert!(x.delete_recognizer_request().is_none());
376 /// assert!(x.undelete_recognizer_request().is_none());
377 /// assert!(x.create_custom_class_request().is_none());
378 /// assert!(x.update_custom_class_request().is_none());
379 /// assert!(x.delete_custom_class_request().is_none());
380 /// assert!(x.undelete_custom_class_request().is_none());
381 /// assert!(x.create_phrase_set_request().is_none());
382 /// assert!(x.update_phrase_set_request().is_none());
383 /// assert!(x.delete_phrase_set_request().is_none());
384 /// assert!(x.undelete_phrase_set_request().is_none());
385 /// assert!(x.update_config_request().is_none());
386 /// ```
387 pub fn set_batch_recognize_request<
388 T: std::convert::Into<std::boxed::Box<crate::model::BatchRecognizeRequest>>,
389 >(
390 mut self,
391 v: T,
392 ) -> Self {
393 self.request = std::option::Option::Some(
394 crate::model::operation_metadata::Request::BatchRecognizeRequest(v.into()),
395 );
396 self
397 }
398
399 /// The value of [request][crate::model::OperationMetadata::request]
400 /// if it holds a `CreateRecognizerRequest`, `None` if the field is not set or
401 /// holds a different branch.
402 pub fn create_recognizer_request(
403 &self,
404 ) -> std::option::Option<&std::boxed::Box<crate::model::CreateRecognizerRequest>> {
405 #[allow(unreachable_patterns)]
406 self.request.as_ref().and_then(|v| match v {
407 crate::model::operation_metadata::Request::CreateRecognizerRequest(v) => {
408 std::option::Option::Some(v)
409 }
410 _ => std::option::Option::None,
411 })
412 }
413
414 /// Sets the value of [request][crate::model::OperationMetadata::request]
415 /// to hold a `CreateRecognizerRequest`.
416 ///
417 /// Note that all the setters affecting `request` are
418 /// mutually exclusive.
419 ///
420 /// # Example
421 /// ```ignore,no_run
422 /// # use google_cloud_speech_v2::model::OperationMetadata;
423 /// use google_cloud_speech_v2::model::CreateRecognizerRequest;
424 /// let x = OperationMetadata::new().set_create_recognizer_request(CreateRecognizerRequest::default()/* use setters */);
425 /// assert!(x.create_recognizer_request().is_some());
426 /// assert!(x.batch_recognize_request().is_none());
427 /// assert!(x.update_recognizer_request().is_none());
428 /// assert!(x.delete_recognizer_request().is_none());
429 /// assert!(x.undelete_recognizer_request().is_none());
430 /// assert!(x.create_custom_class_request().is_none());
431 /// assert!(x.update_custom_class_request().is_none());
432 /// assert!(x.delete_custom_class_request().is_none());
433 /// assert!(x.undelete_custom_class_request().is_none());
434 /// assert!(x.create_phrase_set_request().is_none());
435 /// assert!(x.update_phrase_set_request().is_none());
436 /// assert!(x.delete_phrase_set_request().is_none());
437 /// assert!(x.undelete_phrase_set_request().is_none());
438 /// assert!(x.update_config_request().is_none());
439 /// ```
440 pub fn set_create_recognizer_request<
441 T: std::convert::Into<std::boxed::Box<crate::model::CreateRecognizerRequest>>,
442 >(
443 mut self,
444 v: T,
445 ) -> Self {
446 self.request = std::option::Option::Some(
447 crate::model::operation_metadata::Request::CreateRecognizerRequest(v.into()),
448 );
449 self
450 }
451
452 /// The value of [request][crate::model::OperationMetadata::request]
453 /// if it holds a `UpdateRecognizerRequest`, `None` if the field is not set or
454 /// holds a different branch.
455 pub fn update_recognizer_request(
456 &self,
457 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateRecognizerRequest>> {
458 #[allow(unreachable_patterns)]
459 self.request.as_ref().and_then(|v| match v {
460 crate::model::operation_metadata::Request::UpdateRecognizerRequest(v) => {
461 std::option::Option::Some(v)
462 }
463 _ => std::option::Option::None,
464 })
465 }
466
467 /// Sets the value of [request][crate::model::OperationMetadata::request]
468 /// to hold a `UpdateRecognizerRequest`.
469 ///
470 /// Note that all the setters affecting `request` are
471 /// mutually exclusive.
472 ///
473 /// # Example
474 /// ```ignore,no_run
475 /// # use google_cloud_speech_v2::model::OperationMetadata;
476 /// use google_cloud_speech_v2::model::UpdateRecognizerRequest;
477 /// let x = OperationMetadata::new().set_update_recognizer_request(UpdateRecognizerRequest::default()/* use setters */);
478 /// assert!(x.update_recognizer_request().is_some());
479 /// assert!(x.batch_recognize_request().is_none());
480 /// assert!(x.create_recognizer_request().is_none());
481 /// assert!(x.delete_recognizer_request().is_none());
482 /// assert!(x.undelete_recognizer_request().is_none());
483 /// assert!(x.create_custom_class_request().is_none());
484 /// assert!(x.update_custom_class_request().is_none());
485 /// assert!(x.delete_custom_class_request().is_none());
486 /// assert!(x.undelete_custom_class_request().is_none());
487 /// assert!(x.create_phrase_set_request().is_none());
488 /// assert!(x.update_phrase_set_request().is_none());
489 /// assert!(x.delete_phrase_set_request().is_none());
490 /// assert!(x.undelete_phrase_set_request().is_none());
491 /// assert!(x.update_config_request().is_none());
492 /// ```
493 pub fn set_update_recognizer_request<
494 T: std::convert::Into<std::boxed::Box<crate::model::UpdateRecognizerRequest>>,
495 >(
496 mut self,
497 v: T,
498 ) -> Self {
499 self.request = std::option::Option::Some(
500 crate::model::operation_metadata::Request::UpdateRecognizerRequest(v.into()),
501 );
502 self
503 }
504
505 /// The value of [request][crate::model::OperationMetadata::request]
506 /// if it holds a `DeleteRecognizerRequest`, `None` if the field is not set or
507 /// holds a different branch.
508 pub fn delete_recognizer_request(
509 &self,
510 ) -> std::option::Option<&std::boxed::Box<crate::model::DeleteRecognizerRequest>> {
511 #[allow(unreachable_patterns)]
512 self.request.as_ref().and_then(|v| match v {
513 crate::model::operation_metadata::Request::DeleteRecognizerRequest(v) => {
514 std::option::Option::Some(v)
515 }
516 _ => std::option::Option::None,
517 })
518 }
519
520 /// Sets the value of [request][crate::model::OperationMetadata::request]
521 /// to hold a `DeleteRecognizerRequest`.
522 ///
523 /// Note that all the setters affecting `request` are
524 /// mutually exclusive.
525 ///
526 /// # Example
527 /// ```ignore,no_run
528 /// # use google_cloud_speech_v2::model::OperationMetadata;
529 /// use google_cloud_speech_v2::model::DeleteRecognizerRequest;
530 /// let x = OperationMetadata::new().set_delete_recognizer_request(DeleteRecognizerRequest::default()/* use setters */);
531 /// assert!(x.delete_recognizer_request().is_some());
532 /// assert!(x.batch_recognize_request().is_none());
533 /// assert!(x.create_recognizer_request().is_none());
534 /// assert!(x.update_recognizer_request().is_none());
535 /// assert!(x.undelete_recognizer_request().is_none());
536 /// assert!(x.create_custom_class_request().is_none());
537 /// assert!(x.update_custom_class_request().is_none());
538 /// assert!(x.delete_custom_class_request().is_none());
539 /// assert!(x.undelete_custom_class_request().is_none());
540 /// assert!(x.create_phrase_set_request().is_none());
541 /// assert!(x.update_phrase_set_request().is_none());
542 /// assert!(x.delete_phrase_set_request().is_none());
543 /// assert!(x.undelete_phrase_set_request().is_none());
544 /// assert!(x.update_config_request().is_none());
545 /// ```
546 pub fn set_delete_recognizer_request<
547 T: std::convert::Into<std::boxed::Box<crate::model::DeleteRecognizerRequest>>,
548 >(
549 mut self,
550 v: T,
551 ) -> Self {
552 self.request = std::option::Option::Some(
553 crate::model::operation_metadata::Request::DeleteRecognizerRequest(v.into()),
554 );
555 self
556 }
557
558 /// The value of [request][crate::model::OperationMetadata::request]
559 /// if it holds a `UndeleteRecognizerRequest`, `None` if the field is not set or
560 /// holds a different branch.
561 pub fn undelete_recognizer_request(
562 &self,
563 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeleteRecognizerRequest>> {
564 #[allow(unreachable_patterns)]
565 self.request.as_ref().and_then(|v| match v {
566 crate::model::operation_metadata::Request::UndeleteRecognizerRequest(v) => {
567 std::option::Option::Some(v)
568 }
569 _ => std::option::Option::None,
570 })
571 }
572
573 /// Sets the value of [request][crate::model::OperationMetadata::request]
574 /// to hold a `UndeleteRecognizerRequest`.
575 ///
576 /// Note that all the setters affecting `request` are
577 /// mutually exclusive.
578 ///
579 /// # Example
580 /// ```ignore,no_run
581 /// # use google_cloud_speech_v2::model::OperationMetadata;
582 /// use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
583 /// let x = OperationMetadata::new().set_undelete_recognizer_request(UndeleteRecognizerRequest::default()/* use setters */);
584 /// assert!(x.undelete_recognizer_request().is_some());
585 /// assert!(x.batch_recognize_request().is_none());
586 /// assert!(x.create_recognizer_request().is_none());
587 /// assert!(x.update_recognizer_request().is_none());
588 /// assert!(x.delete_recognizer_request().is_none());
589 /// assert!(x.create_custom_class_request().is_none());
590 /// assert!(x.update_custom_class_request().is_none());
591 /// assert!(x.delete_custom_class_request().is_none());
592 /// assert!(x.undelete_custom_class_request().is_none());
593 /// assert!(x.create_phrase_set_request().is_none());
594 /// assert!(x.update_phrase_set_request().is_none());
595 /// assert!(x.delete_phrase_set_request().is_none());
596 /// assert!(x.undelete_phrase_set_request().is_none());
597 /// assert!(x.update_config_request().is_none());
598 /// ```
599 pub fn set_undelete_recognizer_request<
600 T: std::convert::Into<std::boxed::Box<crate::model::UndeleteRecognizerRequest>>,
601 >(
602 mut self,
603 v: T,
604 ) -> Self {
605 self.request = std::option::Option::Some(
606 crate::model::operation_metadata::Request::UndeleteRecognizerRequest(v.into()),
607 );
608 self
609 }
610
611 /// The value of [request][crate::model::OperationMetadata::request]
612 /// if it holds a `CreateCustomClassRequest`, `None` if the field is not set or
613 /// holds a different branch.
614 pub fn create_custom_class_request(
615 &self,
616 ) -> std::option::Option<&std::boxed::Box<crate::model::CreateCustomClassRequest>> {
617 #[allow(unreachable_patterns)]
618 self.request.as_ref().and_then(|v| match v {
619 crate::model::operation_metadata::Request::CreateCustomClassRequest(v) => {
620 std::option::Option::Some(v)
621 }
622 _ => std::option::Option::None,
623 })
624 }
625
626 /// Sets the value of [request][crate::model::OperationMetadata::request]
627 /// to hold a `CreateCustomClassRequest`.
628 ///
629 /// Note that all the setters affecting `request` are
630 /// mutually exclusive.
631 ///
632 /// # Example
633 /// ```ignore,no_run
634 /// # use google_cloud_speech_v2::model::OperationMetadata;
635 /// use google_cloud_speech_v2::model::CreateCustomClassRequest;
636 /// let x = OperationMetadata::new().set_create_custom_class_request(CreateCustomClassRequest::default()/* use setters */);
637 /// assert!(x.create_custom_class_request().is_some());
638 /// assert!(x.batch_recognize_request().is_none());
639 /// assert!(x.create_recognizer_request().is_none());
640 /// assert!(x.update_recognizer_request().is_none());
641 /// assert!(x.delete_recognizer_request().is_none());
642 /// assert!(x.undelete_recognizer_request().is_none());
643 /// assert!(x.update_custom_class_request().is_none());
644 /// assert!(x.delete_custom_class_request().is_none());
645 /// assert!(x.undelete_custom_class_request().is_none());
646 /// assert!(x.create_phrase_set_request().is_none());
647 /// assert!(x.update_phrase_set_request().is_none());
648 /// assert!(x.delete_phrase_set_request().is_none());
649 /// assert!(x.undelete_phrase_set_request().is_none());
650 /// assert!(x.update_config_request().is_none());
651 /// ```
652 pub fn set_create_custom_class_request<
653 T: std::convert::Into<std::boxed::Box<crate::model::CreateCustomClassRequest>>,
654 >(
655 mut self,
656 v: T,
657 ) -> Self {
658 self.request = std::option::Option::Some(
659 crate::model::operation_metadata::Request::CreateCustomClassRequest(v.into()),
660 );
661 self
662 }
663
664 /// The value of [request][crate::model::OperationMetadata::request]
665 /// if it holds a `UpdateCustomClassRequest`, `None` if the field is not set or
666 /// holds a different branch.
667 pub fn update_custom_class_request(
668 &self,
669 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateCustomClassRequest>> {
670 #[allow(unreachable_patterns)]
671 self.request.as_ref().and_then(|v| match v {
672 crate::model::operation_metadata::Request::UpdateCustomClassRequest(v) => {
673 std::option::Option::Some(v)
674 }
675 _ => std::option::Option::None,
676 })
677 }
678
679 /// Sets the value of [request][crate::model::OperationMetadata::request]
680 /// to hold a `UpdateCustomClassRequest`.
681 ///
682 /// Note that all the setters affecting `request` are
683 /// mutually exclusive.
684 ///
685 /// # Example
686 /// ```ignore,no_run
687 /// # use google_cloud_speech_v2::model::OperationMetadata;
688 /// use google_cloud_speech_v2::model::UpdateCustomClassRequest;
689 /// let x = OperationMetadata::new().set_update_custom_class_request(UpdateCustomClassRequest::default()/* use setters */);
690 /// assert!(x.update_custom_class_request().is_some());
691 /// assert!(x.batch_recognize_request().is_none());
692 /// assert!(x.create_recognizer_request().is_none());
693 /// assert!(x.update_recognizer_request().is_none());
694 /// assert!(x.delete_recognizer_request().is_none());
695 /// assert!(x.undelete_recognizer_request().is_none());
696 /// assert!(x.create_custom_class_request().is_none());
697 /// assert!(x.delete_custom_class_request().is_none());
698 /// assert!(x.undelete_custom_class_request().is_none());
699 /// assert!(x.create_phrase_set_request().is_none());
700 /// assert!(x.update_phrase_set_request().is_none());
701 /// assert!(x.delete_phrase_set_request().is_none());
702 /// assert!(x.undelete_phrase_set_request().is_none());
703 /// assert!(x.update_config_request().is_none());
704 /// ```
705 pub fn set_update_custom_class_request<
706 T: std::convert::Into<std::boxed::Box<crate::model::UpdateCustomClassRequest>>,
707 >(
708 mut self,
709 v: T,
710 ) -> Self {
711 self.request = std::option::Option::Some(
712 crate::model::operation_metadata::Request::UpdateCustomClassRequest(v.into()),
713 );
714 self
715 }
716
717 /// The value of [request][crate::model::OperationMetadata::request]
718 /// if it holds a `DeleteCustomClassRequest`, `None` if the field is not set or
719 /// holds a different branch.
720 pub fn delete_custom_class_request(
721 &self,
722 ) -> std::option::Option<&std::boxed::Box<crate::model::DeleteCustomClassRequest>> {
723 #[allow(unreachable_patterns)]
724 self.request.as_ref().and_then(|v| match v {
725 crate::model::operation_metadata::Request::DeleteCustomClassRequest(v) => {
726 std::option::Option::Some(v)
727 }
728 _ => std::option::Option::None,
729 })
730 }
731
732 /// Sets the value of [request][crate::model::OperationMetadata::request]
733 /// to hold a `DeleteCustomClassRequest`.
734 ///
735 /// Note that all the setters affecting `request` are
736 /// mutually exclusive.
737 ///
738 /// # Example
739 /// ```ignore,no_run
740 /// # use google_cloud_speech_v2::model::OperationMetadata;
741 /// use google_cloud_speech_v2::model::DeleteCustomClassRequest;
742 /// let x = OperationMetadata::new().set_delete_custom_class_request(DeleteCustomClassRequest::default()/* use setters */);
743 /// assert!(x.delete_custom_class_request().is_some());
744 /// assert!(x.batch_recognize_request().is_none());
745 /// assert!(x.create_recognizer_request().is_none());
746 /// assert!(x.update_recognizer_request().is_none());
747 /// assert!(x.delete_recognizer_request().is_none());
748 /// assert!(x.undelete_recognizer_request().is_none());
749 /// assert!(x.create_custom_class_request().is_none());
750 /// assert!(x.update_custom_class_request().is_none());
751 /// assert!(x.undelete_custom_class_request().is_none());
752 /// assert!(x.create_phrase_set_request().is_none());
753 /// assert!(x.update_phrase_set_request().is_none());
754 /// assert!(x.delete_phrase_set_request().is_none());
755 /// assert!(x.undelete_phrase_set_request().is_none());
756 /// assert!(x.update_config_request().is_none());
757 /// ```
758 pub fn set_delete_custom_class_request<
759 T: std::convert::Into<std::boxed::Box<crate::model::DeleteCustomClassRequest>>,
760 >(
761 mut self,
762 v: T,
763 ) -> Self {
764 self.request = std::option::Option::Some(
765 crate::model::operation_metadata::Request::DeleteCustomClassRequest(v.into()),
766 );
767 self
768 }
769
770 /// The value of [request][crate::model::OperationMetadata::request]
771 /// if it holds a `UndeleteCustomClassRequest`, `None` if the field is not set or
772 /// holds a different branch.
773 pub fn undelete_custom_class_request(
774 &self,
775 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeleteCustomClassRequest>> {
776 #[allow(unreachable_patterns)]
777 self.request.as_ref().and_then(|v| match v {
778 crate::model::operation_metadata::Request::UndeleteCustomClassRequest(v) => {
779 std::option::Option::Some(v)
780 }
781 _ => std::option::Option::None,
782 })
783 }
784
785 /// Sets the value of [request][crate::model::OperationMetadata::request]
786 /// to hold a `UndeleteCustomClassRequest`.
787 ///
788 /// Note that all the setters affecting `request` are
789 /// mutually exclusive.
790 ///
791 /// # Example
792 /// ```ignore,no_run
793 /// # use google_cloud_speech_v2::model::OperationMetadata;
794 /// use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
795 /// let x = OperationMetadata::new().set_undelete_custom_class_request(UndeleteCustomClassRequest::default()/* use setters */);
796 /// assert!(x.undelete_custom_class_request().is_some());
797 /// assert!(x.batch_recognize_request().is_none());
798 /// assert!(x.create_recognizer_request().is_none());
799 /// assert!(x.update_recognizer_request().is_none());
800 /// assert!(x.delete_recognizer_request().is_none());
801 /// assert!(x.undelete_recognizer_request().is_none());
802 /// assert!(x.create_custom_class_request().is_none());
803 /// assert!(x.update_custom_class_request().is_none());
804 /// assert!(x.delete_custom_class_request().is_none());
805 /// assert!(x.create_phrase_set_request().is_none());
806 /// assert!(x.update_phrase_set_request().is_none());
807 /// assert!(x.delete_phrase_set_request().is_none());
808 /// assert!(x.undelete_phrase_set_request().is_none());
809 /// assert!(x.update_config_request().is_none());
810 /// ```
811 pub fn set_undelete_custom_class_request<
812 T: std::convert::Into<std::boxed::Box<crate::model::UndeleteCustomClassRequest>>,
813 >(
814 mut self,
815 v: T,
816 ) -> Self {
817 self.request = std::option::Option::Some(
818 crate::model::operation_metadata::Request::UndeleteCustomClassRequest(v.into()),
819 );
820 self
821 }
822
823 /// The value of [request][crate::model::OperationMetadata::request]
824 /// if it holds a `CreatePhraseSetRequest`, `None` if the field is not set or
825 /// holds a different branch.
826 pub fn create_phrase_set_request(
827 &self,
828 ) -> std::option::Option<&std::boxed::Box<crate::model::CreatePhraseSetRequest>> {
829 #[allow(unreachable_patterns)]
830 self.request.as_ref().and_then(|v| match v {
831 crate::model::operation_metadata::Request::CreatePhraseSetRequest(v) => {
832 std::option::Option::Some(v)
833 }
834 _ => std::option::Option::None,
835 })
836 }
837
838 /// Sets the value of [request][crate::model::OperationMetadata::request]
839 /// to hold a `CreatePhraseSetRequest`.
840 ///
841 /// Note that all the setters affecting `request` are
842 /// mutually exclusive.
843 ///
844 /// # Example
845 /// ```ignore,no_run
846 /// # use google_cloud_speech_v2::model::OperationMetadata;
847 /// use google_cloud_speech_v2::model::CreatePhraseSetRequest;
848 /// let x = OperationMetadata::new().set_create_phrase_set_request(CreatePhraseSetRequest::default()/* use setters */);
849 /// assert!(x.create_phrase_set_request().is_some());
850 /// assert!(x.batch_recognize_request().is_none());
851 /// assert!(x.create_recognizer_request().is_none());
852 /// assert!(x.update_recognizer_request().is_none());
853 /// assert!(x.delete_recognizer_request().is_none());
854 /// assert!(x.undelete_recognizer_request().is_none());
855 /// assert!(x.create_custom_class_request().is_none());
856 /// assert!(x.update_custom_class_request().is_none());
857 /// assert!(x.delete_custom_class_request().is_none());
858 /// assert!(x.undelete_custom_class_request().is_none());
859 /// assert!(x.update_phrase_set_request().is_none());
860 /// assert!(x.delete_phrase_set_request().is_none());
861 /// assert!(x.undelete_phrase_set_request().is_none());
862 /// assert!(x.update_config_request().is_none());
863 /// ```
864 pub fn set_create_phrase_set_request<
865 T: std::convert::Into<std::boxed::Box<crate::model::CreatePhraseSetRequest>>,
866 >(
867 mut self,
868 v: T,
869 ) -> Self {
870 self.request = std::option::Option::Some(
871 crate::model::operation_metadata::Request::CreatePhraseSetRequest(v.into()),
872 );
873 self
874 }
875
876 /// The value of [request][crate::model::OperationMetadata::request]
877 /// if it holds a `UpdatePhraseSetRequest`, `None` if the field is not set or
878 /// holds a different branch.
879 pub fn update_phrase_set_request(
880 &self,
881 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdatePhraseSetRequest>> {
882 #[allow(unreachable_patterns)]
883 self.request.as_ref().and_then(|v| match v {
884 crate::model::operation_metadata::Request::UpdatePhraseSetRequest(v) => {
885 std::option::Option::Some(v)
886 }
887 _ => std::option::Option::None,
888 })
889 }
890
891 /// Sets the value of [request][crate::model::OperationMetadata::request]
892 /// to hold a `UpdatePhraseSetRequest`.
893 ///
894 /// Note that all the setters affecting `request` are
895 /// mutually exclusive.
896 ///
897 /// # Example
898 /// ```ignore,no_run
899 /// # use google_cloud_speech_v2::model::OperationMetadata;
900 /// use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
901 /// let x = OperationMetadata::new().set_update_phrase_set_request(UpdatePhraseSetRequest::default()/* use setters */);
902 /// assert!(x.update_phrase_set_request().is_some());
903 /// assert!(x.batch_recognize_request().is_none());
904 /// assert!(x.create_recognizer_request().is_none());
905 /// assert!(x.update_recognizer_request().is_none());
906 /// assert!(x.delete_recognizer_request().is_none());
907 /// assert!(x.undelete_recognizer_request().is_none());
908 /// assert!(x.create_custom_class_request().is_none());
909 /// assert!(x.update_custom_class_request().is_none());
910 /// assert!(x.delete_custom_class_request().is_none());
911 /// assert!(x.undelete_custom_class_request().is_none());
912 /// assert!(x.create_phrase_set_request().is_none());
913 /// assert!(x.delete_phrase_set_request().is_none());
914 /// assert!(x.undelete_phrase_set_request().is_none());
915 /// assert!(x.update_config_request().is_none());
916 /// ```
917 pub fn set_update_phrase_set_request<
918 T: std::convert::Into<std::boxed::Box<crate::model::UpdatePhraseSetRequest>>,
919 >(
920 mut self,
921 v: T,
922 ) -> Self {
923 self.request = std::option::Option::Some(
924 crate::model::operation_metadata::Request::UpdatePhraseSetRequest(v.into()),
925 );
926 self
927 }
928
929 /// The value of [request][crate::model::OperationMetadata::request]
930 /// if it holds a `DeletePhraseSetRequest`, `None` if the field is not set or
931 /// holds a different branch.
932 pub fn delete_phrase_set_request(
933 &self,
934 ) -> std::option::Option<&std::boxed::Box<crate::model::DeletePhraseSetRequest>> {
935 #[allow(unreachable_patterns)]
936 self.request.as_ref().and_then(|v| match v {
937 crate::model::operation_metadata::Request::DeletePhraseSetRequest(v) => {
938 std::option::Option::Some(v)
939 }
940 _ => std::option::Option::None,
941 })
942 }
943
944 /// Sets the value of [request][crate::model::OperationMetadata::request]
945 /// to hold a `DeletePhraseSetRequest`.
946 ///
947 /// Note that all the setters affecting `request` are
948 /// mutually exclusive.
949 ///
950 /// # Example
951 /// ```ignore,no_run
952 /// # use google_cloud_speech_v2::model::OperationMetadata;
953 /// use google_cloud_speech_v2::model::DeletePhraseSetRequest;
954 /// let x = OperationMetadata::new().set_delete_phrase_set_request(DeletePhraseSetRequest::default()/* use setters */);
955 /// assert!(x.delete_phrase_set_request().is_some());
956 /// assert!(x.batch_recognize_request().is_none());
957 /// assert!(x.create_recognizer_request().is_none());
958 /// assert!(x.update_recognizer_request().is_none());
959 /// assert!(x.delete_recognizer_request().is_none());
960 /// assert!(x.undelete_recognizer_request().is_none());
961 /// assert!(x.create_custom_class_request().is_none());
962 /// assert!(x.update_custom_class_request().is_none());
963 /// assert!(x.delete_custom_class_request().is_none());
964 /// assert!(x.undelete_custom_class_request().is_none());
965 /// assert!(x.create_phrase_set_request().is_none());
966 /// assert!(x.update_phrase_set_request().is_none());
967 /// assert!(x.undelete_phrase_set_request().is_none());
968 /// assert!(x.update_config_request().is_none());
969 /// ```
970 pub fn set_delete_phrase_set_request<
971 T: std::convert::Into<std::boxed::Box<crate::model::DeletePhraseSetRequest>>,
972 >(
973 mut self,
974 v: T,
975 ) -> Self {
976 self.request = std::option::Option::Some(
977 crate::model::operation_metadata::Request::DeletePhraseSetRequest(v.into()),
978 );
979 self
980 }
981
982 /// The value of [request][crate::model::OperationMetadata::request]
983 /// if it holds a `UndeletePhraseSetRequest`, `None` if the field is not set or
984 /// holds a different branch.
985 pub fn undelete_phrase_set_request(
986 &self,
987 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeletePhraseSetRequest>> {
988 #[allow(unreachable_patterns)]
989 self.request.as_ref().and_then(|v| match v {
990 crate::model::operation_metadata::Request::UndeletePhraseSetRequest(v) => {
991 std::option::Option::Some(v)
992 }
993 _ => std::option::Option::None,
994 })
995 }
996
997 /// Sets the value of [request][crate::model::OperationMetadata::request]
998 /// to hold a `UndeletePhraseSetRequest`.
999 ///
1000 /// Note that all the setters affecting `request` are
1001 /// mutually exclusive.
1002 ///
1003 /// # Example
1004 /// ```ignore,no_run
1005 /// # use google_cloud_speech_v2::model::OperationMetadata;
1006 /// use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
1007 /// let x = OperationMetadata::new().set_undelete_phrase_set_request(UndeletePhraseSetRequest::default()/* use setters */);
1008 /// assert!(x.undelete_phrase_set_request().is_some());
1009 /// assert!(x.batch_recognize_request().is_none());
1010 /// assert!(x.create_recognizer_request().is_none());
1011 /// assert!(x.update_recognizer_request().is_none());
1012 /// assert!(x.delete_recognizer_request().is_none());
1013 /// assert!(x.undelete_recognizer_request().is_none());
1014 /// assert!(x.create_custom_class_request().is_none());
1015 /// assert!(x.update_custom_class_request().is_none());
1016 /// assert!(x.delete_custom_class_request().is_none());
1017 /// assert!(x.undelete_custom_class_request().is_none());
1018 /// assert!(x.create_phrase_set_request().is_none());
1019 /// assert!(x.update_phrase_set_request().is_none());
1020 /// assert!(x.delete_phrase_set_request().is_none());
1021 /// assert!(x.update_config_request().is_none());
1022 /// ```
1023 pub fn set_undelete_phrase_set_request<
1024 T: std::convert::Into<std::boxed::Box<crate::model::UndeletePhraseSetRequest>>,
1025 >(
1026 mut self,
1027 v: T,
1028 ) -> Self {
1029 self.request = std::option::Option::Some(
1030 crate::model::operation_metadata::Request::UndeletePhraseSetRequest(v.into()),
1031 );
1032 self
1033 }
1034
1035 /// The value of [request][crate::model::OperationMetadata::request]
1036 /// if it holds a `UpdateConfigRequest`, `None` if the field is not set or
1037 /// holds a different branch.
1038 #[deprecated]
1039 pub fn update_config_request(
1040 &self,
1041 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateConfigRequest>> {
1042 #[allow(unreachable_patterns)]
1043 self.request.as_ref().and_then(|v| match v {
1044 crate::model::operation_metadata::Request::UpdateConfigRequest(v) => {
1045 std::option::Option::Some(v)
1046 }
1047 _ => std::option::Option::None,
1048 })
1049 }
1050
1051 /// Sets the value of [request][crate::model::OperationMetadata::request]
1052 /// to hold a `UpdateConfigRequest`.
1053 ///
1054 /// Note that all the setters affecting `request` are
1055 /// mutually exclusive.
1056 ///
1057 /// # Example
1058 /// ```ignore,no_run
1059 /// # use google_cloud_speech_v2::model::OperationMetadata;
1060 /// use google_cloud_speech_v2::model::UpdateConfigRequest;
1061 /// let x = OperationMetadata::new().set_update_config_request(UpdateConfigRequest::default()/* use setters */);
1062 /// assert!(x.update_config_request().is_some());
1063 /// assert!(x.batch_recognize_request().is_none());
1064 /// assert!(x.create_recognizer_request().is_none());
1065 /// assert!(x.update_recognizer_request().is_none());
1066 /// assert!(x.delete_recognizer_request().is_none());
1067 /// assert!(x.undelete_recognizer_request().is_none());
1068 /// assert!(x.create_custom_class_request().is_none());
1069 /// assert!(x.update_custom_class_request().is_none());
1070 /// assert!(x.delete_custom_class_request().is_none());
1071 /// assert!(x.undelete_custom_class_request().is_none());
1072 /// assert!(x.create_phrase_set_request().is_none());
1073 /// assert!(x.update_phrase_set_request().is_none());
1074 /// assert!(x.delete_phrase_set_request().is_none());
1075 /// assert!(x.undelete_phrase_set_request().is_none());
1076 /// ```
1077 #[deprecated]
1078 pub fn set_update_config_request<
1079 T: std::convert::Into<std::boxed::Box<crate::model::UpdateConfigRequest>>,
1080 >(
1081 mut self,
1082 v: T,
1083 ) -> Self {
1084 self.request = std::option::Option::Some(
1085 crate::model::operation_metadata::Request::UpdateConfigRequest(v.into()),
1086 );
1087 self
1088 }
1089
1090 /// Sets the value of [metadata][crate::model::OperationMetadata::metadata].
1091 ///
1092 /// Note that all the setters affecting `metadata` are mutually
1093 /// exclusive.
1094 ///
1095 /// # Example
1096 /// ```ignore,no_run
1097 /// # use google_cloud_speech_v2::model::OperationMetadata;
1098 /// use google_cloud_speech_v2::model::BatchRecognizeMetadata;
1099 /// let x = OperationMetadata::new().set_metadata(Some(
1100 /// google_cloud_speech_v2::model::operation_metadata::Metadata::BatchRecognizeMetadata(BatchRecognizeMetadata::default().into())));
1101 /// ```
1102 pub fn set_metadata<
1103 T: std::convert::Into<std::option::Option<crate::model::operation_metadata::Metadata>>,
1104 >(
1105 mut self,
1106 v: T,
1107 ) -> Self {
1108 self.metadata = v.into();
1109 self
1110 }
1111
1112 /// The value of [metadata][crate::model::OperationMetadata::metadata]
1113 /// if it holds a `BatchRecognizeMetadata`, `None` if the field is not set or
1114 /// holds a different branch.
1115 pub fn batch_recognize_metadata(
1116 &self,
1117 ) -> std::option::Option<&std::boxed::Box<crate::model::BatchRecognizeMetadata>> {
1118 #[allow(unreachable_patterns)]
1119 self.metadata.as_ref().and_then(|v| match v {
1120 crate::model::operation_metadata::Metadata::BatchRecognizeMetadata(v) => {
1121 std::option::Option::Some(v)
1122 }
1123 _ => std::option::Option::None,
1124 })
1125 }
1126
1127 /// Sets the value of [metadata][crate::model::OperationMetadata::metadata]
1128 /// to hold a `BatchRecognizeMetadata`.
1129 ///
1130 /// Note that all the setters affecting `metadata` are
1131 /// mutually exclusive.
1132 ///
1133 /// # Example
1134 /// ```ignore,no_run
1135 /// # use google_cloud_speech_v2::model::OperationMetadata;
1136 /// use google_cloud_speech_v2::model::BatchRecognizeMetadata;
1137 /// let x = OperationMetadata::new().set_batch_recognize_metadata(BatchRecognizeMetadata::default()/* use setters */);
1138 /// assert!(x.batch_recognize_metadata().is_some());
1139 /// ```
1140 pub fn set_batch_recognize_metadata<
1141 T: std::convert::Into<std::boxed::Box<crate::model::BatchRecognizeMetadata>>,
1142 >(
1143 mut self,
1144 v: T,
1145 ) -> Self {
1146 self.metadata = std::option::Option::Some(
1147 crate::model::operation_metadata::Metadata::BatchRecognizeMetadata(v.into()),
1148 );
1149 self
1150 }
1151}
1152
1153impl wkt::message::Message for OperationMetadata {
1154 fn typename() -> &'static str {
1155 "type.googleapis.com/google.cloud.speech.v2.OperationMetadata"
1156 }
1157}
1158
1159/// Defines additional types related to [OperationMetadata].
1160pub mod operation_metadata {
1161 #[allow(unused_imports)]
1162 use super::*;
1163
1164 /// The request that spawned the Operation.
1165 #[derive(Clone, Debug, PartialEq)]
1166 #[non_exhaustive]
1167 pub enum Request {
1168 /// The BatchRecognizeRequest that spawned the Operation.
1169 BatchRecognizeRequest(std::boxed::Box<crate::model::BatchRecognizeRequest>),
1170 /// The CreateRecognizerRequest that spawned the Operation.
1171 CreateRecognizerRequest(std::boxed::Box<crate::model::CreateRecognizerRequest>),
1172 /// The UpdateRecognizerRequest that spawned the Operation.
1173 UpdateRecognizerRequest(std::boxed::Box<crate::model::UpdateRecognizerRequest>),
1174 /// The DeleteRecognizerRequest that spawned the Operation.
1175 DeleteRecognizerRequest(std::boxed::Box<crate::model::DeleteRecognizerRequest>),
1176 /// The UndeleteRecognizerRequest that spawned the Operation.
1177 UndeleteRecognizerRequest(std::boxed::Box<crate::model::UndeleteRecognizerRequest>),
1178 /// The CreateCustomClassRequest that spawned the Operation.
1179 CreateCustomClassRequest(std::boxed::Box<crate::model::CreateCustomClassRequest>),
1180 /// The UpdateCustomClassRequest that spawned the Operation.
1181 UpdateCustomClassRequest(std::boxed::Box<crate::model::UpdateCustomClassRequest>),
1182 /// The DeleteCustomClassRequest that spawned the Operation.
1183 DeleteCustomClassRequest(std::boxed::Box<crate::model::DeleteCustomClassRequest>),
1184 /// The UndeleteCustomClassRequest that spawned the Operation.
1185 UndeleteCustomClassRequest(std::boxed::Box<crate::model::UndeleteCustomClassRequest>),
1186 /// The CreatePhraseSetRequest that spawned the Operation.
1187 CreatePhraseSetRequest(std::boxed::Box<crate::model::CreatePhraseSetRequest>),
1188 /// The UpdatePhraseSetRequest that spawned the Operation.
1189 UpdatePhraseSetRequest(std::boxed::Box<crate::model::UpdatePhraseSetRequest>),
1190 /// The DeletePhraseSetRequest that spawned the Operation.
1191 DeletePhraseSetRequest(std::boxed::Box<crate::model::DeletePhraseSetRequest>),
1192 /// The UndeletePhraseSetRequest that spawned the Operation.
1193 UndeletePhraseSetRequest(std::boxed::Box<crate::model::UndeletePhraseSetRequest>),
1194 /// The UpdateConfigRequest that spawned the Operation.
1195 #[deprecated]
1196 UpdateConfigRequest(std::boxed::Box<crate::model::UpdateConfigRequest>),
1197 }
1198
1199 /// Specific metadata per RPC.
1200 #[derive(Clone, Debug, PartialEq)]
1201 #[non_exhaustive]
1202 pub enum Metadata {
1203 /// Metadata specific to the BatchRecognize method.
1204 BatchRecognizeMetadata(std::boxed::Box<crate::model::BatchRecognizeMetadata>),
1205 }
1206}
1207
1208/// Request message for the
1209/// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
1210///
1211/// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1212#[derive(Clone, Default, PartialEq)]
1213#[non_exhaustive]
1214pub struct ListRecognizersRequest {
1215 /// Required. The project and location of Recognizers to list. The expected
1216 /// format is `projects/{project}/locations/{location}`.
1217 pub parent: std::string::String,
1218
1219 /// The maximum number of Recognizers to return. The service may return fewer
1220 /// than this value. If unspecified, at most 5 Recognizers will be returned.
1221 /// The maximum value is 100; values above 100 will be coerced to 100.
1222 pub page_size: i32,
1223
1224 /// A page token, received from a previous
1225 /// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] call.
1226 /// Provide this to retrieve the subsequent page.
1227 ///
1228 /// When paginating, all other parameters provided to
1229 /// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] must match
1230 /// the call that provided the page token.
1231 ///
1232 /// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1233 pub page_token: std::string::String,
1234
1235 /// Whether, or not, to show resources that have been deleted.
1236 pub show_deleted: bool,
1237
1238 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1239}
1240
1241impl ListRecognizersRequest {
1242 pub fn new() -> Self {
1243 std::default::Default::default()
1244 }
1245
1246 /// Sets the value of [parent][crate::model::ListRecognizersRequest::parent].
1247 ///
1248 /// # Example
1249 /// ```ignore,no_run
1250 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1251 /// let x = ListRecognizersRequest::new().set_parent("example");
1252 /// ```
1253 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1254 self.parent = v.into();
1255 self
1256 }
1257
1258 /// Sets the value of [page_size][crate::model::ListRecognizersRequest::page_size].
1259 ///
1260 /// # Example
1261 /// ```ignore,no_run
1262 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1263 /// let x = ListRecognizersRequest::new().set_page_size(42);
1264 /// ```
1265 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
1266 self.page_size = v.into();
1267 self
1268 }
1269
1270 /// Sets the value of [page_token][crate::model::ListRecognizersRequest::page_token].
1271 ///
1272 /// # Example
1273 /// ```ignore,no_run
1274 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1275 /// let x = ListRecognizersRequest::new().set_page_token("example");
1276 /// ```
1277 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1278 self.page_token = v.into();
1279 self
1280 }
1281
1282 /// Sets the value of [show_deleted][crate::model::ListRecognizersRequest::show_deleted].
1283 ///
1284 /// # Example
1285 /// ```ignore,no_run
1286 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1287 /// let x = ListRecognizersRequest::new().set_show_deleted(true);
1288 /// ```
1289 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1290 self.show_deleted = v.into();
1291 self
1292 }
1293}
1294
1295impl wkt::message::Message for ListRecognizersRequest {
1296 fn typename() -> &'static str {
1297 "type.googleapis.com/google.cloud.speech.v2.ListRecognizersRequest"
1298 }
1299}
1300
1301/// Response message for the
1302/// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
1303///
1304/// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1305#[derive(Clone, Default, PartialEq)]
1306#[non_exhaustive]
1307pub struct ListRecognizersResponse {
1308 /// The list of requested Recognizers.
1309 pub recognizers: std::vec::Vec<crate::model::Recognizer>,
1310
1311 /// A token, which can be sent as
1312 /// [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
1313 /// retrieve the next page. If this field is omitted, there are no subsequent
1314 /// pages. This token expires after 72 hours.
1315 ///
1316 /// [google.cloud.speech.v2.ListRecognizersRequest.page_token]: crate::model::ListRecognizersRequest::page_token
1317 pub next_page_token: std::string::String,
1318
1319 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1320}
1321
1322impl ListRecognizersResponse {
1323 pub fn new() -> Self {
1324 std::default::Default::default()
1325 }
1326
1327 /// Sets the value of [recognizers][crate::model::ListRecognizersResponse::recognizers].
1328 ///
1329 /// # Example
1330 /// ```ignore,no_run
1331 /// # use google_cloud_speech_v2::model::ListRecognizersResponse;
1332 /// use google_cloud_speech_v2::model::Recognizer;
1333 /// let x = ListRecognizersResponse::new()
1334 /// .set_recognizers([
1335 /// Recognizer::default()/* use setters */,
1336 /// Recognizer::default()/* use (different) setters */,
1337 /// ]);
1338 /// ```
1339 pub fn set_recognizers<T, V>(mut self, v: T) -> Self
1340 where
1341 T: std::iter::IntoIterator<Item = V>,
1342 V: std::convert::Into<crate::model::Recognizer>,
1343 {
1344 use std::iter::Iterator;
1345 self.recognizers = v.into_iter().map(|i| i.into()).collect();
1346 self
1347 }
1348
1349 /// Sets the value of [next_page_token][crate::model::ListRecognizersResponse::next_page_token].
1350 ///
1351 /// # Example
1352 /// ```ignore,no_run
1353 /// # use google_cloud_speech_v2::model::ListRecognizersResponse;
1354 /// let x = ListRecognizersResponse::new().set_next_page_token("example");
1355 /// ```
1356 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1357 self.next_page_token = v.into();
1358 self
1359 }
1360}
1361
1362impl wkt::message::Message for ListRecognizersResponse {
1363 fn typename() -> &'static str {
1364 "type.googleapis.com/google.cloud.speech.v2.ListRecognizersResponse"
1365 }
1366}
1367
1368#[doc(hidden)]
1369impl google_cloud_gax::paginator::internal::PageableResponse for ListRecognizersResponse {
1370 type PageItem = crate::model::Recognizer;
1371
1372 fn items(self) -> std::vec::Vec<Self::PageItem> {
1373 self.recognizers
1374 }
1375
1376 fn next_page_token(&self) -> std::string::String {
1377 use std::clone::Clone;
1378 self.next_page_token.clone()
1379 }
1380}
1381
1382/// Request message for the
1383/// [GetRecognizer][google.cloud.speech.v2.Speech.GetRecognizer] method.
1384///
1385/// [google.cloud.speech.v2.Speech.GetRecognizer]: crate::client::Speech::get_recognizer
1386#[derive(Clone, Default, PartialEq)]
1387#[non_exhaustive]
1388pub struct GetRecognizerRequest {
1389 /// Required. The name of the Recognizer to retrieve. The expected format is
1390 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1391 pub name: std::string::String,
1392
1393 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1394}
1395
1396impl GetRecognizerRequest {
1397 pub fn new() -> Self {
1398 std::default::Default::default()
1399 }
1400
1401 /// Sets the value of [name][crate::model::GetRecognizerRequest::name].
1402 ///
1403 /// # Example
1404 /// ```ignore,no_run
1405 /// # use google_cloud_speech_v2::model::GetRecognizerRequest;
1406 /// let x = GetRecognizerRequest::new().set_name("example");
1407 /// ```
1408 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1409 self.name = v.into();
1410 self
1411 }
1412}
1413
1414impl wkt::message::Message for GetRecognizerRequest {
1415 fn typename() -> &'static str {
1416 "type.googleapis.com/google.cloud.speech.v2.GetRecognizerRequest"
1417 }
1418}
1419
1420/// Request message for the
1421/// [UpdateRecognizer][google.cloud.speech.v2.Speech.UpdateRecognizer] method.
1422///
1423/// [google.cloud.speech.v2.Speech.UpdateRecognizer]: crate::client::Speech::update_recognizer
1424#[derive(Clone, Default, PartialEq)]
1425#[non_exhaustive]
1426pub struct UpdateRecognizerRequest {
1427 /// Required. The Recognizer to update.
1428 ///
1429 /// The Recognizer's `name` field is used to identify the Recognizer to update.
1430 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1431 pub recognizer: std::option::Option<crate::model::Recognizer>,
1432
1433 /// The list of fields to update. If empty, all non-default valued fields are
1434 /// considered for update. Use `*` to update the entire Recognizer resource.
1435 pub update_mask: std::option::Option<wkt::FieldMask>,
1436
1437 /// If set, validate the request and preview the updated Recognizer, but do not
1438 /// actually update it.
1439 pub validate_only: bool,
1440
1441 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1442}
1443
1444impl UpdateRecognizerRequest {
1445 pub fn new() -> Self {
1446 std::default::Default::default()
1447 }
1448
1449 /// Sets the value of [recognizer][crate::model::UpdateRecognizerRequest::recognizer].
1450 ///
1451 /// # Example
1452 /// ```ignore,no_run
1453 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1454 /// use google_cloud_speech_v2::model::Recognizer;
1455 /// let x = UpdateRecognizerRequest::new().set_recognizer(Recognizer::default()/* use setters */);
1456 /// ```
1457 pub fn set_recognizer<T>(mut self, v: T) -> Self
1458 where
1459 T: std::convert::Into<crate::model::Recognizer>,
1460 {
1461 self.recognizer = std::option::Option::Some(v.into());
1462 self
1463 }
1464
1465 /// Sets or clears the value of [recognizer][crate::model::UpdateRecognizerRequest::recognizer].
1466 ///
1467 /// # Example
1468 /// ```ignore,no_run
1469 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1470 /// use google_cloud_speech_v2::model::Recognizer;
1471 /// let x = UpdateRecognizerRequest::new().set_or_clear_recognizer(Some(Recognizer::default()/* use setters */));
1472 /// let x = UpdateRecognizerRequest::new().set_or_clear_recognizer(None::<Recognizer>);
1473 /// ```
1474 pub fn set_or_clear_recognizer<T>(mut self, v: std::option::Option<T>) -> Self
1475 where
1476 T: std::convert::Into<crate::model::Recognizer>,
1477 {
1478 self.recognizer = v.map(|x| x.into());
1479 self
1480 }
1481
1482 /// Sets the value of [update_mask][crate::model::UpdateRecognizerRequest::update_mask].
1483 ///
1484 /// # Example
1485 /// ```ignore,no_run
1486 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1487 /// use wkt::FieldMask;
1488 /// let x = UpdateRecognizerRequest::new().set_update_mask(FieldMask::default()/* use setters */);
1489 /// ```
1490 pub fn set_update_mask<T>(mut self, v: T) -> Self
1491 where
1492 T: std::convert::Into<wkt::FieldMask>,
1493 {
1494 self.update_mask = std::option::Option::Some(v.into());
1495 self
1496 }
1497
1498 /// Sets or clears the value of [update_mask][crate::model::UpdateRecognizerRequest::update_mask].
1499 ///
1500 /// # Example
1501 /// ```ignore,no_run
1502 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1503 /// use wkt::FieldMask;
1504 /// let x = UpdateRecognizerRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
1505 /// let x = UpdateRecognizerRequest::new().set_or_clear_update_mask(None::<FieldMask>);
1506 /// ```
1507 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
1508 where
1509 T: std::convert::Into<wkt::FieldMask>,
1510 {
1511 self.update_mask = v.map(|x| x.into());
1512 self
1513 }
1514
1515 /// Sets the value of [validate_only][crate::model::UpdateRecognizerRequest::validate_only].
1516 ///
1517 /// # Example
1518 /// ```ignore,no_run
1519 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1520 /// let x = UpdateRecognizerRequest::new().set_validate_only(true);
1521 /// ```
1522 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1523 self.validate_only = v.into();
1524 self
1525 }
1526}
1527
1528impl wkt::message::Message for UpdateRecognizerRequest {
1529 fn typename() -> &'static str {
1530 "type.googleapis.com/google.cloud.speech.v2.UpdateRecognizerRequest"
1531 }
1532}
1533
1534/// Request message for the
1535/// [DeleteRecognizer][google.cloud.speech.v2.Speech.DeleteRecognizer] method.
1536///
1537/// [google.cloud.speech.v2.Speech.DeleteRecognizer]: crate::client::Speech::delete_recognizer
1538#[derive(Clone, Default, PartialEq)]
1539#[non_exhaustive]
1540pub struct DeleteRecognizerRequest {
1541 /// Required. The name of the Recognizer to delete.
1542 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`
1543 pub name: std::string::String,
1544
1545 /// If set, validate the request and preview the deleted Recognizer, but do not
1546 /// actually delete it.
1547 pub validate_only: bool,
1548
1549 /// If set to true, and the Recognizer is not found, the request will succeed
1550 /// and be a no-op (no Operation is recorded in this case).
1551 pub allow_missing: bool,
1552
1553 /// This checksum is computed by the server based on the value of other
1554 /// fields. This may be sent on update, undelete, and delete requests to ensure
1555 /// the client has an up-to-date value before proceeding.
1556 pub etag: std::string::String,
1557
1558 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1559}
1560
1561impl DeleteRecognizerRequest {
1562 pub fn new() -> Self {
1563 std::default::Default::default()
1564 }
1565
1566 /// Sets the value of [name][crate::model::DeleteRecognizerRequest::name].
1567 ///
1568 /// # Example
1569 /// ```ignore,no_run
1570 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1571 /// let x = DeleteRecognizerRequest::new().set_name("example");
1572 /// ```
1573 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1574 self.name = v.into();
1575 self
1576 }
1577
1578 /// Sets the value of [validate_only][crate::model::DeleteRecognizerRequest::validate_only].
1579 ///
1580 /// # Example
1581 /// ```ignore,no_run
1582 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1583 /// let x = DeleteRecognizerRequest::new().set_validate_only(true);
1584 /// ```
1585 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1586 self.validate_only = v.into();
1587 self
1588 }
1589
1590 /// Sets the value of [allow_missing][crate::model::DeleteRecognizerRequest::allow_missing].
1591 ///
1592 /// # Example
1593 /// ```ignore,no_run
1594 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1595 /// let x = DeleteRecognizerRequest::new().set_allow_missing(true);
1596 /// ```
1597 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1598 self.allow_missing = v.into();
1599 self
1600 }
1601
1602 /// Sets the value of [etag][crate::model::DeleteRecognizerRequest::etag].
1603 ///
1604 /// # Example
1605 /// ```ignore,no_run
1606 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1607 /// let x = DeleteRecognizerRequest::new().set_etag("example");
1608 /// ```
1609 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1610 self.etag = v.into();
1611 self
1612 }
1613}
1614
1615impl wkt::message::Message for DeleteRecognizerRequest {
1616 fn typename() -> &'static str {
1617 "type.googleapis.com/google.cloud.speech.v2.DeleteRecognizerRequest"
1618 }
1619}
1620
1621/// Request message for the
1622/// [UndeleteRecognizer][google.cloud.speech.v2.Speech.UndeleteRecognizer]
1623/// method.
1624///
1625/// [google.cloud.speech.v2.Speech.UndeleteRecognizer]: crate::client::Speech::undelete_recognizer
1626#[derive(Clone, Default, PartialEq)]
1627#[non_exhaustive]
1628pub struct UndeleteRecognizerRequest {
1629 /// Required. The name of the Recognizer to undelete.
1630 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`
1631 pub name: std::string::String,
1632
1633 /// If set, validate the request and preview the undeleted Recognizer, but do
1634 /// not actually undelete it.
1635 pub validate_only: bool,
1636
1637 /// This checksum is computed by the server based on the value of other
1638 /// fields. This may be sent on update, undelete, and delete requests to ensure
1639 /// the client has an up-to-date value before proceeding.
1640 pub etag: std::string::String,
1641
1642 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1643}
1644
1645impl UndeleteRecognizerRequest {
1646 pub fn new() -> Self {
1647 std::default::Default::default()
1648 }
1649
1650 /// Sets the value of [name][crate::model::UndeleteRecognizerRequest::name].
1651 ///
1652 /// # Example
1653 /// ```ignore,no_run
1654 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1655 /// let x = UndeleteRecognizerRequest::new().set_name("example");
1656 /// ```
1657 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1658 self.name = v.into();
1659 self
1660 }
1661
1662 /// Sets the value of [validate_only][crate::model::UndeleteRecognizerRequest::validate_only].
1663 ///
1664 /// # Example
1665 /// ```ignore,no_run
1666 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1667 /// let x = UndeleteRecognizerRequest::new().set_validate_only(true);
1668 /// ```
1669 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1670 self.validate_only = v.into();
1671 self
1672 }
1673
1674 /// Sets the value of [etag][crate::model::UndeleteRecognizerRequest::etag].
1675 ///
1676 /// # Example
1677 /// ```ignore,no_run
1678 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1679 /// let x = UndeleteRecognizerRequest::new().set_etag("example");
1680 /// ```
1681 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1682 self.etag = v.into();
1683 self
1684 }
1685}
1686
1687impl wkt::message::Message for UndeleteRecognizerRequest {
1688 fn typename() -> &'static str {
1689 "type.googleapis.com/google.cloud.speech.v2.UndeleteRecognizerRequest"
1690 }
1691}
1692
1693/// A Recognizer message. Stores recognition configuration and metadata.
1694#[derive(Clone, Default, PartialEq)]
1695#[non_exhaustive]
1696pub struct Recognizer {
1697 /// Output only. Identifier. The resource name of the Recognizer.
1698 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1699 pub name: std::string::String,
1700
1701 /// Output only. System-assigned unique identifier for the Recognizer.
1702 pub uid: std::string::String,
1703
1704 /// User-settable, human-readable name for the Recognizer. Must be 63
1705 /// characters or less.
1706 pub display_name: std::string::String,
1707
1708 /// Optional. This field is now deprecated. Prefer the
1709 /// [`model`][google.cloud.speech.v2.RecognitionConfig.model] field in the
1710 /// [`RecognitionConfig`][google.cloud.speech.v2.RecognitionConfig] message.
1711 ///
1712 /// Which model to use for recognition requests. Select the model best suited
1713 /// to your domain to get best results.
1714 ///
1715 /// Guidance for choosing which model to use can be found in the [Transcription
1716 /// Models
1717 /// Documentation](https://cloud.google.com/speech-to-text/v2/docs/transcription-model)
1718 /// and the models supported in each region can be found in the [Table Of
1719 /// Supported
1720 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
1721 ///
1722 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
1723 /// [google.cloud.speech.v2.RecognitionConfig.model]: crate::model::RecognitionConfig::model
1724 #[deprecated]
1725 pub model: std::string::String,
1726
1727 /// Optional. This field is now deprecated. Prefer the
1728 /// [`language_codes`][google.cloud.speech.v2.RecognitionConfig.language_codes]
1729 /// field in the
1730 /// [`RecognitionConfig`][google.cloud.speech.v2.RecognitionConfig] message.
1731 ///
1732 /// The language of the supplied audio as a
1733 /// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
1734 ///
1735 /// Supported languages for each model are listed in the [Table of Supported
1736 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
1737 ///
1738 /// If additional languages are provided, recognition result will contain
1739 /// recognition in the most likely language detected. The recognition result
1740 /// will include the language tag of the language detected in the audio.
1741 /// When you create or update a Recognizer, these values are
1742 /// stored in normalized BCP-47 form. For example, "en-us" is stored as
1743 /// "en-US".
1744 ///
1745 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
1746 /// [google.cloud.speech.v2.RecognitionConfig.language_codes]: crate::model::RecognitionConfig::language_codes
1747 #[deprecated]
1748 pub language_codes: std::vec::Vec<std::string::String>,
1749
1750 /// Default configuration to use for requests with this Recognizer.
1751 /// This can be overwritten by inline configuration in the
1752 /// [RecognizeRequest.config][google.cloud.speech.v2.RecognizeRequest.config]
1753 /// field.
1754 ///
1755 /// [google.cloud.speech.v2.RecognizeRequest.config]: crate::model::RecognizeRequest::config
1756 pub default_recognition_config: std::option::Option<crate::model::RecognitionConfig>,
1757
1758 /// Allows users to store small amounts of arbitrary data.
1759 /// Both the key and the value must be 63 characters or less each.
1760 /// At most 100 annotations.
1761 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
1762
1763 /// Output only. The Recognizer lifecycle state.
1764 pub state: crate::model::recognizer::State,
1765
1766 /// Output only. Creation time.
1767 pub create_time: std::option::Option<wkt::Timestamp>,
1768
1769 /// Output only. The most recent time this Recognizer was modified.
1770 pub update_time: std::option::Option<wkt::Timestamp>,
1771
1772 /// Output only. The time at which this Recognizer was requested for deletion.
1773 pub delete_time: std::option::Option<wkt::Timestamp>,
1774
1775 /// Output only. The time at which this Recognizer will be purged.
1776 pub expire_time: std::option::Option<wkt::Timestamp>,
1777
1778 /// Output only. This checksum is computed by the server based on the value of
1779 /// other fields. This may be sent on update, undelete, and delete requests to
1780 /// ensure the client has an up-to-date value before proceeding.
1781 pub etag: std::string::String,
1782
1783 /// Output only. Whether or not this Recognizer is in the process of being
1784 /// updated.
1785 pub reconciling: bool,
1786
1787 /// Output only. The [KMS key
1788 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
1789 /// the Recognizer is encrypted. The expected format is
1790 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
1791 pub kms_key_name: std::string::String,
1792
1793 /// Output only. The [KMS key version
1794 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
1795 /// with which the Recognizer is encrypted. The expected format is
1796 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
1797 pub kms_key_version_name: std::string::String,
1798
1799 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1800}
1801
1802impl Recognizer {
1803 pub fn new() -> Self {
1804 std::default::Default::default()
1805 }
1806
1807 /// Sets the value of [name][crate::model::Recognizer::name].
1808 ///
1809 /// # Example
1810 /// ```ignore,no_run
1811 /// # use google_cloud_speech_v2::model::Recognizer;
1812 /// let x = Recognizer::new().set_name("example");
1813 /// ```
1814 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1815 self.name = v.into();
1816 self
1817 }
1818
1819 /// Sets the value of [uid][crate::model::Recognizer::uid].
1820 ///
1821 /// # Example
1822 /// ```ignore,no_run
1823 /// # use google_cloud_speech_v2::model::Recognizer;
1824 /// let x = Recognizer::new().set_uid("example");
1825 /// ```
1826 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1827 self.uid = v.into();
1828 self
1829 }
1830
1831 /// Sets the value of [display_name][crate::model::Recognizer::display_name].
1832 ///
1833 /// # Example
1834 /// ```ignore,no_run
1835 /// # use google_cloud_speech_v2::model::Recognizer;
1836 /// let x = Recognizer::new().set_display_name("example");
1837 /// ```
1838 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1839 self.display_name = v.into();
1840 self
1841 }
1842
1843 /// Sets the value of [model][crate::model::Recognizer::model].
1844 ///
1845 /// # Example
1846 /// ```ignore,no_run
1847 /// # use google_cloud_speech_v2::model::Recognizer;
1848 /// let x = Recognizer::new().set_model("example");
1849 /// ```
1850 #[deprecated]
1851 pub fn set_model<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1852 self.model = v.into();
1853 self
1854 }
1855
1856 /// Sets the value of [language_codes][crate::model::Recognizer::language_codes].
1857 ///
1858 /// # Example
1859 /// ```ignore,no_run
1860 /// # use google_cloud_speech_v2::model::Recognizer;
1861 /// let x = Recognizer::new().set_language_codes(["a", "b", "c"]);
1862 /// ```
1863 #[deprecated]
1864 pub fn set_language_codes<T, V>(mut self, v: T) -> Self
1865 where
1866 T: std::iter::IntoIterator<Item = V>,
1867 V: std::convert::Into<std::string::String>,
1868 {
1869 use std::iter::Iterator;
1870 self.language_codes = v.into_iter().map(|i| i.into()).collect();
1871 self
1872 }
1873
1874 /// Sets the value of [default_recognition_config][crate::model::Recognizer::default_recognition_config].
1875 ///
1876 /// # Example
1877 /// ```ignore,no_run
1878 /// # use google_cloud_speech_v2::model::Recognizer;
1879 /// use google_cloud_speech_v2::model::RecognitionConfig;
1880 /// let x = Recognizer::new().set_default_recognition_config(RecognitionConfig::default()/* use setters */);
1881 /// ```
1882 pub fn set_default_recognition_config<T>(mut self, v: T) -> Self
1883 where
1884 T: std::convert::Into<crate::model::RecognitionConfig>,
1885 {
1886 self.default_recognition_config = std::option::Option::Some(v.into());
1887 self
1888 }
1889
1890 /// Sets or clears the value of [default_recognition_config][crate::model::Recognizer::default_recognition_config].
1891 ///
1892 /// # Example
1893 /// ```ignore,no_run
1894 /// # use google_cloud_speech_v2::model::Recognizer;
1895 /// use google_cloud_speech_v2::model::RecognitionConfig;
1896 /// let x = Recognizer::new().set_or_clear_default_recognition_config(Some(RecognitionConfig::default()/* use setters */));
1897 /// let x = Recognizer::new().set_or_clear_default_recognition_config(None::<RecognitionConfig>);
1898 /// ```
1899 pub fn set_or_clear_default_recognition_config<T>(mut self, v: std::option::Option<T>) -> Self
1900 where
1901 T: std::convert::Into<crate::model::RecognitionConfig>,
1902 {
1903 self.default_recognition_config = v.map(|x| x.into());
1904 self
1905 }
1906
1907 /// Sets the value of [annotations][crate::model::Recognizer::annotations].
1908 ///
1909 /// # Example
1910 /// ```ignore,no_run
1911 /// # use google_cloud_speech_v2::model::Recognizer;
1912 /// let x = Recognizer::new().set_annotations([
1913 /// ("key0", "abc"),
1914 /// ("key1", "xyz"),
1915 /// ]);
1916 /// ```
1917 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
1918 where
1919 T: std::iter::IntoIterator<Item = (K, V)>,
1920 K: std::convert::Into<std::string::String>,
1921 V: std::convert::Into<std::string::String>,
1922 {
1923 use std::iter::Iterator;
1924 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
1925 self
1926 }
1927
1928 /// Sets the value of [state][crate::model::Recognizer::state].
1929 ///
1930 /// # Example
1931 /// ```ignore,no_run
1932 /// # use google_cloud_speech_v2::model::Recognizer;
1933 /// use google_cloud_speech_v2::model::recognizer::State;
1934 /// let x0 = Recognizer::new().set_state(State::Active);
1935 /// let x1 = Recognizer::new().set_state(State::Deleted);
1936 /// ```
1937 pub fn set_state<T: std::convert::Into<crate::model::recognizer::State>>(
1938 mut self,
1939 v: T,
1940 ) -> Self {
1941 self.state = v.into();
1942 self
1943 }
1944
1945 /// Sets the value of [create_time][crate::model::Recognizer::create_time].
1946 ///
1947 /// # Example
1948 /// ```ignore,no_run
1949 /// # use google_cloud_speech_v2::model::Recognizer;
1950 /// use wkt::Timestamp;
1951 /// let x = Recognizer::new().set_create_time(Timestamp::default()/* use setters */);
1952 /// ```
1953 pub fn set_create_time<T>(mut self, v: T) -> Self
1954 where
1955 T: std::convert::Into<wkt::Timestamp>,
1956 {
1957 self.create_time = std::option::Option::Some(v.into());
1958 self
1959 }
1960
1961 /// Sets or clears the value of [create_time][crate::model::Recognizer::create_time].
1962 ///
1963 /// # Example
1964 /// ```ignore,no_run
1965 /// # use google_cloud_speech_v2::model::Recognizer;
1966 /// use wkt::Timestamp;
1967 /// let x = Recognizer::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
1968 /// let x = Recognizer::new().set_or_clear_create_time(None::<Timestamp>);
1969 /// ```
1970 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
1971 where
1972 T: std::convert::Into<wkt::Timestamp>,
1973 {
1974 self.create_time = v.map(|x| x.into());
1975 self
1976 }
1977
1978 /// Sets the value of [update_time][crate::model::Recognizer::update_time].
1979 ///
1980 /// # Example
1981 /// ```ignore,no_run
1982 /// # use google_cloud_speech_v2::model::Recognizer;
1983 /// use wkt::Timestamp;
1984 /// let x = Recognizer::new().set_update_time(Timestamp::default()/* use setters */);
1985 /// ```
1986 pub fn set_update_time<T>(mut self, v: T) -> Self
1987 where
1988 T: std::convert::Into<wkt::Timestamp>,
1989 {
1990 self.update_time = std::option::Option::Some(v.into());
1991 self
1992 }
1993
1994 /// Sets or clears the value of [update_time][crate::model::Recognizer::update_time].
1995 ///
1996 /// # Example
1997 /// ```ignore,no_run
1998 /// # use google_cloud_speech_v2::model::Recognizer;
1999 /// use wkt::Timestamp;
2000 /// let x = Recognizer::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
2001 /// let x = Recognizer::new().set_or_clear_update_time(None::<Timestamp>);
2002 /// ```
2003 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
2004 where
2005 T: std::convert::Into<wkt::Timestamp>,
2006 {
2007 self.update_time = v.map(|x| x.into());
2008 self
2009 }
2010
2011 /// Sets the value of [delete_time][crate::model::Recognizer::delete_time].
2012 ///
2013 /// # Example
2014 /// ```ignore,no_run
2015 /// # use google_cloud_speech_v2::model::Recognizer;
2016 /// use wkt::Timestamp;
2017 /// let x = Recognizer::new().set_delete_time(Timestamp::default()/* use setters */);
2018 /// ```
2019 pub fn set_delete_time<T>(mut self, v: T) -> Self
2020 where
2021 T: std::convert::Into<wkt::Timestamp>,
2022 {
2023 self.delete_time = std::option::Option::Some(v.into());
2024 self
2025 }
2026
2027 /// Sets or clears the value of [delete_time][crate::model::Recognizer::delete_time].
2028 ///
2029 /// # Example
2030 /// ```ignore,no_run
2031 /// # use google_cloud_speech_v2::model::Recognizer;
2032 /// use wkt::Timestamp;
2033 /// let x = Recognizer::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
2034 /// let x = Recognizer::new().set_or_clear_delete_time(None::<Timestamp>);
2035 /// ```
2036 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
2037 where
2038 T: std::convert::Into<wkt::Timestamp>,
2039 {
2040 self.delete_time = v.map(|x| x.into());
2041 self
2042 }
2043
2044 /// Sets the value of [expire_time][crate::model::Recognizer::expire_time].
2045 ///
2046 /// # Example
2047 /// ```ignore,no_run
2048 /// # use google_cloud_speech_v2::model::Recognizer;
2049 /// use wkt::Timestamp;
2050 /// let x = Recognizer::new().set_expire_time(Timestamp::default()/* use setters */);
2051 /// ```
2052 pub fn set_expire_time<T>(mut self, v: T) -> Self
2053 where
2054 T: std::convert::Into<wkt::Timestamp>,
2055 {
2056 self.expire_time = std::option::Option::Some(v.into());
2057 self
2058 }
2059
2060 /// Sets or clears the value of [expire_time][crate::model::Recognizer::expire_time].
2061 ///
2062 /// # Example
2063 /// ```ignore,no_run
2064 /// # use google_cloud_speech_v2::model::Recognizer;
2065 /// use wkt::Timestamp;
2066 /// let x = Recognizer::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
2067 /// let x = Recognizer::new().set_or_clear_expire_time(None::<Timestamp>);
2068 /// ```
2069 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
2070 where
2071 T: std::convert::Into<wkt::Timestamp>,
2072 {
2073 self.expire_time = v.map(|x| x.into());
2074 self
2075 }
2076
2077 /// Sets the value of [etag][crate::model::Recognizer::etag].
2078 ///
2079 /// # Example
2080 /// ```ignore,no_run
2081 /// # use google_cloud_speech_v2::model::Recognizer;
2082 /// let x = Recognizer::new().set_etag("example");
2083 /// ```
2084 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2085 self.etag = v.into();
2086 self
2087 }
2088
2089 /// Sets the value of [reconciling][crate::model::Recognizer::reconciling].
2090 ///
2091 /// # Example
2092 /// ```ignore,no_run
2093 /// # use google_cloud_speech_v2::model::Recognizer;
2094 /// let x = Recognizer::new().set_reconciling(true);
2095 /// ```
2096 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2097 self.reconciling = v.into();
2098 self
2099 }
2100
2101 /// Sets the value of [kms_key_name][crate::model::Recognizer::kms_key_name].
2102 ///
2103 /// # Example
2104 /// ```ignore,no_run
2105 /// # use google_cloud_speech_v2::model::Recognizer;
2106 /// let x = Recognizer::new().set_kms_key_name("example");
2107 /// ```
2108 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2109 self.kms_key_name = v.into();
2110 self
2111 }
2112
2113 /// Sets the value of [kms_key_version_name][crate::model::Recognizer::kms_key_version_name].
2114 ///
2115 /// # Example
2116 /// ```ignore,no_run
2117 /// # use google_cloud_speech_v2::model::Recognizer;
2118 /// let x = Recognizer::new().set_kms_key_version_name("example");
2119 /// ```
2120 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
2121 mut self,
2122 v: T,
2123 ) -> Self {
2124 self.kms_key_version_name = v.into();
2125 self
2126 }
2127}
2128
2129impl wkt::message::Message for Recognizer {
2130 fn typename() -> &'static str {
2131 "type.googleapis.com/google.cloud.speech.v2.Recognizer"
2132 }
2133}
2134
2135/// Defines additional types related to [Recognizer].
2136pub mod recognizer {
2137 #[allow(unused_imports)]
2138 use super::*;
2139
2140 /// Set of states that define the lifecycle of a Recognizer.
2141 ///
2142 /// # Working with unknown values
2143 ///
2144 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2145 /// additional enum variants at any time. Adding new variants is not considered
2146 /// a breaking change. Applications should write their code in anticipation of:
2147 ///
2148 /// - New values appearing in future releases of the client library, **and**
2149 /// - New values received dynamically, without application changes.
2150 ///
2151 /// Please consult the [Working with enums] section in the user guide for some
2152 /// guidelines.
2153 ///
2154 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2155 #[derive(Clone, Debug, PartialEq)]
2156 #[non_exhaustive]
2157 pub enum State {
2158 /// The default value. This value is used if the state is omitted.
2159 Unspecified,
2160 /// The Recognizer is active and ready for use.
2161 Active,
2162 /// This Recognizer has been deleted.
2163 Deleted,
2164 /// If set, the enum was initialized with an unknown value.
2165 ///
2166 /// Applications can examine the value using [State::value] or
2167 /// [State::name].
2168 UnknownValue(state::UnknownValue),
2169 }
2170
2171 #[doc(hidden)]
2172 pub mod state {
2173 #[allow(unused_imports)]
2174 use super::*;
2175 #[derive(Clone, Debug, PartialEq)]
2176 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2177 }
2178
2179 impl State {
2180 /// Gets the enum value.
2181 ///
2182 /// Returns `None` if the enum contains an unknown value deserialized from
2183 /// the string representation of enums.
2184 pub fn value(&self) -> std::option::Option<i32> {
2185 match self {
2186 Self::Unspecified => std::option::Option::Some(0),
2187 Self::Active => std::option::Option::Some(2),
2188 Self::Deleted => std::option::Option::Some(4),
2189 Self::UnknownValue(u) => u.0.value(),
2190 }
2191 }
2192
2193 /// Gets the enum value as a string.
2194 ///
2195 /// Returns `None` if the enum contains an unknown value deserialized from
2196 /// the integer representation of enums.
2197 pub fn name(&self) -> std::option::Option<&str> {
2198 match self {
2199 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
2200 Self::Active => std::option::Option::Some("ACTIVE"),
2201 Self::Deleted => std::option::Option::Some("DELETED"),
2202 Self::UnknownValue(u) => u.0.name(),
2203 }
2204 }
2205 }
2206
2207 impl std::default::Default for State {
2208 fn default() -> Self {
2209 use std::convert::From;
2210 Self::from(0)
2211 }
2212 }
2213
2214 impl std::fmt::Display for State {
2215 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
2216 wkt::internal::display_enum(f, self.name(), self.value())
2217 }
2218 }
2219
2220 impl std::convert::From<i32> for State {
2221 fn from(value: i32) -> Self {
2222 match value {
2223 0 => Self::Unspecified,
2224 2 => Self::Active,
2225 4 => Self::Deleted,
2226 _ => Self::UnknownValue(state::UnknownValue(
2227 wkt::internal::UnknownEnumValue::Integer(value),
2228 )),
2229 }
2230 }
2231 }
2232
2233 impl std::convert::From<&str> for State {
2234 fn from(value: &str) -> Self {
2235 use std::string::ToString;
2236 match value {
2237 "STATE_UNSPECIFIED" => Self::Unspecified,
2238 "ACTIVE" => Self::Active,
2239 "DELETED" => Self::Deleted,
2240 _ => Self::UnknownValue(state::UnknownValue(
2241 wkt::internal::UnknownEnumValue::String(value.to_string()),
2242 )),
2243 }
2244 }
2245 }
2246
2247 impl serde::ser::Serialize for State {
2248 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
2249 where
2250 S: serde::Serializer,
2251 {
2252 match self {
2253 Self::Unspecified => serializer.serialize_i32(0),
2254 Self::Active => serializer.serialize_i32(2),
2255 Self::Deleted => serializer.serialize_i32(4),
2256 Self::UnknownValue(u) => u.0.serialize(serializer),
2257 }
2258 }
2259 }
2260
2261 impl<'de> serde::de::Deserialize<'de> for State {
2262 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
2263 where
2264 D: serde::Deserializer<'de>,
2265 {
2266 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
2267 ".google.cloud.speech.v2.Recognizer.State",
2268 ))
2269 }
2270 }
2271}
2272
2273/// Automatically detected decoding parameters.
2274/// Supported for the following encodings:
2275///
2276/// * WAV_LINEAR16: 16-bit signed little-endian PCM samples in a WAV container.
2277///
2278/// * WAV_MULAW: 8-bit companded mulaw samples in a WAV container.
2279///
2280/// * WAV_ALAW: 8-bit companded alaw samples in a WAV container.
2281///
2282/// * RFC4867_5_AMR: AMR frames with an rfc4867.5 header.
2283///
2284/// * RFC4867_5_AMRWB: AMR-WB frames with an rfc4867.5 header.
2285///
2286/// * FLAC: FLAC frames in the "native FLAC" container format.
2287///
2288/// * MP3: MPEG audio frames with optional (ignored) ID3 metadata.
2289///
2290/// * OGG_OPUS: Opus audio frames in an Ogg container.
2291///
2292/// * WEBM_OPUS: Opus audio frames in a WebM container.
2293///
2294/// * MP4_AAC: AAC audio frames in an MP4 container.
2295///
2296/// * M4A_AAC: AAC audio frames in an M4A container.
2297///
2298/// * MOV_AAC: AAC audio frames in an MOV container.
2299///
2300#[derive(Clone, Default, PartialEq)]
2301#[non_exhaustive]
2302pub struct AutoDetectDecodingConfig {
2303 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2304}
2305
2306impl AutoDetectDecodingConfig {
2307 pub fn new() -> Self {
2308 std::default::Default::default()
2309 }
2310}
2311
2312impl wkt::message::Message for AutoDetectDecodingConfig {
2313 fn typename() -> &'static str {
2314 "type.googleapis.com/google.cloud.speech.v2.AutoDetectDecodingConfig"
2315 }
2316}
2317
2318/// Explicitly specified decoding parameters.
2319#[derive(Clone, Default, PartialEq)]
2320#[non_exhaustive]
2321pub struct ExplicitDecodingConfig {
2322 /// Required. Encoding of the audio data sent for recognition.
2323 pub encoding: crate::model::explicit_decoding_config::AudioEncoding,
2324
2325 /// Optional. Sample rate in Hertz of the audio data sent for recognition.
2326 /// Valid values are: 8000-48000, and 16000 is optimal. For best results, set
2327 /// the sampling rate of the audio source to 16000 Hz. If that's not possible,
2328 /// use the native sample rate of the audio source (instead of resampling).
2329 /// Note that this field is marked as OPTIONAL for backward compatibility
2330 /// reasons. It is (and has always been) effectively REQUIRED.
2331 pub sample_rate_hertz: i32,
2332
2333 /// Optional. Number of channels present in the audio data sent for
2334 /// recognition. Note that this field is marked as OPTIONAL for backward
2335 /// compatibility reasons. It is (and has always been) effectively REQUIRED.
2336 ///
2337 /// The maximum allowed value is 8.
2338 pub audio_channel_count: i32,
2339
2340 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2341}
2342
2343impl ExplicitDecodingConfig {
2344 pub fn new() -> Self {
2345 std::default::Default::default()
2346 }
2347
2348 /// Sets the value of [encoding][crate::model::ExplicitDecodingConfig::encoding].
2349 ///
2350 /// # Example
2351 /// ```ignore,no_run
2352 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2353 /// use google_cloud_speech_v2::model::explicit_decoding_config::AudioEncoding;
2354 /// let x0 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Linear16);
2355 /// let x1 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Mulaw);
2356 /// let x2 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Alaw);
2357 /// ```
2358 pub fn set_encoding<
2359 T: std::convert::Into<crate::model::explicit_decoding_config::AudioEncoding>,
2360 >(
2361 mut self,
2362 v: T,
2363 ) -> Self {
2364 self.encoding = v.into();
2365 self
2366 }
2367
2368 /// Sets the value of [sample_rate_hertz][crate::model::ExplicitDecodingConfig::sample_rate_hertz].
2369 ///
2370 /// # Example
2371 /// ```ignore,no_run
2372 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2373 /// let x = ExplicitDecodingConfig::new().set_sample_rate_hertz(42);
2374 /// ```
2375 pub fn set_sample_rate_hertz<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2376 self.sample_rate_hertz = v.into();
2377 self
2378 }
2379
2380 /// Sets the value of [audio_channel_count][crate::model::ExplicitDecodingConfig::audio_channel_count].
2381 ///
2382 /// # Example
2383 /// ```ignore,no_run
2384 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2385 /// let x = ExplicitDecodingConfig::new().set_audio_channel_count(42);
2386 /// ```
2387 pub fn set_audio_channel_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2388 self.audio_channel_count = v.into();
2389 self
2390 }
2391}
2392
2393impl wkt::message::Message for ExplicitDecodingConfig {
2394 fn typename() -> &'static str {
2395 "type.googleapis.com/google.cloud.speech.v2.ExplicitDecodingConfig"
2396 }
2397}
2398
2399/// Defines additional types related to [ExplicitDecodingConfig].
2400pub mod explicit_decoding_config {
2401 #[allow(unused_imports)]
2402 use super::*;
2403
2404 /// Supported audio data encodings.
2405 ///
2406 /// # Working with unknown values
2407 ///
2408 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2409 /// additional enum variants at any time. Adding new variants is not considered
2410 /// a breaking change. Applications should write their code in anticipation of:
2411 ///
2412 /// - New values appearing in future releases of the client library, **and**
2413 /// - New values received dynamically, without application changes.
2414 ///
2415 /// Please consult the [Working with enums] section in the user guide for some
2416 /// guidelines.
2417 ///
2418 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2419 #[derive(Clone, Debug, PartialEq)]
2420 #[non_exhaustive]
2421 pub enum AudioEncoding {
2422 /// Default value. This value is unused.
2423 Unspecified,
2424 /// Headerless 16-bit signed little-endian PCM samples.
2425 Linear16,
2426 /// Headerless 8-bit companded mulaw samples.
2427 Mulaw,
2428 /// Headerless 8-bit companded alaw samples.
2429 Alaw,
2430 /// AMR frames with an rfc4867.5 header.
2431 Amr,
2432 /// AMR-WB frames with an rfc4867.5 header.
2433 AmrWb,
2434 /// FLAC frames in the "native FLAC" container format.
2435 Flac,
2436 /// MPEG audio frames with optional (ignored) ID3 metadata.
2437 Mp3,
2438 /// Opus audio frames in an Ogg container.
2439 OggOpus,
2440 /// Opus audio frames in a WebM container.
2441 WebmOpus,
2442 /// AAC audio frames in an MP4 container.
2443 Mp4Aac,
2444 /// AAC audio frames in an M4A container.
2445 M4AAac,
2446 /// AAC audio frames in an MOV container.
2447 MovAac,
2448 /// If set, the enum was initialized with an unknown value.
2449 ///
2450 /// Applications can examine the value using [AudioEncoding::value] or
2451 /// [AudioEncoding::name].
2452 UnknownValue(audio_encoding::UnknownValue),
2453 }
2454
2455 #[doc(hidden)]
2456 pub mod audio_encoding {
2457 #[allow(unused_imports)]
2458 use super::*;
2459 #[derive(Clone, Debug, PartialEq)]
2460 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2461 }
2462
2463 impl AudioEncoding {
2464 /// Gets the enum value.
2465 ///
2466 /// Returns `None` if the enum contains an unknown value deserialized from
2467 /// the string representation of enums.
2468 pub fn value(&self) -> std::option::Option<i32> {
2469 match self {
2470 Self::Unspecified => std::option::Option::Some(0),
2471 Self::Linear16 => std::option::Option::Some(1),
2472 Self::Mulaw => std::option::Option::Some(2),
2473 Self::Alaw => std::option::Option::Some(3),
2474 Self::Amr => std::option::Option::Some(4),
2475 Self::AmrWb => std::option::Option::Some(5),
2476 Self::Flac => std::option::Option::Some(6),
2477 Self::Mp3 => std::option::Option::Some(7),
2478 Self::OggOpus => std::option::Option::Some(8),
2479 Self::WebmOpus => std::option::Option::Some(9),
2480 Self::Mp4Aac => std::option::Option::Some(10),
2481 Self::M4AAac => std::option::Option::Some(11),
2482 Self::MovAac => std::option::Option::Some(12),
2483 Self::UnknownValue(u) => u.0.value(),
2484 }
2485 }
2486
2487 /// Gets the enum value as a string.
2488 ///
2489 /// Returns `None` if the enum contains an unknown value deserialized from
2490 /// the integer representation of enums.
2491 pub fn name(&self) -> std::option::Option<&str> {
2492 match self {
2493 Self::Unspecified => std::option::Option::Some("AUDIO_ENCODING_UNSPECIFIED"),
2494 Self::Linear16 => std::option::Option::Some("LINEAR16"),
2495 Self::Mulaw => std::option::Option::Some("MULAW"),
2496 Self::Alaw => std::option::Option::Some("ALAW"),
2497 Self::Amr => std::option::Option::Some("AMR"),
2498 Self::AmrWb => std::option::Option::Some("AMR_WB"),
2499 Self::Flac => std::option::Option::Some("FLAC"),
2500 Self::Mp3 => std::option::Option::Some("MP3"),
2501 Self::OggOpus => std::option::Option::Some("OGG_OPUS"),
2502 Self::WebmOpus => std::option::Option::Some("WEBM_OPUS"),
2503 Self::Mp4Aac => std::option::Option::Some("MP4_AAC"),
2504 Self::M4AAac => std::option::Option::Some("M4A_AAC"),
2505 Self::MovAac => std::option::Option::Some("MOV_AAC"),
2506 Self::UnknownValue(u) => u.0.name(),
2507 }
2508 }
2509 }
2510
2511 impl std::default::Default for AudioEncoding {
2512 fn default() -> Self {
2513 use std::convert::From;
2514 Self::from(0)
2515 }
2516 }
2517
2518 impl std::fmt::Display for AudioEncoding {
2519 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
2520 wkt::internal::display_enum(f, self.name(), self.value())
2521 }
2522 }
2523
2524 impl std::convert::From<i32> for AudioEncoding {
2525 fn from(value: i32) -> Self {
2526 match value {
2527 0 => Self::Unspecified,
2528 1 => Self::Linear16,
2529 2 => Self::Mulaw,
2530 3 => Self::Alaw,
2531 4 => Self::Amr,
2532 5 => Self::AmrWb,
2533 6 => Self::Flac,
2534 7 => Self::Mp3,
2535 8 => Self::OggOpus,
2536 9 => Self::WebmOpus,
2537 10 => Self::Mp4Aac,
2538 11 => Self::M4AAac,
2539 12 => Self::MovAac,
2540 _ => Self::UnknownValue(audio_encoding::UnknownValue(
2541 wkt::internal::UnknownEnumValue::Integer(value),
2542 )),
2543 }
2544 }
2545 }
2546
2547 impl std::convert::From<&str> for AudioEncoding {
2548 fn from(value: &str) -> Self {
2549 use std::string::ToString;
2550 match value {
2551 "AUDIO_ENCODING_UNSPECIFIED" => Self::Unspecified,
2552 "LINEAR16" => Self::Linear16,
2553 "MULAW" => Self::Mulaw,
2554 "ALAW" => Self::Alaw,
2555 "AMR" => Self::Amr,
2556 "AMR_WB" => Self::AmrWb,
2557 "FLAC" => Self::Flac,
2558 "MP3" => Self::Mp3,
2559 "OGG_OPUS" => Self::OggOpus,
2560 "WEBM_OPUS" => Self::WebmOpus,
2561 "MP4_AAC" => Self::Mp4Aac,
2562 "M4A_AAC" => Self::M4AAac,
2563 "MOV_AAC" => Self::MovAac,
2564 _ => Self::UnknownValue(audio_encoding::UnknownValue(
2565 wkt::internal::UnknownEnumValue::String(value.to_string()),
2566 )),
2567 }
2568 }
2569 }
2570
2571 impl serde::ser::Serialize for AudioEncoding {
2572 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
2573 where
2574 S: serde::Serializer,
2575 {
2576 match self {
2577 Self::Unspecified => serializer.serialize_i32(0),
2578 Self::Linear16 => serializer.serialize_i32(1),
2579 Self::Mulaw => serializer.serialize_i32(2),
2580 Self::Alaw => serializer.serialize_i32(3),
2581 Self::Amr => serializer.serialize_i32(4),
2582 Self::AmrWb => serializer.serialize_i32(5),
2583 Self::Flac => serializer.serialize_i32(6),
2584 Self::Mp3 => serializer.serialize_i32(7),
2585 Self::OggOpus => serializer.serialize_i32(8),
2586 Self::WebmOpus => serializer.serialize_i32(9),
2587 Self::Mp4Aac => serializer.serialize_i32(10),
2588 Self::M4AAac => serializer.serialize_i32(11),
2589 Self::MovAac => serializer.serialize_i32(12),
2590 Self::UnknownValue(u) => u.0.serialize(serializer),
2591 }
2592 }
2593 }
2594
2595 impl<'de> serde::de::Deserialize<'de> for AudioEncoding {
2596 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
2597 where
2598 D: serde::Deserializer<'de>,
2599 {
2600 deserializer.deserialize_any(wkt::internal::EnumVisitor::<AudioEncoding>::new(
2601 ".google.cloud.speech.v2.ExplicitDecodingConfig.AudioEncoding",
2602 ))
2603 }
2604 }
2605}
2606
2607/// Configuration to enable speaker diarization.
2608#[derive(Clone, Default, PartialEq)]
2609#[non_exhaustive]
2610pub struct SpeakerDiarizationConfig {
2611 /// Optional. The system automatically determines the number of speakers. This
2612 /// value is not currently used.
2613 pub min_speaker_count: i32,
2614
2615 /// Optional. The system automatically determines the number of speakers. This
2616 /// value is not currently used.
2617 pub max_speaker_count: i32,
2618
2619 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2620}
2621
2622impl SpeakerDiarizationConfig {
2623 pub fn new() -> Self {
2624 std::default::Default::default()
2625 }
2626
2627 /// Sets the value of [min_speaker_count][crate::model::SpeakerDiarizationConfig::min_speaker_count].
2628 ///
2629 /// # Example
2630 /// ```ignore,no_run
2631 /// # use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2632 /// let x = SpeakerDiarizationConfig::new().set_min_speaker_count(42);
2633 /// ```
2634 pub fn set_min_speaker_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2635 self.min_speaker_count = v.into();
2636 self
2637 }
2638
2639 /// Sets the value of [max_speaker_count][crate::model::SpeakerDiarizationConfig::max_speaker_count].
2640 ///
2641 /// # Example
2642 /// ```ignore,no_run
2643 /// # use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2644 /// let x = SpeakerDiarizationConfig::new().set_max_speaker_count(42);
2645 /// ```
2646 pub fn set_max_speaker_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2647 self.max_speaker_count = v.into();
2648 self
2649 }
2650}
2651
2652impl wkt::message::Message for SpeakerDiarizationConfig {
2653 fn typename() -> &'static str {
2654 "type.googleapis.com/google.cloud.speech.v2.SpeakerDiarizationConfig"
2655 }
2656}
2657
2658/// Configuration to enable custom prompt in chirp3.
2659#[derive(Clone, Default, PartialEq)]
2660#[non_exhaustive]
2661pub struct CustomPromptConfig {
2662 /// Optional. The custom instructions to override the existing instructions for
2663 /// chirp3.
2664 pub custom_prompt: std::string::String,
2665
2666 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2667}
2668
2669impl CustomPromptConfig {
2670 pub fn new() -> Self {
2671 std::default::Default::default()
2672 }
2673
2674 /// Sets the value of [custom_prompt][crate::model::CustomPromptConfig::custom_prompt].
2675 ///
2676 /// # Example
2677 /// ```ignore,no_run
2678 /// # use google_cloud_speech_v2::model::CustomPromptConfig;
2679 /// let x = CustomPromptConfig::new().set_custom_prompt("example");
2680 /// ```
2681 pub fn set_custom_prompt<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2682 self.custom_prompt = v.into();
2683 self
2684 }
2685}
2686
2687impl wkt::message::Message for CustomPromptConfig {
2688 fn typename() -> &'static str {
2689 "type.googleapis.com/google.cloud.speech.v2.CustomPromptConfig"
2690 }
2691}
2692
2693/// Available recognition features.
2694#[derive(Clone, Default, PartialEq)]
2695#[non_exhaustive]
2696pub struct RecognitionFeatures {
2697 /// If set to `true`, the server will attempt to filter out profanities,
2698 /// replacing all but the initial character in each filtered word with
2699 /// asterisks, for instance, "f***". If set to `false` or omitted, profanities
2700 /// won't be filtered out.
2701 pub profanity_filter: bool,
2702
2703 /// If `true`, the top result includes a list of words and the start and end
2704 /// time offsets (timestamps) for those words. If `false`, no word-level time
2705 /// offset information is returned. The default is `false`.
2706 pub enable_word_time_offsets: bool,
2707
2708 /// If `true`, the top result includes a list of words and the confidence for
2709 /// those words. If `false`, no word-level confidence information is returned.
2710 /// The default is `false`.
2711 pub enable_word_confidence: bool,
2712
2713 /// If `true`, adds punctuation to recognition result hypotheses. This feature
2714 /// is only available in select languages. The default `false` value does not
2715 /// add punctuation to result hypotheses.
2716 pub enable_automatic_punctuation: bool,
2717
2718 /// The spoken punctuation behavior for the call. If `true`, replaces spoken
2719 /// punctuation with the corresponding symbols in the request. For example,
2720 /// "how are you question mark" becomes "how are you?". See
2721 /// <https://cloud.google.com/speech-to-text/docs/spoken-punctuation> for
2722 /// support. If `false`, spoken punctuation is not replaced.
2723 pub enable_spoken_punctuation: bool,
2724
2725 /// The spoken emoji behavior for the call. If `true`, adds spoken emoji
2726 /// formatting for the request. This will replace spoken emojis with the
2727 /// corresponding Unicode symbols in the final transcript. If `false`, spoken
2728 /// emojis are not replaced.
2729 pub enable_spoken_emojis: bool,
2730
2731 /// Mode for recognizing multi-channel audio.
2732 pub multi_channel_mode: crate::model::recognition_features::MultiChannelMode,
2733
2734 /// Configuration to enable speaker diarization. To enable diarization, set
2735 /// this field to an empty SpeakerDiarizationConfig message.
2736 pub diarization_config: std::option::Option<crate::model::SpeakerDiarizationConfig>,
2737
2738 /// Maximum number of recognition hypotheses to be returned.
2739 /// The server may return fewer than `max_alternatives`.
2740 /// Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of
2741 /// one. If omitted, will return a maximum of one.
2742 pub max_alternatives: i32,
2743
2744 /// Optional. Configuration to enable custom prompt for chirp3.
2745 pub custom_prompt_config: std::option::Option<crate::model::CustomPromptConfig>,
2746
2747 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2748}
2749
2750impl RecognitionFeatures {
2751 pub fn new() -> Self {
2752 std::default::Default::default()
2753 }
2754
2755 /// Sets the value of [profanity_filter][crate::model::RecognitionFeatures::profanity_filter].
2756 ///
2757 /// # Example
2758 /// ```ignore,no_run
2759 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2760 /// let x = RecognitionFeatures::new().set_profanity_filter(true);
2761 /// ```
2762 pub fn set_profanity_filter<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2763 self.profanity_filter = v.into();
2764 self
2765 }
2766
2767 /// Sets the value of [enable_word_time_offsets][crate::model::RecognitionFeatures::enable_word_time_offsets].
2768 ///
2769 /// # Example
2770 /// ```ignore,no_run
2771 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2772 /// let x = RecognitionFeatures::new().set_enable_word_time_offsets(true);
2773 /// ```
2774 pub fn set_enable_word_time_offsets<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2775 self.enable_word_time_offsets = v.into();
2776 self
2777 }
2778
2779 /// Sets the value of [enable_word_confidence][crate::model::RecognitionFeatures::enable_word_confidence].
2780 ///
2781 /// # Example
2782 /// ```ignore,no_run
2783 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2784 /// let x = RecognitionFeatures::new().set_enable_word_confidence(true);
2785 /// ```
2786 pub fn set_enable_word_confidence<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2787 self.enable_word_confidence = v.into();
2788 self
2789 }
2790
2791 /// Sets the value of [enable_automatic_punctuation][crate::model::RecognitionFeatures::enable_automatic_punctuation].
2792 ///
2793 /// # Example
2794 /// ```ignore,no_run
2795 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2796 /// let x = RecognitionFeatures::new().set_enable_automatic_punctuation(true);
2797 /// ```
2798 pub fn set_enable_automatic_punctuation<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2799 self.enable_automatic_punctuation = v.into();
2800 self
2801 }
2802
2803 /// Sets the value of [enable_spoken_punctuation][crate::model::RecognitionFeatures::enable_spoken_punctuation].
2804 ///
2805 /// # Example
2806 /// ```ignore,no_run
2807 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2808 /// let x = RecognitionFeatures::new().set_enable_spoken_punctuation(true);
2809 /// ```
2810 pub fn set_enable_spoken_punctuation<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2811 self.enable_spoken_punctuation = v.into();
2812 self
2813 }
2814
2815 /// Sets the value of [enable_spoken_emojis][crate::model::RecognitionFeatures::enable_spoken_emojis].
2816 ///
2817 /// # Example
2818 /// ```ignore,no_run
2819 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2820 /// let x = RecognitionFeatures::new().set_enable_spoken_emojis(true);
2821 /// ```
2822 pub fn set_enable_spoken_emojis<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2823 self.enable_spoken_emojis = v.into();
2824 self
2825 }
2826
2827 /// Sets the value of [multi_channel_mode][crate::model::RecognitionFeatures::multi_channel_mode].
2828 ///
2829 /// # Example
2830 /// ```ignore,no_run
2831 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2832 /// use google_cloud_speech_v2::model::recognition_features::MultiChannelMode;
2833 /// let x0 = RecognitionFeatures::new().set_multi_channel_mode(MultiChannelMode::SeparateRecognitionPerChannel);
2834 /// ```
2835 pub fn set_multi_channel_mode<
2836 T: std::convert::Into<crate::model::recognition_features::MultiChannelMode>,
2837 >(
2838 mut self,
2839 v: T,
2840 ) -> Self {
2841 self.multi_channel_mode = v.into();
2842 self
2843 }
2844
2845 /// Sets the value of [diarization_config][crate::model::RecognitionFeatures::diarization_config].
2846 ///
2847 /// # Example
2848 /// ```ignore,no_run
2849 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2850 /// use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2851 /// let x = RecognitionFeatures::new().set_diarization_config(SpeakerDiarizationConfig::default()/* use setters */);
2852 /// ```
2853 pub fn set_diarization_config<T>(mut self, v: T) -> Self
2854 where
2855 T: std::convert::Into<crate::model::SpeakerDiarizationConfig>,
2856 {
2857 self.diarization_config = std::option::Option::Some(v.into());
2858 self
2859 }
2860
2861 /// Sets or clears the value of [diarization_config][crate::model::RecognitionFeatures::diarization_config].
2862 ///
2863 /// # Example
2864 /// ```ignore,no_run
2865 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2866 /// use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2867 /// let x = RecognitionFeatures::new().set_or_clear_diarization_config(Some(SpeakerDiarizationConfig::default()/* use setters */));
2868 /// let x = RecognitionFeatures::new().set_or_clear_diarization_config(None::<SpeakerDiarizationConfig>);
2869 /// ```
2870 pub fn set_or_clear_diarization_config<T>(mut self, v: std::option::Option<T>) -> Self
2871 where
2872 T: std::convert::Into<crate::model::SpeakerDiarizationConfig>,
2873 {
2874 self.diarization_config = v.map(|x| x.into());
2875 self
2876 }
2877
2878 /// Sets the value of [max_alternatives][crate::model::RecognitionFeatures::max_alternatives].
2879 ///
2880 /// # Example
2881 /// ```ignore,no_run
2882 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2883 /// let x = RecognitionFeatures::new().set_max_alternatives(42);
2884 /// ```
2885 pub fn set_max_alternatives<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2886 self.max_alternatives = v.into();
2887 self
2888 }
2889
2890 /// Sets the value of [custom_prompt_config][crate::model::RecognitionFeatures::custom_prompt_config].
2891 ///
2892 /// # Example
2893 /// ```ignore,no_run
2894 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2895 /// use google_cloud_speech_v2::model::CustomPromptConfig;
2896 /// let x = RecognitionFeatures::new().set_custom_prompt_config(CustomPromptConfig::default()/* use setters */);
2897 /// ```
2898 pub fn set_custom_prompt_config<T>(mut self, v: T) -> Self
2899 where
2900 T: std::convert::Into<crate::model::CustomPromptConfig>,
2901 {
2902 self.custom_prompt_config = std::option::Option::Some(v.into());
2903 self
2904 }
2905
2906 /// Sets or clears the value of [custom_prompt_config][crate::model::RecognitionFeatures::custom_prompt_config].
2907 ///
2908 /// # Example
2909 /// ```ignore,no_run
2910 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2911 /// use google_cloud_speech_v2::model::CustomPromptConfig;
2912 /// let x = RecognitionFeatures::new().set_or_clear_custom_prompt_config(Some(CustomPromptConfig::default()/* use setters */));
2913 /// let x = RecognitionFeatures::new().set_or_clear_custom_prompt_config(None::<CustomPromptConfig>);
2914 /// ```
2915 pub fn set_or_clear_custom_prompt_config<T>(mut self, v: std::option::Option<T>) -> Self
2916 where
2917 T: std::convert::Into<crate::model::CustomPromptConfig>,
2918 {
2919 self.custom_prompt_config = v.map(|x| x.into());
2920 self
2921 }
2922}
2923
2924impl wkt::message::Message for RecognitionFeatures {
2925 fn typename() -> &'static str {
2926 "type.googleapis.com/google.cloud.speech.v2.RecognitionFeatures"
2927 }
2928}
2929
2930/// Defines additional types related to [RecognitionFeatures].
2931pub mod recognition_features {
2932 #[allow(unused_imports)]
2933 use super::*;
2934
2935 /// Options for how to recognize multi-channel audio.
2936 ///
2937 /// # Working with unknown values
2938 ///
2939 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2940 /// additional enum variants at any time. Adding new variants is not considered
2941 /// a breaking change. Applications should write their code in anticipation of:
2942 ///
2943 /// - New values appearing in future releases of the client library, **and**
2944 /// - New values received dynamically, without application changes.
2945 ///
2946 /// Please consult the [Working with enums] section in the user guide for some
2947 /// guidelines.
2948 ///
2949 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2950 #[derive(Clone, Debug, PartialEq)]
2951 #[non_exhaustive]
2952 pub enum MultiChannelMode {
2953 /// Default value for the multi-channel mode. If the audio contains
2954 /// multiple channels, only the first channel will be transcribed; other
2955 /// channels will be ignored.
2956 Unspecified,
2957 /// If selected, each channel in the provided audio is transcribed
2958 /// independently. This cannot be selected if the selected
2959 /// [model][google.cloud.speech.v2.Recognizer.model] is `latest_short`.
2960 ///
2961 /// [google.cloud.speech.v2.Recognizer.model]: crate::model::Recognizer::model
2962 SeparateRecognitionPerChannel,
2963 /// If set, the enum was initialized with an unknown value.
2964 ///
2965 /// Applications can examine the value using [MultiChannelMode::value] or
2966 /// [MultiChannelMode::name].
2967 UnknownValue(multi_channel_mode::UnknownValue),
2968 }
2969
2970 #[doc(hidden)]
2971 pub mod multi_channel_mode {
2972 #[allow(unused_imports)]
2973 use super::*;
2974 #[derive(Clone, Debug, PartialEq)]
2975 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2976 }
2977
2978 impl MultiChannelMode {
2979 /// Gets the enum value.
2980 ///
2981 /// Returns `None` if the enum contains an unknown value deserialized from
2982 /// the string representation of enums.
2983 pub fn value(&self) -> std::option::Option<i32> {
2984 match self {
2985 Self::Unspecified => std::option::Option::Some(0),
2986 Self::SeparateRecognitionPerChannel => std::option::Option::Some(1),
2987 Self::UnknownValue(u) => u.0.value(),
2988 }
2989 }
2990
2991 /// Gets the enum value as a string.
2992 ///
2993 /// Returns `None` if the enum contains an unknown value deserialized from
2994 /// the integer representation of enums.
2995 pub fn name(&self) -> std::option::Option<&str> {
2996 match self {
2997 Self::Unspecified => std::option::Option::Some("MULTI_CHANNEL_MODE_UNSPECIFIED"),
2998 Self::SeparateRecognitionPerChannel => {
2999 std::option::Option::Some("SEPARATE_RECOGNITION_PER_CHANNEL")
3000 }
3001 Self::UnknownValue(u) => u.0.name(),
3002 }
3003 }
3004 }
3005
3006 impl std::default::Default for MultiChannelMode {
3007 fn default() -> Self {
3008 use std::convert::From;
3009 Self::from(0)
3010 }
3011 }
3012
3013 impl std::fmt::Display for MultiChannelMode {
3014 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
3015 wkt::internal::display_enum(f, self.name(), self.value())
3016 }
3017 }
3018
3019 impl std::convert::From<i32> for MultiChannelMode {
3020 fn from(value: i32) -> Self {
3021 match value {
3022 0 => Self::Unspecified,
3023 1 => Self::SeparateRecognitionPerChannel,
3024 _ => Self::UnknownValue(multi_channel_mode::UnknownValue(
3025 wkt::internal::UnknownEnumValue::Integer(value),
3026 )),
3027 }
3028 }
3029 }
3030
3031 impl std::convert::From<&str> for MultiChannelMode {
3032 fn from(value: &str) -> Self {
3033 use std::string::ToString;
3034 match value {
3035 "MULTI_CHANNEL_MODE_UNSPECIFIED" => Self::Unspecified,
3036 "SEPARATE_RECOGNITION_PER_CHANNEL" => Self::SeparateRecognitionPerChannel,
3037 _ => Self::UnknownValue(multi_channel_mode::UnknownValue(
3038 wkt::internal::UnknownEnumValue::String(value.to_string()),
3039 )),
3040 }
3041 }
3042 }
3043
3044 impl serde::ser::Serialize for MultiChannelMode {
3045 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
3046 where
3047 S: serde::Serializer,
3048 {
3049 match self {
3050 Self::Unspecified => serializer.serialize_i32(0),
3051 Self::SeparateRecognitionPerChannel => serializer.serialize_i32(1),
3052 Self::UnknownValue(u) => u.0.serialize(serializer),
3053 }
3054 }
3055 }
3056
3057 impl<'de> serde::de::Deserialize<'de> for MultiChannelMode {
3058 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
3059 where
3060 D: serde::Deserializer<'de>,
3061 {
3062 deserializer.deserialize_any(wkt::internal::EnumVisitor::<MultiChannelMode>::new(
3063 ".google.cloud.speech.v2.RecognitionFeatures.MultiChannelMode",
3064 ))
3065 }
3066 }
3067}
3068
3069/// Transcription normalization configuration. Use transcription normalization
3070/// to automatically replace parts of the transcript with phrases of your
3071/// choosing. For StreamingRecognize, this normalization only applies to stable
3072/// partial transcripts (stability > 0.8) and final transcripts.
3073#[derive(Clone, Default, PartialEq)]
3074#[non_exhaustive]
3075pub struct TranscriptNormalization {
3076 /// A list of replacement entries. We will perform replacement with one entry
3077 /// at a time. For example, the second entry in ["cat" => "dog", "mountain cat"
3078 /// => "mountain dog"] will never be applied because we will always process the
3079 /// first entry before it. At most 100 entries.
3080 pub entries: std::vec::Vec<crate::model::transcript_normalization::Entry>,
3081
3082 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3083}
3084
3085impl TranscriptNormalization {
3086 pub fn new() -> Self {
3087 std::default::Default::default()
3088 }
3089
3090 /// Sets the value of [entries][crate::model::TranscriptNormalization::entries].
3091 ///
3092 /// # Example
3093 /// ```ignore,no_run
3094 /// # use google_cloud_speech_v2::model::TranscriptNormalization;
3095 /// use google_cloud_speech_v2::model::transcript_normalization::Entry;
3096 /// let x = TranscriptNormalization::new()
3097 /// .set_entries([
3098 /// Entry::default()/* use setters */,
3099 /// Entry::default()/* use (different) setters */,
3100 /// ]);
3101 /// ```
3102 pub fn set_entries<T, V>(mut self, v: T) -> Self
3103 where
3104 T: std::iter::IntoIterator<Item = V>,
3105 V: std::convert::Into<crate::model::transcript_normalization::Entry>,
3106 {
3107 use std::iter::Iterator;
3108 self.entries = v.into_iter().map(|i| i.into()).collect();
3109 self
3110 }
3111}
3112
3113impl wkt::message::Message for TranscriptNormalization {
3114 fn typename() -> &'static str {
3115 "type.googleapis.com/google.cloud.speech.v2.TranscriptNormalization"
3116 }
3117}
3118
3119/// Defines additional types related to [TranscriptNormalization].
3120pub mod transcript_normalization {
3121 #[allow(unused_imports)]
3122 use super::*;
3123
3124 /// A single replacement configuration.
3125 #[derive(Clone, Default, PartialEq)]
3126 #[non_exhaustive]
3127 pub struct Entry {
3128 /// What to replace. Max length is 100 characters.
3129 pub search: std::string::String,
3130
3131 /// What to replace with. Max length is 100 characters.
3132 pub replace: std::string::String,
3133
3134 /// Whether the search is case sensitive.
3135 pub case_sensitive: bool,
3136
3137 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3138 }
3139
3140 impl Entry {
3141 pub fn new() -> Self {
3142 std::default::Default::default()
3143 }
3144
3145 /// Sets the value of [search][crate::model::transcript_normalization::Entry::search].
3146 ///
3147 /// # Example
3148 /// ```ignore,no_run
3149 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3150 /// let x = Entry::new().set_search("example");
3151 /// ```
3152 pub fn set_search<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3153 self.search = v.into();
3154 self
3155 }
3156
3157 /// Sets the value of [replace][crate::model::transcript_normalization::Entry::replace].
3158 ///
3159 /// # Example
3160 /// ```ignore,no_run
3161 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3162 /// let x = Entry::new().set_replace("example");
3163 /// ```
3164 pub fn set_replace<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3165 self.replace = v.into();
3166 self
3167 }
3168
3169 /// Sets the value of [case_sensitive][crate::model::transcript_normalization::Entry::case_sensitive].
3170 ///
3171 /// # Example
3172 /// ```ignore,no_run
3173 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3174 /// let x = Entry::new().set_case_sensitive(true);
3175 /// ```
3176 pub fn set_case_sensitive<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
3177 self.case_sensitive = v.into();
3178 self
3179 }
3180 }
3181
3182 impl wkt::message::Message for Entry {
3183 fn typename() -> &'static str {
3184 "type.googleapis.com/google.cloud.speech.v2.TranscriptNormalization.Entry"
3185 }
3186 }
3187}
3188
3189/// Translation configuration. Use to translate the given audio into text for the
3190/// desired language.
3191#[derive(Clone, Default, PartialEq)]
3192#[non_exhaustive]
3193pub struct TranslationConfig {
3194 /// Required. The language code to translate to.
3195 pub target_language: std::string::String,
3196
3197 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3198}
3199
3200impl TranslationConfig {
3201 pub fn new() -> Self {
3202 std::default::Default::default()
3203 }
3204
3205 /// Sets the value of [target_language][crate::model::TranslationConfig::target_language].
3206 ///
3207 /// # Example
3208 /// ```ignore,no_run
3209 /// # use google_cloud_speech_v2::model::TranslationConfig;
3210 /// let x = TranslationConfig::new().set_target_language("example");
3211 /// ```
3212 pub fn set_target_language<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3213 self.target_language = v.into();
3214 self
3215 }
3216}
3217
3218impl wkt::message::Message for TranslationConfig {
3219 fn typename() -> &'static str {
3220 "type.googleapis.com/google.cloud.speech.v2.TranslationConfig"
3221 }
3222}
3223
3224/// Provides "hints" to the speech recognizer to favor specific words and phrases
3225/// in the results. PhraseSets can be specified as an inline resource, or a
3226/// reference to an existing PhraseSet resource.
3227#[derive(Clone, Default, PartialEq)]
3228#[non_exhaustive]
3229pub struct SpeechAdaptation {
3230 /// A list of inline or referenced PhraseSets.
3231 pub phrase_sets: std::vec::Vec<crate::model::speech_adaptation::AdaptationPhraseSet>,
3232
3233 /// A list of inline CustomClasses. Existing CustomClass resources can be
3234 /// referenced directly in a PhraseSet.
3235 pub custom_classes: std::vec::Vec<crate::model::CustomClass>,
3236
3237 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3238}
3239
3240impl SpeechAdaptation {
3241 pub fn new() -> Self {
3242 std::default::Default::default()
3243 }
3244
3245 /// Sets the value of [phrase_sets][crate::model::SpeechAdaptation::phrase_sets].
3246 ///
3247 /// # Example
3248 /// ```ignore,no_run
3249 /// # use google_cloud_speech_v2::model::SpeechAdaptation;
3250 /// use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3251 /// let x = SpeechAdaptation::new()
3252 /// .set_phrase_sets([
3253 /// AdaptationPhraseSet::default()/* use setters */,
3254 /// AdaptationPhraseSet::default()/* use (different) setters */,
3255 /// ]);
3256 /// ```
3257 pub fn set_phrase_sets<T, V>(mut self, v: T) -> Self
3258 where
3259 T: std::iter::IntoIterator<Item = V>,
3260 V: std::convert::Into<crate::model::speech_adaptation::AdaptationPhraseSet>,
3261 {
3262 use std::iter::Iterator;
3263 self.phrase_sets = v.into_iter().map(|i| i.into()).collect();
3264 self
3265 }
3266
3267 /// Sets the value of [custom_classes][crate::model::SpeechAdaptation::custom_classes].
3268 ///
3269 /// # Example
3270 /// ```ignore,no_run
3271 /// # use google_cloud_speech_v2::model::SpeechAdaptation;
3272 /// use google_cloud_speech_v2::model::CustomClass;
3273 /// let x = SpeechAdaptation::new()
3274 /// .set_custom_classes([
3275 /// CustomClass::default()/* use setters */,
3276 /// CustomClass::default()/* use (different) setters */,
3277 /// ]);
3278 /// ```
3279 pub fn set_custom_classes<T, V>(mut self, v: T) -> Self
3280 where
3281 T: std::iter::IntoIterator<Item = V>,
3282 V: std::convert::Into<crate::model::CustomClass>,
3283 {
3284 use std::iter::Iterator;
3285 self.custom_classes = v.into_iter().map(|i| i.into()).collect();
3286 self
3287 }
3288}
3289
3290impl wkt::message::Message for SpeechAdaptation {
3291 fn typename() -> &'static str {
3292 "type.googleapis.com/google.cloud.speech.v2.SpeechAdaptation"
3293 }
3294}
3295
3296/// Defines additional types related to [SpeechAdaptation].
3297pub mod speech_adaptation {
3298 #[allow(unused_imports)]
3299 use super::*;
3300
3301 /// A biasing PhraseSet, which can be either a string referencing the name of
3302 /// an existing PhraseSets resource, or an inline definition of a PhraseSet.
3303 #[derive(Clone, Default, PartialEq)]
3304 #[non_exhaustive]
3305 pub struct AdaptationPhraseSet {
3306 pub value:
3307 std::option::Option<crate::model::speech_adaptation::adaptation_phrase_set::Value>,
3308
3309 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3310 }
3311
3312 impl AdaptationPhraseSet {
3313 pub fn new() -> Self {
3314 std::default::Default::default()
3315 }
3316
3317 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value].
3318 ///
3319 /// Note that all the setters affecting `value` are mutually
3320 /// exclusive.
3321 ///
3322 /// # Example
3323 /// ```ignore,no_run
3324 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3325 /// use google_cloud_speech_v2::model::speech_adaptation::adaptation_phrase_set::Value;
3326 /// let x = AdaptationPhraseSet::new().set_value(Some(Value::PhraseSet("example".to_string())));
3327 /// ```
3328 pub fn set_value<
3329 T: std::convert::Into<
3330 std::option::Option<
3331 crate::model::speech_adaptation::adaptation_phrase_set::Value,
3332 >,
3333 >,
3334 >(
3335 mut self,
3336 v: T,
3337 ) -> Self {
3338 self.value = v.into();
3339 self
3340 }
3341
3342 /// The value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3343 /// if it holds a `PhraseSet`, `None` if the field is not set or
3344 /// holds a different branch.
3345 pub fn phrase_set(&self) -> std::option::Option<&std::string::String> {
3346 #[allow(unreachable_patterns)]
3347 self.value.as_ref().and_then(|v| match v {
3348 crate::model::speech_adaptation::adaptation_phrase_set::Value::PhraseSet(v) => {
3349 std::option::Option::Some(v)
3350 }
3351 _ => std::option::Option::None,
3352 })
3353 }
3354
3355 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3356 /// to hold a `PhraseSet`.
3357 ///
3358 /// Note that all the setters affecting `value` are
3359 /// mutually exclusive.
3360 ///
3361 /// # Example
3362 /// ```ignore,no_run
3363 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3364 /// let x = AdaptationPhraseSet::new().set_phrase_set("example");
3365 /// assert!(x.phrase_set().is_some());
3366 /// assert!(x.inline_phrase_set().is_none());
3367 /// ```
3368 pub fn set_phrase_set<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3369 self.value = std::option::Option::Some(
3370 crate::model::speech_adaptation::adaptation_phrase_set::Value::PhraseSet(v.into()),
3371 );
3372 self
3373 }
3374
3375 /// The value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3376 /// if it holds a `InlinePhraseSet`, `None` if the field is not set or
3377 /// holds a different branch.
3378 pub fn inline_phrase_set(
3379 &self,
3380 ) -> std::option::Option<&std::boxed::Box<crate::model::PhraseSet>> {
3381 #[allow(unreachable_patterns)]
3382 self.value.as_ref().and_then(|v| match v {
3383 crate::model::speech_adaptation::adaptation_phrase_set::Value::InlinePhraseSet(
3384 v,
3385 ) => std::option::Option::Some(v),
3386 _ => std::option::Option::None,
3387 })
3388 }
3389
3390 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3391 /// to hold a `InlinePhraseSet`.
3392 ///
3393 /// Note that all the setters affecting `value` are
3394 /// mutually exclusive.
3395 ///
3396 /// # Example
3397 /// ```ignore,no_run
3398 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3399 /// use google_cloud_speech_v2::model::PhraseSet;
3400 /// let x = AdaptationPhraseSet::new().set_inline_phrase_set(PhraseSet::default()/* use setters */);
3401 /// assert!(x.inline_phrase_set().is_some());
3402 /// assert!(x.phrase_set().is_none());
3403 /// ```
3404 pub fn set_inline_phrase_set<
3405 T: std::convert::Into<std::boxed::Box<crate::model::PhraseSet>>,
3406 >(
3407 mut self,
3408 v: T,
3409 ) -> Self {
3410 self.value = std::option::Option::Some(
3411 crate::model::speech_adaptation::adaptation_phrase_set::Value::InlinePhraseSet(
3412 v.into(),
3413 ),
3414 );
3415 self
3416 }
3417 }
3418
3419 impl wkt::message::Message for AdaptationPhraseSet {
3420 fn typename() -> &'static str {
3421 "type.googleapis.com/google.cloud.speech.v2.SpeechAdaptation.AdaptationPhraseSet"
3422 }
3423 }
3424
3425 /// Defines additional types related to [AdaptationPhraseSet].
3426 pub mod adaptation_phrase_set {
3427 #[allow(unused_imports)]
3428 use super::*;
3429
3430 #[derive(Clone, Debug, PartialEq)]
3431 #[non_exhaustive]
3432 pub enum Value {
3433 /// The name of an existing PhraseSet resource. The user must have read
3434 /// access to the resource and it must not be deleted.
3435 PhraseSet(std::string::String),
3436 /// An inline defined PhraseSet.
3437 InlinePhraseSet(std::boxed::Box<crate::model::PhraseSet>),
3438 }
3439 }
3440}
3441
3442/// Denoiser config. May not be supported for all models and may
3443/// have no effect.
3444#[derive(Clone, Default, PartialEq)]
3445#[non_exhaustive]
3446pub struct DenoiserConfig {
3447 /// Denoise audio before sending to the transcription model.
3448 pub denoise_audio: bool,
3449
3450 /// Signal-to-Noise Ratio (SNR) threshold for the denoiser. Here SNR means the
3451 /// loudness of the speech signal. Audio with an SNR below this threshold,
3452 /// meaning the speech is too quiet, will be prevented from being sent to the
3453 /// transcription model.
3454 ///
3455 /// If snr_threshold=0, no filtering will be applied.
3456 pub snr_threshold: f32,
3457
3458 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3459}
3460
3461impl DenoiserConfig {
3462 pub fn new() -> Self {
3463 std::default::Default::default()
3464 }
3465
3466 /// Sets the value of [denoise_audio][crate::model::DenoiserConfig::denoise_audio].
3467 ///
3468 /// # Example
3469 /// ```ignore,no_run
3470 /// # use google_cloud_speech_v2::model::DenoiserConfig;
3471 /// let x = DenoiserConfig::new().set_denoise_audio(true);
3472 /// ```
3473 pub fn set_denoise_audio<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
3474 self.denoise_audio = v.into();
3475 self
3476 }
3477
3478 /// Sets the value of [snr_threshold][crate::model::DenoiserConfig::snr_threshold].
3479 ///
3480 /// # Example
3481 /// ```ignore,no_run
3482 /// # use google_cloud_speech_v2::model::DenoiserConfig;
3483 /// let x = DenoiserConfig::new().set_snr_threshold(42.0);
3484 /// ```
3485 pub fn set_snr_threshold<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
3486 self.snr_threshold = v.into();
3487 self
3488 }
3489}
3490
3491impl wkt::message::Message for DenoiserConfig {
3492 fn typename() -> &'static str {
3493 "type.googleapis.com/google.cloud.speech.v2.DenoiserConfig"
3494 }
3495}
3496
3497/// Provides information to the Recognizer that specifies how to process the
3498/// recognition request.
3499#[derive(Clone, Default, PartialEq)]
3500#[non_exhaustive]
3501pub struct RecognitionConfig {
3502 /// Optional. Which model to use for recognition requests. Select the model
3503 /// best suited to your domain to get best results.
3504 ///
3505 /// Guidance for choosing which model to use can be found in the [Transcription
3506 /// Models
3507 /// Documentation](https://cloud.google.com/speech-to-text/v2/docs/transcription-model)
3508 /// and the models supported in each region can be found in the [Table Of
3509 /// Supported
3510 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
3511 pub model: std::string::String,
3512
3513 /// Optional. The language of the supplied audio as a
3514 /// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
3515 /// Language tags are normalized to BCP-47 before they are used eg "en-us"
3516 /// becomes "en-US".
3517 ///
3518 /// Supported languages for each model are listed in the [Table of Supported
3519 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
3520 ///
3521 /// If additional languages are provided, recognition result will contain
3522 /// recognition in the most likely language detected. The recognition result
3523 /// will include the language tag of the language detected in the audio.
3524 pub language_codes: std::vec::Vec<std::string::String>,
3525
3526 /// Speech recognition features to enable.
3527 pub features: std::option::Option<crate::model::RecognitionFeatures>,
3528
3529 /// Speech adaptation context that weights recognizer predictions for specific
3530 /// words and phrases.
3531 pub adaptation: std::option::Option<crate::model::SpeechAdaptation>,
3532
3533 /// Optional. Use transcription normalization to automatically replace parts of
3534 /// the transcript with phrases of your choosing. For StreamingRecognize, this
3535 /// normalization only applies to stable partial transcripts (stability > 0.8)
3536 /// and final transcripts.
3537 pub transcript_normalization: std::option::Option<crate::model::TranscriptNormalization>,
3538
3539 /// Optional. Optional configuration used to automatically run translation on
3540 /// the given audio to the desired language for supported models.
3541 pub translation_config: std::option::Option<crate::model::TranslationConfig>,
3542
3543 /// Optional. Optional denoiser config. May not be supported for all models
3544 /// and may have no effect.
3545 pub denoiser_config: std::option::Option<crate::model::DenoiserConfig>,
3546
3547 /// Decoding parameters for audio being sent for recognition.
3548 pub decoding_config: std::option::Option<crate::model::recognition_config::DecodingConfig>,
3549
3550 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3551}
3552
3553impl RecognitionConfig {
3554 pub fn new() -> Self {
3555 std::default::Default::default()
3556 }
3557
3558 /// Sets the value of [model][crate::model::RecognitionConfig::model].
3559 ///
3560 /// # Example
3561 /// ```ignore,no_run
3562 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3563 /// let x = RecognitionConfig::new().set_model("example");
3564 /// ```
3565 pub fn set_model<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3566 self.model = v.into();
3567 self
3568 }
3569
3570 /// Sets the value of [language_codes][crate::model::RecognitionConfig::language_codes].
3571 ///
3572 /// # Example
3573 /// ```ignore,no_run
3574 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3575 /// let x = RecognitionConfig::new().set_language_codes(["a", "b", "c"]);
3576 /// ```
3577 pub fn set_language_codes<T, V>(mut self, v: T) -> Self
3578 where
3579 T: std::iter::IntoIterator<Item = V>,
3580 V: std::convert::Into<std::string::String>,
3581 {
3582 use std::iter::Iterator;
3583 self.language_codes = v.into_iter().map(|i| i.into()).collect();
3584 self
3585 }
3586
3587 /// Sets the value of [features][crate::model::RecognitionConfig::features].
3588 ///
3589 /// # Example
3590 /// ```ignore,no_run
3591 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3592 /// use google_cloud_speech_v2::model::RecognitionFeatures;
3593 /// let x = RecognitionConfig::new().set_features(RecognitionFeatures::default()/* use setters */);
3594 /// ```
3595 pub fn set_features<T>(mut self, v: T) -> Self
3596 where
3597 T: std::convert::Into<crate::model::RecognitionFeatures>,
3598 {
3599 self.features = std::option::Option::Some(v.into());
3600 self
3601 }
3602
3603 /// Sets or clears the value of [features][crate::model::RecognitionConfig::features].
3604 ///
3605 /// # Example
3606 /// ```ignore,no_run
3607 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3608 /// use google_cloud_speech_v2::model::RecognitionFeatures;
3609 /// let x = RecognitionConfig::new().set_or_clear_features(Some(RecognitionFeatures::default()/* use setters */));
3610 /// let x = RecognitionConfig::new().set_or_clear_features(None::<RecognitionFeatures>);
3611 /// ```
3612 pub fn set_or_clear_features<T>(mut self, v: std::option::Option<T>) -> Self
3613 where
3614 T: std::convert::Into<crate::model::RecognitionFeatures>,
3615 {
3616 self.features = v.map(|x| x.into());
3617 self
3618 }
3619
3620 /// Sets the value of [adaptation][crate::model::RecognitionConfig::adaptation].
3621 ///
3622 /// # Example
3623 /// ```ignore,no_run
3624 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3625 /// use google_cloud_speech_v2::model::SpeechAdaptation;
3626 /// let x = RecognitionConfig::new().set_adaptation(SpeechAdaptation::default()/* use setters */);
3627 /// ```
3628 pub fn set_adaptation<T>(mut self, v: T) -> Self
3629 where
3630 T: std::convert::Into<crate::model::SpeechAdaptation>,
3631 {
3632 self.adaptation = std::option::Option::Some(v.into());
3633 self
3634 }
3635
3636 /// Sets or clears the value of [adaptation][crate::model::RecognitionConfig::adaptation].
3637 ///
3638 /// # Example
3639 /// ```ignore,no_run
3640 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3641 /// use google_cloud_speech_v2::model::SpeechAdaptation;
3642 /// let x = RecognitionConfig::new().set_or_clear_adaptation(Some(SpeechAdaptation::default()/* use setters */));
3643 /// let x = RecognitionConfig::new().set_or_clear_adaptation(None::<SpeechAdaptation>);
3644 /// ```
3645 pub fn set_or_clear_adaptation<T>(mut self, v: std::option::Option<T>) -> Self
3646 where
3647 T: std::convert::Into<crate::model::SpeechAdaptation>,
3648 {
3649 self.adaptation = v.map(|x| x.into());
3650 self
3651 }
3652
3653 /// Sets the value of [transcript_normalization][crate::model::RecognitionConfig::transcript_normalization].
3654 ///
3655 /// # Example
3656 /// ```ignore,no_run
3657 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3658 /// use google_cloud_speech_v2::model::TranscriptNormalization;
3659 /// let x = RecognitionConfig::new().set_transcript_normalization(TranscriptNormalization::default()/* use setters */);
3660 /// ```
3661 pub fn set_transcript_normalization<T>(mut self, v: T) -> Self
3662 where
3663 T: std::convert::Into<crate::model::TranscriptNormalization>,
3664 {
3665 self.transcript_normalization = std::option::Option::Some(v.into());
3666 self
3667 }
3668
3669 /// Sets or clears the value of [transcript_normalization][crate::model::RecognitionConfig::transcript_normalization].
3670 ///
3671 /// # Example
3672 /// ```ignore,no_run
3673 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3674 /// use google_cloud_speech_v2::model::TranscriptNormalization;
3675 /// let x = RecognitionConfig::new().set_or_clear_transcript_normalization(Some(TranscriptNormalization::default()/* use setters */));
3676 /// let x = RecognitionConfig::new().set_or_clear_transcript_normalization(None::<TranscriptNormalization>);
3677 /// ```
3678 pub fn set_or_clear_transcript_normalization<T>(mut self, v: std::option::Option<T>) -> Self
3679 where
3680 T: std::convert::Into<crate::model::TranscriptNormalization>,
3681 {
3682 self.transcript_normalization = v.map(|x| x.into());
3683 self
3684 }
3685
3686 /// Sets the value of [translation_config][crate::model::RecognitionConfig::translation_config].
3687 ///
3688 /// # Example
3689 /// ```ignore,no_run
3690 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3691 /// use google_cloud_speech_v2::model::TranslationConfig;
3692 /// let x = RecognitionConfig::new().set_translation_config(TranslationConfig::default()/* use setters */);
3693 /// ```
3694 pub fn set_translation_config<T>(mut self, v: T) -> Self
3695 where
3696 T: std::convert::Into<crate::model::TranslationConfig>,
3697 {
3698 self.translation_config = std::option::Option::Some(v.into());
3699 self
3700 }
3701
3702 /// Sets or clears the value of [translation_config][crate::model::RecognitionConfig::translation_config].
3703 ///
3704 /// # Example
3705 /// ```ignore,no_run
3706 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3707 /// use google_cloud_speech_v2::model::TranslationConfig;
3708 /// let x = RecognitionConfig::new().set_or_clear_translation_config(Some(TranslationConfig::default()/* use setters */));
3709 /// let x = RecognitionConfig::new().set_or_clear_translation_config(None::<TranslationConfig>);
3710 /// ```
3711 pub fn set_or_clear_translation_config<T>(mut self, v: std::option::Option<T>) -> Self
3712 where
3713 T: std::convert::Into<crate::model::TranslationConfig>,
3714 {
3715 self.translation_config = v.map(|x| x.into());
3716 self
3717 }
3718
3719 /// Sets the value of [denoiser_config][crate::model::RecognitionConfig::denoiser_config].
3720 ///
3721 /// # Example
3722 /// ```ignore,no_run
3723 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3724 /// use google_cloud_speech_v2::model::DenoiserConfig;
3725 /// let x = RecognitionConfig::new().set_denoiser_config(DenoiserConfig::default()/* use setters */);
3726 /// ```
3727 pub fn set_denoiser_config<T>(mut self, v: T) -> Self
3728 where
3729 T: std::convert::Into<crate::model::DenoiserConfig>,
3730 {
3731 self.denoiser_config = std::option::Option::Some(v.into());
3732 self
3733 }
3734
3735 /// Sets or clears the value of [denoiser_config][crate::model::RecognitionConfig::denoiser_config].
3736 ///
3737 /// # Example
3738 /// ```ignore,no_run
3739 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3740 /// use google_cloud_speech_v2::model::DenoiserConfig;
3741 /// let x = RecognitionConfig::new().set_or_clear_denoiser_config(Some(DenoiserConfig::default()/* use setters */));
3742 /// let x = RecognitionConfig::new().set_or_clear_denoiser_config(None::<DenoiserConfig>);
3743 /// ```
3744 pub fn set_or_clear_denoiser_config<T>(mut self, v: std::option::Option<T>) -> Self
3745 where
3746 T: std::convert::Into<crate::model::DenoiserConfig>,
3747 {
3748 self.denoiser_config = v.map(|x| x.into());
3749 self
3750 }
3751
3752 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config].
3753 ///
3754 /// Note that all the setters affecting `decoding_config` are mutually
3755 /// exclusive.
3756 ///
3757 /// # Example
3758 /// ```ignore,no_run
3759 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3760 /// use google_cloud_speech_v2::model::AutoDetectDecodingConfig;
3761 /// let x = RecognitionConfig::new().set_decoding_config(Some(
3762 /// google_cloud_speech_v2::model::recognition_config::DecodingConfig::AutoDecodingConfig(AutoDetectDecodingConfig::default().into())));
3763 /// ```
3764 pub fn set_decoding_config<
3765 T: std::convert::Into<std::option::Option<crate::model::recognition_config::DecodingConfig>>,
3766 >(
3767 mut self,
3768 v: T,
3769 ) -> Self {
3770 self.decoding_config = v.into();
3771 self
3772 }
3773
3774 /// The value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3775 /// if it holds a `AutoDecodingConfig`, `None` if the field is not set or
3776 /// holds a different branch.
3777 pub fn auto_decoding_config(
3778 &self,
3779 ) -> std::option::Option<&std::boxed::Box<crate::model::AutoDetectDecodingConfig>> {
3780 #[allow(unreachable_patterns)]
3781 self.decoding_config.as_ref().and_then(|v| match v {
3782 crate::model::recognition_config::DecodingConfig::AutoDecodingConfig(v) => {
3783 std::option::Option::Some(v)
3784 }
3785 _ => std::option::Option::None,
3786 })
3787 }
3788
3789 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3790 /// to hold a `AutoDecodingConfig`.
3791 ///
3792 /// Note that all the setters affecting `decoding_config` are
3793 /// mutually exclusive.
3794 ///
3795 /// # Example
3796 /// ```ignore,no_run
3797 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3798 /// use google_cloud_speech_v2::model::AutoDetectDecodingConfig;
3799 /// let x = RecognitionConfig::new().set_auto_decoding_config(AutoDetectDecodingConfig::default()/* use setters */);
3800 /// assert!(x.auto_decoding_config().is_some());
3801 /// assert!(x.explicit_decoding_config().is_none());
3802 /// ```
3803 pub fn set_auto_decoding_config<
3804 T: std::convert::Into<std::boxed::Box<crate::model::AutoDetectDecodingConfig>>,
3805 >(
3806 mut self,
3807 v: T,
3808 ) -> Self {
3809 self.decoding_config = std::option::Option::Some(
3810 crate::model::recognition_config::DecodingConfig::AutoDecodingConfig(v.into()),
3811 );
3812 self
3813 }
3814
3815 /// The value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3816 /// if it holds a `ExplicitDecodingConfig`, `None` if the field is not set or
3817 /// holds a different branch.
3818 pub fn explicit_decoding_config(
3819 &self,
3820 ) -> std::option::Option<&std::boxed::Box<crate::model::ExplicitDecodingConfig>> {
3821 #[allow(unreachable_patterns)]
3822 self.decoding_config.as_ref().and_then(|v| match v {
3823 crate::model::recognition_config::DecodingConfig::ExplicitDecodingConfig(v) => {
3824 std::option::Option::Some(v)
3825 }
3826 _ => std::option::Option::None,
3827 })
3828 }
3829
3830 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3831 /// to hold a `ExplicitDecodingConfig`.
3832 ///
3833 /// Note that all the setters affecting `decoding_config` are
3834 /// mutually exclusive.
3835 ///
3836 /// # Example
3837 /// ```ignore,no_run
3838 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3839 /// use google_cloud_speech_v2::model::ExplicitDecodingConfig;
3840 /// let x = RecognitionConfig::new().set_explicit_decoding_config(ExplicitDecodingConfig::default()/* use setters */);
3841 /// assert!(x.explicit_decoding_config().is_some());
3842 /// assert!(x.auto_decoding_config().is_none());
3843 /// ```
3844 pub fn set_explicit_decoding_config<
3845 T: std::convert::Into<std::boxed::Box<crate::model::ExplicitDecodingConfig>>,
3846 >(
3847 mut self,
3848 v: T,
3849 ) -> Self {
3850 self.decoding_config = std::option::Option::Some(
3851 crate::model::recognition_config::DecodingConfig::ExplicitDecodingConfig(v.into()),
3852 );
3853 self
3854 }
3855}
3856
3857impl wkt::message::Message for RecognitionConfig {
3858 fn typename() -> &'static str {
3859 "type.googleapis.com/google.cloud.speech.v2.RecognitionConfig"
3860 }
3861}
3862
3863/// Defines additional types related to [RecognitionConfig].
3864pub mod recognition_config {
3865 #[allow(unused_imports)]
3866 use super::*;
3867
3868 /// Decoding parameters for audio being sent for recognition.
3869 #[derive(Clone, Debug, PartialEq)]
3870 #[non_exhaustive]
3871 pub enum DecodingConfig {
3872 /// Automatically detect decoding parameters.
3873 /// Preferred for supported formats.
3874 AutoDecodingConfig(std::boxed::Box<crate::model::AutoDetectDecodingConfig>),
3875 /// Explicitly specified decoding parameters.
3876 /// Required if using headerless PCM audio (linear16, mulaw, alaw).
3877 ExplicitDecodingConfig(std::boxed::Box<crate::model::ExplicitDecodingConfig>),
3878 }
3879}
3880
3881/// Request message for the
3882/// [Recognize][google.cloud.speech.v2.Speech.Recognize] method. Either
3883/// `content` or `uri` must be supplied. Supplying both or neither returns
3884/// [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. See [content
3885/// limits](https://cloud.google.com/speech-to-text/quotas#content).
3886///
3887/// [google.cloud.speech.v2.Speech.Recognize]: crate::client::Speech::recognize
3888#[derive(Clone, Default, PartialEq)]
3889#[non_exhaustive]
3890pub struct RecognizeRequest {
3891 /// Required. The name of the Recognizer to use during recognition. The
3892 /// expected format is
3893 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
3894 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
3895 pub recognizer: std::string::String,
3896
3897 /// Features and audio metadata to use for the Automatic Speech Recognition.
3898 /// This field in combination with the
3899 /// [config_mask][google.cloud.speech.v2.RecognizeRequest.config_mask] field
3900 /// can be used to override parts of the
3901 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
3902 /// of the Recognizer resource.
3903 ///
3904 /// [google.cloud.speech.v2.RecognizeRequest.config_mask]: crate::model::RecognizeRequest::config_mask
3905 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
3906 pub config: std::option::Option<crate::model::RecognitionConfig>,
3907
3908 /// The list of fields in
3909 /// [config][google.cloud.speech.v2.RecognizeRequest.config] that override the
3910 /// values in the
3911 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
3912 /// of the recognizer during this recognition request. If no mask is provided,
3913 /// all non-default valued fields in
3914 /// [config][google.cloud.speech.v2.RecognizeRequest.config] override the
3915 /// values in the recognizer for this recognition request. If a mask is
3916 /// provided, only the fields listed in the mask override the config in the
3917 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
3918 /// [config][google.cloud.speech.v2.RecognizeRequest.config] completely
3919 /// overrides and replaces the config in the recognizer for this recognition
3920 /// request.
3921 ///
3922 /// [google.cloud.speech.v2.RecognizeRequest.config]: crate::model::RecognizeRequest::config
3923 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
3924 pub config_mask: std::option::Option<wkt::FieldMask>,
3925
3926 /// The audio source, which is either inline content or a Google Cloud
3927 /// Storage URI.
3928 pub audio_source: std::option::Option<crate::model::recognize_request::AudioSource>,
3929
3930 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3931}
3932
3933impl RecognizeRequest {
3934 pub fn new() -> Self {
3935 std::default::Default::default()
3936 }
3937
3938 /// Sets the value of [recognizer][crate::model::RecognizeRequest::recognizer].
3939 ///
3940 /// # Example
3941 /// ```ignore,no_run
3942 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3943 /// let x = RecognizeRequest::new().set_recognizer("example");
3944 /// ```
3945 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3946 self.recognizer = v.into();
3947 self
3948 }
3949
3950 /// Sets the value of [config][crate::model::RecognizeRequest::config].
3951 ///
3952 /// # Example
3953 /// ```ignore,no_run
3954 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3955 /// use google_cloud_speech_v2::model::RecognitionConfig;
3956 /// let x = RecognizeRequest::new().set_config(RecognitionConfig::default()/* use setters */);
3957 /// ```
3958 pub fn set_config<T>(mut self, v: T) -> Self
3959 where
3960 T: std::convert::Into<crate::model::RecognitionConfig>,
3961 {
3962 self.config = std::option::Option::Some(v.into());
3963 self
3964 }
3965
3966 /// Sets or clears the value of [config][crate::model::RecognizeRequest::config].
3967 ///
3968 /// # Example
3969 /// ```ignore,no_run
3970 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3971 /// use google_cloud_speech_v2::model::RecognitionConfig;
3972 /// let x = RecognizeRequest::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
3973 /// let x = RecognizeRequest::new().set_or_clear_config(None::<RecognitionConfig>);
3974 /// ```
3975 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
3976 where
3977 T: std::convert::Into<crate::model::RecognitionConfig>,
3978 {
3979 self.config = v.map(|x| x.into());
3980 self
3981 }
3982
3983 /// Sets the value of [config_mask][crate::model::RecognizeRequest::config_mask].
3984 ///
3985 /// # Example
3986 /// ```ignore,no_run
3987 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3988 /// use wkt::FieldMask;
3989 /// let x = RecognizeRequest::new().set_config_mask(FieldMask::default()/* use setters */);
3990 /// ```
3991 pub fn set_config_mask<T>(mut self, v: T) -> Self
3992 where
3993 T: std::convert::Into<wkt::FieldMask>,
3994 {
3995 self.config_mask = std::option::Option::Some(v.into());
3996 self
3997 }
3998
3999 /// Sets or clears the value of [config_mask][crate::model::RecognizeRequest::config_mask].
4000 ///
4001 /// # Example
4002 /// ```ignore,no_run
4003 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4004 /// use wkt::FieldMask;
4005 /// let x = RecognizeRequest::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
4006 /// let x = RecognizeRequest::new().set_or_clear_config_mask(None::<FieldMask>);
4007 /// ```
4008 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
4009 where
4010 T: std::convert::Into<wkt::FieldMask>,
4011 {
4012 self.config_mask = v.map(|x| x.into());
4013 self
4014 }
4015
4016 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source].
4017 ///
4018 /// Note that all the setters affecting `audio_source` are mutually
4019 /// exclusive.
4020 ///
4021 /// # Example
4022 /// ```ignore,no_run
4023 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4024 /// use google_cloud_speech_v2::model::recognize_request::AudioSource;
4025 /// let x = RecognizeRequest::new().set_audio_source(Some(AudioSource::Content(bytes::Bytes::from_static(b"example"))));
4026 /// ```
4027 pub fn set_audio_source<
4028 T: std::convert::Into<std::option::Option<crate::model::recognize_request::AudioSource>>,
4029 >(
4030 mut self,
4031 v: T,
4032 ) -> Self {
4033 self.audio_source = v.into();
4034 self
4035 }
4036
4037 /// The value of [audio_source][crate::model::RecognizeRequest::audio_source]
4038 /// if it holds a `Content`, `None` if the field is not set or
4039 /// holds a different branch.
4040 pub fn content(&self) -> std::option::Option<&::bytes::Bytes> {
4041 #[allow(unreachable_patterns)]
4042 self.audio_source.as_ref().and_then(|v| match v {
4043 crate::model::recognize_request::AudioSource::Content(v) => {
4044 std::option::Option::Some(v)
4045 }
4046 _ => std::option::Option::None,
4047 })
4048 }
4049
4050 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source]
4051 /// to hold a `Content`.
4052 ///
4053 /// Note that all the setters affecting `audio_source` are
4054 /// mutually exclusive.
4055 ///
4056 /// # Example
4057 /// ```ignore,no_run
4058 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4059 /// let x = RecognizeRequest::new().set_content(bytes::Bytes::from_static(b"example"));
4060 /// assert!(x.content().is_some());
4061 /// assert!(x.uri().is_none());
4062 /// ```
4063 pub fn set_content<T: std::convert::Into<::bytes::Bytes>>(mut self, v: T) -> Self {
4064 self.audio_source = std::option::Option::Some(
4065 crate::model::recognize_request::AudioSource::Content(v.into()),
4066 );
4067 self
4068 }
4069
4070 /// The value of [audio_source][crate::model::RecognizeRequest::audio_source]
4071 /// if it holds a `Uri`, `None` if the field is not set or
4072 /// holds a different branch.
4073 pub fn uri(&self) -> std::option::Option<&std::string::String> {
4074 #[allow(unreachable_patterns)]
4075 self.audio_source.as_ref().and_then(|v| match v {
4076 crate::model::recognize_request::AudioSource::Uri(v) => std::option::Option::Some(v),
4077 _ => std::option::Option::None,
4078 })
4079 }
4080
4081 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source]
4082 /// to hold a `Uri`.
4083 ///
4084 /// Note that all the setters affecting `audio_source` are
4085 /// mutually exclusive.
4086 ///
4087 /// # Example
4088 /// ```ignore,no_run
4089 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4090 /// let x = RecognizeRequest::new().set_uri("example");
4091 /// assert!(x.uri().is_some());
4092 /// assert!(x.content().is_none());
4093 /// ```
4094 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4095 self.audio_source =
4096 std::option::Option::Some(crate::model::recognize_request::AudioSource::Uri(v.into()));
4097 self
4098 }
4099}
4100
4101impl wkt::message::Message for RecognizeRequest {
4102 fn typename() -> &'static str {
4103 "type.googleapis.com/google.cloud.speech.v2.RecognizeRequest"
4104 }
4105}
4106
4107/// Defines additional types related to [RecognizeRequest].
4108pub mod recognize_request {
4109 #[allow(unused_imports)]
4110 use super::*;
4111
4112 /// The audio source, which is either inline content or a Google Cloud
4113 /// Storage URI.
4114 #[derive(Clone, Debug, PartialEq)]
4115 #[non_exhaustive]
4116 pub enum AudioSource {
4117 /// The audio data bytes encoded as specified in
4118 /// [RecognitionConfig][google.cloud.speech.v2.RecognitionConfig]. As
4119 /// with all bytes fields, proto buffers use a pure binary representation,
4120 /// whereas JSON representations use base64.
4121 ///
4122 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
4123 Content(::bytes::Bytes),
4124 /// URI that points to a file that contains audio data bytes as specified in
4125 /// [RecognitionConfig][google.cloud.speech.v2.RecognitionConfig]. The file
4126 /// must not be compressed (for example, gzip). Currently, only Google Cloud
4127 /// Storage URIs are supported, which must be specified in the following
4128 /// format: `gs://bucket_name/object_name` (other URI formats return
4129 /// [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more
4130 /// information, see [Request
4131 /// URIs](https://cloud.google.com/storage/docs/reference-uris).
4132 ///
4133 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
4134 Uri(std::string::String),
4135 }
4136}
4137
4138/// Metadata about the recognition request and response.
4139#[derive(Clone, Default, PartialEq)]
4140#[non_exhaustive]
4141pub struct RecognitionResponseMetadata {
4142 /// Global request identifier auto-generated by the API.
4143 pub request_id: std::string::String,
4144
4145 /// When available, billed audio seconds for the corresponding request.
4146 pub total_billed_duration: std::option::Option<wkt::Duration>,
4147
4148 /// Optional. Output only. Provides the prompt used for the recognition
4149 /// request.
4150 pub prompt: std::option::Option<std::string::String>,
4151
4152 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4153}
4154
4155impl RecognitionResponseMetadata {
4156 pub fn new() -> Self {
4157 std::default::Default::default()
4158 }
4159
4160 /// Sets the value of [request_id][crate::model::RecognitionResponseMetadata::request_id].
4161 ///
4162 /// # Example
4163 /// ```ignore,no_run
4164 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4165 /// let x = RecognitionResponseMetadata::new().set_request_id("example");
4166 /// ```
4167 pub fn set_request_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4168 self.request_id = v.into();
4169 self
4170 }
4171
4172 /// Sets the value of [total_billed_duration][crate::model::RecognitionResponseMetadata::total_billed_duration].
4173 ///
4174 /// # Example
4175 /// ```ignore,no_run
4176 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4177 /// use wkt::Duration;
4178 /// let x = RecognitionResponseMetadata::new().set_total_billed_duration(Duration::default()/* use setters */);
4179 /// ```
4180 pub fn set_total_billed_duration<T>(mut self, v: T) -> Self
4181 where
4182 T: std::convert::Into<wkt::Duration>,
4183 {
4184 self.total_billed_duration = std::option::Option::Some(v.into());
4185 self
4186 }
4187
4188 /// Sets or clears the value of [total_billed_duration][crate::model::RecognitionResponseMetadata::total_billed_duration].
4189 ///
4190 /// # Example
4191 /// ```ignore,no_run
4192 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4193 /// use wkt::Duration;
4194 /// let x = RecognitionResponseMetadata::new().set_or_clear_total_billed_duration(Some(Duration::default()/* use setters */));
4195 /// let x = RecognitionResponseMetadata::new().set_or_clear_total_billed_duration(None::<Duration>);
4196 /// ```
4197 pub fn set_or_clear_total_billed_duration<T>(mut self, v: std::option::Option<T>) -> Self
4198 where
4199 T: std::convert::Into<wkt::Duration>,
4200 {
4201 self.total_billed_duration = v.map(|x| x.into());
4202 self
4203 }
4204
4205 /// Sets the value of [prompt][crate::model::RecognitionResponseMetadata::prompt].
4206 ///
4207 /// # Example
4208 /// ```ignore,no_run
4209 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4210 /// let x = RecognitionResponseMetadata::new().set_prompt("example");
4211 /// ```
4212 pub fn set_prompt<T>(mut self, v: T) -> Self
4213 where
4214 T: std::convert::Into<std::string::String>,
4215 {
4216 self.prompt = std::option::Option::Some(v.into());
4217 self
4218 }
4219
4220 /// Sets or clears the value of [prompt][crate::model::RecognitionResponseMetadata::prompt].
4221 ///
4222 /// # Example
4223 /// ```ignore,no_run
4224 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4225 /// let x = RecognitionResponseMetadata::new().set_or_clear_prompt(Some("example"));
4226 /// let x = RecognitionResponseMetadata::new().set_or_clear_prompt(None::<String>);
4227 /// ```
4228 pub fn set_or_clear_prompt<T>(mut self, v: std::option::Option<T>) -> Self
4229 where
4230 T: std::convert::Into<std::string::String>,
4231 {
4232 self.prompt = v.map(|x| x.into());
4233 self
4234 }
4235}
4236
4237impl wkt::message::Message for RecognitionResponseMetadata {
4238 fn typename() -> &'static str {
4239 "type.googleapis.com/google.cloud.speech.v2.RecognitionResponseMetadata"
4240 }
4241}
4242
4243/// Alternative hypotheses (a.k.a. n-best list).
4244#[derive(Clone, Default, PartialEq)]
4245#[non_exhaustive]
4246pub struct SpeechRecognitionAlternative {
4247 /// Transcript text representing the words that the user spoke.
4248 pub transcript: std::string::String,
4249
4250 /// The confidence estimate between 0.0 and 1.0. A higher number
4251 /// indicates an estimated greater likelihood that the recognized words are
4252 /// correct. This field is set only for the top alternative of a non-streaming
4253 /// result or, of a streaming result where
4254 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final] is
4255 /// set to `true`. This field is not guaranteed to be accurate and users should
4256 /// not rely on it to be always provided. The default of 0.0 is a sentinel
4257 /// value indicating `confidence` was not set.
4258 ///
4259 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
4260 pub confidence: f32,
4261
4262 /// A list of word-specific information for each recognized word.
4263 /// When the
4264 /// [SpeakerDiarizationConfig][google.cloud.speech.v2.SpeakerDiarizationConfig]
4265 /// is set, you will see all the words from the beginning of the audio.
4266 ///
4267 /// [google.cloud.speech.v2.SpeakerDiarizationConfig]: crate::model::SpeakerDiarizationConfig
4268 pub words: std::vec::Vec<crate::model::WordInfo>,
4269
4270 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4271}
4272
4273impl SpeechRecognitionAlternative {
4274 pub fn new() -> Self {
4275 std::default::Default::default()
4276 }
4277
4278 /// Sets the value of [transcript][crate::model::SpeechRecognitionAlternative::transcript].
4279 ///
4280 /// # Example
4281 /// ```ignore,no_run
4282 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4283 /// let x = SpeechRecognitionAlternative::new().set_transcript("example");
4284 /// ```
4285 pub fn set_transcript<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4286 self.transcript = v.into();
4287 self
4288 }
4289
4290 /// Sets the value of [confidence][crate::model::SpeechRecognitionAlternative::confidence].
4291 ///
4292 /// # Example
4293 /// ```ignore,no_run
4294 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4295 /// let x = SpeechRecognitionAlternative::new().set_confidence(42.0);
4296 /// ```
4297 pub fn set_confidence<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
4298 self.confidence = v.into();
4299 self
4300 }
4301
4302 /// Sets the value of [words][crate::model::SpeechRecognitionAlternative::words].
4303 ///
4304 /// # Example
4305 /// ```ignore,no_run
4306 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4307 /// use google_cloud_speech_v2::model::WordInfo;
4308 /// let x = SpeechRecognitionAlternative::new()
4309 /// .set_words([
4310 /// WordInfo::default()/* use setters */,
4311 /// WordInfo::default()/* use (different) setters */,
4312 /// ]);
4313 /// ```
4314 pub fn set_words<T, V>(mut self, v: T) -> Self
4315 where
4316 T: std::iter::IntoIterator<Item = V>,
4317 V: std::convert::Into<crate::model::WordInfo>,
4318 {
4319 use std::iter::Iterator;
4320 self.words = v.into_iter().map(|i| i.into()).collect();
4321 self
4322 }
4323}
4324
4325impl wkt::message::Message for SpeechRecognitionAlternative {
4326 fn typename() -> &'static str {
4327 "type.googleapis.com/google.cloud.speech.v2.SpeechRecognitionAlternative"
4328 }
4329}
4330
4331/// Word-specific information for recognized words.
4332#[derive(Clone, Default, PartialEq)]
4333#[non_exhaustive]
4334pub struct WordInfo {
4335 /// Time offset relative to the beginning of the audio,
4336 /// and corresponding to the start of the spoken word.
4337 /// This field is only set if
4338 /// [enable_word_time_offsets][google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]
4339 /// is `true` and only in the top hypothesis. This is an experimental feature
4340 /// and the accuracy of the time offset can vary.
4341 ///
4342 /// [google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]: crate::model::RecognitionFeatures::enable_word_time_offsets
4343 pub start_offset: std::option::Option<wkt::Duration>,
4344
4345 /// Time offset relative to the beginning of the audio,
4346 /// and corresponding to the end of the spoken word.
4347 /// This field is only set if
4348 /// [enable_word_time_offsets][google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]
4349 /// is `true` and only in the top hypothesis. This is an experimental feature
4350 /// and the accuracy of the time offset can vary.
4351 ///
4352 /// [google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]: crate::model::RecognitionFeatures::enable_word_time_offsets
4353 pub end_offset: std::option::Option<wkt::Duration>,
4354
4355 /// The word corresponding to this set of information.
4356 pub word: std::string::String,
4357
4358 /// The confidence estimate between 0.0 and 1.0. A higher number
4359 /// indicates an estimated greater likelihood that the recognized words are
4360 /// correct. This field is set only for the top alternative of a non-streaming
4361 /// result or, of a streaming result where
4362 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final] is
4363 /// set to `true`. This field is not guaranteed to be accurate and users should
4364 /// not rely on it to be always provided. The default of 0.0 is a sentinel
4365 /// value indicating `confidence` was not set.
4366 ///
4367 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
4368 pub confidence: f32,
4369
4370 /// A distinct label is assigned for every speaker within the audio. This field
4371 /// specifies which one of those speakers was detected to have spoken this
4372 /// word. `speaker_label` is set if
4373 /// [SpeakerDiarizationConfig][google.cloud.speech.v2.SpeakerDiarizationConfig]
4374 /// is given and only in the top alternative.
4375 ///
4376 /// [google.cloud.speech.v2.SpeakerDiarizationConfig]: crate::model::SpeakerDiarizationConfig
4377 pub speaker_label: std::string::String,
4378
4379 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4380}
4381
4382impl WordInfo {
4383 pub fn new() -> Self {
4384 std::default::Default::default()
4385 }
4386
4387 /// Sets the value of [start_offset][crate::model::WordInfo::start_offset].
4388 ///
4389 /// # Example
4390 /// ```ignore,no_run
4391 /// # use google_cloud_speech_v2::model::WordInfo;
4392 /// use wkt::Duration;
4393 /// let x = WordInfo::new().set_start_offset(Duration::default()/* use setters */);
4394 /// ```
4395 pub fn set_start_offset<T>(mut self, v: T) -> Self
4396 where
4397 T: std::convert::Into<wkt::Duration>,
4398 {
4399 self.start_offset = std::option::Option::Some(v.into());
4400 self
4401 }
4402
4403 /// Sets or clears the value of [start_offset][crate::model::WordInfo::start_offset].
4404 ///
4405 /// # Example
4406 /// ```ignore,no_run
4407 /// # use google_cloud_speech_v2::model::WordInfo;
4408 /// use wkt::Duration;
4409 /// let x = WordInfo::new().set_or_clear_start_offset(Some(Duration::default()/* use setters */));
4410 /// let x = WordInfo::new().set_or_clear_start_offset(None::<Duration>);
4411 /// ```
4412 pub fn set_or_clear_start_offset<T>(mut self, v: std::option::Option<T>) -> Self
4413 where
4414 T: std::convert::Into<wkt::Duration>,
4415 {
4416 self.start_offset = v.map(|x| x.into());
4417 self
4418 }
4419
4420 /// Sets the value of [end_offset][crate::model::WordInfo::end_offset].
4421 ///
4422 /// # Example
4423 /// ```ignore,no_run
4424 /// # use google_cloud_speech_v2::model::WordInfo;
4425 /// use wkt::Duration;
4426 /// let x = WordInfo::new().set_end_offset(Duration::default()/* use setters */);
4427 /// ```
4428 pub fn set_end_offset<T>(mut self, v: T) -> Self
4429 where
4430 T: std::convert::Into<wkt::Duration>,
4431 {
4432 self.end_offset = std::option::Option::Some(v.into());
4433 self
4434 }
4435
4436 /// Sets or clears the value of [end_offset][crate::model::WordInfo::end_offset].
4437 ///
4438 /// # Example
4439 /// ```ignore,no_run
4440 /// # use google_cloud_speech_v2::model::WordInfo;
4441 /// use wkt::Duration;
4442 /// let x = WordInfo::new().set_or_clear_end_offset(Some(Duration::default()/* use setters */));
4443 /// let x = WordInfo::new().set_or_clear_end_offset(None::<Duration>);
4444 /// ```
4445 pub fn set_or_clear_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
4446 where
4447 T: std::convert::Into<wkt::Duration>,
4448 {
4449 self.end_offset = v.map(|x| x.into());
4450 self
4451 }
4452
4453 /// Sets the value of [word][crate::model::WordInfo::word].
4454 ///
4455 /// # Example
4456 /// ```ignore,no_run
4457 /// # use google_cloud_speech_v2::model::WordInfo;
4458 /// let x = WordInfo::new().set_word("example");
4459 /// ```
4460 pub fn set_word<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4461 self.word = v.into();
4462 self
4463 }
4464
4465 /// Sets the value of [confidence][crate::model::WordInfo::confidence].
4466 ///
4467 /// # Example
4468 /// ```ignore,no_run
4469 /// # use google_cloud_speech_v2::model::WordInfo;
4470 /// let x = WordInfo::new().set_confidence(42.0);
4471 /// ```
4472 pub fn set_confidence<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
4473 self.confidence = v.into();
4474 self
4475 }
4476
4477 /// Sets the value of [speaker_label][crate::model::WordInfo::speaker_label].
4478 ///
4479 /// # Example
4480 /// ```ignore,no_run
4481 /// # use google_cloud_speech_v2::model::WordInfo;
4482 /// let x = WordInfo::new().set_speaker_label("example");
4483 /// ```
4484 pub fn set_speaker_label<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4485 self.speaker_label = v.into();
4486 self
4487 }
4488}
4489
4490impl wkt::message::Message for WordInfo {
4491 fn typename() -> &'static str {
4492 "type.googleapis.com/google.cloud.speech.v2.WordInfo"
4493 }
4494}
4495
4496/// A speech recognition result corresponding to a portion of the audio.
4497#[derive(Clone, Default, PartialEq)]
4498#[non_exhaustive]
4499pub struct SpeechRecognitionResult {
4500 /// May contain one or more recognition hypotheses. These alternatives are
4501 /// ordered in terms of accuracy, with the top (first) alternative being the
4502 /// most probable, as ranked by the recognizer.
4503 pub alternatives: std::vec::Vec<crate::model::SpeechRecognitionAlternative>,
4504
4505 /// For multi-channel audio, this is the channel number corresponding to the
4506 /// recognized result for the audio from that channel.
4507 /// For `audio_channel_count` = `N`, its output values can range from `1` to
4508 /// `N`.
4509 pub channel_tag: i32,
4510
4511 /// Time offset of the end of this result relative to the beginning of the
4512 /// audio.
4513 pub result_end_offset: std::option::Option<wkt::Duration>,
4514
4515 /// Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
4516 /// language tag of the language in this result. This language code was
4517 /// detected to have the most likelihood of being spoken in the audio.
4518 pub language_code: std::string::String,
4519
4520 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4521}
4522
4523impl SpeechRecognitionResult {
4524 pub fn new() -> Self {
4525 std::default::Default::default()
4526 }
4527
4528 /// Sets the value of [alternatives][crate::model::SpeechRecognitionResult::alternatives].
4529 ///
4530 /// # Example
4531 /// ```ignore,no_run
4532 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4533 /// use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4534 /// let x = SpeechRecognitionResult::new()
4535 /// .set_alternatives([
4536 /// SpeechRecognitionAlternative::default()/* use setters */,
4537 /// SpeechRecognitionAlternative::default()/* use (different) setters */,
4538 /// ]);
4539 /// ```
4540 pub fn set_alternatives<T, V>(mut self, v: T) -> Self
4541 where
4542 T: std::iter::IntoIterator<Item = V>,
4543 V: std::convert::Into<crate::model::SpeechRecognitionAlternative>,
4544 {
4545 use std::iter::Iterator;
4546 self.alternatives = v.into_iter().map(|i| i.into()).collect();
4547 self
4548 }
4549
4550 /// Sets the value of [channel_tag][crate::model::SpeechRecognitionResult::channel_tag].
4551 ///
4552 /// # Example
4553 /// ```ignore,no_run
4554 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4555 /// let x = SpeechRecognitionResult::new().set_channel_tag(42);
4556 /// ```
4557 pub fn set_channel_tag<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
4558 self.channel_tag = v.into();
4559 self
4560 }
4561
4562 /// Sets the value of [result_end_offset][crate::model::SpeechRecognitionResult::result_end_offset].
4563 ///
4564 /// # Example
4565 /// ```ignore,no_run
4566 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4567 /// use wkt::Duration;
4568 /// let x = SpeechRecognitionResult::new().set_result_end_offset(Duration::default()/* use setters */);
4569 /// ```
4570 pub fn set_result_end_offset<T>(mut self, v: T) -> Self
4571 where
4572 T: std::convert::Into<wkt::Duration>,
4573 {
4574 self.result_end_offset = std::option::Option::Some(v.into());
4575 self
4576 }
4577
4578 /// Sets or clears the value of [result_end_offset][crate::model::SpeechRecognitionResult::result_end_offset].
4579 ///
4580 /// # Example
4581 /// ```ignore,no_run
4582 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4583 /// use wkt::Duration;
4584 /// let x = SpeechRecognitionResult::new().set_or_clear_result_end_offset(Some(Duration::default()/* use setters */));
4585 /// let x = SpeechRecognitionResult::new().set_or_clear_result_end_offset(None::<Duration>);
4586 /// ```
4587 pub fn set_or_clear_result_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
4588 where
4589 T: std::convert::Into<wkt::Duration>,
4590 {
4591 self.result_end_offset = v.map(|x| x.into());
4592 self
4593 }
4594
4595 /// Sets the value of [language_code][crate::model::SpeechRecognitionResult::language_code].
4596 ///
4597 /// # Example
4598 /// ```ignore,no_run
4599 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4600 /// let x = SpeechRecognitionResult::new().set_language_code("example");
4601 /// ```
4602 pub fn set_language_code<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4603 self.language_code = v.into();
4604 self
4605 }
4606}
4607
4608impl wkt::message::Message for SpeechRecognitionResult {
4609 fn typename() -> &'static str {
4610 "type.googleapis.com/google.cloud.speech.v2.SpeechRecognitionResult"
4611 }
4612}
4613
4614/// Response message for the
4615/// [Recognize][google.cloud.speech.v2.Speech.Recognize] method.
4616///
4617/// [google.cloud.speech.v2.Speech.Recognize]: crate::client::Speech::recognize
4618#[derive(Clone, Default, PartialEq)]
4619#[non_exhaustive]
4620pub struct RecognizeResponse {
4621 /// Sequential list of transcription results corresponding to sequential
4622 /// portions of audio.
4623 pub results: std::vec::Vec<crate::model::SpeechRecognitionResult>,
4624
4625 /// Metadata about the recognition.
4626 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
4627
4628 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4629}
4630
4631impl RecognizeResponse {
4632 pub fn new() -> Self {
4633 std::default::Default::default()
4634 }
4635
4636 /// Sets the value of [results][crate::model::RecognizeResponse::results].
4637 ///
4638 /// # Example
4639 /// ```ignore,no_run
4640 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4641 /// use google_cloud_speech_v2::model::SpeechRecognitionResult;
4642 /// let x = RecognizeResponse::new()
4643 /// .set_results([
4644 /// SpeechRecognitionResult::default()/* use setters */,
4645 /// SpeechRecognitionResult::default()/* use (different) setters */,
4646 /// ]);
4647 /// ```
4648 pub fn set_results<T, V>(mut self, v: T) -> Self
4649 where
4650 T: std::iter::IntoIterator<Item = V>,
4651 V: std::convert::Into<crate::model::SpeechRecognitionResult>,
4652 {
4653 use std::iter::Iterator;
4654 self.results = v.into_iter().map(|i| i.into()).collect();
4655 self
4656 }
4657
4658 /// Sets the value of [metadata][crate::model::RecognizeResponse::metadata].
4659 ///
4660 /// # Example
4661 /// ```ignore,no_run
4662 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4663 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4664 /// let x = RecognizeResponse::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
4665 /// ```
4666 pub fn set_metadata<T>(mut self, v: T) -> Self
4667 where
4668 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
4669 {
4670 self.metadata = std::option::Option::Some(v.into());
4671 self
4672 }
4673
4674 /// Sets or clears the value of [metadata][crate::model::RecognizeResponse::metadata].
4675 ///
4676 /// # Example
4677 /// ```ignore,no_run
4678 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4679 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4680 /// let x = RecognizeResponse::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
4681 /// let x = RecognizeResponse::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
4682 /// ```
4683 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
4684 where
4685 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
4686 {
4687 self.metadata = v.map(|x| x.into());
4688 self
4689 }
4690}
4691
4692impl wkt::message::Message for RecognizeResponse {
4693 fn typename() -> &'static str {
4694 "type.googleapis.com/google.cloud.speech.v2.RecognizeResponse"
4695 }
4696}
4697
4698/// Available recognition features specific to streaming recognition requests.
4699#[derive(Clone, Default, PartialEq)]
4700#[non_exhaustive]
4701pub struct StreamingRecognitionFeatures {
4702 /// If `true`, responses with voice activity speech events will be returned as
4703 /// they are detected.
4704 pub enable_voice_activity_events: bool,
4705
4706 /// Whether or not to stream interim results to the client. If set to true,
4707 /// interim results will be streamed to the client. Otherwise, only the final
4708 /// response will be streamed back.
4709 pub interim_results: bool,
4710
4711 /// If set, the server will automatically close the stream after the specified
4712 /// duration has elapsed after the last VOICE_ACTIVITY speech event has been
4713 /// sent. The field `voice_activity_events` must also be set to true.
4714 pub voice_activity_timeout:
4715 std::option::Option<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4716
4717 /// Optional. Endpointing sensitivity for this stream.
4718 pub endpointing_sensitivity:
4719 crate::model::streaming_recognition_features::EndpointingSensitivity,
4720
4721 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4722}
4723
4724impl StreamingRecognitionFeatures {
4725 pub fn new() -> Self {
4726 std::default::Default::default()
4727 }
4728
4729 /// Sets the value of [enable_voice_activity_events][crate::model::StreamingRecognitionFeatures::enable_voice_activity_events].
4730 ///
4731 /// # Example
4732 /// ```ignore,no_run
4733 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4734 /// let x = StreamingRecognitionFeatures::new().set_enable_voice_activity_events(true);
4735 /// ```
4736 pub fn set_enable_voice_activity_events<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
4737 self.enable_voice_activity_events = v.into();
4738 self
4739 }
4740
4741 /// Sets the value of [interim_results][crate::model::StreamingRecognitionFeatures::interim_results].
4742 ///
4743 /// # Example
4744 /// ```ignore,no_run
4745 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4746 /// let x = StreamingRecognitionFeatures::new().set_interim_results(true);
4747 /// ```
4748 pub fn set_interim_results<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
4749 self.interim_results = v.into();
4750 self
4751 }
4752
4753 /// Sets the value of [voice_activity_timeout][crate::model::StreamingRecognitionFeatures::voice_activity_timeout].
4754 ///
4755 /// # Example
4756 /// ```ignore,no_run
4757 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4758 /// use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4759 /// let x = StreamingRecognitionFeatures::new().set_voice_activity_timeout(VoiceActivityTimeout::default()/* use setters */);
4760 /// ```
4761 pub fn set_voice_activity_timeout<T>(mut self, v: T) -> Self
4762 where
4763 T: std::convert::Into<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4764 {
4765 self.voice_activity_timeout = std::option::Option::Some(v.into());
4766 self
4767 }
4768
4769 /// Sets or clears the value of [voice_activity_timeout][crate::model::StreamingRecognitionFeatures::voice_activity_timeout].
4770 ///
4771 /// # Example
4772 /// ```ignore,no_run
4773 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4774 /// use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4775 /// let x = StreamingRecognitionFeatures::new().set_or_clear_voice_activity_timeout(Some(VoiceActivityTimeout::default()/* use setters */));
4776 /// let x = StreamingRecognitionFeatures::new().set_or_clear_voice_activity_timeout(None::<VoiceActivityTimeout>);
4777 /// ```
4778 pub fn set_or_clear_voice_activity_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4779 where
4780 T: std::convert::Into<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4781 {
4782 self.voice_activity_timeout = v.map(|x| x.into());
4783 self
4784 }
4785
4786 /// Sets the value of [endpointing_sensitivity][crate::model::StreamingRecognitionFeatures::endpointing_sensitivity].
4787 ///
4788 /// # Example
4789 /// ```ignore,no_run
4790 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4791 /// use google_cloud_speech_v2::model::streaming_recognition_features::EndpointingSensitivity;
4792 /// let x0 = StreamingRecognitionFeatures::new().set_endpointing_sensitivity(EndpointingSensitivity::Standard);
4793 /// let x1 = StreamingRecognitionFeatures::new().set_endpointing_sensitivity(EndpointingSensitivity::Supershort);
4794 /// let x2 = StreamingRecognitionFeatures::new().set_endpointing_sensitivity(EndpointingSensitivity::Short);
4795 /// ```
4796 pub fn set_endpointing_sensitivity<
4797 T: std::convert::Into<crate::model::streaming_recognition_features::EndpointingSensitivity>,
4798 >(
4799 mut self,
4800 v: T,
4801 ) -> Self {
4802 self.endpointing_sensitivity = v.into();
4803 self
4804 }
4805}
4806
4807impl wkt::message::Message for StreamingRecognitionFeatures {
4808 fn typename() -> &'static str {
4809 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionFeatures"
4810 }
4811}
4812
4813/// Defines additional types related to [StreamingRecognitionFeatures].
4814pub mod streaming_recognition_features {
4815 #[allow(unused_imports)]
4816 use super::*;
4817
4818 /// Events that a timeout can be set on for voice activity.
4819 #[derive(Clone, Default, PartialEq)]
4820 #[non_exhaustive]
4821 pub struct VoiceActivityTimeout {
4822 /// Duration to timeout the stream if no speech begins. If this is set and
4823 /// no speech is detected in this duration at the start of the stream, the
4824 /// server will close the stream.
4825 pub speech_start_timeout: std::option::Option<wkt::Duration>,
4826
4827 /// Duration to timeout the stream after speech ends. If this is set and no
4828 /// speech is detected in this duration after speech was detected, the server
4829 /// will close the stream.
4830 pub speech_end_timeout: std::option::Option<wkt::Duration>,
4831
4832 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4833 }
4834
4835 impl VoiceActivityTimeout {
4836 pub fn new() -> Self {
4837 std::default::Default::default()
4838 }
4839
4840 /// Sets the value of [speech_start_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_start_timeout].
4841 ///
4842 /// # Example
4843 /// ```ignore,no_run
4844 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4845 /// use wkt::Duration;
4846 /// let x = VoiceActivityTimeout::new().set_speech_start_timeout(Duration::default()/* use setters */);
4847 /// ```
4848 pub fn set_speech_start_timeout<T>(mut self, v: T) -> Self
4849 where
4850 T: std::convert::Into<wkt::Duration>,
4851 {
4852 self.speech_start_timeout = std::option::Option::Some(v.into());
4853 self
4854 }
4855
4856 /// Sets or clears the value of [speech_start_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_start_timeout].
4857 ///
4858 /// # Example
4859 /// ```ignore,no_run
4860 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4861 /// use wkt::Duration;
4862 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_start_timeout(Some(Duration::default()/* use setters */));
4863 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_start_timeout(None::<Duration>);
4864 /// ```
4865 pub fn set_or_clear_speech_start_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4866 where
4867 T: std::convert::Into<wkt::Duration>,
4868 {
4869 self.speech_start_timeout = v.map(|x| x.into());
4870 self
4871 }
4872
4873 /// Sets the value of [speech_end_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_end_timeout].
4874 ///
4875 /// # Example
4876 /// ```ignore,no_run
4877 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4878 /// use wkt::Duration;
4879 /// let x = VoiceActivityTimeout::new().set_speech_end_timeout(Duration::default()/* use setters */);
4880 /// ```
4881 pub fn set_speech_end_timeout<T>(mut self, v: T) -> Self
4882 where
4883 T: std::convert::Into<wkt::Duration>,
4884 {
4885 self.speech_end_timeout = std::option::Option::Some(v.into());
4886 self
4887 }
4888
4889 /// Sets or clears the value of [speech_end_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_end_timeout].
4890 ///
4891 /// # Example
4892 /// ```ignore,no_run
4893 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4894 /// use wkt::Duration;
4895 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_end_timeout(Some(Duration::default()/* use setters */));
4896 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_end_timeout(None::<Duration>);
4897 /// ```
4898 pub fn set_or_clear_speech_end_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4899 where
4900 T: std::convert::Into<wkt::Duration>,
4901 {
4902 self.speech_end_timeout = v.map(|x| x.into());
4903 self
4904 }
4905 }
4906
4907 impl wkt::message::Message for VoiceActivityTimeout {
4908 fn typename() -> &'static str {
4909 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionFeatures.VoiceActivityTimeout"
4910 }
4911 }
4912
4913 /// Endpointing sensitivity is intended for applications that want to minimize
4914 /// result latency, possibly at the expense of quality. Some utterances may be
4915 /// broken up into multiple fragments.
4916 ///
4917 /// # Working with unknown values
4918 ///
4919 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
4920 /// additional enum variants at any time. Adding new variants is not considered
4921 /// a breaking change. Applications should write their code in anticipation of:
4922 ///
4923 /// - New values appearing in future releases of the client library, **and**
4924 /// - New values received dynamically, without application changes.
4925 ///
4926 /// Please consult the [Working with enums] section in the user guide for some
4927 /// guidelines.
4928 ///
4929 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
4930 #[derive(Clone, Debug, PartialEq)]
4931 #[non_exhaustive]
4932 pub enum EndpointingSensitivity {
4933 /// If no value is specified, the values for
4934 /// ENDPOINTING_SENSITIVITY_STANDARD will be used.
4935 Unspecified,
4936 /// Standard sensitivity, no optimization for latency.
4937 Standard,
4938 /// Super short sensitivity, optimized for super short utterances like single
4939 /// words ("Yes", "No", "Hello", etc.) or a single phrase, command or short
4940 /// query (e.g. "check balance", "five dollars", "call Mom").
4941 Supershort,
4942 /// Short sensitivity, optimized for short utterances like single sentences.
4943 /// (e.g. "Remind me to call the dentist tomorrow morning at nine",
4944 /// "Navigate to the nearest coffee shop that is currently open")
4945 Short,
4946 /// If set, the enum was initialized with an unknown value.
4947 ///
4948 /// Applications can examine the value using [EndpointingSensitivity::value] or
4949 /// [EndpointingSensitivity::name].
4950 UnknownValue(endpointing_sensitivity::UnknownValue),
4951 }
4952
4953 #[doc(hidden)]
4954 pub mod endpointing_sensitivity {
4955 #[allow(unused_imports)]
4956 use super::*;
4957 #[derive(Clone, Debug, PartialEq)]
4958 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
4959 }
4960
4961 impl EndpointingSensitivity {
4962 /// Gets the enum value.
4963 ///
4964 /// Returns `None` if the enum contains an unknown value deserialized from
4965 /// the string representation of enums.
4966 pub fn value(&self) -> std::option::Option<i32> {
4967 match self {
4968 Self::Unspecified => std::option::Option::Some(0),
4969 Self::Standard => std::option::Option::Some(1),
4970 Self::Supershort => std::option::Option::Some(2),
4971 Self::Short => std::option::Option::Some(3),
4972 Self::UnknownValue(u) => u.0.value(),
4973 }
4974 }
4975
4976 /// Gets the enum value as a string.
4977 ///
4978 /// Returns `None` if the enum contains an unknown value deserialized from
4979 /// the integer representation of enums.
4980 pub fn name(&self) -> std::option::Option<&str> {
4981 match self {
4982 Self::Unspecified => {
4983 std::option::Option::Some("ENDPOINTING_SENSITIVITY_UNSPECIFIED")
4984 }
4985 Self::Standard => std::option::Option::Some("ENDPOINTING_SENSITIVITY_STANDARD"),
4986 Self::Supershort => std::option::Option::Some("ENDPOINTING_SENSITIVITY_SUPERSHORT"),
4987 Self::Short => std::option::Option::Some("ENDPOINTING_SENSITIVITY_SHORT"),
4988 Self::UnknownValue(u) => u.0.name(),
4989 }
4990 }
4991 }
4992
4993 impl std::default::Default for EndpointingSensitivity {
4994 fn default() -> Self {
4995 use std::convert::From;
4996 Self::from(0)
4997 }
4998 }
4999
5000 impl std::fmt::Display for EndpointingSensitivity {
5001 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
5002 wkt::internal::display_enum(f, self.name(), self.value())
5003 }
5004 }
5005
5006 impl std::convert::From<i32> for EndpointingSensitivity {
5007 fn from(value: i32) -> Self {
5008 match value {
5009 0 => Self::Unspecified,
5010 1 => Self::Standard,
5011 2 => Self::Supershort,
5012 3 => Self::Short,
5013 _ => Self::UnknownValue(endpointing_sensitivity::UnknownValue(
5014 wkt::internal::UnknownEnumValue::Integer(value),
5015 )),
5016 }
5017 }
5018 }
5019
5020 impl std::convert::From<&str> for EndpointingSensitivity {
5021 fn from(value: &str) -> Self {
5022 use std::string::ToString;
5023 match value {
5024 "ENDPOINTING_SENSITIVITY_UNSPECIFIED" => Self::Unspecified,
5025 "ENDPOINTING_SENSITIVITY_STANDARD" => Self::Standard,
5026 "ENDPOINTING_SENSITIVITY_SUPERSHORT" => Self::Supershort,
5027 "ENDPOINTING_SENSITIVITY_SHORT" => Self::Short,
5028 _ => Self::UnknownValue(endpointing_sensitivity::UnknownValue(
5029 wkt::internal::UnknownEnumValue::String(value.to_string()),
5030 )),
5031 }
5032 }
5033 }
5034
5035 impl serde::ser::Serialize for EndpointingSensitivity {
5036 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
5037 where
5038 S: serde::Serializer,
5039 {
5040 match self {
5041 Self::Unspecified => serializer.serialize_i32(0),
5042 Self::Standard => serializer.serialize_i32(1),
5043 Self::Supershort => serializer.serialize_i32(2),
5044 Self::Short => serializer.serialize_i32(3),
5045 Self::UnknownValue(u) => u.0.serialize(serializer),
5046 }
5047 }
5048 }
5049
5050 impl<'de> serde::de::Deserialize<'de> for EndpointingSensitivity {
5051 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
5052 where
5053 D: serde::Deserializer<'de>,
5054 {
5055 deserializer.deserialize_any(wkt::internal::EnumVisitor::<EndpointingSensitivity>::new(
5056 ".google.cloud.speech.v2.StreamingRecognitionFeatures.EndpointingSensitivity",
5057 ))
5058 }
5059 }
5060}
5061
5062/// Provides configuration information for the StreamingRecognize request.
5063#[derive(Clone, Default, PartialEq)]
5064#[non_exhaustive]
5065pub struct StreamingRecognitionConfig {
5066 /// Required. Features and audio metadata to use for the Automatic Speech
5067 /// Recognition. This field in combination with the
5068 /// [config_mask][google.cloud.speech.v2.StreamingRecognitionConfig.config_mask]
5069 /// field can be used to override parts of the
5070 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5071 /// of the Recognizer resource.
5072 ///
5073 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5074 /// [google.cloud.speech.v2.StreamingRecognitionConfig.config_mask]: crate::model::StreamingRecognitionConfig::config_mask
5075 pub config: std::option::Option<crate::model::RecognitionConfig>,
5076
5077 /// The list of fields in
5078 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config] that
5079 /// override the values in the
5080 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5081 /// of the recognizer during this recognition request. If no mask is provided,
5082 /// all non-default valued fields in
5083 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config] override
5084 /// the values in the Recognizer for this recognition request. If a mask is
5085 /// provided, only the fields listed in the mask override the config in the
5086 /// Recognizer for this recognition request. If a wildcard (`*`) is provided,
5087 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config]
5088 /// completely overrides and replaces the config in the recognizer for this
5089 /// recognition request.
5090 ///
5091 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5092 /// [google.cloud.speech.v2.StreamingRecognitionConfig.config]: crate::model::StreamingRecognitionConfig::config
5093 pub config_mask: std::option::Option<wkt::FieldMask>,
5094
5095 /// Speech recognition features to enable specific to streaming audio
5096 /// recognition requests.
5097 pub streaming_features: std::option::Option<crate::model::StreamingRecognitionFeatures>,
5098
5099 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5100}
5101
5102impl StreamingRecognitionConfig {
5103 pub fn new() -> Self {
5104 std::default::Default::default()
5105 }
5106
5107 /// Sets the value of [config][crate::model::StreamingRecognitionConfig::config].
5108 ///
5109 /// # Example
5110 /// ```ignore,no_run
5111 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5112 /// use google_cloud_speech_v2::model::RecognitionConfig;
5113 /// let x = StreamingRecognitionConfig::new().set_config(RecognitionConfig::default()/* use setters */);
5114 /// ```
5115 pub fn set_config<T>(mut self, v: T) -> Self
5116 where
5117 T: std::convert::Into<crate::model::RecognitionConfig>,
5118 {
5119 self.config = std::option::Option::Some(v.into());
5120 self
5121 }
5122
5123 /// Sets or clears the value of [config][crate::model::StreamingRecognitionConfig::config].
5124 ///
5125 /// # Example
5126 /// ```ignore,no_run
5127 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5128 /// use google_cloud_speech_v2::model::RecognitionConfig;
5129 /// let x = StreamingRecognitionConfig::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
5130 /// let x = StreamingRecognitionConfig::new().set_or_clear_config(None::<RecognitionConfig>);
5131 /// ```
5132 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
5133 where
5134 T: std::convert::Into<crate::model::RecognitionConfig>,
5135 {
5136 self.config = v.map(|x| x.into());
5137 self
5138 }
5139
5140 /// Sets the value of [config_mask][crate::model::StreamingRecognitionConfig::config_mask].
5141 ///
5142 /// # Example
5143 /// ```ignore,no_run
5144 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5145 /// use wkt::FieldMask;
5146 /// let x = StreamingRecognitionConfig::new().set_config_mask(FieldMask::default()/* use setters */);
5147 /// ```
5148 pub fn set_config_mask<T>(mut self, v: T) -> Self
5149 where
5150 T: std::convert::Into<wkt::FieldMask>,
5151 {
5152 self.config_mask = std::option::Option::Some(v.into());
5153 self
5154 }
5155
5156 /// Sets or clears the value of [config_mask][crate::model::StreamingRecognitionConfig::config_mask].
5157 ///
5158 /// # Example
5159 /// ```ignore,no_run
5160 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5161 /// use wkt::FieldMask;
5162 /// let x = StreamingRecognitionConfig::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
5163 /// let x = StreamingRecognitionConfig::new().set_or_clear_config_mask(None::<FieldMask>);
5164 /// ```
5165 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
5166 where
5167 T: std::convert::Into<wkt::FieldMask>,
5168 {
5169 self.config_mask = v.map(|x| x.into());
5170 self
5171 }
5172
5173 /// Sets the value of [streaming_features][crate::model::StreamingRecognitionConfig::streaming_features].
5174 ///
5175 /// # Example
5176 /// ```ignore,no_run
5177 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5178 /// use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
5179 /// let x = StreamingRecognitionConfig::new().set_streaming_features(StreamingRecognitionFeatures::default()/* use setters */);
5180 /// ```
5181 pub fn set_streaming_features<T>(mut self, v: T) -> Self
5182 where
5183 T: std::convert::Into<crate::model::StreamingRecognitionFeatures>,
5184 {
5185 self.streaming_features = std::option::Option::Some(v.into());
5186 self
5187 }
5188
5189 /// Sets or clears the value of [streaming_features][crate::model::StreamingRecognitionConfig::streaming_features].
5190 ///
5191 /// # Example
5192 /// ```ignore,no_run
5193 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5194 /// use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
5195 /// let x = StreamingRecognitionConfig::new().set_or_clear_streaming_features(Some(StreamingRecognitionFeatures::default()/* use setters */));
5196 /// let x = StreamingRecognitionConfig::new().set_or_clear_streaming_features(None::<StreamingRecognitionFeatures>);
5197 /// ```
5198 pub fn set_or_clear_streaming_features<T>(mut self, v: std::option::Option<T>) -> Self
5199 where
5200 T: std::convert::Into<crate::model::StreamingRecognitionFeatures>,
5201 {
5202 self.streaming_features = v.map(|x| x.into());
5203 self
5204 }
5205}
5206
5207impl wkt::message::Message for StreamingRecognitionConfig {
5208 fn typename() -> &'static str {
5209 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionConfig"
5210 }
5211}
5212
5213/// Request message for the
5214/// [StreamingRecognize][google.cloud.speech.v2.Speech.StreamingRecognize]
5215/// method. Multiple
5216/// [StreamingRecognizeRequest][google.cloud.speech.v2.StreamingRecognizeRequest]
5217/// messages are sent in one call.
5218///
5219/// If the [Recognizer][google.cloud.speech.v2.Recognizer] referenced by
5220/// [recognizer][google.cloud.speech.v2.StreamingRecognizeRequest.recognizer]
5221/// contains a fully specified request configuration then the stream may only
5222/// contain messages with only
5223/// [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio] set.
5224///
5225/// Otherwise the first message must contain a
5226/// [recognizer][google.cloud.speech.v2.StreamingRecognizeRequest.recognizer] and
5227/// a
5228/// [streaming_config][google.cloud.speech.v2.StreamingRecognizeRequest.streaming_config]
5229/// message that together fully specify the request configuration and must not
5230/// contain [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio]. All
5231/// subsequent messages must only have
5232/// [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio] set.
5233///
5234/// [google.cloud.speech.v2.Recognizer]: crate::model::Recognizer
5235/// [google.cloud.speech.v2.StreamingRecognizeRequest]: crate::model::StreamingRecognizeRequest
5236/// [google.cloud.speech.v2.StreamingRecognizeRequest.audio]: crate::model::StreamingRecognizeRequest::streaming_request
5237/// [google.cloud.speech.v2.StreamingRecognizeRequest.recognizer]: crate::model::StreamingRecognizeRequest::recognizer
5238/// [google.cloud.speech.v2.StreamingRecognizeRequest.streaming_config]: crate::model::StreamingRecognizeRequest::streaming_request
5239#[derive(Clone, Default, PartialEq)]
5240#[non_exhaustive]
5241pub struct StreamingRecognizeRequest {
5242 /// Required. The name of the Recognizer to use during recognition. The
5243 /// expected format is
5244 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
5245 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
5246 pub recognizer: std::string::String,
5247
5248 pub streaming_request:
5249 std::option::Option<crate::model::streaming_recognize_request::StreamingRequest>,
5250
5251 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5252}
5253
5254impl StreamingRecognizeRequest {
5255 pub fn new() -> Self {
5256 std::default::Default::default()
5257 }
5258
5259 /// Sets the value of [recognizer][crate::model::StreamingRecognizeRequest::recognizer].
5260 ///
5261 /// # Example
5262 /// ```ignore,no_run
5263 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5264 /// let x = StreamingRecognizeRequest::new().set_recognizer("example");
5265 /// ```
5266 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5267 self.recognizer = v.into();
5268 self
5269 }
5270
5271 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request].
5272 ///
5273 /// Note that all the setters affecting `streaming_request` are mutually
5274 /// exclusive.
5275 ///
5276 /// # Example
5277 /// ```ignore,no_run
5278 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5279 /// use google_cloud_speech_v2::model::streaming_recognize_request::StreamingRequest;
5280 /// let x = StreamingRecognizeRequest::new().set_streaming_request(Some(StreamingRequest::Audio(bytes::Bytes::from_static(b"example"))));
5281 /// ```
5282 pub fn set_streaming_request<
5283 T: std::convert::Into<
5284 std::option::Option<crate::model::streaming_recognize_request::StreamingRequest>,
5285 >,
5286 >(
5287 mut self,
5288 v: T,
5289 ) -> Self {
5290 self.streaming_request = v.into();
5291 self
5292 }
5293
5294 /// The value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5295 /// if it holds a `StreamingConfig`, `None` if the field is not set or
5296 /// holds a different branch.
5297 pub fn streaming_config(
5298 &self,
5299 ) -> std::option::Option<&std::boxed::Box<crate::model::StreamingRecognitionConfig>> {
5300 #[allow(unreachable_patterns)]
5301 self.streaming_request.as_ref().and_then(|v| match v {
5302 crate::model::streaming_recognize_request::StreamingRequest::StreamingConfig(v) => {
5303 std::option::Option::Some(v)
5304 }
5305 _ => std::option::Option::None,
5306 })
5307 }
5308
5309 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5310 /// to hold a `StreamingConfig`.
5311 ///
5312 /// Note that all the setters affecting `streaming_request` are
5313 /// mutually exclusive.
5314 ///
5315 /// # Example
5316 /// ```ignore,no_run
5317 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5318 /// use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5319 /// let x = StreamingRecognizeRequest::new().set_streaming_config(StreamingRecognitionConfig::default()/* use setters */);
5320 /// assert!(x.streaming_config().is_some());
5321 /// assert!(x.audio().is_none());
5322 /// ```
5323 pub fn set_streaming_config<
5324 T: std::convert::Into<std::boxed::Box<crate::model::StreamingRecognitionConfig>>,
5325 >(
5326 mut self,
5327 v: T,
5328 ) -> Self {
5329 self.streaming_request = std::option::Option::Some(
5330 crate::model::streaming_recognize_request::StreamingRequest::StreamingConfig(v.into()),
5331 );
5332 self
5333 }
5334
5335 /// The value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5336 /// if it holds a `Audio`, `None` if the field is not set or
5337 /// holds a different branch.
5338 pub fn audio(&self) -> std::option::Option<&::bytes::Bytes> {
5339 #[allow(unreachable_patterns)]
5340 self.streaming_request.as_ref().and_then(|v| match v {
5341 crate::model::streaming_recognize_request::StreamingRequest::Audio(v) => {
5342 std::option::Option::Some(v)
5343 }
5344 _ => std::option::Option::None,
5345 })
5346 }
5347
5348 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5349 /// to hold a `Audio`.
5350 ///
5351 /// Note that all the setters affecting `streaming_request` are
5352 /// mutually exclusive.
5353 ///
5354 /// # Example
5355 /// ```ignore,no_run
5356 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5357 /// let x = StreamingRecognizeRequest::new().set_audio(bytes::Bytes::from_static(b"example"));
5358 /// assert!(x.audio().is_some());
5359 /// assert!(x.streaming_config().is_none());
5360 /// ```
5361 pub fn set_audio<T: std::convert::Into<::bytes::Bytes>>(mut self, v: T) -> Self {
5362 self.streaming_request = std::option::Option::Some(
5363 crate::model::streaming_recognize_request::StreamingRequest::Audio(v.into()),
5364 );
5365 self
5366 }
5367}
5368
5369impl wkt::message::Message for StreamingRecognizeRequest {
5370 fn typename() -> &'static str {
5371 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognizeRequest"
5372 }
5373}
5374
5375/// Defines additional types related to [StreamingRecognizeRequest].
5376pub mod streaming_recognize_request {
5377 #[allow(unused_imports)]
5378 use super::*;
5379
5380 #[derive(Clone, Debug, PartialEq)]
5381 #[non_exhaustive]
5382 pub enum StreamingRequest {
5383 /// StreamingRecognitionConfig to be used in this recognition attempt.
5384 /// If provided, it will override the default RecognitionConfig stored in the
5385 /// Recognizer.
5386 StreamingConfig(std::boxed::Box<crate::model::StreamingRecognitionConfig>),
5387 /// Inline audio bytes to be Recognized.
5388 /// Maximum size for this field is 15 KB per request.
5389 Audio(::bytes::Bytes),
5390 }
5391}
5392
5393/// Request message for the
5394/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize]
5395/// method.
5396///
5397/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
5398#[derive(Clone, Default, PartialEq)]
5399#[non_exhaustive]
5400pub struct BatchRecognizeRequest {
5401 /// Required. The name of the Recognizer to use during recognition. The
5402 /// expected format is
5403 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
5404 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
5405 pub recognizer: std::string::String,
5406
5407 /// Features and audio metadata to use for the Automatic Speech Recognition.
5408 /// This field in combination with the
5409 /// [config_mask][google.cloud.speech.v2.BatchRecognizeRequest.config_mask]
5410 /// field can be used to override parts of the
5411 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5412 /// of the Recognizer resource.
5413 ///
5414 /// [google.cloud.speech.v2.BatchRecognizeRequest.config_mask]: crate::model::BatchRecognizeRequest::config_mask
5415 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5416 pub config: std::option::Option<crate::model::RecognitionConfig>,
5417
5418 /// The list of fields in
5419 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] that override
5420 /// the values in the
5421 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5422 /// of the recognizer during this recognition request. If no mask is provided,
5423 /// all given fields in
5424 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] override the
5425 /// values in the recognizer for this recognition request. If a mask is
5426 /// provided, only the fields listed in the mask override the config in the
5427 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
5428 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] completely
5429 /// overrides and replaces the config in the recognizer for this recognition
5430 /// request.
5431 ///
5432 /// [google.cloud.speech.v2.BatchRecognizeRequest.config]: crate::model::BatchRecognizeRequest::config
5433 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5434 pub config_mask: std::option::Option<wkt::FieldMask>,
5435
5436 /// Audio files with file metadata for ASR.
5437 /// The maximum number of files allowed to be specified is 15.
5438 pub files: std::vec::Vec<crate::model::BatchRecognizeFileMetadata>,
5439
5440 /// Configuration options for where to output the transcripts of each file.
5441 pub recognition_output_config: std::option::Option<crate::model::RecognitionOutputConfig>,
5442
5443 /// Processing strategy to use for this request.
5444 pub processing_strategy: crate::model::batch_recognize_request::ProcessingStrategy,
5445
5446 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5447}
5448
5449impl BatchRecognizeRequest {
5450 pub fn new() -> Self {
5451 std::default::Default::default()
5452 }
5453
5454 /// Sets the value of [recognizer][crate::model::BatchRecognizeRequest::recognizer].
5455 ///
5456 /// # Example
5457 /// ```ignore,no_run
5458 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5459 /// let x = BatchRecognizeRequest::new().set_recognizer("example");
5460 /// ```
5461 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5462 self.recognizer = v.into();
5463 self
5464 }
5465
5466 /// Sets the value of [config][crate::model::BatchRecognizeRequest::config].
5467 ///
5468 /// # Example
5469 /// ```ignore,no_run
5470 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5471 /// use google_cloud_speech_v2::model::RecognitionConfig;
5472 /// let x = BatchRecognizeRequest::new().set_config(RecognitionConfig::default()/* use setters */);
5473 /// ```
5474 pub fn set_config<T>(mut self, v: T) -> Self
5475 where
5476 T: std::convert::Into<crate::model::RecognitionConfig>,
5477 {
5478 self.config = std::option::Option::Some(v.into());
5479 self
5480 }
5481
5482 /// Sets or clears the value of [config][crate::model::BatchRecognizeRequest::config].
5483 ///
5484 /// # Example
5485 /// ```ignore,no_run
5486 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5487 /// use google_cloud_speech_v2::model::RecognitionConfig;
5488 /// let x = BatchRecognizeRequest::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
5489 /// let x = BatchRecognizeRequest::new().set_or_clear_config(None::<RecognitionConfig>);
5490 /// ```
5491 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
5492 where
5493 T: std::convert::Into<crate::model::RecognitionConfig>,
5494 {
5495 self.config = v.map(|x| x.into());
5496 self
5497 }
5498
5499 /// Sets the value of [config_mask][crate::model::BatchRecognizeRequest::config_mask].
5500 ///
5501 /// # Example
5502 /// ```ignore,no_run
5503 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5504 /// use wkt::FieldMask;
5505 /// let x = BatchRecognizeRequest::new().set_config_mask(FieldMask::default()/* use setters */);
5506 /// ```
5507 pub fn set_config_mask<T>(mut self, v: T) -> Self
5508 where
5509 T: std::convert::Into<wkt::FieldMask>,
5510 {
5511 self.config_mask = std::option::Option::Some(v.into());
5512 self
5513 }
5514
5515 /// Sets or clears the value of [config_mask][crate::model::BatchRecognizeRequest::config_mask].
5516 ///
5517 /// # Example
5518 /// ```ignore,no_run
5519 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5520 /// use wkt::FieldMask;
5521 /// let x = BatchRecognizeRequest::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
5522 /// let x = BatchRecognizeRequest::new().set_or_clear_config_mask(None::<FieldMask>);
5523 /// ```
5524 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
5525 where
5526 T: std::convert::Into<wkt::FieldMask>,
5527 {
5528 self.config_mask = v.map(|x| x.into());
5529 self
5530 }
5531
5532 /// Sets the value of [files][crate::model::BatchRecognizeRequest::files].
5533 ///
5534 /// # Example
5535 /// ```ignore,no_run
5536 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5537 /// use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
5538 /// let x = BatchRecognizeRequest::new()
5539 /// .set_files([
5540 /// BatchRecognizeFileMetadata::default()/* use setters */,
5541 /// BatchRecognizeFileMetadata::default()/* use (different) setters */,
5542 /// ]);
5543 /// ```
5544 pub fn set_files<T, V>(mut self, v: T) -> Self
5545 where
5546 T: std::iter::IntoIterator<Item = V>,
5547 V: std::convert::Into<crate::model::BatchRecognizeFileMetadata>,
5548 {
5549 use std::iter::Iterator;
5550 self.files = v.into_iter().map(|i| i.into()).collect();
5551 self
5552 }
5553
5554 /// Sets the value of [recognition_output_config][crate::model::BatchRecognizeRequest::recognition_output_config].
5555 ///
5556 /// # Example
5557 /// ```ignore,no_run
5558 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5559 /// use google_cloud_speech_v2::model::RecognitionOutputConfig;
5560 /// let x = BatchRecognizeRequest::new().set_recognition_output_config(RecognitionOutputConfig::default()/* use setters */);
5561 /// ```
5562 pub fn set_recognition_output_config<T>(mut self, v: T) -> Self
5563 where
5564 T: std::convert::Into<crate::model::RecognitionOutputConfig>,
5565 {
5566 self.recognition_output_config = std::option::Option::Some(v.into());
5567 self
5568 }
5569
5570 /// Sets or clears the value of [recognition_output_config][crate::model::BatchRecognizeRequest::recognition_output_config].
5571 ///
5572 /// # Example
5573 /// ```ignore,no_run
5574 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5575 /// use google_cloud_speech_v2::model::RecognitionOutputConfig;
5576 /// let x = BatchRecognizeRequest::new().set_or_clear_recognition_output_config(Some(RecognitionOutputConfig::default()/* use setters */));
5577 /// let x = BatchRecognizeRequest::new().set_or_clear_recognition_output_config(None::<RecognitionOutputConfig>);
5578 /// ```
5579 pub fn set_or_clear_recognition_output_config<T>(mut self, v: std::option::Option<T>) -> Self
5580 where
5581 T: std::convert::Into<crate::model::RecognitionOutputConfig>,
5582 {
5583 self.recognition_output_config = v.map(|x| x.into());
5584 self
5585 }
5586
5587 /// Sets the value of [processing_strategy][crate::model::BatchRecognizeRequest::processing_strategy].
5588 ///
5589 /// # Example
5590 /// ```ignore,no_run
5591 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5592 /// use google_cloud_speech_v2::model::batch_recognize_request::ProcessingStrategy;
5593 /// let x0 = BatchRecognizeRequest::new().set_processing_strategy(ProcessingStrategy::DynamicBatching);
5594 /// ```
5595 pub fn set_processing_strategy<
5596 T: std::convert::Into<crate::model::batch_recognize_request::ProcessingStrategy>,
5597 >(
5598 mut self,
5599 v: T,
5600 ) -> Self {
5601 self.processing_strategy = v.into();
5602 self
5603 }
5604}
5605
5606impl wkt::message::Message for BatchRecognizeRequest {
5607 fn typename() -> &'static str {
5608 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeRequest"
5609 }
5610}
5611
5612/// Defines additional types related to [BatchRecognizeRequest].
5613pub mod batch_recognize_request {
5614 #[allow(unused_imports)]
5615 use super::*;
5616
5617 /// Possible processing strategies for batch requests.
5618 ///
5619 /// # Working with unknown values
5620 ///
5621 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
5622 /// additional enum variants at any time. Adding new variants is not considered
5623 /// a breaking change. Applications should write their code in anticipation of:
5624 ///
5625 /// - New values appearing in future releases of the client library, **and**
5626 /// - New values received dynamically, without application changes.
5627 ///
5628 /// Please consult the [Working with enums] section in the user guide for some
5629 /// guidelines.
5630 ///
5631 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
5632 #[derive(Clone, Debug, PartialEq)]
5633 #[non_exhaustive]
5634 pub enum ProcessingStrategy {
5635 /// Default value for the processing strategy. The request is processed as
5636 /// soon as its received.
5637 Unspecified,
5638 /// If selected, processes the request during lower utilization periods for a
5639 /// price discount. The request is fulfilled within 24 hours.
5640 DynamicBatching,
5641 /// If set, the enum was initialized with an unknown value.
5642 ///
5643 /// Applications can examine the value using [ProcessingStrategy::value] or
5644 /// [ProcessingStrategy::name].
5645 UnknownValue(processing_strategy::UnknownValue),
5646 }
5647
5648 #[doc(hidden)]
5649 pub mod processing_strategy {
5650 #[allow(unused_imports)]
5651 use super::*;
5652 #[derive(Clone, Debug, PartialEq)]
5653 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
5654 }
5655
5656 impl ProcessingStrategy {
5657 /// Gets the enum value.
5658 ///
5659 /// Returns `None` if the enum contains an unknown value deserialized from
5660 /// the string representation of enums.
5661 pub fn value(&self) -> std::option::Option<i32> {
5662 match self {
5663 Self::Unspecified => std::option::Option::Some(0),
5664 Self::DynamicBatching => std::option::Option::Some(1),
5665 Self::UnknownValue(u) => u.0.value(),
5666 }
5667 }
5668
5669 /// Gets the enum value as a string.
5670 ///
5671 /// Returns `None` if the enum contains an unknown value deserialized from
5672 /// the integer representation of enums.
5673 pub fn name(&self) -> std::option::Option<&str> {
5674 match self {
5675 Self::Unspecified => std::option::Option::Some("PROCESSING_STRATEGY_UNSPECIFIED"),
5676 Self::DynamicBatching => std::option::Option::Some("DYNAMIC_BATCHING"),
5677 Self::UnknownValue(u) => u.0.name(),
5678 }
5679 }
5680 }
5681
5682 impl std::default::Default for ProcessingStrategy {
5683 fn default() -> Self {
5684 use std::convert::From;
5685 Self::from(0)
5686 }
5687 }
5688
5689 impl std::fmt::Display for ProcessingStrategy {
5690 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
5691 wkt::internal::display_enum(f, self.name(), self.value())
5692 }
5693 }
5694
5695 impl std::convert::From<i32> for ProcessingStrategy {
5696 fn from(value: i32) -> Self {
5697 match value {
5698 0 => Self::Unspecified,
5699 1 => Self::DynamicBatching,
5700 _ => Self::UnknownValue(processing_strategy::UnknownValue(
5701 wkt::internal::UnknownEnumValue::Integer(value),
5702 )),
5703 }
5704 }
5705 }
5706
5707 impl std::convert::From<&str> for ProcessingStrategy {
5708 fn from(value: &str) -> Self {
5709 use std::string::ToString;
5710 match value {
5711 "PROCESSING_STRATEGY_UNSPECIFIED" => Self::Unspecified,
5712 "DYNAMIC_BATCHING" => Self::DynamicBatching,
5713 _ => Self::UnknownValue(processing_strategy::UnknownValue(
5714 wkt::internal::UnknownEnumValue::String(value.to_string()),
5715 )),
5716 }
5717 }
5718 }
5719
5720 impl serde::ser::Serialize for ProcessingStrategy {
5721 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
5722 where
5723 S: serde::Serializer,
5724 {
5725 match self {
5726 Self::Unspecified => serializer.serialize_i32(0),
5727 Self::DynamicBatching => serializer.serialize_i32(1),
5728 Self::UnknownValue(u) => u.0.serialize(serializer),
5729 }
5730 }
5731 }
5732
5733 impl<'de> serde::de::Deserialize<'de> for ProcessingStrategy {
5734 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
5735 where
5736 D: serde::Deserializer<'de>,
5737 {
5738 deserializer.deserialize_any(wkt::internal::EnumVisitor::<ProcessingStrategy>::new(
5739 ".google.cloud.speech.v2.BatchRecognizeRequest.ProcessingStrategy",
5740 ))
5741 }
5742 }
5743}
5744
5745/// Output configurations for Cloud Storage.
5746#[derive(Clone, Default, PartialEq)]
5747#[non_exhaustive]
5748pub struct GcsOutputConfig {
5749 /// The Cloud Storage URI prefix with which recognition results will be
5750 /// written.
5751 pub uri: std::string::String,
5752
5753 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5754}
5755
5756impl GcsOutputConfig {
5757 pub fn new() -> Self {
5758 std::default::Default::default()
5759 }
5760
5761 /// Sets the value of [uri][crate::model::GcsOutputConfig::uri].
5762 ///
5763 /// # Example
5764 /// ```ignore,no_run
5765 /// # use google_cloud_speech_v2::model::GcsOutputConfig;
5766 /// let x = GcsOutputConfig::new().set_uri("example");
5767 /// ```
5768 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5769 self.uri = v.into();
5770 self
5771 }
5772}
5773
5774impl wkt::message::Message for GcsOutputConfig {
5775 fn typename() -> &'static str {
5776 "type.googleapis.com/google.cloud.speech.v2.GcsOutputConfig"
5777 }
5778}
5779
5780/// Output configurations for inline response.
5781#[derive(Clone, Default, PartialEq)]
5782#[non_exhaustive]
5783pub struct InlineOutputConfig {
5784 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5785}
5786
5787impl InlineOutputConfig {
5788 pub fn new() -> Self {
5789 std::default::Default::default()
5790 }
5791}
5792
5793impl wkt::message::Message for InlineOutputConfig {
5794 fn typename() -> &'static str {
5795 "type.googleapis.com/google.cloud.speech.v2.InlineOutputConfig"
5796 }
5797}
5798
5799/// Output configurations for serialized `BatchRecognizeResults` protos.
5800#[derive(Clone, Default, PartialEq)]
5801#[non_exhaustive]
5802pub struct NativeOutputFileFormatConfig {
5803 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5804}
5805
5806impl NativeOutputFileFormatConfig {
5807 pub fn new() -> Self {
5808 std::default::Default::default()
5809 }
5810}
5811
5812impl wkt::message::Message for NativeOutputFileFormatConfig {
5813 fn typename() -> &'static str {
5814 "type.googleapis.com/google.cloud.speech.v2.NativeOutputFileFormatConfig"
5815 }
5816}
5817
5818/// Output configurations for [WebVTT](https://www.w3.org/TR/webvtt1/) formatted
5819/// subtitle file.
5820#[derive(Clone, Default, PartialEq)]
5821#[non_exhaustive]
5822pub struct VttOutputFileFormatConfig {
5823 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5824}
5825
5826impl VttOutputFileFormatConfig {
5827 pub fn new() -> Self {
5828 std::default::Default::default()
5829 }
5830}
5831
5832impl wkt::message::Message for VttOutputFileFormatConfig {
5833 fn typename() -> &'static str {
5834 "type.googleapis.com/google.cloud.speech.v2.VttOutputFileFormatConfig"
5835 }
5836}
5837
5838/// Output configurations [SubRip
5839/// Text](https://www.matroska.org/technical/subtitles.html#srt-subtitles)
5840/// formatted subtitle file.
5841#[derive(Clone, Default, PartialEq)]
5842#[non_exhaustive]
5843pub struct SrtOutputFileFormatConfig {
5844 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5845}
5846
5847impl SrtOutputFileFormatConfig {
5848 pub fn new() -> Self {
5849 std::default::Default::default()
5850 }
5851}
5852
5853impl wkt::message::Message for SrtOutputFileFormatConfig {
5854 fn typename() -> &'static str {
5855 "type.googleapis.com/google.cloud.speech.v2.SrtOutputFileFormatConfig"
5856 }
5857}
5858
5859/// Configuration for the format of the results stored to `output`.
5860#[derive(Clone, Default, PartialEq)]
5861#[non_exhaustive]
5862pub struct OutputFormatConfig {
5863 /// Configuration for the native output format. If this field is set or if no
5864 /// other output format field is set, then transcripts will be written to the
5865 /// sink in the native format.
5866 pub native: std::option::Option<crate::model::NativeOutputFileFormatConfig>,
5867
5868 /// Configuration for the VTT output format. If this field is set, then
5869 /// transcripts will be written to the sink in the VTT format.
5870 pub vtt: std::option::Option<crate::model::VttOutputFileFormatConfig>,
5871
5872 /// Configuration for the SRT output format. If this field is set, then
5873 /// transcripts will be written to the sink in the SRT format.
5874 pub srt: std::option::Option<crate::model::SrtOutputFileFormatConfig>,
5875
5876 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5877}
5878
5879impl OutputFormatConfig {
5880 pub fn new() -> Self {
5881 std::default::Default::default()
5882 }
5883
5884 /// Sets the value of [native][crate::model::OutputFormatConfig::native].
5885 ///
5886 /// # Example
5887 /// ```ignore,no_run
5888 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5889 /// use google_cloud_speech_v2::model::NativeOutputFileFormatConfig;
5890 /// let x = OutputFormatConfig::new().set_native(NativeOutputFileFormatConfig::default()/* use setters */);
5891 /// ```
5892 pub fn set_native<T>(mut self, v: T) -> Self
5893 where
5894 T: std::convert::Into<crate::model::NativeOutputFileFormatConfig>,
5895 {
5896 self.native = std::option::Option::Some(v.into());
5897 self
5898 }
5899
5900 /// Sets or clears the value of [native][crate::model::OutputFormatConfig::native].
5901 ///
5902 /// # Example
5903 /// ```ignore,no_run
5904 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5905 /// use google_cloud_speech_v2::model::NativeOutputFileFormatConfig;
5906 /// let x = OutputFormatConfig::new().set_or_clear_native(Some(NativeOutputFileFormatConfig::default()/* use setters */));
5907 /// let x = OutputFormatConfig::new().set_or_clear_native(None::<NativeOutputFileFormatConfig>);
5908 /// ```
5909 pub fn set_or_clear_native<T>(mut self, v: std::option::Option<T>) -> Self
5910 where
5911 T: std::convert::Into<crate::model::NativeOutputFileFormatConfig>,
5912 {
5913 self.native = v.map(|x| x.into());
5914 self
5915 }
5916
5917 /// Sets the value of [vtt][crate::model::OutputFormatConfig::vtt].
5918 ///
5919 /// # Example
5920 /// ```ignore,no_run
5921 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5922 /// use google_cloud_speech_v2::model::VttOutputFileFormatConfig;
5923 /// let x = OutputFormatConfig::new().set_vtt(VttOutputFileFormatConfig::default()/* use setters */);
5924 /// ```
5925 pub fn set_vtt<T>(mut self, v: T) -> Self
5926 where
5927 T: std::convert::Into<crate::model::VttOutputFileFormatConfig>,
5928 {
5929 self.vtt = std::option::Option::Some(v.into());
5930 self
5931 }
5932
5933 /// Sets or clears the value of [vtt][crate::model::OutputFormatConfig::vtt].
5934 ///
5935 /// # Example
5936 /// ```ignore,no_run
5937 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5938 /// use google_cloud_speech_v2::model::VttOutputFileFormatConfig;
5939 /// let x = OutputFormatConfig::new().set_or_clear_vtt(Some(VttOutputFileFormatConfig::default()/* use setters */));
5940 /// let x = OutputFormatConfig::new().set_or_clear_vtt(None::<VttOutputFileFormatConfig>);
5941 /// ```
5942 pub fn set_or_clear_vtt<T>(mut self, v: std::option::Option<T>) -> Self
5943 where
5944 T: std::convert::Into<crate::model::VttOutputFileFormatConfig>,
5945 {
5946 self.vtt = v.map(|x| x.into());
5947 self
5948 }
5949
5950 /// Sets the value of [srt][crate::model::OutputFormatConfig::srt].
5951 ///
5952 /// # Example
5953 /// ```ignore,no_run
5954 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5955 /// use google_cloud_speech_v2::model::SrtOutputFileFormatConfig;
5956 /// let x = OutputFormatConfig::new().set_srt(SrtOutputFileFormatConfig::default()/* use setters */);
5957 /// ```
5958 pub fn set_srt<T>(mut self, v: T) -> Self
5959 where
5960 T: std::convert::Into<crate::model::SrtOutputFileFormatConfig>,
5961 {
5962 self.srt = std::option::Option::Some(v.into());
5963 self
5964 }
5965
5966 /// Sets or clears the value of [srt][crate::model::OutputFormatConfig::srt].
5967 ///
5968 /// # Example
5969 /// ```ignore,no_run
5970 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5971 /// use google_cloud_speech_v2::model::SrtOutputFileFormatConfig;
5972 /// let x = OutputFormatConfig::new().set_or_clear_srt(Some(SrtOutputFileFormatConfig::default()/* use setters */));
5973 /// let x = OutputFormatConfig::new().set_or_clear_srt(None::<SrtOutputFileFormatConfig>);
5974 /// ```
5975 pub fn set_or_clear_srt<T>(mut self, v: std::option::Option<T>) -> Self
5976 where
5977 T: std::convert::Into<crate::model::SrtOutputFileFormatConfig>,
5978 {
5979 self.srt = v.map(|x| x.into());
5980 self
5981 }
5982}
5983
5984impl wkt::message::Message for OutputFormatConfig {
5985 fn typename() -> &'static str {
5986 "type.googleapis.com/google.cloud.speech.v2.OutputFormatConfig"
5987 }
5988}
5989
5990/// Configuration options for the output(s) of recognition.
5991#[derive(Clone, Default, PartialEq)]
5992#[non_exhaustive]
5993pub struct RecognitionOutputConfig {
5994 /// Optional. Configuration for the format of the results stored to `output`.
5995 /// If unspecified transcripts will be written in the `NATIVE` format only.
5996 pub output_format_config: std::option::Option<crate::model::OutputFormatConfig>,
5997
5998 pub output: std::option::Option<crate::model::recognition_output_config::Output>,
5999
6000 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6001}
6002
6003impl RecognitionOutputConfig {
6004 pub fn new() -> Self {
6005 std::default::Default::default()
6006 }
6007
6008 /// Sets the value of [output_format_config][crate::model::RecognitionOutputConfig::output_format_config].
6009 ///
6010 /// # Example
6011 /// ```ignore,no_run
6012 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
6013 /// use google_cloud_speech_v2::model::OutputFormatConfig;
6014 /// let x = RecognitionOutputConfig::new().set_output_format_config(OutputFormatConfig::default()/* use setters */);
6015 /// ```
6016 pub fn set_output_format_config<T>(mut self, v: T) -> Self
6017 where
6018 T: std::convert::Into<crate::model::OutputFormatConfig>,
6019 {
6020 self.output_format_config = std::option::Option::Some(v.into());
6021 self
6022 }
6023
6024 /// Sets or clears the value of [output_format_config][crate::model::RecognitionOutputConfig::output_format_config].
6025 ///
6026 /// # Example
6027 /// ```ignore,no_run
6028 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
6029 /// use google_cloud_speech_v2::model::OutputFormatConfig;
6030 /// let x = RecognitionOutputConfig::new().set_or_clear_output_format_config(Some(OutputFormatConfig::default()/* use setters */));
6031 /// let x = RecognitionOutputConfig::new().set_or_clear_output_format_config(None::<OutputFormatConfig>);
6032 /// ```
6033 pub fn set_or_clear_output_format_config<T>(mut self, v: std::option::Option<T>) -> Self
6034 where
6035 T: std::convert::Into<crate::model::OutputFormatConfig>,
6036 {
6037 self.output_format_config = v.map(|x| x.into());
6038 self
6039 }
6040
6041 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output].
6042 ///
6043 /// Note that all the setters affecting `output` are mutually
6044 /// exclusive.
6045 ///
6046 /// # Example
6047 /// ```ignore,no_run
6048 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
6049 /// use google_cloud_speech_v2::model::GcsOutputConfig;
6050 /// let x = RecognitionOutputConfig::new().set_output(Some(
6051 /// google_cloud_speech_v2::model::recognition_output_config::Output::GcsOutputConfig(GcsOutputConfig::default().into())));
6052 /// ```
6053 pub fn set_output<
6054 T: std::convert::Into<std::option::Option<crate::model::recognition_output_config::Output>>,
6055 >(
6056 mut self,
6057 v: T,
6058 ) -> Self {
6059 self.output = v.into();
6060 self
6061 }
6062
6063 /// The value of [output][crate::model::RecognitionOutputConfig::output]
6064 /// if it holds a `GcsOutputConfig`, `None` if the field is not set or
6065 /// holds a different branch.
6066 pub fn gcs_output_config(
6067 &self,
6068 ) -> std::option::Option<&std::boxed::Box<crate::model::GcsOutputConfig>> {
6069 #[allow(unreachable_patterns)]
6070 self.output.as_ref().and_then(|v| match v {
6071 crate::model::recognition_output_config::Output::GcsOutputConfig(v) => {
6072 std::option::Option::Some(v)
6073 }
6074 _ => std::option::Option::None,
6075 })
6076 }
6077
6078 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output]
6079 /// to hold a `GcsOutputConfig`.
6080 ///
6081 /// Note that all the setters affecting `output` are
6082 /// mutually exclusive.
6083 ///
6084 /// # Example
6085 /// ```ignore,no_run
6086 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
6087 /// use google_cloud_speech_v2::model::GcsOutputConfig;
6088 /// let x = RecognitionOutputConfig::new().set_gcs_output_config(GcsOutputConfig::default()/* use setters */);
6089 /// assert!(x.gcs_output_config().is_some());
6090 /// assert!(x.inline_response_config().is_none());
6091 /// ```
6092 pub fn set_gcs_output_config<
6093 T: std::convert::Into<std::boxed::Box<crate::model::GcsOutputConfig>>,
6094 >(
6095 mut self,
6096 v: T,
6097 ) -> Self {
6098 self.output = std::option::Option::Some(
6099 crate::model::recognition_output_config::Output::GcsOutputConfig(v.into()),
6100 );
6101 self
6102 }
6103
6104 /// The value of [output][crate::model::RecognitionOutputConfig::output]
6105 /// if it holds a `InlineResponseConfig`, `None` if the field is not set or
6106 /// holds a different branch.
6107 pub fn inline_response_config(
6108 &self,
6109 ) -> std::option::Option<&std::boxed::Box<crate::model::InlineOutputConfig>> {
6110 #[allow(unreachable_patterns)]
6111 self.output.as_ref().and_then(|v| match v {
6112 crate::model::recognition_output_config::Output::InlineResponseConfig(v) => {
6113 std::option::Option::Some(v)
6114 }
6115 _ => std::option::Option::None,
6116 })
6117 }
6118
6119 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output]
6120 /// to hold a `InlineResponseConfig`.
6121 ///
6122 /// Note that all the setters affecting `output` are
6123 /// mutually exclusive.
6124 ///
6125 /// # Example
6126 /// ```ignore,no_run
6127 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
6128 /// use google_cloud_speech_v2::model::InlineOutputConfig;
6129 /// let x = RecognitionOutputConfig::new().set_inline_response_config(InlineOutputConfig::default()/* use setters */);
6130 /// assert!(x.inline_response_config().is_some());
6131 /// assert!(x.gcs_output_config().is_none());
6132 /// ```
6133 pub fn set_inline_response_config<
6134 T: std::convert::Into<std::boxed::Box<crate::model::InlineOutputConfig>>,
6135 >(
6136 mut self,
6137 v: T,
6138 ) -> Self {
6139 self.output = std::option::Option::Some(
6140 crate::model::recognition_output_config::Output::InlineResponseConfig(v.into()),
6141 );
6142 self
6143 }
6144}
6145
6146impl wkt::message::Message for RecognitionOutputConfig {
6147 fn typename() -> &'static str {
6148 "type.googleapis.com/google.cloud.speech.v2.RecognitionOutputConfig"
6149 }
6150}
6151
6152/// Defines additional types related to [RecognitionOutputConfig].
6153pub mod recognition_output_config {
6154 #[allow(unused_imports)]
6155 use super::*;
6156
6157 #[derive(Clone, Debug, PartialEq)]
6158 #[non_exhaustive]
6159 pub enum Output {
6160 /// If this message is populated, recognition results are written to the
6161 /// provided Google Cloud Storage URI.
6162 GcsOutputConfig(std::boxed::Box<crate::model::GcsOutputConfig>),
6163 /// If this message is populated, recognition results are provided in the
6164 /// [BatchRecognizeResponse][google.cloud.speech.v2.BatchRecognizeResponse]
6165 /// message of the Operation when completed. This is only supported when
6166 /// calling [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize]
6167 /// with just one audio file.
6168 ///
6169 /// [google.cloud.speech.v2.BatchRecognizeResponse]: crate::model::BatchRecognizeResponse
6170 /// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6171 InlineResponseConfig(std::boxed::Box<crate::model::InlineOutputConfig>),
6172 }
6173}
6174
6175/// Response message for
6176/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize] that is
6177/// packaged into a longrunning [Operation][google.longrunning.Operation].
6178///
6179/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6180/// [google.longrunning.Operation]: google_cloud_longrunning::model::Operation
6181#[derive(Clone, Default, PartialEq)]
6182#[non_exhaustive]
6183pub struct BatchRecognizeResponse {
6184 /// Map from filename to the final result for that file.
6185 pub results:
6186 std::collections::HashMap<std::string::String, crate::model::BatchRecognizeFileResult>,
6187
6188 /// When available, billed audio seconds for the corresponding request.
6189 pub total_billed_duration: std::option::Option<wkt::Duration>,
6190
6191 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6192}
6193
6194impl BatchRecognizeResponse {
6195 pub fn new() -> Self {
6196 std::default::Default::default()
6197 }
6198
6199 /// Sets the value of [results][crate::model::BatchRecognizeResponse::results].
6200 ///
6201 /// # Example
6202 /// ```ignore,no_run
6203 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6204 /// use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6205 /// let x = BatchRecognizeResponse::new().set_results([
6206 /// ("key0", BatchRecognizeFileResult::default()/* use setters */),
6207 /// ("key1", BatchRecognizeFileResult::default()/* use (different) setters */),
6208 /// ]);
6209 /// ```
6210 pub fn set_results<T, K, V>(mut self, v: T) -> Self
6211 where
6212 T: std::iter::IntoIterator<Item = (K, V)>,
6213 K: std::convert::Into<std::string::String>,
6214 V: std::convert::Into<crate::model::BatchRecognizeFileResult>,
6215 {
6216 use std::iter::Iterator;
6217 self.results = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
6218 self
6219 }
6220
6221 /// Sets the value of [total_billed_duration][crate::model::BatchRecognizeResponse::total_billed_duration].
6222 ///
6223 /// # Example
6224 /// ```ignore,no_run
6225 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6226 /// use wkt::Duration;
6227 /// let x = BatchRecognizeResponse::new().set_total_billed_duration(Duration::default()/* use setters */);
6228 /// ```
6229 pub fn set_total_billed_duration<T>(mut self, v: T) -> Self
6230 where
6231 T: std::convert::Into<wkt::Duration>,
6232 {
6233 self.total_billed_duration = std::option::Option::Some(v.into());
6234 self
6235 }
6236
6237 /// Sets or clears the value of [total_billed_duration][crate::model::BatchRecognizeResponse::total_billed_duration].
6238 ///
6239 /// # Example
6240 /// ```ignore,no_run
6241 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6242 /// use wkt::Duration;
6243 /// let x = BatchRecognizeResponse::new().set_or_clear_total_billed_duration(Some(Duration::default()/* use setters */));
6244 /// let x = BatchRecognizeResponse::new().set_or_clear_total_billed_duration(None::<Duration>);
6245 /// ```
6246 pub fn set_or_clear_total_billed_duration<T>(mut self, v: std::option::Option<T>) -> Self
6247 where
6248 T: std::convert::Into<wkt::Duration>,
6249 {
6250 self.total_billed_duration = v.map(|x| x.into());
6251 self
6252 }
6253}
6254
6255impl wkt::message::Message for BatchRecognizeResponse {
6256 fn typename() -> &'static str {
6257 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeResponse"
6258 }
6259}
6260
6261/// Output type for Cloud Storage of BatchRecognize transcripts. Though this
6262/// proto isn't returned in this API anywhere, the Cloud Storage transcripts will
6263/// be this proto serialized and should be parsed as such.
6264#[derive(Clone, Default, PartialEq)]
6265#[non_exhaustive]
6266pub struct BatchRecognizeResults {
6267 /// Sequential list of transcription results corresponding to sequential
6268 /// portions of audio.
6269 pub results: std::vec::Vec<crate::model::SpeechRecognitionResult>,
6270
6271 /// Metadata about the recognition.
6272 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
6273
6274 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6275}
6276
6277impl BatchRecognizeResults {
6278 pub fn new() -> Self {
6279 std::default::Default::default()
6280 }
6281
6282 /// Sets the value of [results][crate::model::BatchRecognizeResults::results].
6283 ///
6284 /// # Example
6285 /// ```ignore,no_run
6286 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6287 /// use google_cloud_speech_v2::model::SpeechRecognitionResult;
6288 /// let x = BatchRecognizeResults::new()
6289 /// .set_results([
6290 /// SpeechRecognitionResult::default()/* use setters */,
6291 /// SpeechRecognitionResult::default()/* use (different) setters */,
6292 /// ]);
6293 /// ```
6294 pub fn set_results<T, V>(mut self, v: T) -> Self
6295 where
6296 T: std::iter::IntoIterator<Item = V>,
6297 V: std::convert::Into<crate::model::SpeechRecognitionResult>,
6298 {
6299 use std::iter::Iterator;
6300 self.results = v.into_iter().map(|i| i.into()).collect();
6301 self
6302 }
6303
6304 /// Sets the value of [metadata][crate::model::BatchRecognizeResults::metadata].
6305 ///
6306 /// # Example
6307 /// ```ignore,no_run
6308 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6309 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6310 /// let x = BatchRecognizeResults::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
6311 /// ```
6312 pub fn set_metadata<T>(mut self, v: T) -> Self
6313 where
6314 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6315 {
6316 self.metadata = std::option::Option::Some(v.into());
6317 self
6318 }
6319
6320 /// Sets or clears the value of [metadata][crate::model::BatchRecognizeResults::metadata].
6321 ///
6322 /// # Example
6323 /// ```ignore,no_run
6324 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6325 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6326 /// let x = BatchRecognizeResults::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
6327 /// let x = BatchRecognizeResults::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
6328 /// ```
6329 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
6330 where
6331 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6332 {
6333 self.metadata = v.map(|x| x.into());
6334 self
6335 }
6336}
6337
6338impl wkt::message::Message for BatchRecognizeResults {
6339 fn typename() -> &'static str {
6340 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeResults"
6341 }
6342}
6343
6344/// Final results written to Cloud Storage.
6345#[derive(Clone, Default, PartialEq)]
6346#[non_exhaustive]
6347pub struct CloudStorageResult {
6348 /// The Cloud Storage URI to which recognition results were written.
6349 pub uri: std::string::String,
6350
6351 /// The Cloud Storage URI to which recognition results were written as VTT
6352 /// formatted captions. This is populated only when `VTT` output is requested.
6353 pub vtt_format_uri: std::string::String,
6354
6355 /// The Cloud Storage URI to which recognition results were written as SRT
6356 /// formatted captions. This is populated only when `SRT` output is requested.
6357 pub srt_format_uri: std::string::String,
6358
6359 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6360}
6361
6362impl CloudStorageResult {
6363 pub fn new() -> Self {
6364 std::default::Default::default()
6365 }
6366
6367 /// Sets the value of [uri][crate::model::CloudStorageResult::uri].
6368 ///
6369 /// # Example
6370 /// ```ignore,no_run
6371 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6372 /// let x = CloudStorageResult::new().set_uri("example");
6373 /// ```
6374 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6375 self.uri = v.into();
6376 self
6377 }
6378
6379 /// Sets the value of [vtt_format_uri][crate::model::CloudStorageResult::vtt_format_uri].
6380 ///
6381 /// # Example
6382 /// ```ignore,no_run
6383 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6384 /// let x = CloudStorageResult::new().set_vtt_format_uri("example");
6385 /// ```
6386 pub fn set_vtt_format_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6387 self.vtt_format_uri = v.into();
6388 self
6389 }
6390
6391 /// Sets the value of [srt_format_uri][crate::model::CloudStorageResult::srt_format_uri].
6392 ///
6393 /// # Example
6394 /// ```ignore,no_run
6395 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6396 /// let x = CloudStorageResult::new().set_srt_format_uri("example");
6397 /// ```
6398 pub fn set_srt_format_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6399 self.srt_format_uri = v.into();
6400 self
6401 }
6402}
6403
6404impl wkt::message::Message for CloudStorageResult {
6405 fn typename() -> &'static str {
6406 "type.googleapis.com/google.cloud.speech.v2.CloudStorageResult"
6407 }
6408}
6409
6410/// Final results returned inline in the recognition response.
6411#[derive(Clone, Default, PartialEq)]
6412#[non_exhaustive]
6413pub struct InlineResult {
6414 /// The transcript for the audio file.
6415 pub transcript: std::option::Option<crate::model::BatchRecognizeResults>,
6416
6417 /// The transcript for the audio file as VTT formatted captions. This is
6418 /// populated only when `VTT` output is requested.
6419 pub vtt_captions: std::string::String,
6420
6421 /// The transcript for the audio file as SRT formatted captions. This is
6422 /// populated only when `SRT` output is requested.
6423 pub srt_captions: std::string::String,
6424
6425 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6426}
6427
6428impl InlineResult {
6429 pub fn new() -> Self {
6430 std::default::Default::default()
6431 }
6432
6433 /// Sets the value of [transcript][crate::model::InlineResult::transcript].
6434 ///
6435 /// # Example
6436 /// ```ignore,no_run
6437 /// # use google_cloud_speech_v2::model::InlineResult;
6438 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6439 /// let x = InlineResult::new().set_transcript(BatchRecognizeResults::default()/* use setters */);
6440 /// ```
6441 pub fn set_transcript<T>(mut self, v: T) -> Self
6442 where
6443 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6444 {
6445 self.transcript = std::option::Option::Some(v.into());
6446 self
6447 }
6448
6449 /// Sets or clears the value of [transcript][crate::model::InlineResult::transcript].
6450 ///
6451 /// # Example
6452 /// ```ignore,no_run
6453 /// # use google_cloud_speech_v2::model::InlineResult;
6454 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6455 /// let x = InlineResult::new().set_or_clear_transcript(Some(BatchRecognizeResults::default()/* use setters */));
6456 /// let x = InlineResult::new().set_or_clear_transcript(None::<BatchRecognizeResults>);
6457 /// ```
6458 pub fn set_or_clear_transcript<T>(mut self, v: std::option::Option<T>) -> Self
6459 where
6460 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6461 {
6462 self.transcript = v.map(|x| x.into());
6463 self
6464 }
6465
6466 /// Sets the value of [vtt_captions][crate::model::InlineResult::vtt_captions].
6467 ///
6468 /// # Example
6469 /// ```ignore,no_run
6470 /// # use google_cloud_speech_v2::model::InlineResult;
6471 /// let x = InlineResult::new().set_vtt_captions("example");
6472 /// ```
6473 pub fn set_vtt_captions<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6474 self.vtt_captions = v.into();
6475 self
6476 }
6477
6478 /// Sets the value of [srt_captions][crate::model::InlineResult::srt_captions].
6479 ///
6480 /// # Example
6481 /// ```ignore,no_run
6482 /// # use google_cloud_speech_v2::model::InlineResult;
6483 /// let x = InlineResult::new().set_srt_captions("example");
6484 /// ```
6485 pub fn set_srt_captions<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6486 self.srt_captions = v.into();
6487 self
6488 }
6489}
6490
6491impl wkt::message::Message for InlineResult {
6492 fn typename() -> &'static str {
6493 "type.googleapis.com/google.cloud.speech.v2.InlineResult"
6494 }
6495}
6496
6497/// Final results for a single file.
6498#[derive(Clone, Default, PartialEq)]
6499#[non_exhaustive]
6500pub struct BatchRecognizeFileResult {
6501 /// Error if one was encountered.
6502 pub error: std::option::Option<google_cloud_rpc::model::Status>,
6503
6504 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
6505
6506 /// Deprecated. Use `cloud_storage_result.native_format_uri` instead.
6507 #[deprecated]
6508 pub uri: std::string::String,
6509
6510 /// Deprecated. Use `inline_result.transcript` instead.
6511 #[deprecated]
6512 pub transcript: std::option::Option<crate::model::BatchRecognizeResults>,
6513
6514 pub result: std::option::Option<crate::model::batch_recognize_file_result::Result>,
6515
6516 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6517}
6518
6519impl BatchRecognizeFileResult {
6520 pub fn new() -> Self {
6521 std::default::Default::default()
6522 }
6523
6524 /// Sets the value of [error][crate::model::BatchRecognizeFileResult::error].
6525 ///
6526 /// # Example
6527 /// ```ignore,no_run
6528 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6529 /// use google_cloud_rpc::model::Status;
6530 /// let x = BatchRecognizeFileResult::new().set_error(Status::default()/* use setters */);
6531 /// ```
6532 pub fn set_error<T>(mut self, v: T) -> Self
6533 where
6534 T: std::convert::Into<google_cloud_rpc::model::Status>,
6535 {
6536 self.error = std::option::Option::Some(v.into());
6537 self
6538 }
6539
6540 /// Sets or clears the value of [error][crate::model::BatchRecognizeFileResult::error].
6541 ///
6542 /// # Example
6543 /// ```ignore,no_run
6544 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6545 /// use google_cloud_rpc::model::Status;
6546 /// let x = BatchRecognizeFileResult::new().set_or_clear_error(Some(Status::default()/* use setters */));
6547 /// let x = BatchRecognizeFileResult::new().set_or_clear_error(None::<Status>);
6548 /// ```
6549 pub fn set_or_clear_error<T>(mut self, v: std::option::Option<T>) -> Self
6550 where
6551 T: std::convert::Into<google_cloud_rpc::model::Status>,
6552 {
6553 self.error = v.map(|x| x.into());
6554 self
6555 }
6556
6557 /// Sets the value of [metadata][crate::model::BatchRecognizeFileResult::metadata].
6558 ///
6559 /// # Example
6560 /// ```ignore,no_run
6561 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6562 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6563 /// let x = BatchRecognizeFileResult::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
6564 /// ```
6565 pub fn set_metadata<T>(mut self, v: T) -> Self
6566 where
6567 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6568 {
6569 self.metadata = std::option::Option::Some(v.into());
6570 self
6571 }
6572
6573 /// Sets or clears the value of [metadata][crate::model::BatchRecognizeFileResult::metadata].
6574 ///
6575 /// # Example
6576 /// ```ignore,no_run
6577 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6578 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6579 /// let x = BatchRecognizeFileResult::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
6580 /// let x = BatchRecognizeFileResult::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
6581 /// ```
6582 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
6583 where
6584 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6585 {
6586 self.metadata = v.map(|x| x.into());
6587 self
6588 }
6589
6590 /// Sets the value of [uri][crate::model::BatchRecognizeFileResult::uri].
6591 ///
6592 /// # Example
6593 /// ```ignore,no_run
6594 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6595 /// let x = BatchRecognizeFileResult::new().set_uri("example");
6596 /// ```
6597 #[deprecated]
6598 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6599 self.uri = v.into();
6600 self
6601 }
6602
6603 /// Sets the value of [transcript][crate::model::BatchRecognizeFileResult::transcript].
6604 ///
6605 /// # Example
6606 /// ```ignore,no_run
6607 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6608 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6609 /// let x = BatchRecognizeFileResult::new().set_transcript(BatchRecognizeResults::default()/* use setters */);
6610 /// ```
6611 #[deprecated]
6612 pub fn set_transcript<T>(mut self, v: T) -> Self
6613 where
6614 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6615 {
6616 self.transcript = std::option::Option::Some(v.into());
6617 self
6618 }
6619
6620 /// Sets or clears the value of [transcript][crate::model::BatchRecognizeFileResult::transcript].
6621 ///
6622 /// # Example
6623 /// ```ignore,no_run
6624 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6625 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6626 /// let x = BatchRecognizeFileResult::new().set_or_clear_transcript(Some(BatchRecognizeResults::default()/* use setters */));
6627 /// let x = BatchRecognizeFileResult::new().set_or_clear_transcript(None::<BatchRecognizeResults>);
6628 /// ```
6629 #[deprecated]
6630 pub fn set_or_clear_transcript<T>(mut self, v: std::option::Option<T>) -> Self
6631 where
6632 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6633 {
6634 self.transcript = v.map(|x| x.into());
6635 self
6636 }
6637
6638 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result].
6639 ///
6640 /// Note that all the setters affecting `result` are mutually
6641 /// exclusive.
6642 ///
6643 /// # Example
6644 /// ```ignore,no_run
6645 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6646 /// use google_cloud_speech_v2::model::CloudStorageResult;
6647 /// let x = BatchRecognizeFileResult::new().set_result(Some(
6648 /// google_cloud_speech_v2::model::batch_recognize_file_result::Result::CloudStorageResult(CloudStorageResult::default().into())));
6649 /// ```
6650 pub fn set_result<
6651 T: std::convert::Into<std::option::Option<crate::model::batch_recognize_file_result::Result>>,
6652 >(
6653 mut self,
6654 v: T,
6655 ) -> Self {
6656 self.result = v.into();
6657 self
6658 }
6659
6660 /// The value of [result][crate::model::BatchRecognizeFileResult::result]
6661 /// if it holds a `CloudStorageResult`, `None` if the field is not set or
6662 /// holds a different branch.
6663 pub fn cloud_storage_result(
6664 &self,
6665 ) -> std::option::Option<&std::boxed::Box<crate::model::CloudStorageResult>> {
6666 #[allow(unreachable_patterns)]
6667 self.result.as_ref().and_then(|v| match v {
6668 crate::model::batch_recognize_file_result::Result::CloudStorageResult(v) => {
6669 std::option::Option::Some(v)
6670 }
6671 _ => std::option::Option::None,
6672 })
6673 }
6674
6675 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result]
6676 /// to hold a `CloudStorageResult`.
6677 ///
6678 /// Note that all the setters affecting `result` are
6679 /// mutually exclusive.
6680 ///
6681 /// # Example
6682 /// ```ignore,no_run
6683 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6684 /// use google_cloud_speech_v2::model::CloudStorageResult;
6685 /// let x = BatchRecognizeFileResult::new().set_cloud_storage_result(CloudStorageResult::default()/* use setters */);
6686 /// assert!(x.cloud_storage_result().is_some());
6687 /// assert!(x.inline_result().is_none());
6688 /// ```
6689 pub fn set_cloud_storage_result<
6690 T: std::convert::Into<std::boxed::Box<crate::model::CloudStorageResult>>,
6691 >(
6692 mut self,
6693 v: T,
6694 ) -> Self {
6695 self.result = std::option::Option::Some(
6696 crate::model::batch_recognize_file_result::Result::CloudStorageResult(v.into()),
6697 );
6698 self
6699 }
6700
6701 /// The value of [result][crate::model::BatchRecognizeFileResult::result]
6702 /// if it holds a `InlineResult`, `None` if the field is not set or
6703 /// holds a different branch.
6704 pub fn inline_result(
6705 &self,
6706 ) -> std::option::Option<&std::boxed::Box<crate::model::InlineResult>> {
6707 #[allow(unreachable_patterns)]
6708 self.result.as_ref().and_then(|v| match v {
6709 crate::model::batch_recognize_file_result::Result::InlineResult(v) => {
6710 std::option::Option::Some(v)
6711 }
6712 _ => std::option::Option::None,
6713 })
6714 }
6715
6716 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result]
6717 /// to hold a `InlineResult`.
6718 ///
6719 /// Note that all the setters affecting `result` are
6720 /// mutually exclusive.
6721 ///
6722 /// # Example
6723 /// ```ignore,no_run
6724 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6725 /// use google_cloud_speech_v2::model::InlineResult;
6726 /// let x = BatchRecognizeFileResult::new().set_inline_result(InlineResult::default()/* use setters */);
6727 /// assert!(x.inline_result().is_some());
6728 /// assert!(x.cloud_storage_result().is_none());
6729 /// ```
6730 pub fn set_inline_result<T: std::convert::Into<std::boxed::Box<crate::model::InlineResult>>>(
6731 mut self,
6732 v: T,
6733 ) -> Self {
6734 self.result = std::option::Option::Some(
6735 crate::model::batch_recognize_file_result::Result::InlineResult(v.into()),
6736 );
6737 self
6738 }
6739}
6740
6741impl wkt::message::Message for BatchRecognizeFileResult {
6742 fn typename() -> &'static str {
6743 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeFileResult"
6744 }
6745}
6746
6747/// Defines additional types related to [BatchRecognizeFileResult].
6748pub mod batch_recognize_file_result {
6749 #[allow(unused_imports)]
6750 use super::*;
6751
6752 #[derive(Clone, Debug, PartialEq)]
6753 #[non_exhaustive]
6754 pub enum Result {
6755 /// Recognition results written to Cloud Storage. This is
6756 /// populated only when
6757 /// [GcsOutputConfig][google.cloud.speech.v2.GcsOutputConfig] is set in
6758 /// the
6759 /// [RecognitionOutputConfig][[google.cloud.speech.v2.RecognitionOutputConfig].
6760 ///
6761 /// [google.cloud.speech.v2.GcsOutputConfig]: crate::model::GcsOutputConfig
6762 CloudStorageResult(std::boxed::Box<crate::model::CloudStorageResult>),
6763 /// Recognition results. This is populated only when
6764 /// [InlineOutputConfig][google.cloud.speech.v2.InlineOutputConfig] is set in
6765 /// the
6766 /// [RecognitionOutputConfig][[google.cloud.speech.v2.RecognitionOutputConfig].
6767 ///
6768 /// [google.cloud.speech.v2.InlineOutputConfig]: crate::model::InlineOutputConfig
6769 InlineResult(std::boxed::Box<crate::model::InlineResult>),
6770 }
6771}
6772
6773/// Metadata about transcription for a single file (for example, progress
6774/// percent).
6775#[derive(Clone, Default, PartialEq)]
6776#[non_exhaustive]
6777pub struct BatchRecognizeTranscriptionMetadata {
6778 /// How much of the file has been transcribed so far.
6779 pub progress_percent: i32,
6780
6781 /// Error if one was encountered.
6782 pub error: std::option::Option<google_cloud_rpc::model::Status>,
6783
6784 /// The Cloud Storage URI to which recognition results will be written.
6785 pub uri: std::string::String,
6786
6787 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6788}
6789
6790impl BatchRecognizeTranscriptionMetadata {
6791 pub fn new() -> Self {
6792 std::default::Default::default()
6793 }
6794
6795 /// Sets the value of [progress_percent][crate::model::BatchRecognizeTranscriptionMetadata::progress_percent].
6796 ///
6797 /// # Example
6798 /// ```ignore,no_run
6799 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6800 /// let x = BatchRecognizeTranscriptionMetadata::new().set_progress_percent(42);
6801 /// ```
6802 pub fn set_progress_percent<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
6803 self.progress_percent = v.into();
6804 self
6805 }
6806
6807 /// Sets the value of [error][crate::model::BatchRecognizeTranscriptionMetadata::error].
6808 ///
6809 /// # Example
6810 /// ```ignore,no_run
6811 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6812 /// use google_cloud_rpc::model::Status;
6813 /// let x = BatchRecognizeTranscriptionMetadata::new().set_error(Status::default()/* use setters */);
6814 /// ```
6815 pub fn set_error<T>(mut self, v: T) -> Self
6816 where
6817 T: std::convert::Into<google_cloud_rpc::model::Status>,
6818 {
6819 self.error = std::option::Option::Some(v.into());
6820 self
6821 }
6822
6823 /// Sets or clears the value of [error][crate::model::BatchRecognizeTranscriptionMetadata::error].
6824 ///
6825 /// # Example
6826 /// ```ignore,no_run
6827 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6828 /// use google_cloud_rpc::model::Status;
6829 /// let x = BatchRecognizeTranscriptionMetadata::new().set_or_clear_error(Some(Status::default()/* use setters */));
6830 /// let x = BatchRecognizeTranscriptionMetadata::new().set_or_clear_error(None::<Status>);
6831 /// ```
6832 pub fn set_or_clear_error<T>(mut self, v: std::option::Option<T>) -> Self
6833 where
6834 T: std::convert::Into<google_cloud_rpc::model::Status>,
6835 {
6836 self.error = v.map(|x| x.into());
6837 self
6838 }
6839
6840 /// Sets the value of [uri][crate::model::BatchRecognizeTranscriptionMetadata::uri].
6841 ///
6842 /// # Example
6843 /// ```ignore,no_run
6844 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6845 /// let x = BatchRecognizeTranscriptionMetadata::new().set_uri("example");
6846 /// ```
6847 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6848 self.uri = v.into();
6849 self
6850 }
6851}
6852
6853impl wkt::message::Message for BatchRecognizeTranscriptionMetadata {
6854 fn typename() -> &'static str {
6855 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeTranscriptionMetadata"
6856 }
6857}
6858
6859/// Operation metadata for
6860/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize].
6861///
6862/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6863#[derive(Clone, Default, PartialEq)]
6864#[non_exhaustive]
6865pub struct BatchRecognizeMetadata {
6866 /// Map from provided filename to the transcription metadata for that file.
6867 pub transcription_metadata: std::collections::HashMap<
6868 std::string::String,
6869 crate::model::BatchRecognizeTranscriptionMetadata,
6870 >,
6871
6872 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6873}
6874
6875impl BatchRecognizeMetadata {
6876 pub fn new() -> Self {
6877 std::default::Default::default()
6878 }
6879
6880 /// Sets the value of [transcription_metadata][crate::model::BatchRecognizeMetadata::transcription_metadata].
6881 ///
6882 /// # Example
6883 /// ```ignore,no_run
6884 /// # use google_cloud_speech_v2::model::BatchRecognizeMetadata;
6885 /// use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6886 /// let x = BatchRecognizeMetadata::new().set_transcription_metadata([
6887 /// ("key0", BatchRecognizeTranscriptionMetadata::default()/* use setters */),
6888 /// ("key1", BatchRecognizeTranscriptionMetadata::default()/* use (different) setters */),
6889 /// ]);
6890 /// ```
6891 pub fn set_transcription_metadata<T, K, V>(mut self, v: T) -> Self
6892 where
6893 T: std::iter::IntoIterator<Item = (K, V)>,
6894 K: std::convert::Into<std::string::String>,
6895 V: std::convert::Into<crate::model::BatchRecognizeTranscriptionMetadata>,
6896 {
6897 use std::iter::Iterator;
6898 self.transcription_metadata = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
6899 self
6900 }
6901}
6902
6903impl wkt::message::Message for BatchRecognizeMetadata {
6904 fn typename() -> &'static str {
6905 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeMetadata"
6906 }
6907}
6908
6909/// Metadata about a single file in a batch for BatchRecognize.
6910#[derive(Clone, Default, PartialEq)]
6911#[non_exhaustive]
6912pub struct BatchRecognizeFileMetadata {
6913 /// Features and audio metadata to use for the Automatic Speech Recognition.
6914 /// This field in combination with the
6915 /// [config_mask][google.cloud.speech.v2.BatchRecognizeFileMetadata.config_mask]
6916 /// field can be used to override parts of the
6917 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
6918 /// of the Recognizer resource as well as the
6919 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] at the
6920 /// request level.
6921 ///
6922 /// [google.cloud.speech.v2.BatchRecognizeFileMetadata.config_mask]: crate::model::BatchRecognizeFileMetadata::config_mask
6923 /// [google.cloud.speech.v2.BatchRecognizeRequest.config]: crate::model::BatchRecognizeRequest::config
6924 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
6925 pub config: std::option::Option<crate::model::RecognitionConfig>,
6926
6927 /// The list of fields in
6928 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config] that
6929 /// override the values in the
6930 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
6931 /// of the recognizer during this recognition request. If no mask is provided,
6932 /// all non-default valued fields in
6933 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config] override
6934 /// the values in the recognizer for this recognition request. If a mask is
6935 /// provided, only the fields listed in the mask override the config in the
6936 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
6937 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config]
6938 /// completely overrides and replaces the config in the recognizer for this
6939 /// recognition request.
6940 ///
6941 /// [google.cloud.speech.v2.BatchRecognizeFileMetadata.config]: crate::model::BatchRecognizeFileMetadata::config
6942 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
6943 pub config_mask: std::option::Option<wkt::FieldMask>,
6944
6945 /// The audio source, which is a Google Cloud Storage URI.
6946 pub audio_source: std::option::Option<crate::model::batch_recognize_file_metadata::AudioSource>,
6947
6948 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6949}
6950
6951impl BatchRecognizeFileMetadata {
6952 pub fn new() -> Self {
6953 std::default::Default::default()
6954 }
6955
6956 /// Sets the value of [config][crate::model::BatchRecognizeFileMetadata::config].
6957 ///
6958 /// # Example
6959 /// ```ignore,no_run
6960 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6961 /// use google_cloud_speech_v2::model::RecognitionConfig;
6962 /// let x = BatchRecognizeFileMetadata::new().set_config(RecognitionConfig::default()/* use setters */);
6963 /// ```
6964 pub fn set_config<T>(mut self, v: T) -> Self
6965 where
6966 T: std::convert::Into<crate::model::RecognitionConfig>,
6967 {
6968 self.config = std::option::Option::Some(v.into());
6969 self
6970 }
6971
6972 /// Sets or clears the value of [config][crate::model::BatchRecognizeFileMetadata::config].
6973 ///
6974 /// # Example
6975 /// ```ignore,no_run
6976 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6977 /// use google_cloud_speech_v2::model::RecognitionConfig;
6978 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
6979 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config(None::<RecognitionConfig>);
6980 /// ```
6981 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
6982 where
6983 T: std::convert::Into<crate::model::RecognitionConfig>,
6984 {
6985 self.config = v.map(|x| x.into());
6986 self
6987 }
6988
6989 /// Sets the value of [config_mask][crate::model::BatchRecognizeFileMetadata::config_mask].
6990 ///
6991 /// # Example
6992 /// ```ignore,no_run
6993 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6994 /// use wkt::FieldMask;
6995 /// let x = BatchRecognizeFileMetadata::new().set_config_mask(FieldMask::default()/* use setters */);
6996 /// ```
6997 pub fn set_config_mask<T>(mut self, v: T) -> Self
6998 where
6999 T: std::convert::Into<wkt::FieldMask>,
7000 {
7001 self.config_mask = std::option::Option::Some(v.into());
7002 self
7003 }
7004
7005 /// Sets or clears the value of [config_mask][crate::model::BatchRecognizeFileMetadata::config_mask].
7006 ///
7007 /// # Example
7008 /// ```ignore,no_run
7009 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
7010 /// use wkt::FieldMask;
7011 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
7012 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config_mask(None::<FieldMask>);
7013 /// ```
7014 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
7015 where
7016 T: std::convert::Into<wkt::FieldMask>,
7017 {
7018 self.config_mask = v.map(|x| x.into());
7019 self
7020 }
7021
7022 /// Sets the value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source].
7023 ///
7024 /// Note that all the setters affecting `audio_source` are mutually
7025 /// exclusive.
7026 ///
7027 /// # Example
7028 /// ```ignore,no_run
7029 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
7030 /// use google_cloud_speech_v2::model::batch_recognize_file_metadata::AudioSource;
7031 /// let x = BatchRecognizeFileMetadata::new().set_audio_source(Some(AudioSource::Uri("example".to_string())));
7032 /// ```
7033 pub fn set_audio_source<
7034 T: std::convert::Into<
7035 std::option::Option<crate::model::batch_recognize_file_metadata::AudioSource>,
7036 >,
7037 >(
7038 mut self,
7039 v: T,
7040 ) -> Self {
7041 self.audio_source = v.into();
7042 self
7043 }
7044
7045 /// The value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source]
7046 /// if it holds a `Uri`, `None` if the field is not set or
7047 /// holds a different branch.
7048 pub fn uri(&self) -> std::option::Option<&std::string::String> {
7049 #[allow(unreachable_patterns)]
7050 self.audio_source.as_ref().and_then(|v| match v {
7051 crate::model::batch_recognize_file_metadata::AudioSource::Uri(v) => {
7052 std::option::Option::Some(v)
7053 }
7054 _ => std::option::Option::None,
7055 })
7056 }
7057
7058 /// Sets the value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source]
7059 /// to hold a `Uri`.
7060 ///
7061 /// Note that all the setters affecting `audio_source` are
7062 /// mutually exclusive.
7063 ///
7064 /// # Example
7065 /// ```ignore,no_run
7066 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
7067 /// let x = BatchRecognizeFileMetadata::new().set_uri("example");
7068 /// assert!(x.uri().is_some());
7069 /// ```
7070 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7071 self.audio_source = std::option::Option::Some(
7072 crate::model::batch_recognize_file_metadata::AudioSource::Uri(v.into()),
7073 );
7074 self
7075 }
7076}
7077
7078impl wkt::message::Message for BatchRecognizeFileMetadata {
7079 fn typename() -> &'static str {
7080 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeFileMetadata"
7081 }
7082}
7083
7084/// Defines additional types related to [BatchRecognizeFileMetadata].
7085pub mod batch_recognize_file_metadata {
7086 #[allow(unused_imports)]
7087 use super::*;
7088
7089 /// The audio source, which is a Google Cloud Storage URI.
7090 #[derive(Clone, Debug, PartialEq)]
7091 #[non_exhaustive]
7092 pub enum AudioSource {
7093 /// Cloud Storage URI for the audio file.
7094 Uri(std::string::String),
7095 }
7096}
7097
7098/// A streaming speech recognition result corresponding to a portion of the audio
7099/// that is currently being processed.
7100#[derive(Clone, Default, PartialEq)]
7101#[non_exhaustive]
7102pub struct StreamingRecognitionResult {
7103 /// May contain one or more recognition hypotheses. These alternatives are
7104 /// ordered in terms of accuracy, with the top (first) alternative being the
7105 /// most probable, as ranked by the recognizer.
7106 pub alternatives: std::vec::Vec<crate::model::SpeechRecognitionAlternative>,
7107
7108 /// If `false`, this
7109 /// [StreamingRecognitionResult][google.cloud.speech.v2.StreamingRecognitionResult]
7110 /// represents an interim result that may change. If `true`, this is the final
7111 /// time the speech service will return this particular
7112 /// [StreamingRecognitionResult][google.cloud.speech.v2.StreamingRecognitionResult],
7113 /// the recognizer will not return any further hypotheses for this portion of
7114 /// the transcript and corresponding audio.
7115 ///
7116 /// [google.cloud.speech.v2.StreamingRecognitionResult]: crate::model::StreamingRecognitionResult
7117 pub is_final: bool,
7118
7119 /// An estimate of the likelihood that the recognizer will not change its guess
7120 /// about this interim result. Values range from 0.0 (completely unstable)
7121 /// to 1.0 (completely stable). This field is only provided for interim results
7122 /// ([is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`false`).
7123 /// The default of 0.0 is a sentinel value indicating `stability` was not set.
7124 ///
7125 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
7126 pub stability: f32,
7127
7128 /// Time offset of the end of this result relative to the beginning of the
7129 /// audio.
7130 pub result_end_offset: std::option::Option<wkt::Duration>,
7131
7132 /// For multi-channel audio, this is the channel number corresponding to the
7133 /// recognized result for the audio from that channel.
7134 /// For
7135 /// `audio_channel_count` = `N`, its output values can range from `1` to `N`.
7136 pub channel_tag: i32,
7137
7138 /// Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
7139 /// language tag of the language in this result. This language code was
7140 /// detected to have the most likelihood of being spoken in the audio.
7141 pub language_code: std::string::String,
7142
7143 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7144}
7145
7146impl StreamingRecognitionResult {
7147 pub fn new() -> Self {
7148 std::default::Default::default()
7149 }
7150
7151 /// Sets the value of [alternatives][crate::model::StreamingRecognitionResult::alternatives].
7152 ///
7153 /// # Example
7154 /// ```ignore,no_run
7155 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7156 /// use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
7157 /// let x = StreamingRecognitionResult::new()
7158 /// .set_alternatives([
7159 /// SpeechRecognitionAlternative::default()/* use setters */,
7160 /// SpeechRecognitionAlternative::default()/* use (different) setters */,
7161 /// ]);
7162 /// ```
7163 pub fn set_alternatives<T, V>(mut self, v: T) -> Self
7164 where
7165 T: std::iter::IntoIterator<Item = V>,
7166 V: std::convert::Into<crate::model::SpeechRecognitionAlternative>,
7167 {
7168 use std::iter::Iterator;
7169 self.alternatives = v.into_iter().map(|i| i.into()).collect();
7170 self
7171 }
7172
7173 /// Sets the value of [is_final][crate::model::StreamingRecognitionResult::is_final].
7174 ///
7175 /// # Example
7176 /// ```ignore,no_run
7177 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7178 /// let x = StreamingRecognitionResult::new().set_is_final(true);
7179 /// ```
7180 pub fn set_is_final<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
7181 self.is_final = v.into();
7182 self
7183 }
7184
7185 /// Sets the value of [stability][crate::model::StreamingRecognitionResult::stability].
7186 ///
7187 /// # Example
7188 /// ```ignore,no_run
7189 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7190 /// let x = StreamingRecognitionResult::new().set_stability(42.0);
7191 /// ```
7192 pub fn set_stability<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
7193 self.stability = v.into();
7194 self
7195 }
7196
7197 /// Sets the value of [result_end_offset][crate::model::StreamingRecognitionResult::result_end_offset].
7198 ///
7199 /// # Example
7200 /// ```ignore,no_run
7201 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7202 /// use wkt::Duration;
7203 /// let x = StreamingRecognitionResult::new().set_result_end_offset(Duration::default()/* use setters */);
7204 /// ```
7205 pub fn set_result_end_offset<T>(mut self, v: T) -> Self
7206 where
7207 T: std::convert::Into<wkt::Duration>,
7208 {
7209 self.result_end_offset = std::option::Option::Some(v.into());
7210 self
7211 }
7212
7213 /// Sets or clears the value of [result_end_offset][crate::model::StreamingRecognitionResult::result_end_offset].
7214 ///
7215 /// # Example
7216 /// ```ignore,no_run
7217 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7218 /// use wkt::Duration;
7219 /// let x = StreamingRecognitionResult::new().set_or_clear_result_end_offset(Some(Duration::default()/* use setters */));
7220 /// let x = StreamingRecognitionResult::new().set_or_clear_result_end_offset(None::<Duration>);
7221 /// ```
7222 pub fn set_or_clear_result_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
7223 where
7224 T: std::convert::Into<wkt::Duration>,
7225 {
7226 self.result_end_offset = v.map(|x| x.into());
7227 self
7228 }
7229
7230 /// Sets the value of [channel_tag][crate::model::StreamingRecognitionResult::channel_tag].
7231 ///
7232 /// # Example
7233 /// ```ignore,no_run
7234 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7235 /// let x = StreamingRecognitionResult::new().set_channel_tag(42);
7236 /// ```
7237 pub fn set_channel_tag<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
7238 self.channel_tag = v.into();
7239 self
7240 }
7241
7242 /// Sets the value of [language_code][crate::model::StreamingRecognitionResult::language_code].
7243 ///
7244 /// # Example
7245 /// ```ignore,no_run
7246 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7247 /// let x = StreamingRecognitionResult::new().set_language_code("example");
7248 /// ```
7249 pub fn set_language_code<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7250 self.language_code = v.into();
7251 self
7252 }
7253}
7254
7255impl wkt::message::Message for StreamingRecognitionResult {
7256 fn typename() -> &'static str {
7257 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionResult"
7258 }
7259}
7260
7261/// `StreamingRecognizeResponse` is the only message returned to the client by
7262/// `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse`
7263/// messages are streamed back to the client. If there is no recognizable
7264/// audio then no messages are streamed back to the client.
7265///
7266/// Here are some examples of `StreamingRecognizeResponse`s that might
7267/// be returned while processing audio:
7268///
7269/// 1. results { alternatives { transcript: "tube" } stability: 0.01 }
7270///
7271/// 1. results { alternatives { transcript: "to be a" } stability: 0.01 }
7272///
7273/// 1. results { alternatives { transcript: "to be" } stability: 0.9 }
7274/// results { alternatives { transcript: " or not to be" } stability: 0.01 }
7275///
7276/// 1. results { alternatives { transcript: "to be or not to be"
7277/// confidence: 0.92 }
7278/// alternatives { transcript: "to bee or not to bee" }
7279/// is_final: true }
7280///
7281/// 1. results { alternatives { transcript: " that's" } stability: 0.01 }
7282///
7283/// 1. results { alternatives { transcript: " that is" } stability: 0.9 }
7284/// results { alternatives { transcript: " the question" } stability: 0.01 }
7285///
7286/// 1. results { alternatives { transcript: " that is the question"
7287/// confidence: 0.98 }
7288/// alternatives { transcript: " that was the question" }
7289/// is_final: true }
7290///
7291///
7292/// Notes:
7293///
7294/// - Only two of the above responses #4 and #7 contain final results; they are
7295/// indicated by `is_final: true`. Concatenating these together generates the
7296/// full transcript: "to be or not to be that is the question".
7297///
7298/// - The others contain interim `results`. #3 and #6 contain two interim
7299/// `results`: the first portion has a high stability and is less likely to
7300/// change; the second portion has a low stability and is very likely to
7301/// change. A UI designer might choose to show only high stability `results`.
7302///
7303/// - The specific `stability` and `confidence` values shown above are only for
7304/// illustrative purposes. Actual values may vary.
7305///
7306/// - In each response, only one of these fields will be set:
7307/// `error`,
7308/// `speech_event_type`, or
7309/// one or more (repeated) `results`.
7310///
7311#[derive(Clone, Default, PartialEq)]
7312#[non_exhaustive]
7313pub struct StreamingRecognizeResponse {
7314 /// This repeated list contains zero or more results that
7315 /// correspond to consecutive portions of the audio currently being processed.
7316 /// It contains zero or one
7317 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`true`
7318 /// result (the newly settled portion), followed by zero or more
7319 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`false`
7320 /// results (the interim results).
7321 ///
7322 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
7323 pub results: std::vec::Vec<crate::model::StreamingRecognitionResult>,
7324
7325 /// Indicates the type of speech event.
7326 pub speech_event_type: crate::model::streaming_recognize_response::SpeechEventType,
7327
7328 /// Time offset between the beginning of the audio and event emission.
7329 pub speech_event_offset: std::option::Option<wkt::Duration>,
7330
7331 /// Metadata about the recognition.
7332 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
7333
7334 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7335}
7336
7337impl StreamingRecognizeResponse {
7338 pub fn new() -> Self {
7339 std::default::Default::default()
7340 }
7341
7342 /// Sets the value of [results][crate::model::StreamingRecognizeResponse::results].
7343 ///
7344 /// # Example
7345 /// ```ignore,no_run
7346 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7347 /// use google_cloud_speech_v2::model::StreamingRecognitionResult;
7348 /// let x = StreamingRecognizeResponse::new()
7349 /// .set_results([
7350 /// StreamingRecognitionResult::default()/* use setters */,
7351 /// StreamingRecognitionResult::default()/* use (different) setters */,
7352 /// ]);
7353 /// ```
7354 pub fn set_results<T, V>(mut self, v: T) -> Self
7355 where
7356 T: std::iter::IntoIterator<Item = V>,
7357 V: std::convert::Into<crate::model::StreamingRecognitionResult>,
7358 {
7359 use std::iter::Iterator;
7360 self.results = v.into_iter().map(|i| i.into()).collect();
7361 self
7362 }
7363
7364 /// Sets the value of [speech_event_type][crate::model::StreamingRecognizeResponse::speech_event_type].
7365 ///
7366 /// # Example
7367 /// ```ignore,no_run
7368 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7369 /// use google_cloud_speech_v2::model::streaming_recognize_response::SpeechEventType;
7370 /// let x0 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::EndOfSingleUtterance);
7371 /// let x1 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::SpeechActivityBegin);
7372 /// let x2 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::SpeechActivityEnd);
7373 /// ```
7374 pub fn set_speech_event_type<
7375 T: std::convert::Into<crate::model::streaming_recognize_response::SpeechEventType>,
7376 >(
7377 mut self,
7378 v: T,
7379 ) -> Self {
7380 self.speech_event_type = v.into();
7381 self
7382 }
7383
7384 /// Sets the value of [speech_event_offset][crate::model::StreamingRecognizeResponse::speech_event_offset].
7385 ///
7386 /// # Example
7387 /// ```ignore,no_run
7388 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7389 /// use wkt::Duration;
7390 /// let x = StreamingRecognizeResponse::new().set_speech_event_offset(Duration::default()/* use setters */);
7391 /// ```
7392 pub fn set_speech_event_offset<T>(mut self, v: T) -> Self
7393 where
7394 T: std::convert::Into<wkt::Duration>,
7395 {
7396 self.speech_event_offset = std::option::Option::Some(v.into());
7397 self
7398 }
7399
7400 /// Sets or clears the value of [speech_event_offset][crate::model::StreamingRecognizeResponse::speech_event_offset].
7401 ///
7402 /// # Example
7403 /// ```ignore,no_run
7404 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7405 /// use wkt::Duration;
7406 /// let x = StreamingRecognizeResponse::new().set_or_clear_speech_event_offset(Some(Duration::default()/* use setters */));
7407 /// let x = StreamingRecognizeResponse::new().set_or_clear_speech_event_offset(None::<Duration>);
7408 /// ```
7409 pub fn set_or_clear_speech_event_offset<T>(mut self, v: std::option::Option<T>) -> Self
7410 where
7411 T: std::convert::Into<wkt::Duration>,
7412 {
7413 self.speech_event_offset = v.map(|x| x.into());
7414 self
7415 }
7416
7417 /// Sets the value of [metadata][crate::model::StreamingRecognizeResponse::metadata].
7418 ///
7419 /// # Example
7420 /// ```ignore,no_run
7421 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7422 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
7423 /// let x = StreamingRecognizeResponse::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
7424 /// ```
7425 pub fn set_metadata<T>(mut self, v: T) -> Self
7426 where
7427 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
7428 {
7429 self.metadata = std::option::Option::Some(v.into());
7430 self
7431 }
7432
7433 /// Sets or clears the value of [metadata][crate::model::StreamingRecognizeResponse::metadata].
7434 ///
7435 /// # Example
7436 /// ```ignore,no_run
7437 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7438 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
7439 /// let x = StreamingRecognizeResponse::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
7440 /// let x = StreamingRecognizeResponse::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
7441 /// ```
7442 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
7443 where
7444 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
7445 {
7446 self.metadata = v.map(|x| x.into());
7447 self
7448 }
7449}
7450
7451impl wkt::message::Message for StreamingRecognizeResponse {
7452 fn typename() -> &'static str {
7453 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognizeResponse"
7454 }
7455}
7456
7457/// Defines additional types related to [StreamingRecognizeResponse].
7458pub mod streaming_recognize_response {
7459 #[allow(unused_imports)]
7460 use super::*;
7461
7462 /// Indicates the type of speech event.
7463 ///
7464 /// # Working with unknown values
7465 ///
7466 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
7467 /// additional enum variants at any time. Adding new variants is not considered
7468 /// a breaking change. Applications should write their code in anticipation of:
7469 ///
7470 /// - New values appearing in future releases of the client library, **and**
7471 /// - New values received dynamically, without application changes.
7472 ///
7473 /// Please consult the [Working with enums] section in the user guide for some
7474 /// guidelines.
7475 ///
7476 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
7477 #[derive(Clone, Debug, PartialEq)]
7478 #[non_exhaustive]
7479 pub enum SpeechEventType {
7480 /// No speech event specified.
7481 Unspecified,
7482 /// This event indicates that the server has detected the end of the user's
7483 /// speech utterance and expects no additional speech. Therefore, the server
7484 /// will not process additional audio and will close the gRPC bidirectional
7485 /// stream. This event is only sent if there was a force cutoff due to
7486 /// silence being detected early. This event is only available through the
7487 /// `latest_short` [model][google.cloud.speech.v2.Recognizer.model].
7488 ///
7489 /// [google.cloud.speech.v2.Recognizer.model]: crate::model::Recognizer::model
7490 EndOfSingleUtterance,
7491 /// This event indicates that the server has detected the beginning of human
7492 /// voice activity in the stream. This event can be returned multiple times
7493 /// if speech starts and stops repeatedly throughout the stream. This event
7494 /// is only sent if `voice_activity_events` is set to true.
7495 SpeechActivityBegin,
7496 /// This event indicates that the server has detected the end of human voice
7497 /// activity in the stream. This event can be returned multiple times if
7498 /// speech starts and stops repeatedly throughout the stream. This event is
7499 /// only sent if `voice_activity_events` is set to true.
7500 SpeechActivityEnd,
7501 /// If set, the enum was initialized with an unknown value.
7502 ///
7503 /// Applications can examine the value using [SpeechEventType::value] or
7504 /// [SpeechEventType::name].
7505 UnknownValue(speech_event_type::UnknownValue),
7506 }
7507
7508 #[doc(hidden)]
7509 pub mod speech_event_type {
7510 #[allow(unused_imports)]
7511 use super::*;
7512 #[derive(Clone, Debug, PartialEq)]
7513 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
7514 }
7515
7516 impl SpeechEventType {
7517 /// Gets the enum value.
7518 ///
7519 /// Returns `None` if the enum contains an unknown value deserialized from
7520 /// the string representation of enums.
7521 pub fn value(&self) -> std::option::Option<i32> {
7522 match self {
7523 Self::Unspecified => std::option::Option::Some(0),
7524 Self::EndOfSingleUtterance => std::option::Option::Some(1),
7525 Self::SpeechActivityBegin => std::option::Option::Some(2),
7526 Self::SpeechActivityEnd => std::option::Option::Some(3),
7527 Self::UnknownValue(u) => u.0.value(),
7528 }
7529 }
7530
7531 /// Gets the enum value as a string.
7532 ///
7533 /// Returns `None` if the enum contains an unknown value deserialized from
7534 /// the integer representation of enums.
7535 pub fn name(&self) -> std::option::Option<&str> {
7536 match self {
7537 Self::Unspecified => std::option::Option::Some("SPEECH_EVENT_TYPE_UNSPECIFIED"),
7538 Self::EndOfSingleUtterance => std::option::Option::Some("END_OF_SINGLE_UTTERANCE"),
7539 Self::SpeechActivityBegin => std::option::Option::Some("SPEECH_ACTIVITY_BEGIN"),
7540 Self::SpeechActivityEnd => std::option::Option::Some("SPEECH_ACTIVITY_END"),
7541 Self::UnknownValue(u) => u.0.name(),
7542 }
7543 }
7544 }
7545
7546 impl std::default::Default for SpeechEventType {
7547 fn default() -> Self {
7548 use std::convert::From;
7549 Self::from(0)
7550 }
7551 }
7552
7553 impl std::fmt::Display for SpeechEventType {
7554 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
7555 wkt::internal::display_enum(f, self.name(), self.value())
7556 }
7557 }
7558
7559 impl std::convert::From<i32> for SpeechEventType {
7560 fn from(value: i32) -> Self {
7561 match value {
7562 0 => Self::Unspecified,
7563 1 => Self::EndOfSingleUtterance,
7564 2 => Self::SpeechActivityBegin,
7565 3 => Self::SpeechActivityEnd,
7566 _ => Self::UnknownValue(speech_event_type::UnknownValue(
7567 wkt::internal::UnknownEnumValue::Integer(value),
7568 )),
7569 }
7570 }
7571 }
7572
7573 impl std::convert::From<&str> for SpeechEventType {
7574 fn from(value: &str) -> Self {
7575 use std::string::ToString;
7576 match value {
7577 "SPEECH_EVENT_TYPE_UNSPECIFIED" => Self::Unspecified,
7578 "END_OF_SINGLE_UTTERANCE" => Self::EndOfSingleUtterance,
7579 "SPEECH_ACTIVITY_BEGIN" => Self::SpeechActivityBegin,
7580 "SPEECH_ACTIVITY_END" => Self::SpeechActivityEnd,
7581 _ => Self::UnknownValue(speech_event_type::UnknownValue(
7582 wkt::internal::UnknownEnumValue::String(value.to_string()),
7583 )),
7584 }
7585 }
7586 }
7587
7588 impl serde::ser::Serialize for SpeechEventType {
7589 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
7590 where
7591 S: serde::Serializer,
7592 {
7593 match self {
7594 Self::Unspecified => serializer.serialize_i32(0),
7595 Self::EndOfSingleUtterance => serializer.serialize_i32(1),
7596 Self::SpeechActivityBegin => serializer.serialize_i32(2),
7597 Self::SpeechActivityEnd => serializer.serialize_i32(3),
7598 Self::UnknownValue(u) => u.0.serialize(serializer),
7599 }
7600 }
7601 }
7602
7603 impl<'de> serde::de::Deserialize<'de> for SpeechEventType {
7604 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
7605 where
7606 D: serde::Deserializer<'de>,
7607 {
7608 deserializer.deserialize_any(wkt::internal::EnumVisitor::<SpeechEventType>::new(
7609 ".google.cloud.speech.v2.StreamingRecognizeResponse.SpeechEventType",
7610 ))
7611 }
7612 }
7613}
7614
7615/// Message representing the config for the Speech-to-Text API. This includes an
7616/// optional [KMS key](https://cloud.google.com/kms/docs/resource-hierarchy#keys)
7617/// with which incoming data will be encrypted.
7618#[derive(Clone, Default, PartialEq)]
7619#[non_exhaustive]
7620pub struct Config {
7621 /// Output only. Identifier. The name of the config resource. There is exactly
7622 /// one config resource per project per location. The expected format is
7623 /// `projects/{project}/locations/{location}/config`.
7624 pub name: std::string::String,
7625
7626 /// Optional. An optional [KMS key
7627 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) that if
7628 /// present, will be used to encrypt Speech-to-Text resources at-rest. Updating
7629 /// this key will not encrypt existing resources using this key; only new
7630 /// resources will be encrypted using this key. The expected format is
7631 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
7632 pub kms_key_name: std::string::String,
7633
7634 /// Output only. The most recent time this resource was modified.
7635 pub update_time: std::option::Option<wkt::Timestamp>,
7636
7637 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7638}
7639
7640impl Config {
7641 pub fn new() -> Self {
7642 std::default::Default::default()
7643 }
7644
7645 /// Sets the value of [name][crate::model::Config::name].
7646 ///
7647 /// # Example
7648 /// ```ignore,no_run
7649 /// # use google_cloud_speech_v2::model::Config;
7650 /// let x = Config::new().set_name("example");
7651 /// ```
7652 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7653 self.name = v.into();
7654 self
7655 }
7656
7657 /// Sets the value of [kms_key_name][crate::model::Config::kms_key_name].
7658 ///
7659 /// # Example
7660 /// ```ignore,no_run
7661 /// # use google_cloud_speech_v2::model::Config;
7662 /// let x = Config::new().set_kms_key_name("example");
7663 /// ```
7664 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7665 self.kms_key_name = v.into();
7666 self
7667 }
7668
7669 /// Sets the value of [update_time][crate::model::Config::update_time].
7670 ///
7671 /// # Example
7672 /// ```ignore,no_run
7673 /// # use google_cloud_speech_v2::model::Config;
7674 /// use wkt::Timestamp;
7675 /// let x = Config::new().set_update_time(Timestamp::default()/* use setters */);
7676 /// ```
7677 pub fn set_update_time<T>(mut self, v: T) -> Self
7678 where
7679 T: std::convert::Into<wkt::Timestamp>,
7680 {
7681 self.update_time = std::option::Option::Some(v.into());
7682 self
7683 }
7684
7685 /// Sets or clears the value of [update_time][crate::model::Config::update_time].
7686 ///
7687 /// # Example
7688 /// ```ignore,no_run
7689 /// # use google_cloud_speech_v2::model::Config;
7690 /// use wkt::Timestamp;
7691 /// let x = Config::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
7692 /// let x = Config::new().set_or_clear_update_time(None::<Timestamp>);
7693 /// ```
7694 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
7695 where
7696 T: std::convert::Into<wkt::Timestamp>,
7697 {
7698 self.update_time = v.map(|x| x.into());
7699 self
7700 }
7701}
7702
7703impl wkt::message::Message for Config {
7704 fn typename() -> &'static str {
7705 "type.googleapis.com/google.cloud.speech.v2.Config"
7706 }
7707}
7708
7709/// Request message for the
7710/// [GetConfig][google.cloud.speech.v2.Speech.GetConfig] method.
7711///
7712/// [google.cloud.speech.v2.Speech.GetConfig]: crate::client::Speech::get_config
7713#[derive(Clone, Default, PartialEq)]
7714#[non_exhaustive]
7715pub struct GetConfigRequest {
7716 /// Required. The name of the config to retrieve. There is exactly one config
7717 /// resource per project per location. The expected format is
7718 /// `projects/{project}/locations/{location}/config`.
7719 pub name: std::string::String,
7720
7721 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7722}
7723
7724impl GetConfigRequest {
7725 pub fn new() -> Self {
7726 std::default::Default::default()
7727 }
7728
7729 /// Sets the value of [name][crate::model::GetConfigRequest::name].
7730 ///
7731 /// # Example
7732 /// ```ignore,no_run
7733 /// # use google_cloud_speech_v2::model::GetConfigRequest;
7734 /// let x = GetConfigRequest::new().set_name("example");
7735 /// ```
7736 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7737 self.name = v.into();
7738 self
7739 }
7740}
7741
7742impl wkt::message::Message for GetConfigRequest {
7743 fn typename() -> &'static str {
7744 "type.googleapis.com/google.cloud.speech.v2.GetConfigRequest"
7745 }
7746}
7747
7748/// Request message for the
7749/// [UpdateConfig][google.cloud.speech.v2.Speech.UpdateConfig] method.
7750///
7751/// [google.cloud.speech.v2.Speech.UpdateConfig]: crate::client::Speech::update_config
7752#[derive(Clone, Default, PartialEq)]
7753#[non_exhaustive]
7754pub struct UpdateConfigRequest {
7755 /// Required. The config to update.
7756 ///
7757 /// The config's `name` field is used to identify the config to be updated.
7758 /// The expected format is `projects/{project}/locations/{location}/config`.
7759 pub config: std::option::Option<crate::model::Config>,
7760
7761 /// The list of fields to be updated.
7762 pub update_mask: std::option::Option<wkt::FieldMask>,
7763
7764 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7765}
7766
7767impl UpdateConfigRequest {
7768 pub fn new() -> Self {
7769 std::default::Default::default()
7770 }
7771
7772 /// Sets the value of [config][crate::model::UpdateConfigRequest::config].
7773 ///
7774 /// # Example
7775 /// ```ignore,no_run
7776 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7777 /// use google_cloud_speech_v2::model::Config;
7778 /// let x = UpdateConfigRequest::new().set_config(Config::default()/* use setters */);
7779 /// ```
7780 pub fn set_config<T>(mut self, v: T) -> Self
7781 where
7782 T: std::convert::Into<crate::model::Config>,
7783 {
7784 self.config = std::option::Option::Some(v.into());
7785 self
7786 }
7787
7788 /// Sets or clears the value of [config][crate::model::UpdateConfigRequest::config].
7789 ///
7790 /// # Example
7791 /// ```ignore,no_run
7792 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7793 /// use google_cloud_speech_v2::model::Config;
7794 /// let x = UpdateConfigRequest::new().set_or_clear_config(Some(Config::default()/* use setters */));
7795 /// let x = UpdateConfigRequest::new().set_or_clear_config(None::<Config>);
7796 /// ```
7797 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
7798 where
7799 T: std::convert::Into<crate::model::Config>,
7800 {
7801 self.config = v.map(|x| x.into());
7802 self
7803 }
7804
7805 /// Sets the value of [update_mask][crate::model::UpdateConfigRequest::update_mask].
7806 ///
7807 /// # Example
7808 /// ```ignore,no_run
7809 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7810 /// use wkt::FieldMask;
7811 /// let x = UpdateConfigRequest::new().set_update_mask(FieldMask::default()/* use setters */);
7812 /// ```
7813 pub fn set_update_mask<T>(mut self, v: T) -> Self
7814 where
7815 T: std::convert::Into<wkt::FieldMask>,
7816 {
7817 self.update_mask = std::option::Option::Some(v.into());
7818 self
7819 }
7820
7821 /// Sets or clears the value of [update_mask][crate::model::UpdateConfigRequest::update_mask].
7822 ///
7823 /// # Example
7824 /// ```ignore,no_run
7825 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7826 /// use wkt::FieldMask;
7827 /// let x = UpdateConfigRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
7828 /// let x = UpdateConfigRequest::new().set_or_clear_update_mask(None::<FieldMask>);
7829 /// ```
7830 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
7831 where
7832 T: std::convert::Into<wkt::FieldMask>,
7833 {
7834 self.update_mask = v.map(|x| x.into());
7835 self
7836 }
7837}
7838
7839impl wkt::message::Message for UpdateConfigRequest {
7840 fn typename() -> &'static str {
7841 "type.googleapis.com/google.cloud.speech.v2.UpdateConfigRequest"
7842 }
7843}
7844
7845/// CustomClass for biasing in speech recognition. Used to define a set of words
7846/// or phrases that represents a common concept or theme likely to appear in your
7847/// audio, for example a list of passenger ship names.
7848#[derive(Clone, Default, PartialEq)]
7849#[non_exhaustive]
7850pub struct CustomClass {
7851 /// Output only. Identifier. The resource name of the CustomClass.
7852 /// Format:
7853 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
7854 pub name: std::string::String,
7855
7856 /// Output only. System-assigned unique identifier for the CustomClass.
7857 pub uid: std::string::String,
7858
7859 /// Optional. User-settable, human-readable name for the CustomClass. Must be
7860 /// 63 characters or less.
7861 pub display_name: std::string::String,
7862
7863 /// A collection of class items.
7864 pub items: std::vec::Vec<crate::model::custom_class::ClassItem>,
7865
7866 /// Output only. The CustomClass lifecycle state.
7867 pub state: crate::model::custom_class::State,
7868
7869 /// Output only. Creation time.
7870 pub create_time: std::option::Option<wkt::Timestamp>,
7871
7872 /// Output only. The most recent time this resource was modified.
7873 pub update_time: std::option::Option<wkt::Timestamp>,
7874
7875 /// Output only. The time at which this resource was requested for deletion.
7876 pub delete_time: std::option::Option<wkt::Timestamp>,
7877
7878 /// Output only. The time at which this resource will be purged.
7879 pub expire_time: std::option::Option<wkt::Timestamp>,
7880
7881 /// Optional. Allows users to store small amounts of arbitrary data.
7882 /// Both the key and the value must be 63 characters or less each.
7883 /// At most 100 annotations.
7884 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
7885
7886 /// Output only. This checksum is computed by the server based on the value of
7887 /// other fields. This may be sent on update, undelete, and delete requests to
7888 /// ensure the client has an up-to-date value before proceeding.
7889 pub etag: std::string::String,
7890
7891 /// Output only. Whether or not this CustomClass is in the process of being
7892 /// updated.
7893 pub reconciling: bool,
7894
7895 /// Output only. The [KMS key
7896 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
7897 /// the CustomClass is encrypted. The expected format is
7898 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
7899 pub kms_key_name: std::string::String,
7900
7901 /// Output only. The [KMS key version
7902 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
7903 /// with which the CustomClass is encrypted. The expected format is
7904 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
7905 pub kms_key_version_name: std::string::String,
7906
7907 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7908}
7909
7910impl CustomClass {
7911 pub fn new() -> Self {
7912 std::default::Default::default()
7913 }
7914
7915 /// Sets the value of [name][crate::model::CustomClass::name].
7916 ///
7917 /// # Example
7918 /// ```ignore,no_run
7919 /// # use google_cloud_speech_v2::model::CustomClass;
7920 /// let x = CustomClass::new().set_name("example");
7921 /// ```
7922 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7923 self.name = v.into();
7924 self
7925 }
7926
7927 /// Sets the value of [uid][crate::model::CustomClass::uid].
7928 ///
7929 /// # Example
7930 /// ```ignore,no_run
7931 /// # use google_cloud_speech_v2::model::CustomClass;
7932 /// let x = CustomClass::new().set_uid("example");
7933 /// ```
7934 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7935 self.uid = v.into();
7936 self
7937 }
7938
7939 /// Sets the value of [display_name][crate::model::CustomClass::display_name].
7940 ///
7941 /// # Example
7942 /// ```ignore,no_run
7943 /// # use google_cloud_speech_v2::model::CustomClass;
7944 /// let x = CustomClass::new().set_display_name("example");
7945 /// ```
7946 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7947 self.display_name = v.into();
7948 self
7949 }
7950
7951 /// Sets the value of [items][crate::model::CustomClass::items].
7952 ///
7953 /// # Example
7954 /// ```ignore,no_run
7955 /// # use google_cloud_speech_v2::model::CustomClass;
7956 /// use google_cloud_speech_v2::model::custom_class::ClassItem;
7957 /// let x = CustomClass::new()
7958 /// .set_items([
7959 /// ClassItem::default()/* use setters */,
7960 /// ClassItem::default()/* use (different) setters */,
7961 /// ]);
7962 /// ```
7963 pub fn set_items<T, V>(mut self, v: T) -> Self
7964 where
7965 T: std::iter::IntoIterator<Item = V>,
7966 V: std::convert::Into<crate::model::custom_class::ClassItem>,
7967 {
7968 use std::iter::Iterator;
7969 self.items = v.into_iter().map(|i| i.into()).collect();
7970 self
7971 }
7972
7973 /// Sets the value of [state][crate::model::CustomClass::state].
7974 ///
7975 /// # Example
7976 /// ```ignore,no_run
7977 /// # use google_cloud_speech_v2::model::CustomClass;
7978 /// use google_cloud_speech_v2::model::custom_class::State;
7979 /// let x0 = CustomClass::new().set_state(State::Active);
7980 /// let x1 = CustomClass::new().set_state(State::Deleted);
7981 /// ```
7982 pub fn set_state<T: std::convert::Into<crate::model::custom_class::State>>(
7983 mut self,
7984 v: T,
7985 ) -> Self {
7986 self.state = v.into();
7987 self
7988 }
7989
7990 /// Sets the value of [create_time][crate::model::CustomClass::create_time].
7991 ///
7992 /// # Example
7993 /// ```ignore,no_run
7994 /// # use google_cloud_speech_v2::model::CustomClass;
7995 /// use wkt::Timestamp;
7996 /// let x = CustomClass::new().set_create_time(Timestamp::default()/* use setters */);
7997 /// ```
7998 pub fn set_create_time<T>(mut self, v: T) -> Self
7999 where
8000 T: std::convert::Into<wkt::Timestamp>,
8001 {
8002 self.create_time = std::option::Option::Some(v.into());
8003 self
8004 }
8005
8006 /// Sets or clears the value of [create_time][crate::model::CustomClass::create_time].
8007 ///
8008 /// # Example
8009 /// ```ignore,no_run
8010 /// # use google_cloud_speech_v2::model::CustomClass;
8011 /// use wkt::Timestamp;
8012 /// let x = CustomClass::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
8013 /// let x = CustomClass::new().set_or_clear_create_time(None::<Timestamp>);
8014 /// ```
8015 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
8016 where
8017 T: std::convert::Into<wkt::Timestamp>,
8018 {
8019 self.create_time = v.map(|x| x.into());
8020 self
8021 }
8022
8023 /// Sets the value of [update_time][crate::model::CustomClass::update_time].
8024 ///
8025 /// # Example
8026 /// ```ignore,no_run
8027 /// # use google_cloud_speech_v2::model::CustomClass;
8028 /// use wkt::Timestamp;
8029 /// let x = CustomClass::new().set_update_time(Timestamp::default()/* use setters */);
8030 /// ```
8031 pub fn set_update_time<T>(mut self, v: T) -> Self
8032 where
8033 T: std::convert::Into<wkt::Timestamp>,
8034 {
8035 self.update_time = std::option::Option::Some(v.into());
8036 self
8037 }
8038
8039 /// Sets or clears the value of [update_time][crate::model::CustomClass::update_time].
8040 ///
8041 /// # Example
8042 /// ```ignore,no_run
8043 /// # use google_cloud_speech_v2::model::CustomClass;
8044 /// use wkt::Timestamp;
8045 /// let x = CustomClass::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
8046 /// let x = CustomClass::new().set_or_clear_update_time(None::<Timestamp>);
8047 /// ```
8048 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
8049 where
8050 T: std::convert::Into<wkt::Timestamp>,
8051 {
8052 self.update_time = v.map(|x| x.into());
8053 self
8054 }
8055
8056 /// Sets the value of [delete_time][crate::model::CustomClass::delete_time].
8057 ///
8058 /// # Example
8059 /// ```ignore,no_run
8060 /// # use google_cloud_speech_v2::model::CustomClass;
8061 /// use wkt::Timestamp;
8062 /// let x = CustomClass::new().set_delete_time(Timestamp::default()/* use setters */);
8063 /// ```
8064 pub fn set_delete_time<T>(mut self, v: T) -> Self
8065 where
8066 T: std::convert::Into<wkt::Timestamp>,
8067 {
8068 self.delete_time = std::option::Option::Some(v.into());
8069 self
8070 }
8071
8072 /// Sets or clears the value of [delete_time][crate::model::CustomClass::delete_time].
8073 ///
8074 /// # Example
8075 /// ```ignore,no_run
8076 /// # use google_cloud_speech_v2::model::CustomClass;
8077 /// use wkt::Timestamp;
8078 /// let x = CustomClass::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
8079 /// let x = CustomClass::new().set_or_clear_delete_time(None::<Timestamp>);
8080 /// ```
8081 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
8082 where
8083 T: std::convert::Into<wkt::Timestamp>,
8084 {
8085 self.delete_time = v.map(|x| x.into());
8086 self
8087 }
8088
8089 /// Sets the value of [expire_time][crate::model::CustomClass::expire_time].
8090 ///
8091 /// # Example
8092 /// ```ignore,no_run
8093 /// # use google_cloud_speech_v2::model::CustomClass;
8094 /// use wkt::Timestamp;
8095 /// let x = CustomClass::new().set_expire_time(Timestamp::default()/* use setters */);
8096 /// ```
8097 pub fn set_expire_time<T>(mut self, v: T) -> Self
8098 where
8099 T: std::convert::Into<wkt::Timestamp>,
8100 {
8101 self.expire_time = std::option::Option::Some(v.into());
8102 self
8103 }
8104
8105 /// Sets or clears the value of [expire_time][crate::model::CustomClass::expire_time].
8106 ///
8107 /// # Example
8108 /// ```ignore,no_run
8109 /// # use google_cloud_speech_v2::model::CustomClass;
8110 /// use wkt::Timestamp;
8111 /// let x = CustomClass::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
8112 /// let x = CustomClass::new().set_or_clear_expire_time(None::<Timestamp>);
8113 /// ```
8114 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
8115 where
8116 T: std::convert::Into<wkt::Timestamp>,
8117 {
8118 self.expire_time = v.map(|x| x.into());
8119 self
8120 }
8121
8122 /// Sets the value of [annotations][crate::model::CustomClass::annotations].
8123 ///
8124 /// # Example
8125 /// ```ignore,no_run
8126 /// # use google_cloud_speech_v2::model::CustomClass;
8127 /// let x = CustomClass::new().set_annotations([
8128 /// ("key0", "abc"),
8129 /// ("key1", "xyz"),
8130 /// ]);
8131 /// ```
8132 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
8133 where
8134 T: std::iter::IntoIterator<Item = (K, V)>,
8135 K: std::convert::Into<std::string::String>,
8136 V: std::convert::Into<std::string::String>,
8137 {
8138 use std::iter::Iterator;
8139 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
8140 self
8141 }
8142
8143 /// Sets the value of [etag][crate::model::CustomClass::etag].
8144 ///
8145 /// # Example
8146 /// ```ignore,no_run
8147 /// # use google_cloud_speech_v2::model::CustomClass;
8148 /// let x = CustomClass::new().set_etag("example");
8149 /// ```
8150 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8151 self.etag = v.into();
8152 self
8153 }
8154
8155 /// Sets the value of [reconciling][crate::model::CustomClass::reconciling].
8156 ///
8157 /// # Example
8158 /// ```ignore,no_run
8159 /// # use google_cloud_speech_v2::model::CustomClass;
8160 /// let x = CustomClass::new().set_reconciling(true);
8161 /// ```
8162 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8163 self.reconciling = v.into();
8164 self
8165 }
8166
8167 /// Sets the value of [kms_key_name][crate::model::CustomClass::kms_key_name].
8168 ///
8169 /// # Example
8170 /// ```ignore,no_run
8171 /// # use google_cloud_speech_v2::model::CustomClass;
8172 /// let x = CustomClass::new().set_kms_key_name("example");
8173 /// ```
8174 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8175 self.kms_key_name = v.into();
8176 self
8177 }
8178
8179 /// Sets the value of [kms_key_version_name][crate::model::CustomClass::kms_key_version_name].
8180 ///
8181 /// # Example
8182 /// ```ignore,no_run
8183 /// # use google_cloud_speech_v2::model::CustomClass;
8184 /// let x = CustomClass::new().set_kms_key_version_name("example");
8185 /// ```
8186 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
8187 mut self,
8188 v: T,
8189 ) -> Self {
8190 self.kms_key_version_name = v.into();
8191 self
8192 }
8193}
8194
8195impl wkt::message::Message for CustomClass {
8196 fn typename() -> &'static str {
8197 "type.googleapis.com/google.cloud.speech.v2.CustomClass"
8198 }
8199}
8200
8201/// Defines additional types related to [CustomClass].
8202pub mod custom_class {
8203 #[allow(unused_imports)]
8204 use super::*;
8205
8206 /// An item of the class.
8207 #[derive(Clone, Default, PartialEq)]
8208 #[non_exhaustive]
8209 pub struct ClassItem {
8210 /// The class item's value.
8211 pub value: std::string::String,
8212
8213 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8214 }
8215
8216 impl ClassItem {
8217 pub fn new() -> Self {
8218 std::default::Default::default()
8219 }
8220
8221 /// Sets the value of [value][crate::model::custom_class::ClassItem::value].
8222 ///
8223 /// # Example
8224 /// ```ignore,no_run
8225 /// # use google_cloud_speech_v2::model::custom_class::ClassItem;
8226 /// let x = ClassItem::new().set_value("example");
8227 /// ```
8228 pub fn set_value<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8229 self.value = v.into();
8230 self
8231 }
8232 }
8233
8234 impl wkt::message::Message for ClassItem {
8235 fn typename() -> &'static str {
8236 "type.googleapis.com/google.cloud.speech.v2.CustomClass.ClassItem"
8237 }
8238 }
8239
8240 /// Set of states that define the lifecycle of a CustomClass.
8241 ///
8242 /// # Working with unknown values
8243 ///
8244 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
8245 /// additional enum variants at any time. Adding new variants is not considered
8246 /// a breaking change. Applications should write their code in anticipation of:
8247 ///
8248 /// - New values appearing in future releases of the client library, **and**
8249 /// - New values received dynamically, without application changes.
8250 ///
8251 /// Please consult the [Working with enums] section in the user guide for some
8252 /// guidelines.
8253 ///
8254 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
8255 #[derive(Clone, Debug, PartialEq)]
8256 #[non_exhaustive]
8257 pub enum State {
8258 /// Unspecified state. This is only used/useful for distinguishing
8259 /// unset values.
8260 Unspecified,
8261 /// The normal and active state.
8262 Active,
8263 /// This CustomClass has been deleted.
8264 Deleted,
8265 /// If set, the enum was initialized with an unknown value.
8266 ///
8267 /// Applications can examine the value using [State::value] or
8268 /// [State::name].
8269 UnknownValue(state::UnknownValue),
8270 }
8271
8272 #[doc(hidden)]
8273 pub mod state {
8274 #[allow(unused_imports)]
8275 use super::*;
8276 #[derive(Clone, Debug, PartialEq)]
8277 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
8278 }
8279
8280 impl State {
8281 /// Gets the enum value.
8282 ///
8283 /// Returns `None` if the enum contains an unknown value deserialized from
8284 /// the string representation of enums.
8285 pub fn value(&self) -> std::option::Option<i32> {
8286 match self {
8287 Self::Unspecified => std::option::Option::Some(0),
8288 Self::Active => std::option::Option::Some(2),
8289 Self::Deleted => std::option::Option::Some(4),
8290 Self::UnknownValue(u) => u.0.value(),
8291 }
8292 }
8293
8294 /// Gets the enum value as a string.
8295 ///
8296 /// Returns `None` if the enum contains an unknown value deserialized from
8297 /// the integer representation of enums.
8298 pub fn name(&self) -> std::option::Option<&str> {
8299 match self {
8300 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
8301 Self::Active => std::option::Option::Some("ACTIVE"),
8302 Self::Deleted => std::option::Option::Some("DELETED"),
8303 Self::UnknownValue(u) => u.0.name(),
8304 }
8305 }
8306 }
8307
8308 impl std::default::Default for State {
8309 fn default() -> Self {
8310 use std::convert::From;
8311 Self::from(0)
8312 }
8313 }
8314
8315 impl std::fmt::Display for State {
8316 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
8317 wkt::internal::display_enum(f, self.name(), self.value())
8318 }
8319 }
8320
8321 impl std::convert::From<i32> for State {
8322 fn from(value: i32) -> Self {
8323 match value {
8324 0 => Self::Unspecified,
8325 2 => Self::Active,
8326 4 => Self::Deleted,
8327 _ => Self::UnknownValue(state::UnknownValue(
8328 wkt::internal::UnknownEnumValue::Integer(value),
8329 )),
8330 }
8331 }
8332 }
8333
8334 impl std::convert::From<&str> for State {
8335 fn from(value: &str) -> Self {
8336 use std::string::ToString;
8337 match value {
8338 "STATE_UNSPECIFIED" => Self::Unspecified,
8339 "ACTIVE" => Self::Active,
8340 "DELETED" => Self::Deleted,
8341 _ => Self::UnknownValue(state::UnknownValue(
8342 wkt::internal::UnknownEnumValue::String(value.to_string()),
8343 )),
8344 }
8345 }
8346 }
8347
8348 impl serde::ser::Serialize for State {
8349 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
8350 where
8351 S: serde::Serializer,
8352 {
8353 match self {
8354 Self::Unspecified => serializer.serialize_i32(0),
8355 Self::Active => serializer.serialize_i32(2),
8356 Self::Deleted => serializer.serialize_i32(4),
8357 Self::UnknownValue(u) => u.0.serialize(serializer),
8358 }
8359 }
8360 }
8361
8362 impl<'de> serde::de::Deserialize<'de> for State {
8363 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
8364 where
8365 D: serde::Deserializer<'de>,
8366 {
8367 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
8368 ".google.cloud.speech.v2.CustomClass.State",
8369 ))
8370 }
8371 }
8372}
8373
8374/// PhraseSet for biasing in speech recognition. A PhraseSet is used to provide
8375/// "hints" to the speech recognizer to favor specific words and phrases in the
8376/// results.
8377#[derive(Clone, Default, PartialEq)]
8378#[non_exhaustive]
8379pub struct PhraseSet {
8380 /// Output only. Identifier. The resource name of the PhraseSet.
8381 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
8382 pub name: std::string::String,
8383
8384 /// Output only. System-assigned unique identifier for the PhraseSet.
8385 pub uid: std::string::String,
8386
8387 /// A list of word and phrases.
8388 pub phrases: std::vec::Vec<crate::model::phrase_set::Phrase>,
8389
8390 /// Hint Boost. Positive value will increase the probability that a specific
8391 /// phrase will be recognized over other similar sounding phrases. The higher
8392 /// the boost, the higher the chance of false positive recognition as well.
8393 /// Valid `boost` values are between 0 (exclusive) and 20. We recommend using a
8394 /// binary search approach to finding the optimal value for your use case as
8395 /// well as adding phrases both with and without boost to your requests.
8396 pub boost: f32,
8397
8398 /// User-settable, human-readable name for the PhraseSet. Must be 63
8399 /// characters or less.
8400 pub display_name: std::string::String,
8401
8402 /// Output only. The PhraseSet lifecycle state.
8403 pub state: crate::model::phrase_set::State,
8404
8405 /// Output only. Creation time.
8406 pub create_time: std::option::Option<wkt::Timestamp>,
8407
8408 /// Output only. The most recent time this resource was modified.
8409 pub update_time: std::option::Option<wkt::Timestamp>,
8410
8411 /// Output only. The time at which this resource was requested for deletion.
8412 pub delete_time: std::option::Option<wkt::Timestamp>,
8413
8414 /// Output only. The time at which this resource will be purged.
8415 pub expire_time: std::option::Option<wkt::Timestamp>,
8416
8417 /// Allows users to store small amounts of arbitrary data.
8418 /// Both the key and the value must be 63 characters or less each.
8419 /// At most 100 annotations.
8420 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
8421
8422 /// Output only. This checksum is computed by the server based on the value of
8423 /// other fields. This may be sent on update, undelete, and delete requests to
8424 /// ensure the client has an up-to-date value before proceeding.
8425 pub etag: std::string::String,
8426
8427 /// Output only. Whether or not this PhraseSet is in the process of being
8428 /// updated.
8429 pub reconciling: bool,
8430
8431 /// Output only. The [KMS key
8432 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
8433 /// the PhraseSet is encrypted. The expected format is
8434 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
8435 pub kms_key_name: std::string::String,
8436
8437 /// Output only. The [KMS key version
8438 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
8439 /// with which the PhraseSet is encrypted. The expected format is
8440 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
8441 pub kms_key_version_name: std::string::String,
8442
8443 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8444}
8445
8446impl PhraseSet {
8447 pub fn new() -> Self {
8448 std::default::Default::default()
8449 }
8450
8451 /// Sets the value of [name][crate::model::PhraseSet::name].
8452 ///
8453 /// # Example
8454 /// ```ignore,no_run
8455 /// # use google_cloud_speech_v2::model::PhraseSet;
8456 /// let x = PhraseSet::new().set_name("example");
8457 /// ```
8458 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8459 self.name = v.into();
8460 self
8461 }
8462
8463 /// Sets the value of [uid][crate::model::PhraseSet::uid].
8464 ///
8465 /// # Example
8466 /// ```ignore,no_run
8467 /// # use google_cloud_speech_v2::model::PhraseSet;
8468 /// let x = PhraseSet::new().set_uid("example");
8469 /// ```
8470 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8471 self.uid = v.into();
8472 self
8473 }
8474
8475 /// Sets the value of [phrases][crate::model::PhraseSet::phrases].
8476 ///
8477 /// # Example
8478 /// ```ignore,no_run
8479 /// # use google_cloud_speech_v2::model::PhraseSet;
8480 /// use google_cloud_speech_v2::model::phrase_set::Phrase;
8481 /// let x = PhraseSet::new()
8482 /// .set_phrases([
8483 /// Phrase::default()/* use setters */,
8484 /// Phrase::default()/* use (different) setters */,
8485 /// ]);
8486 /// ```
8487 pub fn set_phrases<T, V>(mut self, v: T) -> Self
8488 where
8489 T: std::iter::IntoIterator<Item = V>,
8490 V: std::convert::Into<crate::model::phrase_set::Phrase>,
8491 {
8492 use std::iter::Iterator;
8493 self.phrases = v.into_iter().map(|i| i.into()).collect();
8494 self
8495 }
8496
8497 /// Sets the value of [boost][crate::model::PhraseSet::boost].
8498 ///
8499 /// # Example
8500 /// ```ignore,no_run
8501 /// # use google_cloud_speech_v2::model::PhraseSet;
8502 /// let x = PhraseSet::new().set_boost(42.0);
8503 /// ```
8504 pub fn set_boost<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
8505 self.boost = v.into();
8506 self
8507 }
8508
8509 /// Sets the value of [display_name][crate::model::PhraseSet::display_name].
8510 ///
8511 /// # Example
8512 /// ```ignore,no_run
8513 /// # use google_cloud_speech_v2::model::PhraseSet;
8514 /// let x = PhraseSet::new().set_display_name("example");
8515 /// ```
8516 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8517 self.display_name = v.into();
8518 self
8519 }
8520
8521 /// Sets the value of [state][crate::model::PhraseSet::state].
8522 ///
8523 /// # Example
8524 /// ```ignore,no_run
8525 /// # use google_cloud_speech_v2::model::PhraseSet;
8526 /// use google_cloud_speech_v2::model::phrase_set::State;
8527 /// let x0 = PhraseSet::new().set_state(State::Active);
8528 /// let x1 = PhraseSet::new().set_state(State::Deleted);
8529 /// ```
8530 pub fn set_state<T: std::convert::Into<crate::model::phrase_set::State>>(
8531 mut self,
8532 v: T,
8533 ) -> Self {
8534 self.state = v.into();
8535 self
8536 }
8537
8538 /// Sets the value of [create_time][crate::model::PhraseSet::create_time].
8539 ///
8540 /// # Example
8541 /// ```ignore,no_run
8542 /// # use google_cloud_speech_v2::model::PhraseSet;
8543 /// use wkt::Timestamp;
8544 /// let x = PhraseSet::new().set_create_time(Timestamp::default()/* use setters */);
8545 /// ```
8546 pub fn set_create_time<T>(mut self, v: T) -> Self
8547 where
8548 T: std::convert::Into<wkt::Timestamp>,
8549 {
8550 self.create_time = std::option::Option::Some(v.into());
8551 self
8552 }
8553
8554 /// Sets or clears the value of [create_time][crate::model::PhraseSet::create_time].
8555 ///
8556 /// # Example
8557 /// ```ignore,no_run
8558 /// # use google_cloud_speech_v2::model::PhraseSet;
8559 /// use wkt::Timestamp;
8560 /// let x = PhraseSet::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
8561 /// let x = PhraseSet::new().set_or_clear_create_time(None::<Timestamp>);
8562 /// ```
8563 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
8564 where
8565 T: std::convert::Into<wkt::Timestamp>,
8566 {
8567 self.create_time = v.map(|x| x.into());
8568 self
8569 }
8570
8571 /// Sets the value of [update_time][crate::model::PhraseSet::update_time].
8572 ///
8573 /// # Example
8574 /// ```ignore,no_run
8575 /// # use google_cloud_speech_v2::model::PhraseSet;
8576 /// use wkt::Timestamp;
8577 /// let x = PhraseSet::new().set_update_time(Timestamp::default()/* use setters */);
8578 /// ```
8579 pub fn set_update_time<T>(mut self, v: T) -> Self
8580 where
8581 T: std::convert::Into<wkt::Timestamp>,
8582 {
8583 self.update_time = std::option::Option::Some(v.into());
8584 self
8585 }
8586
8587 /// Sets or clears the value of [update_time][crate::model::PhraseSet::update_time].
8588 ///
8589 /// # Example
8590 /// ```ignore,no_run
8591 /// # use google_cloud_speech_v2::model::PhraseSet;
8592 /// use wkt::Timestamp;
8593 /// let x = PhraseSet::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
8594 /// let x = PhraseSet::new().set_or_clear_update_time(None::<Timestamp>);
8595 /// ```
8596 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
8597 where
8598 T: std::convert::Into<wkt::Timestamp>,
8599 {
8600 self.update_time = v.map(|x| x.into());
8601 self
8602 }
8603
8604 /// Sets the value of [delete_time][crate::model::PhraseSet::delete_time].
8605 ///
8606 /// # Example
8607 /// ```ignore,no_run
8608 /// # use google_cloud_speech_v2::model::PhraseSet;
8609 /// use wkt::Timestamp;
8610 /// let x = PhraseSet::new().set_delete_time(Timestamp::default()/* use setters */);
8611 /// ```
8612 pub fn set_delete_time<T>(mut self, v: T) -> Self
8613 where
8614 T: std::convert::Into<wkt::Timestamp>,
8615 {
8616 self.delete_time = std::option::Option::Some(v.into());
8617 self
8618 }
8619
8620 /// Sets or clears the value of [delete_time][crate::model::PhraseSet::delete_time].
8621 ///
8622 /// # Example
8623 /// ```ignore,no_run
8624 /// # use google_cloud_speech_v2::model::PhraseSet;
8625 /// use wkt::Timestamp;
8626 /// let x = PhraseSet::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
8627 /// let x = PhraseSet::new().set_or_clear_delete_time(None::<Timestamp>);
8628 /// ```
8629 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
8630 where
8631 T: std::convert::Into<wkt::Timestamp>,
8632 {
8633 self.delete_time = v.map(|x| x.into());
8634 self
8635 }
8636
8637 /// Sets the value of [expire_time][crate::model::PhraseSet::expire_time].
8638 ///
8639 /// # Example
8640 /// ```ignore,no_run
8641 /// # use google_cloud_speech_v2::model::PhraseSet;
8642 /// use wkt::Timestamp;
8643 /// let x = PhraseSet::new().set_expire_time(Timestamp::default()/* use setters */);
8644 /// ```
8645 pub fn set_expire_time<T>(mut self, v: T) -> Self
8646 where
8647 T: std::convert::Into<wkt::Timestamp>,
8648 {
8649 self.expire_time = std::option::Option::Some(v.into());
8650 self
8651 }
8652
8653 /// Sets or clears the value of [expire_time][crate::model::PhraseSet::expire_time].
8654 ///
8655 /// # Example
8656 /// ```ignore,no_run
8657 /// # use google_cloud_speech_v2::model::PhraseSet;
8658 /// use wkt::Timestamp;
8659 /// let x = PhraseSet::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
8660 /// let x = PhraseSet::new().set_or_clear_expire_time(None::<Timestamp>);
8661 /// ```
8662 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
8663 where
8664 T: std::convert::Into<wkt::Timestamp>,
8665 {
8666 self.expire_time = v.map(|x| x.into());
8667 self
8668 }
8669
8670 /// Sets the value of [annotations][crate::model::PhraseSet::annotations].
8671 ///
8672 /// # Example
8673 /// ```ignore,no_run
8674 /// # use google_cloud_speech_v2::model::PhraseSet;
8675 /// let x = PhraseSet::new().set_annotations([
8676 /// ("key0", "abc"),
8677 /// ("key1", "xyz"),
8678 /// ]);
8679 /// ```
8680 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
8681 where
8682 T: std::iter::IntoIterator<Item = (K, V)>,
8683 K: std::convert::Into<std::string::String>,
8684 V: std::convert::Into<std::string::String>,
8685 {
8686 use std::iter::Iterator;
8687 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
8688 self
8689 }
8690
8691 /// Sets the value of [etag][crate::model::PhraseSet::etag].
8692 ///
8693 /// # Example
8694 /// ```ignore,no_run
8695 /// # use google_cloud_speech_v2::model::PhraseSet;
8696 /// let x = PhraseSet::new().set_etag("example");
8697 /// ```
8698 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8699 self.etag = v.into();
8700 self
8701 }
8702
8703 /// Sets the value of [reconciling][crate::model::PhraseSet::reconciling].
8704 ///
8705 /// # Example
8706 /// ```ignore,no_run
8707 /// # use google_cloud_speech_v2::model::PhraseSet;
8708 /// let x = PhraseSet::new().set_reconciling(true);
8709 /// ```
8710 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8711 self.reconciling = v.into();
8712 self
8713 }
8714
8715 /// Sets the value of [kms_key_name][crate::model::PhraseSet::kms_key_name].
8716 ///
8717 /// # Example
8718 /// ```ignore,no_run
8719 /// # use google_cloud_speech_v2::model::PhraseSet;
8720 /// let x = PhraseSet::new().set_kms_key_name("example");
8721 /// ```
8722 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8723 self.kms_key_name = v.into();
8724 self
8725 }
8726
8727 /// Sets the value of [kms_key_version_name][crate::model::PhraseSet::kms_key_version_name].
8728 ///
8729 /// # Example
8730 /// ```ignore,no_run
8731 /// # use google_cloud_speech_v2::model::PhraseSet;
8732 /// let x = PhraseSet::new().set_kms_key_version_name("example");
8733 /// ```
8734 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
8735 mut self,
8736 v: T,
8737 ) -> Self {
8738 self.kms_key_version_name = v.into();
8739 self
8740 }
8741}
8742
8743impl wkt::message::Message for PhraseSet {
8744 fn typename() -> &'static str {
8745 "type.googleapis.com/google.cloud.speech.v2.PhraseSet"
8746 }
8747}
8748
8749/// Defines additional types related to [PhraseSet].
8750pub mod phrase_set {
8751 #[allow(unused_imports)]
8752 use super::*;
8753
8754 /// A Phrase contains words and phrase "hints" so that the speech recognition
8755 /// is more likely to recognize them. This can be used to improve the accuracy
8756 /// for specific words and phrases, for example, if specific commands are
8757 /// typically spoken by the user. This can also be used to add additional words
8758 /// to the vocabulary of the recognizer.
8759 ///
8760 /// List items can also include CustomClass references containing groups of
8761 /// words that represent common concepts that occur in natural language.
8762 #[derive(Clone, Default, PartialEq)]
8763 #[non_exhaustive]
8764 pub struct Phrase {
8765 /// The phrase itself.
8766 pub value: std::string::String,
8767
8768 /// Hint Boost. Overrides the boost set at the phrase set level.
8769 /// Positive value will increase the probability that a specific phrase will
8770 /// be recognized over other similar sounding phrases. The higher the boost,
8771 /// the higher the chance of false positive recognition as well. Negative
8772 /// boost values would correspond to anti-biasing. Anti-biasing is not
8773 /// enabled, so negative boost values will return an error. Boost values must
8774 /// be between 0 and 20. Any values outside that range will return an error.
8775 /// We recommend using a binary search approach to finding the optimal value
8776 /// for your use case as well as adding phrases both with and without boost
8777 /// to your requests.
8778 pub boost: f32,
8779
8780 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8781 }
8782
8783 impl Phrase {
8784 pub fn new() -> Self {
8785 std::default::Default::default()
8786 }
8787
8788 /// Sets the value of [value][crate::model::phrase_set::Phrase::value].
8789 ///
8790 /// # Example
8791 /// ```ignore,no_run
8792 /// # use google_cloud_speech_v2::model::phrase_set::Phrase;
8793 /// let x = Phrase::new().set_value("example");
8794 /// ```
8795 pub fn set_value<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8796 self.value = v.into();
8797 self
8798 }
8799
8800 /// Sets the value of [boost][crate::model::phrase_set::Phrase::boost].
8801 ///
8802 /// # Example
8803 /// ```ignore,no_run
8804 /// # use google_cloud_speech_v2::model::phrase_set::Phrase;
8805 /// let x = Phrase::new().set_boost(42.0);
8806 /// ```
8807 pub fn set_boost<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
8808 self.boost = v.into();
8809 self
8810 }
8811 }
8812
8813 impl wkt::message::Message for Phrase {
8814 fn typename() -> &'static str {
8815 "type.googleapis.com/google.cloud.speech.v2.PhraseSet.Phrase"
8816 }
8817 }
8818
8819 /// Set of states that define the lifecycle of a PhraseSet.
8820 ///
8821 /// # Working with unknown values
8822 ///
8823 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
8824 /// additional enum variants at any time. Adding new variants is not considered
8825 /// a breaking change. Applications should write their code in anticipation of:
8826 ///
8827 /// - New values appearing in future releases of the client library, **and**
8828 /// - New values received dynamically, without application changes.
8829 ///
8830 /// Please consult the [Working with enums] section in the user guide for some
8831 /// guidelines.
8832 ///
8833 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
8834 #[derive(Clone, Debug, PartialEq)]
8835 #[non_exhaustive]
8836 pub enum State {
8837 /// Unspecified state. This is only used/useful for distinguishing
8838 /// unset values.
8839 Unspecified,
8840 /// The normal and active state.
8841 Active,
8842 /// This PhraseSet has been deleted.
8843 Deleted,
8844 /// If set, the enum was initialized with an unknown value.
8845 ///
8846 /// Applications can examine the value using [State::value] or
8847 /// [State::name].
8848 UnknownValue(state::UnknownValue),
8849 }
8850
8851 #[doc(hidden)]
8852 pub mod state {
8853 #[allow(unused_imports)]
8854 use super::*;
8855 #[derive(Clone, Debug, PartialEq)]
8856 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
8857 }
8858
8859 impl State {
8860 /// Gets the enum value.
8861 ///
8862 /// Returns `None` if the enum contains an unknown value deserialized from
8863 /// the string representation of enums.
8864 pub fn value(&self) -> std::option::Option<i32> {
8865 match self {
8866 Self::Unspecified => std::option::Option::Some(0),
8867 Self::Active => std::option::Option::Some(2),
8868 Self::Deleted => std::option::Option::Some(4),
8869 Self::UnknownValue(u) => u.0.value(),
8870 }
8871 }
8872
8873 /// Gets the enum value as a string.
8874 ///
8875 /// Returns `None` if the enum contains an unknown value deserialized from
8876 /// the integer representation of enums.
8877 pub fn name(&self) -> std::option::Option<&str> {
8878 match self {
8879 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
8880 Self::Active => std::option::Option::Some("ACTIVE"),
8881 Self::Deleted => std::option::Option::Some("DELETED"),
8882 Self::UnknownValue(u) => u.0.name(),
8883 }
8884 }
8885 }
8886
8887 impl std::default::Default for State {
8888 fn default() -> Self {
8889 use std::convert::From;
8890 Self::from(0)
8891 }
8892 }
8893
8894 impl std::fmt::Display for State {
8895 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
8896 wkt::internal::display_enum(f, self.name(), self.value())
8897 }
8898 }
8899
8900 impl std::convert::From<i32> for State {
8901 fn from(value: i32) -> Self {
8902 match value {
8903 0 => Self::Unspecified,
8904 2 => Self::Active,
8905 4 => Self::Deleted,
8906 _ => Self::UnknownValue(state::UnknownValue(
8907 wkt::internal::UnknownEnumValue::Integer(value),
8908 )),
8909 }
8910 }
8911 }
8912
8913 impl std::convert::From<&str> for State {
8914 fn from(value: &str) -> Self {
8915 use std::string::ToString;
8916 match value {
8917 "STATE_UNSPECIFIED" => Self::Unspecified,
8918 "ACTIVE" => Self::Active,
8919 "DELETED" => Self::Deleted,
8920 _ => Self::UnknownValue(state::UnknownValue(
8921 wkt::internal::UnknownEnumValue::String(value.to_string()),
8922 )),
8923 }
8924 }
8925 }
8926
8927 impl serde::ser::Serialize for State {
8928 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
8929 where
8930 S: serde::Serializer,
8931 {
8932 match self {
8933 Self::Unspecified => serializer.serialize_i32(0),
8934 Self::Active => serializer.serialize_i32(2),
8935 Self::Deleted => serializer.serialize_i32(4),
8936 Self::UnknownValue(u) => u.0.serialize(serializer),
8937 }
8938 }
8939 }
8940
8941 impl<'de> serde::de::Deserialize<'de> for State {
8942 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
8943 where
8944 D: serde::Deserializer<'de>,
8945 {
8946 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
8947 ".google.cloud.speech.v2.PhraseSet.State",
8948 ))
8949 }
8950 }
8951}
8952
8953/// Request message for the
8954/// [CreateCustomClass][google.cloud.speech.v2.Speech.CreateCustomClass] method.
8955///
8956/// [google.cloud.speech.v2.Speech.CreateCustomClass]: crate::client::Speech::create_custom_class
8957#[derive(Clone, Default, PartialEq)]
8958#[non_exhaustive]
8959pub struct CreateCustomClassRequest {
8960 /// Required. The CustomClass to create.
8961 pub custom_class: std::option::Option<crate::model::CustomClass>,
8962
8963 /// If set, validate the request and preview the CustomClass, but do not
8964 /// actually create it.
8965 pub validate_only: bool,
8966
8967 /// The ID to use for the CustomClass, which will become the final component of
8968 /// the CustomClass's resource name.
8969 ///
8970 /// This value should be 4-63 characters, and valid characters
8971 /// are /[a-z][0-9]-/.
8972 pub custom_class_id: std::string::String,
8973
8974 /// Required. The project and location where this CustomClass will be created.
8975 /// The expected format is `projects/{project}/locations/{location}`.
8976 pub parent: std::string::String,
8977
8978 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8979}
8980
8981impl CreateCustomClassRequest {
8982 pub fn new() -> Self {
8983 std::default::Default::default()
8984 }
8985
8986 /// Sets the value of [custom_class][crate::model::CreateCustomClassRequest::custom_class].
8987 ///
8988 /// # Example
8989 /// ```ignore,no_run
8990 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8991 /// use google_cloud_speech_v2::model::CustomClass;
8992 /// let x = CreateCustomClassRequest::new().set_custom_class(CustomClass::default()/* use setters */);
8993 /// ```
8994 pub fn set_custom_class<T>(mut self, v: T) -> Self
8995 where
8996 T: std::convert::Into<crate::model::CustomClass>,
8997 {
8998 self.custom_class = std::option::Option::Some(v.into());
8999 self
9000 }
9001
9002 /// Sets or clears the value of [custom_class][crate::model::CreateCustomClassRequest::custom_class].
9003 ///
9004 /// # Example
9005 /// ```ignore,no_run
9006 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
9007 /// use google_cloud_speech_v2::model::CustomClass;
9008 /// let x = CreateCustomClassRequest::new().set_or_clear_custom_class(Some(CustomClass::default()/* use setters */));
9009 /// let x = CreateCustomClassRequest::new().set_or_clear_custom_class(None::<CustomClass>);
9010 /// ```
9011 pub fn set_or_clear_custom_class<T>(mut self, v: std::option::Option<T>) -> Self
9012 where
9013 T: std::convert::Into<crate::model::CustomClass>,
9014 {
9015 self.custom_class = v.map(|x| x.into());
9016 self
9017 }
9018
9019 /// Sets the value of [validate_only][crate::model::CreateCustomClassRequest::validate_only].
9020 ///
9021 /// # Example
9022 /// ```ignore,no_run
9023 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
9024 /// let x = CreateCustomClassRequest::new().set_validate_only(true);
9025 /// ```
9026 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9027 self.validate_only = v.into();
9028 self
9029 }
9030
9031 /// Sets the value of [custom_class_id][crate::model::CreateCustomClassRequest::custom_class_id].
9032 ///
9033 /// # Example
9034 /// ```ignore,no_run
9035 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
9036 /// let x = CreateCustomClassRequest::new().set_custom_class_id("example");
9037 /// ```
9038 pub fn set_custom_class_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9039 self.custom_class_id = v.into();
9040 self
9041 }
9042
9043 /// Sets the value of [parent][crate::model::CreateCustomClassRequest::parent].
9044 ///
9045 /// # Example
9046 /// ```ignore,no_run
9047 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
9048 /// let x = CreateCustomClassRequest::new().set_parent("example");
9049 /// ```
9050 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9051 self.parent = v.into();
9052 self
9053 }
9054}
9055
9056impl wkt::message::Message for CreateCustomClassRequest {
9057 fn typename() -> &'static str {
9058 "type.googleapis.com/google.cloud.speech.v2.CreateCustomClassRequest"
9059 }
9060}
9061
9062/// Request message for the
9063/// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] method.
9064///
9065/// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
9066#[derive(Clone, Default, PartialEq)]
9067#[non_exhaustive]
9068pub struct ListCustomClassesRequest {
9069 /// Required. The project and location of CustomClass resources to list. The
9070 /// expected format is `projects/{project}/locations/{location}`.
9071 pub parent: std::string::String,
9072
9073 /// Number of results per requests. A valid page_size ranges from 0 to 100
9074 /// inclusive. If the page_size is zero or unspecified, a page size of 5 will
9075 /// be chosen. If the page size exceeds 100, it will be coerced down to 100.
9076 /// Note that a call might return fewer results than the requested page size.
9077 pub page_size: i32,
9078
9079 /// A page token, received from a previous
9080 /// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] call.
9081 /// Provide this to retrieve the subsequent page.
9082 ///
9083 /// When paginating, all other parameters provided to
9084 /// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] must
9085 /// match the call that provided the page token.
9086 ///
9087 /// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
9088 pub page_token: std::string::String,
9089
9090 /// Whether, or not, to show resources that have been deleted.
9091 pub show_deleted: bool,
9092
9093 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9094}
9095
9096impl ListCustomClassesRequest {
9097 pub fn new() -> Self {
9098 std::default::Default::default()
9099 }
9100
9101 /// Sets the value of [parent][crate::model::ListCustomClassesRequest::parent].
9102 ///
9103 /// # Example
9104 /// ```ignore,no_run
9105 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
9106 /// let x = ListCustomClassesRequest::new().set_parent("example");
9107 /// ```
9108 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9109 self.parent = v.into();
9110 self
9111 }
9112
9113 /// Sets the value of [page_size][crate::model::ListCustomClassesRequest::page_size].
9114 ///
9115 /// # Example
9116 /// ```ignore,no_run
9117 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
9118 /// let x = ListCustomClassesRequest::new().set_page_size(42);
9119 /// ```
9120 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
9121 self.page_size = v.into();
9122 self
9123 }
9124
9125 /// Sets the value of [page_token][crate::model::ListCustomClassesRequest::page_token].
9126 ///
9127 /// # Example
9128 /// ```ignore,no_run
9129 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
9130 /// let x = ListCustomClassesRequest::new().set_page_token("example");
9131 /// ```
9132 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9133 self.page_token = v.into();
9134 self
9135 }
9136
9137 /// Sets the value of [show_deleted][crate::model::ListCustomClassesRequest::show_deleted].
9138 ///
9139 /// # Example
9140 /// ```ignore,no_run
9141 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
9142 /// let x = ListCustomClassesRequest::new().set_show_deleted(true);
9143 /// ```
9144 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9145 self.show_deleted = v.into();
9146 self
9147 }
9148}
9149
9150impl wkt::message::Message for ListCustomClassesRequest {
9151 fn typename() -> &'static str {
9152 "type.googleapis.com/google.cloud.speech.v2.ListCustomClassesRequest"
9153 }
9154}
9155
9156/// Response message for the
9157/// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] method.
9158///
9159/// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
9160#[derive(Clone, Default, PartialEq)]
9161#[non_exhaustive]
9162pub struct ListCustomClassesResponse {
9163 /// The list of requested CustomClasses.
9164 pub custom_classes: std::vec::Vec<crate::model::CustomClass>,
9165
9166 /// A token, which can be sent as
9167 /// [page_token][google.cloud.speech.v2.ListCustomClassesRequest.page_token] to
9168 /// retrieve the next page. If this field is omitted, there are no subsequent
9169 /// pages. This token expires after 72 hours.
9170 ///
9171 /// [google.cloud.speech.v2.ListCustomClassesRequest.page_token]: crate::model::ListCustomClassesRequest::page_token
9172 pub next_page_token: std::string::String,
9173
9174 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9175}
9176
9177impl ListCustomClassesResponse {
9178 pub fn new() -> Self {
9179 std::default::Default::default()
9180 }
9181
9182 /// Sets the value of [custom_classes][crate::model::ListCustomClassesResponse::custom_classes].
9183 ///
9184 /// # Example
9185 /// ```ignore,no_run
9186 /// # use google_cloud_speech_v2::model::ListCustomClassesResponse;
9187 /// use google_cloud_speech_v2::model::CustomClass;
9188 /// let x = ListCustomClassesResponse::new()
9189 /// .set_custom_classes([
9190 /// CustomClass::default()/* use setters */,
9191 /// CustomClass::default()/* use (different) setters */,
9192 /// ]);
9193 /// ```
9194 pub fn set_custom_classes<T, V>(mut self, v: T) -> Self
9195 where
9196 T: std::iter::IntoIterator<Item = V>,
9197 V: std::convert::Into<crate::model::CustomClass>,
9198 {
9199 use std::iter::Iterator;
9200 self.custom_classes = v.into_iter().map(|i| i.into()).collect();
9201 self
9202 }
9203
9204 /// Sets the value of [next_page_token][crate::model::ListCustomClassesResponse::next_page_token].
9205 ///
9206 /// # Example
9207 /// ```ignore,no_run
9208 /// # use google_cloud_speech_v2::model::ListCustomClassesResponse;
9209 /// let x = ListCustomClassesResponse::new().set_next_page_token("example");
9210 /// ```
9211 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9212 self.next_page_token = v.into();
9213 self
9214 }
9215}
9216
9217impl wkt::message::Message for ListCustomClassesResponse {
9218 fn typename() -> &'static str {
9219 "type.googleapis.com/google.cloud.speech.v2.ListCustomClassesResponse"
9220 }
9221}
9222
9223#[doc(hidden)]
9224impl google_cloud_gax::paginator::internal::PageableResponse for ListCustomClassesResponse {
9225 type PageItem = crate::model::CustomClass;
9226
9227 fn items(self) -> std::vec::Vec<Self::PageItem> {
9228 self.custom_classes
9229 }
9230
9231 fn next_page_token(&self) -> std::string::String {
9232 use std::clone::Clone;
9233 self.next_page_token.clone()
9234 }
9235}
9236
9237/// Request message for the
9238/// [GetCustomClass][google.cloud.speech.v2.Speech.GetCustomClass] method.
9239///
9240/// [google.cloud.speech.v2.Speech.GetCustomClass]: crate::client::Speech::get_custom_class
9241#[derive(Clone, Default, PartialEq)]
9242#[non_exhaustive]
9243pub struct GetCustomClassRequest {
9244 /// Required. The name of the CustomClass to retrieve. The expected format is
9245 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
9246 pub name: std::string::String,
9247
9248 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9249}
9250
9251impl GetCustomClassRequest {
9252 pub fn new() -> Self {
9253 std::default::Default::default()
9254 }
9255
9256 /// Sets the value of [name][crate::model::GetCustomClassRequest::name].
9257 ///
9258 /// # Example
9259 /// ```ignore,no_run
9260 /// # use google_cloud_speech_v2::model::GetCustomClassRequest;
9261 /// let x = GetCustomClassRequest::new().set_name("example");
9262 /// ```
9263 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9264 self.name = v.into();
9265 self
9266 }
9267}
9268
9269impl wkt::message::Message for GetCustomClassRequest {
9270 fn typename() -> &'static str {
9271 "type.googleapis.com/google.cloud.speech.v2.GetCustomClassRequest"
9272 }
9273}
9274
9275/// Request message for the
9276/// [UpdateCustomClass][google.cloud.speech.v2.Speech.UpdateCustomClass] method.
9277///
9278/// [google.cloud.speech.v2.Speech.UpdateCustomClass]: crate::client::Speech::update_custom_class
9279#[derive(Clone, Default, PartialEq)]
9280#[non_exhaustive]
9281pub struct UpdateCustomClassRequest {
9282 /// Required. The CustomClass to update.
9283 ///
9284 /// The CustomClass's `name` field is used to identify the CustomClass to
9285 /// update. Format:
9286 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
9287 pub custom_class: std::option::Option<crate::model::CustomClass>,
9288
9289 /// The list of fields to be updated. If empty, all fields are considered for
9290 /// update.
9291 pub update_mask: std::option::Option<wkt::FieldMask>,
9292
9293 /// If set, validate the request and preview the updated CustomClass, but do
9294 /// not actually update it.
9295 pub validate_only: bool,
9296
9297 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9298}
9299
9300impl UpdateCustomClassRequest {
9301 pub fn new() -> Self {
9302 std::default::Default::default()
9303 }
9304
9305 /// Sets the value of [custom_class][crate::model::UpdateCustomClassRequest::custom_class].
9306 ///
9307 /// # Example
9308 /// ```ignore,no_run
9309 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9310 /// use google_cloud_speech_v2::model::CustomClass;
9311 /// let x = UpdateCustomClassRequest::new().set_custom_class(CustomClass::default()/* use setters */);
9312 /// ```
9313 pub fn set_custom_class<T>(mut self, v: T) -> Self
9314 where
9315 T: std::convert::Into<crate::model::CustomClass>,
9316 {
9317 self.custom_class = std::option::Option::Some(v.into());
9318 self
9319 }
9320
9321 /// Sets or clears the value of [custom_class][crate::model::UpdateCustomClassRequest::custom_class].
9322 ///
9323 /// # Example
9324 /// ```ignore,no_run
9325 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9326 /// use google_cloud_speech_v2::model::CustomClass;
9327 /// let x = UpdateCustomClassRequest::new().set_or_clear_custom_class(Some(CustomClass::default()/* use setters */));
9328 /// let x = UpdateCustomClassRequest::new().set_or_clear_custom_class(None::<CustomClass>);
9329 /// ```
9330 pub fn set_or_clear_custom_class<T>(mut self, v: std::option::Option<T>) -> Self
9331 where
9332 T: std::convert::Into<crate::model::CustomClass>,
9333 {
9334 self.custom_class = v.map(|x| x.into());
9335 self
9336 }
9337
9338 /// Sets the value of [update_mask][crate::model::UpdateCustomClassRequest::update_mask].
9339 ///
9340 /// # Example
9341 /// ```ignore,no_run
9342 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9343 /// use wkt::FieldMask;
9344 /// let x = UpdateCustomClassRequest::new().set_update_mask(FieldMask::default()/* use setters */);
9345 /// ```
9346 pub fn set_update_mask<T>(mut self, v: T) -> Self
9347 where
9348 T: std::convert::Into<wkt::FieldMask>,
9349 {
9350 self.update_mask = std::option::Option::Some(v.into());
9351 self
9352 }
9353
9354 /// Sets or clears the value of [update_mask][crate::model::UpdateCustomClassRequest::update_mask].
9355 ///
9356 /// # Example
9357 /// ```ignore,no_run
9358 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9359 /// use wkt::FieldMask;
9360 /// let x = UpdateCustomClassRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
9361 /// let x = UpdateCustomClassRequest::new().set_or_clear_update_mask(None::<FieldMask>);
9362 /// ```
9363 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
9364 where
9365 T: std::convert::Into<wkt::FieldMask>,
9366 {
9367 self.update_mask = v.map(|x| x.into());
9368 self
9369 }
9370
9371 /// Sets the value of [validate_only][crate::model::UpdateCustomClassRequest::validate_only].
9372 ///
9373 /// # Example
9374 /// ```ignore,no_run
9375 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9376 /// let x = UpdateCustomClassRequest::new().set_validate_only(true);
9377 /// ```
9378 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9379 self.validate_only = v.into();
9380 self
9381 }
9382}
9383
9384impl wkt::message::Message for UpdateCustomClassRequest {
9385 fn typename() -> &'static str {
9386 "type.googleapis.com/google.cloud.speech.v2.UpdateCustomClassRequest"
9387 }
9388}
9389
9390/// Request message for the
9391/// [DeleteCustomClass][google.cloud.speech.v2.Speech.DeleteCustomClass] method.
9392///
9393/// [google.cloud.speech.v2.Speech.DeleteCustomClass]: crate::client::Speech::delete_custom_class
9394#[derive(Clone, Default, PartialEq)]
9395#[non_exhaustive]
9396pub struct DeleteCustomClassRequest {
9397 /// Required. The name of the CustomClass to delete.
9398 /// Format:
9399 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`
9400 pub name: std::string::String,
9401
9402 /// If set, validate the request and preview the deleted CustomClass, but do
9403 /// not actually delete it.
9404 pub validate_only: bool,
9405
9406 /// If set to true, and the CustomClass is not found, the request will succeed
9407 /// and be a no-op (no Operation is recorded in this case).
9408 pub allow_missing: bool,
9409
9410 /// This checksum is computed by the server based on the value of other
9411 /// fields. This may be sent on update, undelete, and delete requests to ensure
9412 /// the client has an up-to-date value before proceeding.
9413 pub etag: std::string::String,
9414
9415 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9416}
9417
9418impl DeleteCustomClassRequest {
9419 pub fn new() -> Self {
9420 std::default::Default::default()
9421 }
9422
9423 /// Sets the value of [name][crate::model::DeleteCustomClassRequest::name].
9424 ///
9425 /// # Example
9426 /// ```ignore,no_run
9427 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9428 /// let x = DeleteCustomClassRequest::new().set_name("example");
9429 /// ```
9430 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9431 self.name = v.into();
9432 self
9433 }
9434
9435 /// Sets the value of [validate_only][crate::model::DeleteCustomClassRequest::validate_only].
9436 ///
9437 /// # Example
9438 /// ```ignore,no_run
9439 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9440 /// let x = DeleteCustomClassRequest::new().set_validate_only(true);
9441 /// ```
9442 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9443 self.validate_only = v.into();
9444 self
9445 }
9446
9447 /// Sets the value of [allow_missing][crate::model::DeleteCustomClassRequest::allow_missing].
9448 ///
9449 /// # Example
9450 /// ```ignore,no_run
9451 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9452 /// let x = DeleteCustomClassRequest::new().set_allow_missing(true);
9453 /// ```
9454 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9455 self.allow_missing = v.into();
9456 self
9457 }
9458
9459 /// Sets the value of [etag][crate::model::DeleteCustomClassRequest::etag].
9460 ///
9461 /// # Example
9462 /// ```ignore,no_run
9463 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9464 /// let x = DeleteCustomClassRequest::new().set_etag("example");
9465 /// ```
9466 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9467 self.etag = v.into();
9468 self
9469 }
9470}
9471
9472impl wkt::message::Message for DeleteCustomClassRequest {
9473 fn typename() -> &'static str {
9474 "type.googleapis.com/google.cloud.speech.v2.DeleteCustomClassRequest"
9475 }
9476}
9477
9478/// Request message for the
9479/// [UndeleteCustomClass][google.cloud.speech.v2.Speech.UndeleteCustomClass]
9480/// method.
9481///
9482/// [google.cloud.speech.v2.Speech.UndeleteCustomClass]: crate::client::Speech::undelete_custom_class
9483#[derive(Clone, Default, PartialEq)]
9484#[non_exhaustive]
9485pub struct UndeleteCustomClassRequest {
9486 /// Required. The name of the CustomClass to undelete.
9487 /// Format:
9488 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`
9489 pub name: std::string::String,
9490
9491 /// If set, validate the request and preview the undeleted CustomClass, but do
9492 /// not actually undelete it.
9493 pub validate_only: bool,
9494
9495 /// This checksum is computed by the server based on the value of other
9496 /// fields. This may be sent on update, undelete, and delete requests to ensure
9497 /// the client has an up-to-date value before proceeding.
9498 pub etag: std::string::String,
9499
9500 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9501}
9502
9503impl UndeleteCustomClassRequest {
9504 pub fn new() -> Self {
9505 std::default::Default::default()
9506 }
9507
9508 /// Sets the value of [name][crate::model::UndeleteCustomClassRequest::name].
9509 ///
9510 /// # Example
9511 /// ```ignore,no_run
9512 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9513 /// let x = UndeleteCustomClassRequest::new().set_name("example");
9514 /// ```
9515 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9516 self.name = v.into();
9517 self
9518 }
9519
9520 /// Sets the value of [validate_only][crate::model::UndeleteCustomClassRequest::validate_only].
9521 ///
9522 /// # Example
9523 /// ```ignore,no_run
9524 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9525 /// let x = UndeleteCustomClassRequest::new().set_validate_only(true);
9526 /// ```
9527 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9528 self.validate_only = v.into();
9529 self
9530 }
9531
9532 /// Sets the value of [etag][crate::model::UndeleteCustomClassRequest::etag].
9533 ///
9534 /// # Example
9535 /// ```ignore,no_run
9536 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9537 /// let x = UndeleteCustomClassRequest::new().set_etag("example");
9538 /// ```
9539 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9540 self.etag = v.into();
9541 self
9542 }
9543}
9544
9545impl wkt::message::Message for UndeleteCustomClassRequest {
9546 fn typename() -> &'static str {
9547 "type.googleapis.com/google.cloud.speech.v2.UndeleteCustomClassRequest"
9548 }
9549}
9550
9551/// Request message for the
9552/// [CreatePhraseSet][google.cloud.speech.v2.Speech.CreatePhraseSet] method.
9553///
9554/// [google.cloud.speech.v2.Speech.CreatePhraseSet]: crate::client::Speech::create_phrase_set
9555#[derive(Clone, Default, PartialEq)]
9556#[non_exhaustive]
9557pub struct CreatePhraseSetRequest {
9558 /// Required. The PhraseSet to create.
9559 pub phrase_set: std::option::Option<crate::model::PhraseSet>,
9560
9561 /// If set, validate the request and preview the PhraseSet, but do not
9562 /// actually create it.
9563 pub validate_only: bool,
9564
9565 /// The ID to use for the PhraseSet, which will become the final component of
9566 /// the PhraseSet's resource name.
9567 ///
9568 /// This value should be 4-63 characters, and valid characters
9569 /// are /[a-z][0-9]-/.
9570 pub phrase_set_id: std::string::String,
9571
9572 /// Required. The project and location where this PhraseSet will be created.
9573 /// The expected format is `projects/{project}/locations/{location}`.
9574 pub parent: std::string::String,
9575
9576 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9577}
9578
9579impl CreatePhraseSetRequest {
9580 pub fn new() -> Self {
9581 std::default::Default::default()
9582 }
9583
9584 /// Sets the value of [phrase_set][crate::model::CreatePhraseSetRequest::phrase_set].
9585 ///
9586 /// # Example
9587 /// ```ignore,no_run
9588 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9589 /// use google_cloud_speech_v2::model::PhraseSet;
9590 /// let x = CreatePhraseSetRequest::new().set_phrase_set(PhraseSet::default()/* use setters */);
9591 /// ```
9592 pub fn set_phrase_set<T>(mut self, v: T) -> Self
9593 where
9594 T: std::convert::Into<crate::model::PhraseSet>,
9595 {
9596 self.phrase_set = std::option::Option::Some(v.into());
9597 self
9598 }
9599
9600 /// Sets or clears the value of [phrase_set][crate::model::CreatePhraseSetRequest::phrase_set].
9601 ///
9602 /// # Example
9603 /// ```ignore,no_run
9604 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9605 /// use google_cloud_speech_v2::model::PhraseSet;
9606 /// let x = CreatePhraseSetRequest::new().set_or_clear_phrase_set(Some(PhraseSet::default()/* use setters */));
9607 /// let x = CreatePhraseSetRequest::new().set_or_clear_phrase_set(None::<PhraseSet>);
9608 /// ```
9609 pub fn set_or_clear_phrase_set<T>(mut self, v: std::option::Option<T>) -> Self
9610 where
9611 T: std::convert::Into<crate::model::PhraseSet>,
9612 {
9613 self.phrase_set = v.map(|x| x.into());
9614 self
9615 }
9616
9617 /// Sets the value of [validate_only][crate::model::CreatePhraseSetRequest::validate_only].
9618 ///
9619 /// # Example
9620 /// ```ignore,no_run
9621 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9622 /// let x = CreatePhraseSetRequest::new().set_validate_only(true);
9623 /// ```
9624 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9625 self.validate_only = v.into();
9626 self
9627 }
9628
9629 /// Sets the value of [phrase_set_id][crate::model::CreatePhraseSetRequest::phrase_set_id].
9630 ///
9631 /// # Example
9632 /// ```ignore,no_run
9633 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9634 /// let x = CreatePhraseSetRequest::new().set_phrase_set_id("example");
9635 /// ```
9636 pub fn set_phrase_set_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9637 self.phrase_set_id = v.into();
9638 self
9639 }
9640
9641 /// Sets the value of [parent][crate::model::CreatePhraseSetRequest::parent].
9642 ///
9643 /// # Example
9644 /// ```ignore,no_run
9645 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9646 /// let x = CreatePhraseSetRequest::new().set_parent("example");
9647 /// ```
9648 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9649 self.parent = v.into();
9650 self
9651 }
9652}
9653
9654impl wkt::message::Message for CreatePhraseSetRequest {
9655 fn typename() -> &'static str {
9656 "type.googleapis.com/google.cloud.speech.v2.CreatePhraseSetRequest"
9657 }
9658}
9659
9660/// Request message for the
9661/// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
9662///
9663/// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9664#[derive(Clone, Default, PartialEq)]
9665#[non_exhaustive]
9666pub struct ListPhraseSetsRequest {
9667 /// Required. The project and location of PhraseSet resources to list. The
9668 /// expected format is `projects/{project}/locations/{location}`.
9669 pub parent: std::string::String,
9670
9671 /// The maximum number of PhraseSets to return. The service may return fewer
9672 /// than this value. If unspecified, at most 5 PhraseSets will be returned.
9673 /// The maximum value is 100; values above 100 will be coerced to 100.
9674 pub page_size: i32,
9675
9676 /// A page token, received from a previous
9677 /// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] call.
9678 /// Provide this to retrieve the subsequent page.
9679 ///
9680 /// When paginating, all other parameters provided to
9681 /// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] must match
9682 /// the call that provided the page token.
9683 ///
9684 /// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9685 pub page_token: std::string::String,
9686
9687 /// Whether, or not, to show resources that have been deleted.
9688 pub show_deleted: bool,
9689
9690 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9691}
9692
9693impl ListPhraseSetsRequest {
9694 pub fn new() -> Self {
9695 std::default::Default::default()
9696 }
9697
9698 /// Sets the value of [parent][crate::model::ListPhraseSetsRequest::parent].
9699 ///
9700 /// # Example
9701 /// ```ignore,no_run
9702 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9703 /// let x = ListPhraseSetsRequest::new().set_parent("example");
9704 /// ```
9705 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9706 self.parent = v.into();
9707 self
9708 }
9709
9710 /// Sets the value of [page_size][crate::model::ListPhraseSetsRequest::page_size].
9711 ///
9712 /// # Example
9713 /// ```ignore,no_run
9714 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9715 /// let x = ListPhraseSetsRequest::new().set_page_size(42);
9716 /// ```
9717 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
9718 self.page_size = v.into();
9719 self
9720 }
9721
9722 /// Sets the value of [page_token][crate::model::ListPhraseSetsRequest::page_token].
9723 ///
9724 /// # Example
9725 /// ```ignore,no_run
9726 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9727 /// let x = ListPhraseSetsRequest::new().set_page_token("example");
9728 /// ```
9729 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9730 self.page_token = v.into();
9731 self
9732 }
9733
9734 /// Sets the value of [show_deleted][crate::model::ListPhraseSetsRequest::show_deleted].
9735 ///
9736 /// # Example
9737 /// ```ignore,no_run
9738 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9739 /// let x = ListPhraseSetsRequest::new().set_show_deleted(true);
9740 /// ```
9741 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9742 self.show_deleted = v.into();
9743 self
9744 }
9745}
9746
9747impl wkt::message::Message for ListPhraseSetsRequest {
9748 fn typename() -> &'static str {
9749 "type.googleapis.com/google.cloud.speech.v2.ListPhraseSetsRequest"
9750 }
9751}
9752
9753/// Response message for the
9754/// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
9755///
9756/// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9757#[derive(Clone, Default, PartialEq)]
9758#[non_exhaustive]
9759pub struct ListPhraseSetsResponse {
9760 /// The list of requested PhraseSets.
9761 pub phrase_sets: std::vec::Vec<crate::model::PhraseSet>,
9762
9763 /// A token, which can be sent as
9764 /// [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
9765 /// retrieve the next page. If this field is omitted, there are no subsequent
9766 /// pages. This token expires after 72 hours.
9767 ///
9768 /// [google.cloud.speech.v2.ListPhraseSetsRequest.page_token]: crate::model::ListPhraseSetsRequest::page_token
9769 pub next_page_token: std::string::String,
9770
9771 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9772}
9773
9774impl ListPhraseSetsResponse {
9775 pub fn new() -> Self {
9776 std::default::Default::default()
9777 }
9778
9779 /// Sets the value of [phrase_sets][crate::model::ListPhraseSetsResponse::phrase_sets].
9780 ///
9781 /// # Example
9782 /// ```ignore,no_run
9783 /// # use google_cloud_speech_v2::model::ListPhraseSetsResponse;
9784 /// use google_cloud_speech_v2::model::PhraseSet;
9785 /// let x = ListPhraseSetsResponse::new()
9786 /// .set_phrase_sets([
9787 /// PhraseSet::default()/* use setters */,
9788 /// PhraseSet::default()/* use (different) setters */,
9789 /// ]);
9790 /// ```
9791 pub fn set_phrase_sets<T, V>(mut self, v: T) -> Self
9792 where
9793 T: std::iter::IntoIterator<Item = V>,
9794 V: std::convert::Into<crate::model::PhraseSet>,
9795 {
9796 use std::iter::Iterator;
9797 self.phrase_sets = v.into_iter().map(|i| i.into()).collect();
9798 self
9799 }
9800
9801 /// Sets the value of [next_page_token][crate::model::ListPhraseSetsResponse::next_page_token].
9802 ///
9803 /// # Example
9804 /// ```ignore,no_run
9805 /// # use google_cloud_speech_v2::model::ListPhraseSetsResponse;
9806 /// let x = ListPhraseSetsResponse::new().set_next_page_token("example");
9807 /// ```
9808 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9809 self.next_page_token = v.into();
9810 self
9811 }
9812}
9813
9814impl wkt::message::Message for ListPhraseSetsResponse {
9815 fn typename() -> &'static str {
9816 "type.googleapis.com/google.cloud.speech.v2.ListPhraseSetsResponse"
9817 }
9818}
9819
9820#[doc(hidden)]
9821impl google_cloud_gax::paginator::internal::PageableResponse for ListPhraseSetsResponse {
9822 type PageItem = crate::model::PhraseSet;
9823
9824 fn items(self) -> std::vec::Vec<Self::PageItem> {
9825 self.phrase_sets
9826 }
9827
9828 fn next_page_token(&self) -> std::string::String {
9829 use std::clone::Clone;
9830 self.next_page_token.clone()
9831 }
9832}
9833
9834/// Request message for the
9835/// [GetPhraseSet][google.cloud.speech.v2.Speech.GetPhraseSet] method.
9836///
9837/// [google.cloud.speech.v2.Speech.GetPhraseSet]: crate::client::Speech::get_phrase_set
9838#[derive(Clone, Default, PartialEq)]
9839#[non_exhaustive]
9840pub struct GetPhraseSetRequest {
9841 /// Required. The name of the PhraseSet to retrieve. The expected format is
9842 /// `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
9843 pub name: std::string::String,
9844
9845 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9846}
9847
9848impl GetPhraseSetRequest {
9849 pub fn new() -> Self {
9850 std::default::Default::default()
9851 }
9852
9853 /// Sets the value of [name][crate::model::GetPhraseSetRequest::name].
9854 ///
9855 /// # Example
9856 /// ```ignore,no_run
9857 /// # use google_cloud_speech_v2::model::GetPhraseSetRequest;
9858 /// let x = GetPhraseSetRequest::new().set_name("example");
9859 /// ```
9860 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9861 self.name = v.into();
9862 self
9863 }
9864}
9865
9866impl wkt::message::Message for GetPhraseSetRequest {
9867 fn typename() -> &'static str {
9868 "type.googleapis.com/google.cloud.speech.v2.GetPhraseSetRequest"
9869 }
9870}
9871
9872/// Request message for the
9873/// [UpdatePhraseSet][google.cloud.speech.v2.Speech.UpdatePhraseSet] method.
9874///
9875/// [google.cloud.speech.v2.Speech.UpdatePhraseSet]: crate::client::Speech::update_phrase_set
9876#[derive(Clone, Default, PartialEq)]
9877#[non_exhaustive]
9878pub struct UpdatePhraseSetRequest {
9879 /// Required. The PhraseSet to update.
9880 ///
9881 /// The PhraseSet's `name` field is used to identify the PhraseSet to update.
9882 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
9883 pub phrase_set: std::option::Option<crate::model::PhraseSet>,
9884
9885 /// The list of fields to update. If empty, all non-default valued fields are
9886 /// considered for update. Use `*` to update the entire PhraseSet resource.
9887 pub update_mask: std::option::Option<wkt::FieldMask>,
9888
9889 /// If set, validate the request and preview the updated PhraseSet, but do not
9890 /// actually update it.
9891 pub validate_only: bool,
9892
9893 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9894}
9895
9896impl UpdatePhraseSetRequest {
9897 pub fn new() -> Self {
9898 std::default::Default::default()
9899 }
9900
9901 /// Sets the value of [phrase_set][crate::model::UpdatePhraseSetRequest::phrase_set].
9902 ///
9903 /// # Example
9904 /// ```ignore,no_run
9905 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9906 /// use google_cloud_speech_v2::model::PhraseSet;
9907 /// let x = UpdatePhraseSetRequest::new().set_phrase_set(PhraseSet::default()/* use setters */);
9908 /// ```
9909 pub fn set_phrase_set<T>(mut self, v: T) -> Self
9910 where
9911 T: std::convert::Into<crate::model::PhraseSet>,
9912 {
9913 self.phrase_set = std::option::Option::Some(v.into());
9914 self
9915 }
9916
9917 /// Sets or clears the value of [phrase_set][crate::model::UpdatePhraseSetRequest::phrase_set].
9918 ///
9919 /// # Example
9920 /// ```ignore,no_run
9921 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9922 /// use google_cloud_speech_v2::model::PhraseSet;
9923 /// let x = UpdatePhraseSetRequest::new().set_or_clear_phrase_set(Some(PhraseSet::default()/* use setters */));
9924 /// let x = UpdatePhraseSetRequest::new().set_or_clear_phrase_set(None::<PhraseSet>);
9925 /// ```
9926 pub fn set_or_clear_phrase_set<T>(mut self, v: std::option::Option<T>) -> Self
9927 where
9928 T: std::convert::Into<crate::model::PhraseSet>,
9929 {
9930 self.phrase_set = v.map(|x| x.into());
9931 self
9932 }
9933
9934 /// Sets the value of [update_mask][crate::model::UpdatePhraseSetRequest::update_mask].
9935 ///
9936 /// # Example
9937 /// ```ignore,no_run
9938 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9939 /// use wkt::FieldMask;
9940 /// let x = UpdatePhraseSetRequest::new().set_update_mask(FieldMask::default()/* use setters */);
9941 /// ```
9942 pub fn set_update_mask<T>(mut self, v: T) -> Self
9943 where
9944 T: std::convert::Into<wkt::FieldMask>,
9945 {
9946 self.update_mask = std::option::Option::Some(v.into());
9947 self
9948 }
9949
9950 /// Sets or clears the value of [update_mask][crate::model::UpdatePhraseSetRequest::update_mask].
9951 ///
9952 /// # Example
9953 /// ```ignore,no_run
9954 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9955 /// use wkt::FieldMask;
9956 /// let x = UpdatePhraseSetRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
9957 /// let x = UpdatePhraseSetRequest::new().set_or_clear_update_mask(None::<FieldMask>);
9958 /// ```
9959 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
9960 where
9961 T: std::convert::Into<wkt::FieldMask>,
9962 {
9963 self.update_mask = v.map(|x| x.into());
9964 self
9965 }
9966
9967 /// Sets the value of [validate_only][crate::model::UpdatePhraseSetRequest::validate_only].
9968 ///
9969 /// # Example
9970 /// ```ignore,no_run
9971 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9972 /// let x = UpdatePhraseSetRequest::new().set_validate_only(true);
9973 /// ```
9974 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9975 self.validate_only = v.into();
9976 self
9977 }
9978}
9979
9980impl wkt::message::Message for UpdatePhraseSetRequest {
9981 fn typename() -> &'static str {
9982 "type.googleapis.com/google.cloud.speech.v2.UpdatePhraseSetRequest"
9983 }
9984}
9985
9986/// Request message for the
9987/// [DeletePhraseSet][google.cloud.speech.v2.Speech.DeletePhraseSet] method.
9988///
9989/// [google.cloud.speech.v2.Speech.DeletePhraseSet]: crate::client::Speech::delete_phrase_set
9990#[derive(Clone, Default, PartialEq)]
9991#[non_exhaustive]
9992pub struct DeletePhraseSetRequest {
9993 /// Required. The name of the PhraseSet to delete.
9994 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`
9995 pub name: std::string::String,
9996
9997 /// If set, validate the request and preview the deleted PhraseSet, but do not
9998 /// actually delete it.
9999 pub validate_only: bool,
10000
10001 /// If set to true, and the PhraseSet is not found, the request will succeed
10002 /// and be a no-op (no Operation is recorded in this case).
10003 pub allow_missing: bool,
10004
10005 /// This checksum is computed by the server based on the value of other
10006 /// fields. This may be sent on update, undelete, and delete requests to ensure
10007 /// the client has an up-to-date value before proceeding.
10008 pub etag: std::string::String,
10009
10010 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10011}
10012
10013impl DeletePhraseSetRequest {
10014 pub fn new() -> Self {
10015 std::default::Default::default()
10016 }
10017
10018 /// Sets the value of [name][crate::model::DeletePhraseSetRequest::name].
10019 ///
10020 /// # Example
10021 /// ```ignore,no_run
10022 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
10023 /// let x = DeletePhraseSetRequest::new().set_name("example");
10024 /// ```
10025 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10026 self.name = v.into();
10027 self
10028 }
10029
10030 /// Sets the value of [validate_only][crate::model::DeletePhraseSetRequest::validate_only].
10031 ///
10032 /// # Example
10033 /// ```ignore,no_run
10034 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
10035 /// let x = DeletePhraseSetRequest::new().set_validate_only(true);
10036 /// ```
10037 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
10038 self.validate_only = v.into();
10039 self
10040 }
10041
10042 /// Sets the value of [allow_missing][crate::model::DeletePhraseSetRequest::allow_missing].
10043 ///
10044 /// # Example
10045 /// ```ignore,no_run
10046 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
10047 /// let x = DeletePhraseSetRequest::new().set_allow_missing(true);
10048 /// ```
10049 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
10050 self.allow_missing = v.into();
10051 self
10052 }
10053
10054 /// Sets the value of [etag][crate::model::DeletePhraseSetRequest::etag].
10055 ///
10056 /// # Example
10057 /// ```ignore,no_run
10058 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
10059 /// let x = DeletePhraseSetRequest::new().set_etag("example");
10060 /// ```
10061 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10062 self.etag = v.into();
10063 self
10064 }
10065}
10066
10067impl wkt::message::Message for DeletePhraseSetRequest {
10068 fn typename() -> &'static str {
10069 "type.googleapis.com/google.cloud.speech.v2.DeletePhraseSetRequest"
10070 }
10071}
10072
10073/// Request message for the
10074/// [UndeletePhraseSet][google.cloud.speech.v2.Speech.UndeletePhraseSet]
10075/// method.
10076///
10077/// [google.cloud.speech.v2.Speech.UndeletePhraseSet]: crate::client::Speech::undelete_phrase_set
10078#[derive(Clone, Default, PartialEq)]
10079#[non_exhaustive]
10080pub struct UndeletePhraseSetRequest {
10081 /// Required. The name of the PhraseSet to undelete.
10082 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`
10083 pub name: std::string::String,
10084
10085 /// If set, validate the request and preview the undeleted PhraseSet, but do
10086 /// not actually undelete it.
10087 pub validate_only: bool,
10088
10089 /// This checksum is computed by the server based on the value of other
10090 /// fields. This may be sent on update, undelete, and delete requests to ensure
10091 /// the client has an up-to-date value before proceeding.
10092 pub etag: std::string::String,
10093
10094 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10095}
10096
10097impl UndeletePhraseSetRequest {
10098 pub fn new() -> Self {
10099 std::default::Default::default()
10100 }
10101
10102 /// Sets the value of [name][crate::model::UndeletePhraseSetRequest::name].
10103 ///
10104 /// # Example
10105 /// ```ignore,no_run
10106 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
10107 /// let x = UndeletePhraseSetRequest::new().set_name("example");
10108 /// ```
10109 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10110 self.name = v.into();
10111 self
10112 }
10113
10114 /// Sets the value of [validate_only][crate::model::UndeletePhraseSetRequest::validate_only].
10115 ///
10116 /// # Example
10117 /// ```ignore,no_run
10118 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
10119 /// let x = UndeletePhraseSetRequest::new().set_validate_only(true);
10120 /// ```
10121 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
10122 self.validate_only = v.into();
10123 self
10124 }
10125
10126 /// Sets the value of [etag][crate::model::UndeletePhraseSetRequest::etag].
10127 ///
10128 /// # Example
10129 /// ```ignore,no_run
10130 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
10131 /// let x = UndeletePhraseSetRequest::new().set_etag("example");
10132 /// ```
10133 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10134 self.etag = v.into();
10135 self
10136 }
10137}
10138
10139impl wkt::message::Message for UndeletePhraseSetRequest {
10140 fn typename() -> &'static str {
10141 "type.googleapis.com/google.cloud.speech.v2.UndeletePhraseSetRequest"
10142 }
10143}
10144
10145/// Represents a singular feature of a model. If the feature is `recognizer`,
10146/// the release_state of the feature represents the release_state of the model
10147#[derive(Clone, Default, PartialEq)]
10148#[non_exhaustive]
10149pub struct ModelFeature {
10150 /// The name of the feature (Note: the feature can be `recognizer`)
10151 pub feature: std::string::String,
10152
10153 /// The release state of the feature
10154 pub release_state: std::string::String,
10155
10156 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10157}
10158
10159impl ModelFeature {
10160 pub fn new() -> Self {
10161 std::default::Default::default()
10162 }
10163
10164 /// Sets the value of [feature][crate::model::ModelFeature::feature].
10165 ///
10166 /// # Example
10167 /// ```ignore,no_run
10168 /// # use google_cloud_speech_v2::model::ModelFeature;
10169 /// let x = ModelFeature::new().set_feature("example");
10170 /// ```
10171 pub fn set_feature<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10172 self.feature = v.into();
10173 self
10174 }
10175
10176 /// Sets the value of [release_state][crate::model::ModelFeature::release_state].
10177 ///
10178 /// # Example
10179 /// ```ignore,no_run
10180 /// # use google_cloud_speech_v2::model::ModelFeature;
10181 /// let x = ModelFeature::new().set_release_state("example");
10182 /// ```
10183 pub fn set_release_state<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10184 self.release_state = v.into();
10185 self
10186 }
10187}
10188
10189impl wkt::message::Message for ModelFeature {
10190 fn typename() -> &'static str {
10191 "type.googleapis.com/google.cloud.speech.v2.ModelFeature"
10192 }
10193}
10194
10195/// Represents the collection of features belonging to a model
10196#[derive(Clone, Default, PartialEq)]
10197#[non_exhaustive]
10198pub struct ModelFeatures {
10199 /// Repeated field that contains all features of the model
10200 pub model_feature: std::vec::Vec<crate::model::ModelFeature>,
10201
10202 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10203}
10204
10205impl ModelFeatures {
10206 pub fn new() -> Self {
10207 std::default::Default::default()
10208 }
10209
10210 /// Sets the value of [model_feature][crate::model::ModelFeatures::model_feature].
10211 ///
10212 /// # Example
10213 /// ```ignore,no_run
10214 /// # use google_cloud_speech_v2::model::ModelFeatures;
10215 /// use google_cloud_speech_v2::model::ModelFeature;
10216 /// let x = ModelFeatures::new()
10217 /// .set_model_feature([
10218 /// ModelFeature::default()/* use setters */,
10219 /// ModelFeature::default()/* use (different) setters */,
10220 /// ]);
10221 /// ```
10222 pub fn set_model_feature<T, V>(mut self, v: T) -> Self
10223 where
10224 T: std::iter::IntoIterator<Item = V>,
10225 V: std::convert::Into<crate::model::ModelFeature>,
10226 {
10227 use std::iter::Iterator;
10228 self.model_feature = v.into_iter().map(|i| i.into()).collect();
10229 self
10230 }
10231}
10232
10233impl wkt::message::Message for ModelFeatures {
10234 fn typename() -> &'static str {
10235 "type.googleapis.com/google.cloud.speech.v2.ModelFeatures"
10236 }
10237}
10238
10239/// The metadata about the models in a given region for a specific locale.
10240/// Currently this is just the features of the model
10241#[derive(Clone, Default, PartialEq)]
10242#[non_exhaustive]
10243pub struct ModelMetadata {
10244 /// Map of the model name -> features of that model
10245 pub model_features: std::collections::HashMap<std::string::String, crate::model::ModelFeatures>,
10246
10247 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10248}
10249
10250impl ModelMetadata {
10251 pub fn new() -> Self {
10252 std::default::Default::default()
10253 }
10254
10255 /// Sets the value of [model_features][crate::model::ModelMetadata::model_features].
10256 ///
10257 /// # Example
10258 /// ```ignore,no_run
10259 /// # use google_cloud_speech_v2::model::ModelMetadata;
10260 /// use google_cloud_speech_v2::model::ModelFeatures;
10261 /// let x = ModelMetadata::new().set_model_features([
10262 /// ("key0", ModelFeatures::default()/* use setters */),
10263 /// ("key1", ModelFeatures::default()/* use (different) setters */),
10264 /// ]);
10265 /// ```
10266 pub fn set_model_features<T, K, V>(mut self, v: T) -> Self
10267 where
10268 T: std::iter::IntoIterator<Item = (K, V)>,
10269 K: std::convert::Into<std::string::String>,
10270 V: std::convert::Into<crate::model::ModelFeatures>,
10271 {
10272 use std::iter::Iterator;
10273 self.model_features = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
10274 self
10275 }
10276}
10277
10278impl wkt::message::Message for ModelMetadata {
10279 fn typename() -> &'static str {
10280 "type.googleapis.com/google.cloud.speech.v2.ModelMetadata"
10281 }
10282}
10283
10284/// The metadata about locales available in a given region. Currently this is
10285/// just the models that are available for each locale
10286#[derive(Clone, Default, PartialEq)]
10287#[non_exhaustive]
10288pub struct LanguageMetadata {
10289 /// Map of locale (language code) -> models
10290 pub models: std::collections::HashMap<std::string::String, crate::model::ModelMetadata>,
10291
10292 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10293}
10294
10295impl LanguageMetadata {
10296 pub fn new() -> Self {
10297 std::default::Default::default()
10298 }
10299
10300 /// Sets the value of [models][crate::model::LanguageMetadata::models].
10301 ///
10302 /// # Example
10303 /// ```ignore,no_run
10304 /// # use google_cloud_speech_v2::model::LanguageMetadata;
10305 /// use google_cloud_speech_v2::model::ModelMetadata;
10306 /// let x = LanguageMetadata::new().set_models([
10307 /// ("key0", ModelMetadata::default()/* use setters */),
10308 /// ("key1", ModelMetadata::default()/* use (different) setters */),
10309 /// ]);
10310 /// ```
10311 pub fn set_models<T, K, V>(mut self, v: T) -> Self
10312 where
10313 T: std::iter::IntoIterator<Item = (K, V)>,
10314 K: std::convert::Into<std::string::String>,
10315 V: std::convert::Into<crate::model::ModelMetadata>,
10316 {
10317 use std::iter::Iterator;
10318 self.models = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
10319 self
10320 }
10321}
10322
10323impl wkt::message::Message for LanguageMetadata {
10324 fn typename() -> &'static str {
10325 "type.googleapis.com/google.cloud.speech.v2.LanguageMetadata"
10326 }
10327}
10328
10329/// The access metadata for a particular region. This can be applied if the org
10330/// policy for the given project disallows a particular region.
10331#[derive(Clone, Default, PartialEq)]
10332#[non_exhaustive]
10333pub struct AccessMetadata {
10334 /// Describes the different types of constraints that are applied.
10335 pub constraint_type: crate::model::access_metadata::ConstraintType,
10336
10337 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10338}
10339
10340impl AccessMetadata {
10341 pub fn new() -> Self {
10342 std::default::Default::default()
10343 }
10344
10345 /// Sets the value of [constraint_type][crate::model::AccessMetadata::constraint_type].
10346 ///
10347 /// # Example
10348 /// ```ignore,no_run
10349 /// # use google_cloud_speech_v2::model::AccessMetadata;
10350 /// use google_cloud_speech_v2::model::access_metadata::ConstraintType;
10351 /// let x0 = AccessMetadata::new().set_constraint_type(ConstraintType::ResourceLocationsOrgPolicyCreateConstraint);
10352 /// ```
10353 pub fn set_constraint_type<
10354 T: std::convert::Into<crate::model::access_metadata::ConstraintType>,
10355 >(
10356 mut self,
10357 v: T,
10358 ) -> Self {
10359 self.constraint_type = v.into();
10360 self
10361 }
10362}
10363
10364impl wkt::message::Message for AccessMetadata {
10365 fn typename() -> &'static str {
10366 "type.googleapis.com/google.cloud.speech.v2.AccessMetadata"
10367 }
10368}
10369
10370/// Defines additional types related to [AccessMetadata].
10371pub mod access_metadata {
10372 #[allow(unused_imports)]
10373 use super::*;
10374
10375 /// Describes the different types of constraints that can be applied on a
10376 /// region.
10377 ///
10378 /// # Working with unknown values
10379 ///
10380 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
10381 /// additional enum variants at any time. Adding new variants is not considered
10382 /// a breaking change. Applications should write their code in anticipation of:
10383 ///
10384 /// - New values appearing in future releases of the client library, **and**
10385 /// - New values received dynamically, without application changes.
10386 ///
10387 /// Please consult the [Working with enums] section in the user guide for some
10388 /// guidelines.
10389 ///
10390 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
10391 #[derive(Clone, Debug, PartialEq)]
10392 #[non_exhaustive]
10393 pub enum ConstraintType {
10394 /// Unspecified constraint applied.
10395 Unspecified,
10396 /// The project's org policy disallows the given region.
10397 ResourceLocationsOrgPolicyCreateConstraint,
10398 /// If set, the enum was initialized with an unknown value.
10399 ///
10400 /// Applications can examine the value using [ConstraintType::value] or
10401 /// [ConstraintType::name].
10402 UnknownValue(constraint_type::UnknownValue),
10403 }
10404
10405 #[doc(hidden)]
10406 pub mod constraint_type {
10407 #[allow(unused_imports)]
10408 use super::*;
10409 #[derive(Clone, Debug, PartialEq)]
10410 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
10411 }
10412
10413 impl ConstraintType {
10414 /// Gets the enum value.
10415 ///
10416 /// Returns `None` if the enum contains an unknown value deserialized from
10417 /// the string representation of enums.
10418 pub fn value(&self) -> std::option::Option<i32> {
10419 match self {
10420 Self::Unspecified => std::option::Option::Some(0),
10421 Self::ResourceLocationsOrgPolicyCreateConstraint => std::option::Option::Some(1),
10422 Self::UnknownValue(u) => u.0.value(),
10423 }
10424 }
10425
10426 /// Gets the enum value as a string.
10427 ///
10428 /// Returns `None` if the enum contains an unknown value deserialized from
10429 /// the integer representation of enums.
10430 pub fn name(&self) -> std::option::Option<&str> {
10431 match self {
10432 Self::Unspecified => std::option::Option::Some("CONSTRAINT_TYPE_UNSPECIFIED"),
10433 Self::ResourceLocationsOrgPolicyCreateConstraint => {
10434 std::option::Option::Some("RESOURCE_LOCATIONS_ORG_POLICY_CREATE_CONSTRAINT")
10435 }
10436 Self::UnknownValue(u) => u.0.name(),
10437 }
10438 }
10439 }
10440
10441 impl std::default::Default for ConstraintType {
10442 fn default() -> Self {
10443 use std::convert::From;
10444 Self::from(0)
10445 }
10446 }
10447
10448 impl std::fmt::Display for ConstraintType {
10449 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
10450 wkt::internal::display_enum(f, self.name(), self.value())
10451 }
10452 }
10453
10454 impl std::convert::From<i32> for ConstraintType {
10455 fn from(value: i32) -> Self {
10456 match value {
10457 0 => Self::Unspecified,
10458 1 => Self::ResourceLocationsOrgPolicyCreateConstraint,
10459 _ => Self::UnknownValue(constraint_type::UnknownValue(
10460 wkt::internal::UnknownEnumValue::Integer(value),
10461 )),
10462 }
10463 }
10464 }
10465
10466 impl std::convert::From<&str> for ConstraintType {
10467 fn from(value: &str) -> Self {
10468 use std::string::ToString;
10469 match value {
10470 "CONSTRAINT_TYPE_UNSPECIFIED" => Self::Unspecified,
10471 "RESOURCE_LOCATIONS_ORG_POLICY_CREATE_CONSTRAINT" => {
10472 Self::ResourceLocationsOrgPolicyCreateConstraint
10473 }
10474 _ => Self::UnknownValue(constraint_type::UnknownValue(
10475 wkt::internal::UnknownEnumValue::String(value.to_string()),
10476 )),
10477 }
10478 }
10479 }
10480
10481 impl serde::ser::Serialize for ConstraintType {
10482 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
10483 where
10484 S: serde::Serializer,
10485 {
10486 match self {
10487 Self::Unspecified => serializer.serialize_i32(0),
10488 Self::ResourceLocationsOrgPolicyCreateConstraint => serializer.serialize_i32(1),
10489 Self::UnknownValue(u) => u.0.serialize(serializer),
10490 }
10491 }
10492 }
10493
10494 impl<'de> serde::de::Deserialize<'de> for ConstraintType {
10495 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
10496 where
10497 D: serde::Deserializer<'de>,
10498 {
10499 deserializer.deserialize_any(wkt::internal::EnumVisitor::<ConstraintType>::new(
10500 ".google.cloud.speech.v2.AccessMetadata.ConstraintType",
10501 ))
10502 }
10503 }
10504}
10505
10506/// Main metadata for the Locations API for STT V2. Currently this is just the
10507/// metadata about locales, models, and features
10508#[derive(Clone, Default, PartialEq)]
10509#[non_exhaustive]
10510pub struct LocationsMetadata {
10511 /// Information about available locales, models, and features represented in
10512 /// the hierarchical structure of locales -> models -> features
10513 pub languages: std::option::Option<crate::model::LanguageMetadata>,
10514
10515 /// Information about access metadata for the region and given project.
10516 pub access_metadata: std::option::Option<crate::model::AccessMetadata>,
10517
10518 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10519}
10520
10521impl LocationsMetadata {
10522 pub fn new() -> Self {
10523 std::default::Default::default()
10524 }
10525
10526 /// Sets the value of [languages][crate::model::LocationsMetadata::languages].
10527 ///
10528 /// # Example
10529 /// ```ignore,no_run
10530 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10531 /// use google_cloud_speech_v2::model::LanguageMetadata;
10532 /// let x = LocationsMetadata::new().set_languages(LanguageMetadata::default()/* use setters */);
10533 /// ```
10534 pub fn set_languages<T>(mut self, v: T) -> Self
10535 where
10536 T: std::convert::Into<crate::model::LanguageMetadata>,
10537 {
10538 self.languages = std::option::Option::Some(v.into());
10539 self
10540 }
10541
10542 /// Sets or clears the value of [languages][crate::model::LocationsMetadata::languages].
10543 ///
10544 /// # Example
10545 /// ```ignore,no_run
10546 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10547 /// use google_cloud_speech_v2::model::LanguageMetadata;
10548 /// let x = LocationsMetadata::new().set_or_clear_languages(Some(LanguageMetadata::default()/* use setters */));
10549 /// let x = LocationsMetadata::new().set_or_clear_languages(None::<LanguageMetadata>);
10550 /// ```
10551 pub fn set_or_clear_languages<T>(mut self, v: std::option::Option<T>) -> Self
10552 where
10553 T: std::convert::Into<crate::model::LanguageMetadata>,
10554 {
10555 self.languages = v.map(|x| x.into());
10556 self
10557 }
10558
10559 /// Sets the value of [access_metadata][crate::model::LocationsMetadata::access_metadata].
10560 ///
10561 /// # Example
10562 /// ```ignore,no_run
10563 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10564 /// use google_cloud_speech_v2::model::AccessMetadata;
10565 /// let x = LocationsMetadata::new().set_access_metadata(AccessMetadata::default()/* use setters */);
10566 /// ```
10567 pub fn set_access_metadata<T>(mut self, v: T) -> Self
10568 where
10569 T: std::convert::Into<crate::model::AccessMetadata>,
10570 {
10571 self.access_metadata = std::option::Option::Some(v.into());
10572 self
10573 }
10574
10575 /// Sets or clears the value of [access_metadata][crate::model::LocationsMetadata::access_metadata].
10576 ///
10577 /// # Example
10578 /// ```ignore,no_run
10579 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10580 /// use google_cloud_speech_v2::model::AccessMetadata;
10581 /// let x = LocationsMetadata::new().set_or_clear_access_metadata(Some(AccessMetadata::default()/* use setters */));
10582 /// let x = LocationsMetadata::new().set_or_clear_access_metadata(None::<AccessMetadata>);
10583 /// ```
10584 pub fn set_or_clear_access_metadata<T>(mut self, v: std::option::Option<T>) -> Self
10585 where
10586 T: std::convert::Into<crate::model::AccessMetadata>,
10587 {
10588 self.access_metadata = v.map(|x| x.into());
10589 self
10590 }
10591}
10592
10593impl wkt::message::Message for LocationsMetadata {
10594 fn typename() -> &'static str {
10595 "type.googleapis.com/google.cloud.speech.v2.LocationsMetadata"
10596 }
10597}