google_cloud_speech_v2/model.rs
1// Copyright 2025 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14//
15// Code generated by sidekick. DO NOT EDIT.
16
17#![allow(rustdoc::redundant_explicit_links)]
18#![allow(rustdoc::broken_intra_doc_links)]
19#![no_implicit_prelude]
20extern crate async_trait;
21extern crate bytes;
22extern crate gaxi;
23extern crate google_cloud_gax;
24extern crate google_cloud_location;
25extern crate google_cloud_longrunning;
26extern crate google_cloud_lro;
27extern crate google_cloud_rpc;
28extern crate lazy_static;
29extern crate serde;
30extern crate serde_json;
31extern crate serde_with;
32extern crate std;
33extern crate tracing;
34extern crate wkt;
35
36mod debug;
37mod deserialize;
38mod serialize;
39
40/// Request message for the
41/// [CreateRecognizer][google.cloud.speech.v2.Speech.CreateRecognizer] method.
42///
43/// [google.cloud.speech.v2.Speech.CreateRecognizer]: crate::client::Speech::create_recognizer
44#[derive(Clone, Default, PartialEq)]
45#[non_exhaustive]
46pub struct CreateRecognizerRequest {
47 /// Required. The Recognizer to create.
48 pub recognizer: std::option::Option<crate::model::Recognizer>,
49
50 /// If set, validate the request and preview the Recognizer, but do not
51 /// actually create it.
52 pub validate_only: bool,
53
54 /// The ID to use for the Recognizer, which will become the final component of
55 /// the Recognizer's resource name.
56 ///
57 /// This value should be 4-63 characters, and valid characters
58 /// are /[a-z][0-9]-/.
59 pub recognizer_id: std::string::String,
60
61 /// Required. The project and location where this Recognizer will be created.
62 /// The expected format is `projects/{project}/locations/{location}`.
63 pub parent: std::string::String,
64
65 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
66}
67
68impl CreateRecognizerRequest {
69 pub fn new() -> Self {
70 std::default::Default::default()
71 }
72
73 /// Sets the value of [recognizer][crate::model::CreateRecognizerRequest::recognizer].
74 ///
75 /// # Example
76 /// ```ignore,no_run
77 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
78 /// use google_cloud_speech_v2::model::Recognizer;
79 /// let x = CreateRecognizerRequest::new().set_recognizer(Recognizer::default()/* use setters */);
80 /// ```
81 pub fn set_recognizer<T>(mut self, v: T) -> Self
82 where
83 T: std::convert::Into<crate::model::Recognizer>,
84 {
85 self.recognizer = std::option::Option::Some(v.into());
86 self
87 }
88
89 /// Sets or clears the value of [recognizer][crate::model::CreateRecognizerRequest::recognizer].
90 ///
91 /// # Example
92 /// ```ignore,no_run
93 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
94 /// use google_cloud_speech_v2::model::Recognizer;
95 /// let x = CreateRecognizerRequest::new().set_or_clear_recognizer(Some(Recognizer::default()/* use setters */));
96 /// let x = CreateRecognizerRequest::new().set_or_clear_recognizer(None::<Recognizer>);
97 /// ```
98 pub fn set_or_clear_recognizer<T>(mut self, v: std::option::Option<T>) -> Self
99 where
100 T: std::convert::Into<crate::model::Recognizer>,
101 {
102 self.recognizer = v.map(|x| x.into());
103 self
104 }
105
106 /// Sets the value of [validate_only][crate::model::CreateRecognizerRequest::validate_only].
107 ///
108 /// # Example
109 /// ```ignore,no_run
110 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
111 /// let x = CreateRecognizerRequest::new().set_validate_only(true);
112 /// ```
113 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
114 self.validate_only = v.into();
115 self
116 }
117
118 /// Sets the value of [recognizer_id][crate::model::CreateRecognizerRequest::recognizer_id].
119 ///
120 /// # Example
121 /// ```ignore,no_run
122 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
123 /// let x = CreateRecognizerRequest::new().set_recognizer_id("example");
124 /// ```
125 pub fn set_recognizer_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
126 self.recognizer_id = v.into();
127 self
128 }
129
130 /// Sets the value of [parent][crate::model::CreateRecognizerRequest::parent].
131 ///
132 /// # Example
133 /// ```ignore,no_run
134 /// # use google_cloud_speech_v2::model::CreateRecognizerRequest;
135 /// let x = CreateRecognizerRequest::new().set_parent("example");
136 /// ```
137 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
138 self.parent = v.into();
139 self
140 }
141}
142
143impl wkt::message::Message for CreateRecognizerRequest {
144 fn typename() -> &'static str {
145 "type.googleapis.com/google.cloud.speech.v2.CreateRecognizerRequest"
146 }
147}
148
149/// Represents the metadata of a long-running operation.
150#[derive(Clone, Default, PartialEq)]
151#[non_exhaustive]
152pub struct OperationMetadata {
153 /// The time the operation was created.
154 pub create_time: std::option::Option<wkt::Timestamp>,
155
156 /// The time the operation was last updated.
157 pub update_time: std::option::Option<wkt::Timestamp>,
158
159 /// The resource path for the target of the operation.
160 pub resource: std::string::String,
161
162 /// The method that triggered the operation.
163 pub method: std::string::String,
164
165 /// The [KMS key
166 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
167 /// the content of the Operation is encrypted. The expected format is
168 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
169 pub kms_key_name: std::string::String,
170
171 /// The [KMS key version
172 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
173 /// with which content of the Operation is encrypted. The expected format is
174 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
175 pub kms_key_version_name: std::string::String,
176
177 /// The percent progress of the Operation. Values can range from 0-100. If the
178 /// value is 100, then the operation is finished.
179 pub progress_percent: i32,
180
181 /// The request that spawned the Operation.
182 pub request: std::option::Option<crate::model::operation_metadata::Request>,
183
184 /// Specific metadata per RPC.
185 pub metadata: std::option::Option<crate::model::operation_metadata::Metadata>,
186
187 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
188}
189
190impl OperationMetadata {
191 pub fn new() -> Self {
192 std::default::Default::default()
193 }
194
195 /// Sets the value of [create_time][crate::model::OperationMetadata::create_time].
196 ///
197 /// # Example
198 /// ```ignore,no_run
199 /// # use google_cloud_speech_v2::model::OperationMetadata;
200 /// use wkt::Timestamp;
201 /// let x = OperationMetadata::new().set_create_time(Timestamp::default()/* use setters */);
202 /// ```
203 pub fn set_create_time<T>(mut self, v: T) -> Self
204 where
205 T: std::convert::Into<wkt::Timestamp>,
206 {
207 self.create_time = std::option::Option::Some(v.into());
208 self
209 }
210
211 /// Sets or clears the value of [create_time][crate::model::OperationMetadata::create_time].
212 ///
213 /// # Example
214 /// ```ignore,no_run
215 /// # use google_cloud_speech_v2::model::OperationMetadata;
216 /// use wkt::Timestamp;
217 /// let x = OperationMetadata::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
218 /// let x = OperationMetadata::new().set_or_clear_create_time(None::<Timestamp>);
219 /// ```
220 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
221 where
222 T: std::convert::Into<wkt::Timestamp>,
223 {
224 self.create_time = v.map(|x| x.into());
225 self
226 }
227
228 /// Sets the value of [update_time][crate::model::OperationMetadata::update_time].
229 ///
230 /// # Example
231 /// ```ignore,no_run
232 /// # use google_cloud_speech_v2::model::OperationMetadata;
233 /// use wkt::Timestamp;
234 /// let x = OperationMetadata::new().set_update_time(Timestamp::default()/* use setters */);
235 /// ```
236 pub fn set_update_time<T>(mut self, v: T) -> Self
237 where
238 T: std::convert::Into<wkt::Timestamp>,
239 {
240 self.update_time = std::option::Option::Some(v.into());
241 self
242 }
243
244 /// Sets or clears the value of [update_time][crate::model::OperationMetadata::update_time].
245 ///
246 /// # Example
247 /// ```ignore,no_run
248 /// # use google_cloud_speech_v2::model::OperationMetadata;
249 /// use wkt::Timestamp;
250 /// let x = OperationMetadata::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
251 /// let x = OperationMetadata::new().set_or_clear_update_time(None::<Timestamp>);
252 /// ```
253 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
254 where
255 T: std::convert::Into<wkt::Timestamp>,
256 {
257 self.update_time = v.map(|x| x.into());
258 self
259 }
260
261 /// Sets the value of [resource][crate::model::OperationMetadata::resource].
262 ///
263 /// # Example
264 /// ```ignore,no_run
265 /// # use google_cloud_speech_v2::model::OperationMetadata;
266 /// let x = OperationMetadata::new().set_resource("example");
267 /// ```
268 pub fn set_resource<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
269 self.resource = v.into();
270 self
271 }
272
273 /// Sets the value of [method][crate::model::OperationMetadata::method].
274 ///
275 /// # Example
276 /// ```ignore,no_run
277 /// # use google_cloud_speech_v2::model::OperationMetadata;
278 /// let x = OperationMetadata::new().set_method("example");
279 /// ```
280 pub fn set_method<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
281 self.method = v.into();
282 self
283 }
284
285 /// Sets the value of [kms_key_name][crate::model::OperationMetadata::kms_key_name].
286 ///
287 /// # Example
288 /// ```ignore,no_run
289 /// # use google_cloud_speech_v2::model::OperationMetadata;
290 /// let x = OperationMetadata::new().set_kms_key_name("example");
291 /// ```
292 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
293 self.kms_key_name = v.into();
294 self
295 }
296
297 /// Sets the value of [kms_key_version_name][crate::model::OperationMetadata::kms_key_version_name].
298 ///
299 /// # Example
300 /// ```ignore,no_run
301 /// # use google_cloud_speech_v2::model::OperationMetadata;
302 /// let x = OperationMetadata::new().set_kms_key_version_name("example");
303 /// ```
304 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
305 mut self,
306 v: T,
307 ) -> Self {
308 self.kms_key_version_name = v.into();
309 self
310 }
311
312 /// Sets the value of [progress_percent][crate::model::OperationMetadata::progress_percent].
313 ///
314 /// # Example
315 /// ```ignore,no_run
316 /// # use google_cloud_speech_v2::model::OperationMetadata;
317 /// let x = OperationMetadata::new().set_progress_percent(42);
318 /// ```
319 pub fn set_progress_percent<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
320 self.progress_percent = v.into();
321 self
322 }
323
324 /// Sets the value of [request][crate::model::OperationMetadata::request].
325 ///
326 /// Note that all the setters affecting `request` are mutually
327 /// exclusive.
328 ///
329 /// # Example
330 /// ```ignore,no_run
331 /// # use google_cloud_speech_v2::model::OperationMetadata;
332 /// use google_cloud_speech_v2::model::BatchRecognizeRequest;
333 /// let x = OperationMetadata::new().set_request(Some(
334 /// google_cloud_speech_v2::model::operation_metadata::Request::BatchRecognizeRequest(BatchRecognizeRequest::default().into())));
335 /// ```
336 pub fn set_request<
337 T: std::convert::Into<std::option::Option<crate::model::operation_metadata::Request>>,
338 >(
339 mut self,
340 v: T,
341 ) -> Self {
342 self.request = v.into();
343 self
344 }
345
346 /// The value of [request][crate::model::OperationMetadata::request]
347 /// if it holds a `BatchRecognizeRequest`, `None` if the field is not set or
348 /// holds a different branch.
349 pub fn batch_recognize_request(
350 &self,
351 ) -> std::option::Option<&std::boxed::Box<crate::model::BatchRecognizeRequest>> {
352 #[allow(unreachable_patterns)]
353 self.request.as_ref().and_then(|v| match v {
354 crate::model::operation_metadata::Request::BatchRecognizeRequest(v) => {
355 std::option::Option::Some(v)
356 }
357 _ => std::option::Option::None,
358 })
359 }
360
361 /// Sets the value of [request][crate::model::OperationMetadata::request]
362 /// to hold a `BatchRecognizeRequest`.
363 ///
364 /// Note that all the setters affecting `request` are
365 /// mutually exclusive.
366 ///
367 /// # Example
368 /// ```ignore,no_run
369 /// # use google_cloud_speech_v2::model::OperationMetadata;
370 /// use google_cloud_speech_v2::model::BatchRecognizeRequest;
371 /// let x = OperationMetadata::new().set_batch_recognize_request(BatchRecognizeRequest::default()/* use setters */);
372 /// assert!(x.batch_recognize_request().is_some());
373 /// assert!(x.create_recognizer_request().is_none());
374 /// assert!(x.update_recognizer_request().is_none());
375 /// assert!(x.delete_recognizer_request().is_none());
376 /// assert!(x.undelete_recognizer_request().is_none());
377 /// assert!(x.create_custom_class_request().is_none());
378 /// assert!(x.update_custom_class_request().is_none());
379 /// assert!(x.delete_custom_class_request().is_none());
380 /// assert!(x.undelete_custom_class_request().is_none());
381 /// assert!(x.create_phrase_set_request().is_none());
382 /// assert!(x.update_phrase_set_request().is_none());
383 /// assert!(x.delete_phrase_set_request().is_none());
384 /// assert!(x.undelete_phrase_set_request().is_none());
385 /// assert!(x.update_config_request().is_none());
386 /// ```
387 pub fn set_batch_recognize_request<
388 T: std::convert::Into<std::boxed::Box<crate::model::BatchRecognizeRequest>>,
389 >(
390 mut self,
391 v: T,
392 ) -> Self {
393 self.request = std::option::Option::Some(
394 crate::model::operation_metadata::Request::BatchRecognizeRequest(v.into()),
395 );
396 self
397 }
398
399 /// The value of [request][crate::model::OperationMetadata::request]
400 /// if it holds a `CreateRecognizerRequest`, `None` if the field is not set or
401 /// holds a different branch.
402 pub fn create_recognizer_request(
403 &self,
404 ) -> std::option::Option<&std::boxed::Box<crate::model::CreateRecognizerRequest>> {
405 #[allow(unreachable_patterns)]
406 self.request.as_ref().and_then(|v| match v {
407 crate::model::operation_metadata::Request::CreateRecognizerRequest(v) => {
408 std::option::Option::Some(v)
409 }
410 _ => std::option::Option::None,
411 })
412 }
413
414 /// Sets the value of [request][crate::model::OperationMetadata::request]
415 /// to hold a `CreateRecognizerRequest`.
416 ///
417 /// Note that all the setters affecting `request` are
418 /// mutually exclusive.
419 ///
420 /// # Example
421 /// ```ignore,no_run
422 /// # use google_cloud_speech_v2::model::OperationMetadata;
423 /// use google_cloud_speech_v2::model::CreateRecognizerRequest;
424 /// let x = OperationMetadata::new().set_create_recognizer_request(CreateRecognizerRequest::default()/* use setters */);
425 /// assert!(x.create_recognizer_request().is_some());
426 /// assert!(x.batch_recognize_request().is_none());
427 /// assert!(x.update_recognizer_request().is_none());
428 /// assert!(x.delete_recognizer_request().is_none());
429 /// assert!(x.undelete_recognizer_request().is_none());
430 /// assert!(x.create_custom_class_request().is_none());
431 /// assert!(x.update_custom_class_request().is_none());
432 /// assert!(x.delete_custom_class_request().is_none());
433 /// assert!(x.undelete_custom_class_request().is_none());
434 /// assert!(x.create_phrase_set_request().is_none());
435 /// assert!(x.update_phrase_set_request().is_none());
436 /// assert!(x.delete_phrase_set_request().is_none());
437 /// assert!(x.undelete_phrase_set_request().is_none());
438 /// assert!(x.update_config_request().is_none());
439 /// ```
440 pub fn set_create_recognizer_request<
441 T: std::convert::Into<std::boxed::Box<crate::model::CreateRecognizerRequest>>,
442 >(
443 mut self,
444 v: T,
445 ) -> Self {
446 self.request = std::option::Option::Some(
447 crate::model::operation_metadata::Request::CreateRecognizerRequest(v.into()),
448 );
449 self
450 }
451
452 /// The value of [request][crate::model::OperationMetadata::request]
453 /// if it holds a `UpdateRecognizerRequest`, `None` if the field is not set or
454 /// holds a different branch.
455 pub fn update_recognizer_request(
456 &self,
457 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateRecognizerRequest>> {
458 #[allow(unreachable_patterns)]
459 self.request.as_ref().and_then(|v| match v {
460 crate::model::operation_metadata::Request::UpdateRecognizerRequest(v) => {
461 std::option::Option::Some(v)
462 }
463 _ => std::option::Option::None,
464 })
465 }
466
467 /// Sets the value of [request][crate::model::OperationMetadata::request]
468 /// to hold a `UpdateRecognizerRequest`.
469 ///
470 /// Note that all the setters affecting `request` are
471 /// mutually exclusive.
472 ///
473 /// # Example
474 /// ```ignore,no_run
475 /// # use google_cloud_speech_v2::model::OperationMetadata;
476 /// use google_cloud_speech_v2::model::UpdateRecognizerRequest;
477 /// let x = OperationMetadata::new().set_update_recognizer_request(UpdateRecognizerRequest::default()/* use setters */);
478 /// assert!(x.update_recognizer_request().is_some());
479 /// assert!(x.batch_recognize_request().is_none());
480 /// assert!(x.create_recognizer_request().is_none());
481 /// assert!(x.delete_recognizer_request().is_none());
482 /// assert!(x.undelete_recognizer_request().is_none());
483 /// assert!(x.create_custom_class_request().is_none());
484 /// assert!(x.update_custom_class_request().is_none());
485 /// assert!(x.delete_custom_class_request().is_none());
486 /// assert!(x.undelete_custom_class_request().is_none());
487 /// assert!(x.create_phrase_set_request().is_none());
488 /// assert!(x.update_phrase_set_request().is_none());
489 /// assert!(x.delete_phrase_set_request().is_none());
490 /// assert!(x.undelete_phrase_set_request().is_none());
491 /// assert!(x.update_config_request().is_none());
492 /// ```
493 pub fn set_update_recognizer_request<
494 T: std::convert::Into<std::boxed::Box<crate::model::UpdateRecognizerRequest>>,
495 >(
496 mut self,
497 v: T,
498 ) -> Self {
499 self.request = std::option::Option::Some(
500 crate::model::operation_metadata::Request::UpdateRecognizerRequest(v.into()),
501 );
502 self
503 }
504
505 /// The value of [request][crate::model::OperationMetadata::request]
506 /// if it holds a `DeleteRecognizerRequest`, `None` if the field is not set or
507 /// holds a different branch.
508 pub fn delete_recognizer_request(
509 &self,
510 ) -> std::option::Option<&std::boxed::Box<crate::model::DeleteRecognizerRequest>> {
511 #[allow(unreachable_patterns)]
512 self.request.as_ref().and_then(|v| match v {
513 crate::model::operation_metadata::Request::DeleteRecognizerRequest(v) => {
514 std::option::Option::Some(v)
515 }
516 _ => std::option::Option::None,
517 })
518 }
519
520 /// Sets the value of [request][crate::model::OperationMetadata::request]
521 /// to hold a `DeleteRecognizerRequest`.
522 ///
523 /// Note that all the setters affecting `request` are
524 /// mutually exclusive.
525 ///
526 /// # Example
527 /// ```ignore,no_run
528 /// # use google_cloud_speech_v2::model::OperationMetadata;
529 /// use google_cloud_speech_v2::model::DeleteRecognizerRequest;
530 /// let x = OperationMetadata::new().set_delete_recognizer_request(DeleteRecognizerRequest::default()/* use setters */);
531 /// assert!(x.delete_recognizer_request().is_some());
532 /// assert!(x.batch_recognize_request().is_none());
533 /// assert!(x.create_recognizer_request().is_none());
534 /// assert!(x.update_recognizer_request().is_none());
535 /// assert!(x.undelete_recognizer_request().is_none());
536 /// assert!(x.create_custom_class_request().is_none());
537 /// assert!(x.update_custom_class_request().is_none());
538 /// assert!(x.delete_custom_class_request().is_none());
539 /// assert!(x.undelete_custom_class_request().is_none());
540 /// assert!(x.create_phrase_set_request().is_none());
541 /// assert!(x.update_phrase_set_request().is_none());
542 /// assert!(x.delete_phrase_set_request().is_none());
543 /// assert!(x.undelete_phrase_set_request().is_none());
544 /// assert!(x.update_config_request().is_none());
545 /// ```
546 pub fn set_delete_recognizer_request<
547 T: std::convert::Into<std::boxed::Box<crate::model::DeleteRecognizerRequest>>,
548 >(
549 mut self,
550 v: T,
551 ) -> Self {
552 self.request = std::option::Option::Some(
553 crate::model::operation_metadata::Request::DeleteRecognizerRequest(v.into()),
554 );
555 self
556 }
557
558 /// The value of [request][crate::model::OperationMetadata::request]
559 /// if it holds a `UndeleteRecognizerRequest`, `None` if the field is not set or
560 /// holds a different branch.
561 pub fn undelete_recognizer_request(
562 &self,
563 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeleteRecognizerRequest>> {
564 #[allow(unreachable_patterns)]
565 self.request.as_ref().and_then(|v| match v {
566 crate::model::operation_metadata::Request::UndeleteRecognizerRequest(v) => {
567 std::option::Option::Some(v)
568 }
569 _ => std::option::Option::None,
570 })
571 }
572
573 /// Sets the value of [request][crate::model::OperationMetadata::request]
574 /// to hold a `UndeleteRecognizerRequest`.
575 ///
576 /// Note that all the setters affecting `request` are
577 /// mutually exclusive.
578 ///
579 /// # Example
580 /// ```ignore,no_run
581 /// # use google_cloud_speech_v2::model::OperationMetadata;
582 /// use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
583 /// let x = OperationMetadata::new().set_undelete_recognizer_request(UndeleteRecognizerRequest::default()/* use setters */);
584 /// assert!(x.undelete_recognizer_request().is_some());
585 /// assert!(x.batch_recognize_request().is_none());
586 /// assert!(x.create_recognizer_request().is_none());
587 /// assert!(x.update_recognizer_request().is_none());
588 /// assert!(x.delete_recognizer_request().is_none());
589 /// assert!(x.create_custom_class_request().is_none());
590 /// assert!(x.update_custom_class_request().is_none());
591 /// assert!(x.delete_custom_class_request().is_none());
592 /// assert!(x.undelete_custom_class_request().is_none());
593 /// assert!(x.create_phrase_set_request().is_none());
594 /// assert!(x.update_phrase_set_request().is_none());
595 /// assert!(x.delete_phrase_set_request().is_none());
596 /// assert!(x.undelete_phrase_set_request().is_none());
597 /// assert!(x.update_config_request().is_none());
598 /// ```
599 pub fn set_undelete_recognizer_request<
600 T: std::convert::Into<std::boxed::Box<crate::model::UndeleteRecognizerRequest>>,
601 >(
602 mut self,
603 v: T,
604 ) -> Self {
605 self.request = std::option::Option::Some(
606 crate::model::operation_metadata::Request::UndeleteRecognizerRequest(v.into()),
607 );
608 self
609 }
610
611 /// The value of [request][crate::model::OperationMetadata::request]
612 /// if it holds a `CreateCustomClassRequest`, `None` if the field is not set or
613 /// holds a different branch.
614 pub fn create_custom_class_request(
615 &self,
616 ) -> std::option::Option<&std::boxed::Box<crate::model::CreateCustomClassRequest>> {
617 #[allow(unreachable_patterns)]
618 self.request.as_ref().and_then(|v| match v {
619 crate::model::operation_metadata::Request::CreateCustomClassRequest(v) => {
620 std::option::Option::Some(v)
621 }
622 _ => std::option::Option::None,
623 })
624 }
625
626 /// Sets the value of [request][crate::model::OperationMetadata::request]
627 /// to hold a `CreateCustomClassRequest`.
628 ///
629 /// Note that all the setters affecting `request` are
630 /// mutually exclusive.
631 ///
632 /// # Example
633 /// ```ignore,no_run
634 /// # use google_cloud_speech_v2::model::OperationMetadata;
635 /// use google_cloud_speech_v2::model::CreateCustomClassRequest;
636 /// let x = OperationMetadata::new().set_create_custom_class_request(CreateCustomClassRequest::default()/* use setters */);
637 /// assert!(x.create_custom_class_request().is_some());
638 /// assert!(x.batch_recognize_request().is_none());
639 /// assert!(x.create_recognizer_request().is_none());
640 /// assert!(x.update_recognizer_request().is_none());
641 /// assert!(x.delete_recognizer_request().is_none());
642 /// assert!(x.undelete_recognizer_request().is_none());
643 /// assert!(x.update_custom_class_request().is_none());
644 /// assert!(x.delete_custom_class_request().is_none());
645 /// assert!(x.undelete_custom_class_request().is_none());
646 /// assert!(x.create_phrase_set_request().is_none());
647 /// assert!(x.update_phrase_set_request().is_none());
648 /// assert!(x.delete_phrase_set_request().is_none());
649 /// assert!(x.undelete_phrase_set_request().is_none());
650 /// assert!(x.update_config_request().is_none());
651 /// ```
652 pub fn set_create_custom_class_request<
653 T: std::convert::Into<std::boxed::Box<crate::model::CreateCustomClassRequest>>,
654 >(
655 mut self,
656 v: T,
657 ) -> Self {
658 self.request = std::option::Option::Some(
659 crate::model::operation_metadata::Request::CreateCustomClassRequest(v.into()),
660 );
661 self
662 }
663
664 /// The value of [request][crate::model::OperationMetadata::request]
665 /// if it holds a `UpdateCustomClassRequest`, `None` if the field is not set or
666 /// holds a different branch.
667 pub fn update_custom_class_request(
668 &self,
669 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateCustomClassRequest>> {
670 #[allow(unreachable_patterns)]
671 self.request.as_ref().and_then(|v| match v {
672 crate::model::operation_metadata::Request::UpdateCustomClassRequest(v) => {
673 std::option::Option::Some(v)
674 }
675 _ => std::option::Option::None,
676 })
677 }
678
679 /// Sets the value of [request][crate::model::OperationMetadata::request]
680 /// to hold a `UpdateCustomClassRequest`.
681 ///
682 /// Note that all the setters affecting `request` are
683 /// mutually exclusive.
684 ///
685 /// # Example
686 /// ```ignore,no_run
687 /// # use google_cloud_speech_v2::model::OperationMetadata;
688 /// use google_cloud_speech_v2::model::UpdateCustomClassRequest;
689 /// let x = OperationMetadata::new().set_update_custom_class_request(UpdateCustomClassRequest::default()/* use setters */);
690 /// assert!(x.update_custom_class_request().is_some());
691 /// assert!(x.batch_recognize_request().is_none());
692 /// assert!(x.create_recognizer_request().is_none());
693 /// assert!(x.update_recognizer_request().is_none());
694 /// assert!(x.delete_recognizer_request().is_none());
695 /// assert!(x.undelete_recognizer_request().is_none());
696 /// assert!(x.create_custom_class_request().is_none());
697 /// assert!(x.delete_custom_class_request().is_none());
698 /// assert!(x.undelete_custom_class_request().is_none());
699 /// assert!(x.create_phrase_set_request().is_none());
700 /// assert!(x.update_phrase_set_request().is_none());
701 /// assert!(x.delete_phrase_set_request().is_none());
702 /// assert!(x.undelete_phrase_set_request().is_none());
703 /// assert!(x.update_config_request().is_none());
704 /// ```
705 pub fn set_update_custom_class_request<
706 T: std::convert::Into<std::boxed::Box<crate::model::UpdateCustomClassRequest>>,
707 >(
708 mut self,
709 v: T,
710 ) -> Self {
711 self.request = std::option::Option::Some(
712 crate::model::operation_metadata::Request::UpdateCustomClassRequest(v.into()),
713 );
714 self
715 }
716
717 /// The value of [request][crate::model::OperationMetadata::request]
718 /// if it holds a `DeleteCustomClassRequest`, `None` if the field is not set or
719 /// holds a different branch.
720 pub fn delete_custom_class_request(
721 &self,
722 ) -> std::option::Option<&std::boxed::Box<crate::model::DeleteCustomClassRequest>> {
723 #[allow(unreachable_patterns)]
724 self.request.as_ref().and_then(|v| match v {
725 crate::model::operation_metadata::Request::DeleteCustomClassRequest(v) => {
726 std::option::Option::Some(v)
727 }
728 _ => std::option::Option::None,
729 })
730 }
731
732 /// Sets the value of [request][crate::model::OperationMetadata::request]
733 /// to hold a `DeleteCustomClassRequest`.
734 ///
735 /// Note that all the setters affecting `request` are
736 /// mutually exclusive.
737 ///
738 /// # Example
739 /// ```ignore,no_run
740 /// # use google_cloud_speech_v2::model::OperationMetadata;
741 /// use google_cloud_speech_v2::model::DeleteCustomClassRequest;
742 /// let x = OperationMetadata::new().set_delete_custom_class_request(DeleteCustomClassRequest::default()/* use setters */);
743 /// assert!(x.delete_custom_class_request().is_some());
744 /// assert!(x.batch_recognize_request().is_none());
745 /// assert!(x.create_recognizer_request().is_none());
746 /// assert!(x.update_recognizer_request().is_none());
747 /// assert!(x.delete_recognizer_request().is_none());
748 /// assert!(x.undelete_recognizer_request().is_none());
749 /// assert!(x.create_custom_class_request().is_none());
750 /// assert!(x.update_custom_class_request().is_none());
751 /// assert!(x.undelete_custom_class_request().is_none());
752 /// assert!(x.create_phrase_set_request().is_none());
753 /// assert!(x.update_phrase_set_request().is_none());
754 /// assert!(x.delete_phrase_set_request().is_none());
755 /// assert!(x.undelete_phrase_set_request().is_none());
756 /// assert!(x.update_config_request().is_none());
757 /// ```
758 pub fn set_delete_custom_class_request<
759 T: std::convert::Into<std::boxed::Box<crate::model::DeleteCustomClassRequest>>,
760 >(
761 mut self,
762 v: T,
763 ) -> Self {
764 self.request = std::option::Option::Some(
765 crate::model::operation_metadata::Request::DeleteCustomClassRequest(v.into()),
766 );
767 self
768 }
769
770 /// The value of [request][crate::model::OperationMetadata::request]
771 /// if it holds a `UndeleteCustomClassRequest`, `None` if the field is not set or
772 /// holds a different branch.
773 pub fn undelete_custom_class_request(
774 &self,
775 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeleteCustomClassRequest>> {
776 #[allow(unreachable_patterns)]
777 self.request.as_ref().and_then(|v| match v {
778 crate::model::operation_metadata::Request::UndeleteCustomClassRequest(v) => {
779 std::option::Option::Some(v)
780 }
781 _ => std::option::Option::None,
782 })
783 }
784
785 /// Sets the value of [request][crate::model::OperationMetadata::request]
786 /// to hold a `UndeleteCustomClassRequest`.
787 ///
788 /// Note that all the setters affecting `request` are
789 /// mutually exclusive.
790 ///
791 /// # Example
792 /// ```ignore,no_run
793 /// # use google_cloud_speech_v2::model::OperationMetadata;
794 /// use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
795 /// let x = OperationMetadata::new().set_undelete_custom_class_request(UndeleteCustomClassRequest::default()/* use setters */);
796 /// assert!(x.undelete_custom_class_request().is_some());
797 /// assert!(x.batch_recognize_request().is_none());
798 /// assert!(x.create_recognizer_request().is_none());
799 /// assert!(x.update_recognizer_request().is_none());
800 /// assert!(x.delete_recognizer_request().is_none());
801 /// assert!(x.undelete_recognizer_request().is_none());
802 /// assert!(x.create_custom_class_request().is_none());
803 /// assert!(x.update_custom_class_request().is_none());
804 /// assert!(x.delete_custom_class_request().is_none());
805 /// assert!(x.create_phrase_set_request().is_none());
806 /// assert!(x.update_phrase_set_request().is_none());
807 /// assert!(x.delete_phrase_set_request().is_none());
808 /// assert!(x.undelete_phrase_set_request().is_none());
809 /// assert!(x.update_config_request().is_none());
810 /// ```
811 pub fn set_undelete_custom_class_request<
812 T: std::convert::Into<std::boxed::Box<crate::model::UndeleteCustomClassRequest>>,
813 >(
814 mut self,
815 v: T,
816 ) -> Self {
817 self.request = std::option::Option::Some(
818 crate::model::operation_metadata::Request::UndeleteCustomClassRequest(v.into()),
819 );
820 self
821 }
822
823 /// The value of [request][crate::model::OperationMetadata::request]
824 /// if it holds a `CreatePhraseSetRequest`, `None` if the field is not set or
825 /// holds a different branch.
826 pub fn create_phrase_set_request(
827 &self,
828 ) -> std::option::Option<&std::boxed::Box<crate::model::CreatePhraseSetRequest>> {
829 #[allow(unreachable_patterns)]
830 self.request.as_ref().and_then(|v| match v {
831 crate::model::operation_metadata::Request::CreatePhraseSetRequest(v) => {
832 std::option::Option::Some(v)
833 }
834 _ => std::option::Option::None,
835 })
836 }
837
838 /// Sets the value of [request][crate::model::OperationMetadata::request]
839 /// to hold a `CreatePhraseSetRequest`.
840 ///
841 /// Note that all the setters affecting `request` are
842 /// mutually exclusive.
843 ///
844 /// # Example
845 /// ```ignore,no_run
846 /// # use google_cloud_speech_v2::model::OperationMetadata;
847 /// use google_cloud_speech_v2::model::CreatePhraseSetRequest;
848 /// let x = OperationMetadata::new().set_create_phrase_set_request(CreatePhraseSetRequest::default()/* use setters */);
849 /// assert!(x.create_phrase_set_request().is_some());
850 /// assert!(x.batch_recognize_request().is_none());
851 /// assert!(x.create_recognizer_request().is_none());
852 /// assert!(x.update_recognizer_request().is_none());
853 /// assert!(x.delete_recognizer_request().is_none());
854 /// assert!(x.undelete_recognizer_request().is_none());
855 /// assert!(x.create_custom_class_request().is_none());
856 /// assert!(x.update_custom_class_request().is_none());
857 /// assert!(x.delete_custom_class_request().is_none());
858 /// assert!(x.undelete_custom_class_request().is_none());
859 /// assert!(x.update_phrase_set_request().is_none());
860 /// assert!(x.delete_phrase_set_request().is_none());
861 /// assert!(x.undelete_phrase_set_request().is_none());
862 /// assert!(x.update_config_request().is_none());
863 /// ```
864 pub fn set_create_phrase_set_request<
865 T: std::convert::Into<std::boxed::Box<crate::model::CreatePhraseSetRequest>>,
866 >(
867 mut self,
868 v: T,
869 ) -> Self {
870 self.request = std::option::Option::Some(
871 crate::model::operation_metadata::Request::CreatePhraseSetRequest(v.into()),
872 );
873 self
874 }
875
876 /// The value of [request][crate::model::OperationMetadata::request]
877 /// if it holds a `UpdatePhraseSetRequest`, `None` if the field is not set or
878 /// holds a different branch.
879 pub fn update_phrase_set_request(
880 &self,
881 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdatePhraseSetRequest>> {
882 #[allow(unreachable_patterns)]
883 self.request.as_ref().and_then(|v| match v {
884 crate::model::operation_metadata::Request::UpdatePhraseSetRequest(v) => {
885 std::option::Option::Some(v)
886 }
887 _ => std::option::Option::None,
888 })
889 }
890
891 /// Sets the value of [request][crate::model::OperationMetadata::request]
892 /// to hold a `UpdatePhraseSetRequest`.
893 ///
894 /// Note that all the setters affecting `request` are
895 /// mutually exclusive.
896 ///
897 /// # Example
898 /// ```ignore,no_run
899 /// # use google_cloud_speech_v2::model::OperationMetadata;
900 /// use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
901 /// let x = OperationMetadata::new().set_update_phrase_set_request(UpdatePhraseSetRequest::default()/* use setters */);
902 /// assert!(x.update_phrase_set_request().is_some());
903 /// assert!(x.batch_recognize_request().is_none());
904 /// assert!(x.create_recognizer_request().is_none());
905 /// assert!(x.update_recognizer_request().is_none());
906 /// assert!(x.delete_recognizer_request().is_none());
907 /// assert!(x.undelete_recognizer_request().is_none());
908 /// assert!(x.create_custom_class_request().is_none());
909 /// assert!(x.update_custom_class_request().is_none());
910 /// assert!(x.delete_custom_class_request().is_none());
911 /// assert!(x.undelete_custom_class_request().is_none());
912 /// assert!(x.create_phrase_set_request().is_none());
913 /// assert!(x.delete_phrase_set_request().is_none());
914 /// assert!(x.undelete_phrase_set_request().is_none());
915 /// assert!(x.update_config_request().is_none());
916 /// ```
917 pub fn set_update_phrase_set_request<
918 T: std::convert::Into<std::boxed::Box<crate::model::UpdatePhraseSetRequest>>,
919 >(
920 mut self,
921 v: T,
922 ) -> Self {
923 self.request = std::option::Option::Some(
924 crate::model::operation_metadata::Request::UpdatePhraseSetRequest(v.into()),
925 );
926 self
927 }
928
929 /// The value of [request][crate::model::OperationMetadata::request]
930 /// if it holds a `DeletePhraseSetRequest`, `None` if the field is not set or
931 /// holds a different branch.
932 pub fn delete_phrase_set_request(
933 &self,
934 ) -> std::option::Option<&std::boxed::Box<crate::model::DeletePhraseSetRequest>> {
935 #[allow(unreachable_patterns)]
936 self.request.as_ref().and_then(|v| match v {
937 crate::model::operation_metadata::Request::DeletePhraseSetRequest(v) => {
938 std::option::Option::Some(v)
939 }
940 _ => std::option::Option::None,
941 })
942 }
943
944 /// Sets the value of [request][crate::model::OperationMetadata::request]
945 /// to hold a `DeletePhraseSetRequest`.
946 ///
947 /// Note that all the setters affecting `request` are
948 /// mutually exclusive.
949 ///
950 /// # Example
951 /// ```ignore,no_run
952 /// # use google_cloud_speech_v2::model::OperationMetadata;
953 /// use google_cloud_speech_v2::model::DeletePhraseSetRequest;
954 /// let x = OperationMetadata::new().set_delete_phrase_set_request(DeletePhraseSetRequest::default()/* use setters */);
955 /// assert!(x.delete_phrase_set_request().is_some());
956 /// assert!(x.batch_recognize_request().is_none());
957 /// assert!(x.create_recognizer_request().is_none());
958 /// assert!(x.update_recognizer_request().is_none());
959 /// assert!(x.delete_recognizer_request().is_none());
960 /// assert!(x.undelete_recognizer_request().is_none());
961 /// assert!(x.create_custom_class_request().is_none());
962 /// assert!(x.update_custom_class_request().is_none());
963 /// assert!(x.delete_custom_class_request().is_none());
964 /// assert!(x.undelete_custom_class_request().is_none());
965 /// assert!(x.create_phrase_set_request().is_none());
966 /// assert!(x.update_phrase_set_request().is_none());
967 /// assert!(x.undelete_phrase_set_request().is_none());
968 /// assert!(x.update_config_request().is_none());
969 /// ```
970 pub fn set_delete_phrase_set_request<
971 T: std::convert::Into<std::boxed::Box<crate::model::DeletePhraseSetRequest>>,
972 >(
973 mut self,
974 v: T,
975 ) -> Self {
976 self.request = std::option::Option::Some(
977 crate::model::operation_metadata::Request::DeletePhraseSetRequest(v.into()),
978 );
979 self
980 }
981
982 /// The value of [request][crate::model::OperationMetadata::request]
983 /// if it holds a `UndeletePhraseSetRequest`, `None` if the field is not set or
984 /// holds a different branch.
985 pub fn undelete_phrase_set_request(
986 &self,
987 ) -> std::option::Option<&std::boxed::Box<crate::model::UndeletePhraseSetRequest>> {
988 #[allow(unreachable_patterns)]
989 self.request.as_ref().and_then(|v| match v {
990 crate::model::operation_metadata::Request::UndeletePhraseSetRequest(v) => {
991 std::option::Option::Some(v)
992 }
993 _ => std::option::Option::None,
994 })
995 }
996
997 /// Sets the value of [request][crate::model::OperationMetadata::request]
998 /// to hold a `UndeletePhraseSetRequest`.
999 ///
1000 /// Note that all the setters affecting `request` are
1001 /// mutually exclusive.
1002 ///
1003 /// # Example
1004 /// ```ignore,no_run
1005 /// # use google_cloud_speech_v2::model::OperationMetadata;
1006 /// use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
1007 /// let x = OperationMetadata::new().set_undelete_phrase_set_request(UndeletePhraseSetRequest::default()/* use setters */);
1008 /// assert!(x.undelete_phrase_set_request().is_some());
1009 /// assert!(x.batch_recognize_request().is_none());
1010 /// assert!(x.create_recognizer_request().is_none());
1011 /// assert!(x.update_recognizer_request().is_none());
1012 /// assert!(x.delete_recognizer_request().is_none());
1013 /// assert!(x.undelete_recognizer_request().is_none());
1014 /// assert!(x.create_custom_class_request().is_none());
1015 /// assert!(x.update_custom_class_request().is_none());
1016 /// assert!(x.delete_custom_class_request().is_none());
1017 /// assert!(x.undelete_custom_class_request().is_none());
1018 /// assert!(x.create_phrase_set_request().is_none());
1019 /// assert!(x.update_phrase_set_request().is_none());
1020 /// assert!(x.delete_phrase_set_request().is_none());
1021 /// assert!(x.update_config_request().is_none());
1022 /// ```
1023 pub fn set_undelete_phrase_set_request<
1024 T: std::convert::Into<std::boxed::Box<crate::model::UndeletePhraseSetRequest>>,
1025 >(
1026 mut self,
1027 v: T,
1028 ) -> Self {
1029 self.request = std::option::Option::Some(
1030 crate::model::operation_metadata::Request::UndeletePhraseSetRequest(v.into()),
1031 );
1032 self
1033 }
1034
1035 /// The value of [request][crate::model::OperationMetadata::request]
1036 /// if it holds a `UpdateConfigRequest`, `None` if the field is not set or
1037 /// holds a different branch.
1038 #[deprecated]
1039 pub fn update_config_request(
1040 &self,
1041 ) -> std::option::Option<&std::boxed::Box<crate::model::UpdateConfigRequest>> {
1042 #[allow(unreachable_patterns)]
1043 self.request.as_ref().and_then(|v| match v {
1044 crate::model::operation_metadata::Request::UpdateConfigRequest(v) => {
1045 std::option::Option::Some(v)
1046 }
1047 _ => std::option::Option::None,
1048 })
1049 }
1050
1051 /// Sets the value of [request][crate::model::OperationMetadata::request]
1052 /// to hold a `UpdateConfigRequest`.
1053 ///
1054 /// Note that all the setters affecting `request` are
1055 /// mutually exclusive.
1056 ///
1057 /// # Example
1058 /// ```ignore,no_run
1059 /// # use google_cloud_speech_v2::model::OperationMetadata;
1060 /// use google_cloud_speech_v2::model::UpdateConfigRequest;
1061 /// let x = OperationMetadata::new().set_update_config_request(UpdateConfigRequest::default()/* use setters */);
1062 /// assert!(x.update_config_request().is_some());
1063 /// assert!(x.batch_recognize_request().is_none());
1064 /// assert!(x.create_recognizer_request().is_none());
1065 /// assert!(x.update_recognizer_request().is_none());
1066 /// assert!(x.delete_recognizer_request().is_none());
1067 /// assert!(x.undelete_recognizer_request().is_none());
1068 /// assert!(x.create_custom_class_request().is_none());
1069 /// assert!(x.update_custom_class_request().is_none());
1070 /// assert!(x.delete_custom_class_request().is_none());
1071 /// assert!(x.undelete_custom_class_request().is_none());
1072 /// assert!(x.create_phrase_set_request().is_none());
1073 /// assert!(x.update_phrase_set_request().is_none());
1074 /// assert!(x.delete_phrase_set_request().is_none());
1075 /// assert!(x.undelete_phrase_set_request().is_none());
1076 /// ```
1077 #[deprecated]
1078 pub fn set_update_config_request<
1079 T: std::convert::Into<std::boxed::Box<crate::model::UpdateConfigRequest>>,
1080 >(
1081 mut self,
1082 v: T,
1083 ) -> Self {
1084 self.request = std::option::Option::Some(
1085 crate::model::operation_metadata::Request::UpdateConfigRequest(v.into()),
1086 );
1087 self
1088 }
1089
1090 /// Sets the value of [metadata][crate::model::OperationMetadata::metadata].
1091 ///
1092 /// Note that all the setters affecting `metadata` are mutually
1093 /// exclusive.
1094 ///
1095 /// # Example
1096 /// ```ignore,no_run
1097 /// # use google_cloud_speech_v2::model::OperationMetadata;
1098 /// use google_cloud_speech_v2::model::BatchRecognizeMetadata;
1099 /// let x = OperationMetadata::new().set_metadata(Some(
1100 /// google_cloud_speech_v2::model::operation_metadata::Metadata::BatchRecognizeMetadata(BatchRecognizeMetadata::default().into())));
1101 /// ```
1102 pub fn set_metadata<
1103 T: std::convert::Into<std::option::Option<crate::model::operation_metadata::Metadata>>,
1104 >(
1105 mut self,
1106 v: T,
1107 ) -> Self {
1108 self.metadata = v.into();
1109 self
1110 }
1111
1112 /// The value of [metadata][crate::model::OperationMetadata::metadata]
1113 /// if it holds a `BatchRecognizeMetadata`, `None` if the field is not set or
1114 /// holds a different branch.
1115 pub fn batch_recognize_metadata(
1116 &self,
1117 ) -> std::option::Option<&std::boxed::Box<crate::model::BatchRecognizeMetadata>> {
1118 #[allow(unreachable_patterns)]
1119 self.metadata.as_ref().and_then(|v| match v {
1120 crate::model::operation_metadata::Metadata::BatchRecognizeMetadata(v) => {
1121 std::option::Option::Some(v)
1122 }
1123 _ => std::option::Option::None,
1124 })
1125 }
1126
1127 /// Sets the value of [metadata][crate::model::OperationMetadata::metadata]
1128 /// to hold a `BatchRecognizeMetadata`.
1129 ///
1130 /// Note that all the setters affecting `metadata` are
1131 /// mutually exclusive.
1132 ///
1133 /// # Example
1134 /// ```ignore,no_run
1135 /// # use google_cloud_speech_v2::model::OperationMetadata;
1136 /// use google_cloud_speech_v2::model::BatchRecognizeMetadata;
1137 /// let x = OperationMetadata::new().set_batch_recognize_metadata(BatchRecognizeMetadata::default()/* use setters */);
1138 /// assert!(x.batch_recognize_metadata().is_some());
1139 /// ```
1140 pub fn set_batch_recognize_metadata<
1141 T: std::convert::Into<std::boxed::Box<crate::model::BatchRecognizeMetadata>>,
1142 >(
1143 mut self,
1144 v: T,
1145 ) -> Self {
1146 self.metadata = std::option::Option::Some(
1147 crate::model::operation_metadata::Metadata::BatchRecognizeMetadata(v.into()),
1148 );
1149 self
1150 }
1151}
1152
1153impl wkt::message::Message for OperationMetadata {
1154 fn typename() -> &'static str {
1155 "type.googleapis.com/google.cloud.speech.v2.OperationMetadata"
1156 }
1157}
1158
1159/// Defines additional types related to [OperationMetadata].
1160pub mod operation_metadata {
1161 #[allow(unused_imports)]
1162 use super::*;
1163
1164 /// The request that spawned the Operation.
1165 #[derive(Clone, Debug, PartialEq)]
1166 #[non_exhaustive]
1167 pub enum Request {
1168 /// The BatchRecognizeRequest that spawned the Operation.
1169 BatchRecognizeRequest(std::boxed::Box<crate::model::BatchRecognizeRequest>),
1170 /// The CreateRecognizerRequest that spawned the Operation.
1171 CreateRecognizerRequest(std::boxed::Box<crate::model::CreateRecognizerRequest>),
1172 /// The UpdateRecognizerRequest that spawned the Operation.
1173 UpdateRecognizerRequest(std::boxed::Box<crate::model::UpdateRecognizerRequest>),
1174 /// The DeleteRecognizerRequest that spawned the Operation.
1175 DeleteRecognizerRequest(std::boxed::Box<crate::model::DeleteRecognizerRequest>),
1176 /// The UndeleteRecognizerRequest that spawned the Operation.
1177 UndeleteRecognizerRequest(std::boxed::Box<crate::model::UndeleteRecognizerRequest>),
1178 /// The CreateCustomClassRequest that spawned the Operation.
1179 CreateCustomClassRequest(std::boxed::Box<crate::model::CreateCustomClassRequest>),
1180 /// The UpdateCustomClassRequest that spawned the Operation.
1181 UpdateCustomClassRequest(std::boxed::Box<crate::model::UpdateCustomClassRequest>),
1182 /// The DeleteCustomClassRequest that spawned the Operation.
1183 DeleteCustomClassRequest(std::boxed::Box<crate::model::DeleteCustomClassRequest>),
1184 /// The UndeleteCustomClassRequest that spawned the Operation.
1185 UndeleteCustomClassRequest(std::boxed::Box<crate::model::UndeleteCustomClassRequest>),
1186 /// The CreatePhraseSetRequest that spawned the Operation.
1187 CreatePhraseSetRequest(std::boxed::Box<crate::model::CreatePhraseSetRequest>),
1188 /// The UpdatePhraseSetRequest that spawned the Operation.
1189 UpdatePhraseSetRequest(std::boxed::Box<crate::model::UpdatePhraseSetRequest>),
1190 /// The DeletePhraseSetRequest that spawned the Operation.
1191 DeletePhraseSetRequest(std::boxed::Box<crate::model::DeletePhraseSetRequest>),
1192 /// The UndeletePhraseSetRequest that spawned the Operation.
1193 UndeletePhraseSetRequest(std::boxed::Box<crate::model::UndeletePhraseSetRequest>),
1194 /// The UpdateConfigRequest that spawned the Operation.
1195 #[deprecated]
1196 UpdateConfigRequest(std::boxed::Box<crate::model::UpdateConfigRequest>),
1197 }
1198
1199 /// Specific metadata per RPC.
1200 #[derive(Clone, Debug, PartialEq)]
1201 #[non_exhaustive]
1202 pub enum Metadata {
1203 /// Metadata specific to the BatchRecognize method.
1204 BatchRecognizeMetadata(std::boxed::Box<crate::model::BatchRecognizeMetadata>),
1205 }
1206}
1207
1208/// Request message for the
1209/// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
1210///
1211/// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1212#[derive(Clone, Default, PartialEq)]
1213#[non_exhaustive]
1214pub struct ListRecognizersRequest {
1215 /// Required. The project and location of Recognizers to list. The expected
1216 /// format is `projects/{project}/locations/{location}`.
1217 pub parent: std::string::String,
1218
1219 /// The maximum number of Recognizers to return. The service may return fewer
1220 /// than this value. If unspecified, at most 5 Recognizers will be returned.
1221 /// The maximum value is 100; values above 100 will be coerced to 100.
1222 pub page_size: i32,
1223
1224 /// A page token, received from a previous
1225 /// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] call.
1226 /// Provide this to retrieve the subsequent page.
1227 ///
1228 /// When paginating, all other parameters provided to
1229 /// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] must match
1230 /// the call that provided the page token.
1231 ///
1232 /// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1233 pub page_token: std::string::String,
1234
1235 /// Whether, or not, to show resources that have been deleted.
1236 pub show_deleted: bool,
1237
1238 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1239}
1240
1241impl ListRecognizersRequest {
1242 pub fn new() -> Self {
1243 std::default::Default::default()
1244 }
1245
1246 /// Sets the value of [parent][crate::model::ListRecognizersRequest::parent].
1247 ///
1248 /// # Example
1249 /// ```ignore,no_run
1250 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1251 /// let x = ListRecognizersRequest::new().set_parent("example");
1252 /// ```
1253 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1254 self.parent = v.into();
1255 self
1256 }
1257
1258 /// Sets the value of [page_size][crate::model::ListRecognizersRequest::page_size].
1259 ///
1260 /// # Example
1261 /// ```ignore,no_run
1262 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1263 /// let x = ListRecognizersRequest::new().set_page_size(42);
1264 /// ```
1265 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
1266 self.page_size = v.into();
1267 self
1268 }
1269
1270 /// Sets the value of [page_token][crate::model::ListRecognizersRequest::page_token].
1271 ///
1272 /// # Example
1273 /// ```ignore,no_run
1274 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1275 /// let x = ListRecognizersRequest::new().set_page_token("example");
1276 /// ```
1277 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1278 self.page_token = v.into();
1279 self
1280 }
1281
1282 /// Sets the value of [show_deleted][crate::model::ListRecognizersRequest::show_deleted].
1283 ///
1284 /// # Example
1285 /// ```ignore,no_run
1286 /// # use google_cloud_speech_v2::model::ListRecognizersRequest;
1287 /// let x = ListRecognizersRequest::new().set_show_deleted(true);
1288 /// ```
1289 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1290 self.show_deleted = v.into();
1291 self
1292 }
1293}
1294
1295impl wkt::message::Message for ListRecognizersRequest {
1296 fn typename() -> &'static str {
1297 "type.googleapis.com/google.cloud.speech.v2.ListRecognizersRequest"
1298 }
1299}
1300
1301/// Response message for the
1302/// [ListRecognizers][google.cloud.speech.v2.Speech.ListRecognizers] method.
1303///
1304/// [google.cloud.speech.v2.Speech.ListRecognizers]: crate::client::Speech::list_recognizers
1305#[derive(Clone, Default, PartialEq)]
1306#[non_exhaustive]
1307pub struct ListRecognizersResponse {
1308 /// The list of requested Recognizers.
1309 pub recognizers: std::vec::Vec<crate::model::Recognizer>,
1310
1311 /// A token, which can be sent as
1312 /// [page_token][google.cloud.speech.v2.ListRecognizersRequest.page_token] to
1313 /// retrieve the next page. If this field is omitted, there are no subsequent
1314 /// pages. This token expires after 72 hours.
1315 ///
1316 /// [google.cloud.speech.v2.ListRecognizersRequest.page_token]: crate::model::ListRecognizersRequest::page_token
1317 pub next_page_token: std::string::String,
1318
1319 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1320}
1321
1322impl ListRecognizersResponse {
1323 pub fn new() -> Self {
1324 std::default::Default::default()
1325 }
1326
1327 /// Sets the value of [recognizers][crate::model::ListRecognizersResponse::recognizers].
1328 ///
1329 /// # Example
1330 /// ```ignore,no_run
1331 /// # use google_cloud_speech_v2::model::ListRecognizersResponse;
1332 /// use google_cloud_speech_v2::model::Recognizer;
1333 /// let x = ListRecognizersResponse::new()
1334 /// .set_recognizers([
1335 /// Recognizer::default()/* use setters */,
1336 /// Recognizer::default()/* use (different) setters */,
1337 /// ]);
1338 /// ```
1339 pub fn set_recognizers<T, V>(mut self, v: T) -> Self
1340 where
1341 T: std::iter::IntoIterator<Item = V>,
1342 V: std::convert::Into<crate::model::Recognizer>,
1343 {
1344 use std::iter::Iterator;
1345 self.recognizers = v.into_iter().map(|i| i.into()).collect();
1346 self
1347 }
1348
1349 /// Sets the value of [next_page_token][crate::model::ListRecognizersResponse::next_page_token].
1350 ///
1351 /// # Example
1352 /// ```ignore,no_run
1353 /// # use google_cloud_speech_v2::model::ListRecognizersResponse;
1354 /// let x = ListRecognizersResponse::new().set_next_page_token("example");
1355 /// ```
1356 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1357 self.next_page_token = v.into();
1358 self
1359 }
1360}
1361
1362impl wkt::message::Message for ListRecognizersResponse {
1363 fn typename() -> &'static str {
1364 "type.googleapis.com/google.cloud.speech.v2.ListRecognizersResponse"
1365 }
1366}
1367
1368#[doc(hidden)]
1369impl google_cloud_gax::paginator::internal::PageableResponse for ListRecognizersResponse {
1370 type PageItem = crate::model::Recognizer;
1371
1372 fn items(self) -> std::vec::Vec<Self::PageItem> {
1373 self.recognizers
1374 }
1375
1376 fn next_page_token(&self) -> std::string::String {
1377 use std::clone::Clone;
1378 self.next_page_token.clone()
1379 }
1380}
1381
1382/// Request message for the
1383/// [GetRecognizer][google.cloud.speech.v2.Speech.GetRecognizer] method.
1384///
1385/// [google.cloud.speech.v2.Speech.GetRecognizer]: crate::client::Speech::get_recognizer
1386#[derive(Clone, Default, PartialEq)]
1387#[non_exhaustive]
1388pub struct GetRecognizerRequest {
1389 /// Required. The name of the Recognizer to retrieve. The expected format is
1390 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1391 pub name: std::string::String,
1392
1393 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1394}
1395
1396impl GetRecognizerRequest {
1397 pub fn new() -> Self {
1398 std::default::Default::default()
1399 }
1400
1401 /// Sets the value of [name][crate::model::GetRecognizerRequest::name].
1402 ///
1403 /// # Example
1404 /// ```ignore,no_run
1405 /// # use google_cloud_speech_v2::model::GetRecognizerRequest;
1406 /// let x = GetRecognizerRequest::new().set_name("example");
1407 /// ```
1408 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1409 self.name = v.into();
1410 self
1411 }
1412}
1413
1414impl wkt::message::Message for GetRecognizerRequest {
1415 fn typename() -> &'static str {
1416 "type.googleapis.com/google.cloud.speech.v2.GetRecognizerRequest"
1417 }
1418}
1419
1420/// Request message for the
1421/// [UpdateRecognizer][google.cloud.speech.v2.Speech.UpdateRecognizer] method.
1422///
1423/// [google.cloud.speech.v2.Speech.UpdateRecognizer]: crate::client::Speech::update_recognizer
1424#[derive(Clone, Default, PartialEq)]
1425#[non_exhaustive]
1426pub struct UpdateRecognizerRequest {
1427 /// Required. The Recognizer to update.
1428 ///
1429 /// The Recognizer's `name` field is used to identify the Recognizer to update.
1430 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1431 pub recognizer: std::option::Option<crate::model::Recognizer>,
1432
1433 /// The list of fields to update. If empty, all non-default valued fields are
1434 /// considered for update. Use `*` to update the entire Recognizer resource.
1435 pub update_mask: std::option::Option<wkt::FieldMask>,
1436
1437 /// If set, validate the request and preview the updated Recognizer, but do not
1438 /// actually update it.
1439 pub validate_only: bool,
1440
1441 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1442}
1443
1444impl UpdateRecognizerRequest {
1445 pub fn new() -> Self {
1446 std::default::Default::default()
1447 }
1448
1449 /// Sets the value of [recognizer][crate::model::UpdateRecognizerRequest::recognizer].
1450 ///
1451 /// # Example
1452 /// ```ignore,no_run
1453 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1454 /// use google_cloud_speech_v2::model::Recognizer;
1455 /// let x = UpdateRecognizerRequest::new().set_recognizer(Recognizer::default()/* use setters */);
1456 /// ```
1457 pub fn set_recognizer<T>(mut self, v: T) -> Self
1458 where
1459 T: std::convert::Into<crate::model::Recognizer>,
1460 {
1461 self.recognizer = std::option::Option::Some(v.into());
1462 self
1463 }
1464
1465 /// Sets or clears the value of [recognizer][crate::model::UpdateRecognizerRequest::recognizer].
1466 ///
1467 /// # Example
1468 /// ```ignore,no_run
1469 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1470 /// use google_cloud_speech_v2::model::Recognizer;
1471 /// let x = UpdateRecognizerRequest::new().set_or_clear_recognizer(Some(Recognizer::default()/* use setters */));
1472 /// let x = UpdateRecognizerRequest::new().set_or_clear_recognizer(None::<Recognizer>);
1473 /// ```
1474 pub fn set_or_clear_recognizer<T>(mut self, v: std::option::Option<T>) -> Self
1475 where
1476 T: std::convert::Into<crate::model::Recognizer>,
1477 {
1478 self.recognizer = v.map(|x| x.into());
1479 self
1480 }
1481
1482 /// Sets the value of [update_mask][crate::model::UpdateRecognizerRequest::update_mask].
1483 ///
1484 /// # Example
1485 /// ```ignore,no_run
1486 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1487 /// use wkt::FieldMask;
1488 /// let x = UpdateRecognizerRequest::new().set_update_mask(FieldMask::default()/* use setters */);
1489 /// ```
1490 pub fn set_update_mask<T>(mut self, v: T) -> Self
1491 where
1492 T: std::convert::Into<wkt::FieldMask>,
1493 {
1494 self.update_mask = std::option::Option::Some(v.into());
1495 self
1496 }
1497
1498 /// Sets or clears the value of [update_mask][crate::model::UpdateRecognizerRequest::update_mask].
1499 ///
1500 /// # Example
1501 /// ```ignore,no_run
1502 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1503 /// use wkt::FieldMask;
1504 /// let x = UpdateRecognizerRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
1505 /// let x = UpdateRecognizerRequest::new().set_or_clear_update_mask(None::<FieldMask>);
1506 /// ```
1507 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
1508 where
1509 T: std::convert::Into<wkt::FieldMask>,
1510 {
1511 self.update_mask = v.map(|x| x.into());
1512 self
1513 }
1514
1515 /// Sets the value of [validate_only][crate::model::UpdateRecognizerRequest::validate_only].
1516 ///
1517 /// # Example
1518 /// ```ignore,no_run
1519 /// # use google_cloud_speech_v2::model::UpdateRecognizerRequest;
1520 /// let x = UpdateRecognizerRequest::new().set_validate_only(true);
1521 /// ```
1522 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1523 self.validate_only = v.into();
1524 self
1525 }
1526}
1527
1528impl wkt::message::Message for UpdateRecognizerRequest {
1529 fn typename() -> &'static str {
1530 "type.googleapis.com/google.cloud.speech.v2.UpdateRecognizerRequest"
1531 }
1532}
1533
1534/// Request message for the
1535/// [DeleteRecognizer][google.cloud.speech.v2.Speech.DeleteRecognizer] method.
1536///
1537/// [google.cloud.speech.v2.Speech.DeleteRecognizer]: crate::client::Speech::delete_recognizer
1538#[derive(Clone, Default, PartialEq)]
1539#[non_exhaustive]
1540pub struct DeleteRecognizerRequest {
1541 /// Required. The name of the Recognizer to delete.
1542 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`
1543 pub name: std::string::String,
1544
1545 /// If set, validate the request and preview the deleted Recognizer, but do not
1546 /// actually delete it.
1547 pub validate_only: bool,
1548
1549 /// If set to true, and the Recognizer is not found, the request will succeed
1550 /// and be a no-op (no Operation is recorded in this case).
1551 pub allow_missing: bool,
1552
1553 /// This checksum is computed by the server based on the value of other
1554 /// fields. This may be sent on update, undelete, and delete requests to ensure
1555 /// the client has an up-to-date value before proceeding.
1556 pub etag: std::string::String,
1557
1558 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1559}
1560
1561impl DeleteRecognizerRequest {
1562 pub fn new() -> Self {
1563 std::default::Default::default()
1564 }
1565
1566 /// Sets the value of [name][crate::model::DeleteRecognizerRequest::name].
1567 ///
1568 /// # Example
1569 /// ```ignore,no_run
1570 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1571 /// let x = DeleteRecognizerRequest::new().set_name("example");
1572 /// ```
1573 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1574 self.name = v.into();
1575 self
1576 }
1577
1578 /// Sets the value of [validate_only][crate::model::DeleteRecognizerRequest::validate_only].
1579 ///
1580 /// # Example
1581 /// ```ignore,no_run
1582 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1583 /// let x = DeleteRecognizerRequest::new().set_validate_only(true);
1584 /// ```
1585 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1586 self.validate_only = v.into();
1587 self
1588 }
1589
1590 /// Sets the value of [allow_missing][crate::model::DeleteRecognizerRequest::allow_missing].
1591 ///
1592 /// # Example
1593 /// ```ignore,no_run
1594 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1595 /// let x = DeleteRecognizerRequest::new().set_allow_missing(true);
1596 /// ```
1597 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1598 self.allow_missing = v.into();
1599 self
1600 }
1601
1602 /// Sets the value of [etag][crate::model::DeleteRecognizerRequest::etag].
1603 ///
1604 /// # Example
1605 /// ```ignore,no_run
1606 /// # use google_cloud_speech_v2::model::DeleteRecognizerRequest;
1607 /// let x = DeleteRecognizerRequest::new().set_etag("example");
1608 /// ```
1609 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1610 self.etag = v.into();
1611 self
1612 }
1613}
1614
1615impl wkt::message::Message for DeleteRecognizerRequest {
1616 fn typename() -> &'static str {
1617 "type.googleapis.com/google.cloud.speech.v2.DeleteRecognizerRequest"
1618 }
1619}
1620
1621/// Request message for the
1622/// [UndeleteRecognizer][google.cloud.speech.v2.Speech.UndeleteRecognizer]
1623/// method.
1624///
1625/// [google.cloud.speech.v2.Speech.UndeleteRecognizer]: crate::client::Speech::undelete_recognizer
1626#[derive(Clone, Default, PartialEq)]
1627#[non_exhaustive]
1628pub struct UndeleteRecognizerRequest {
1629 /// Required. The name of the Recognizer to undelete.
1630 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`
1631 pub name: std::string::String,
1632
1633 /// If set, validate the request and preview the undeleted Recognizer, but do
1634 /// not actually undelete it.
1635 pub validate_only: bool,
1636
1637 /// This checksum is computed by the server based on the value of other
1638 /// fields. This may be sent on update, undelete, and delete requests to ensure
1639 /// the client has an up-to-date value before proceeding.
1640 pub etag: std::string::String,
1641
1642 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1643}
1644
1645impl UndeleteRecognizerRequest {
1646 pub fn new() -> Self {
1647 std::default::Default::default()
1648 }
1649
1650 /// Sets the value of [name][crate::model::UndeleteRecognizerRequest::name].
1651 ///
1652 /// # Example
1653 /// ```ignore,no_run
1654 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1655 /// let x = UndeleteRecognizerRequest::new().set_name("example");
1656 /// ```
1657 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1658 self.name = v.into();
1659 self
1660 }
1661
1662 /// Sets the value of [validate_only][crate::model::UndeleteRecognizerRequest::validate_only].
1663 ///
1664 /// # Example
1665 /// ```ignore,no_run
1666 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1667 /// let x = UndeleteRecognizerRequest::new().set_validate_only(true);
1668 /// ```
1669 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
1670 self.validate_only = v.into();
1671 self
1672 }
1673
1674 /// Sets the value of [etag][crate::model::UndeleteRecognizerRequest::etag].
1675 ///
1676 /// # Example
1677 /// ```ignore,no_run
1678 /// # use google_cloud_speech_v2::model::UndeleteRecognizerRequest;
1679 /// let x = UndeleteRecognizerRequest::new().set_etag("example");
1680 /// ```
1681 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1682 self.etag = v.into();
1683 self
1684 }
1685}
1686
1687impl wkt::message::Message for UndeleteRecognizerRequest {
1688 fn typename() -> &'static str {
1689 "type.googleapis.com/google.cloud.speech.v2.UndeleteRecognizerRequest"
1690 }
1691}
1692
1693/// A Recognizer message. Stores recognition configuration and metadata.
1694#[derive(Clone, Default, PartialEq)]
1695#[non_exhaustive]
1696pub struct Recognizer {
1697 /// Output only. Identifier. The resource name of the Recognizer.
1698 /// Format: `projects/{project}/locations/{location}/recognizers/{recognizer}`.
1699 pub name: std::string::String,
1700
1701 /// Output only. System-assigned unique identifier for the Recognizer.
1702 pub uid: std::string::String,
1703
1704 /// User-settable, human-readable name for the Recognizer. Must be 63
1705 /// characters or less.
1706 pub display_name: std::string::String,
1707
1708 /// Optional. This field is now deprecated. Prefer the
1709 /// [`model`][google.cloud.speech.v2.RecognitionConfig.model] field in the
1710 /// [`RecognitionConfig`][google.cloud.speech.v2.RecognitionConfig] message.
1711 ///
1712 /// Which model to use for recognition requests. Select the model best suited
1713 /// to your domain to get best results.
1714 ///
1715 /// Guidance for choosing which model to use can be found in the [Transcription
1716 /// Models
1717 /// Documentation](https://cloud.google.com/speech-to-text/v2/docs/transcription-model)
1718 /// and the models supported in each region can be found in the [Table Of
1719 /// Supported
1720 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
1721 ///
1722 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
1723 /// [google.cloud.speech.v2.RecognitionConfig.model]: crate::model::RecognitionConfig::model
1724 #[deprecated]
1725 pub model: std::string::String,
1726
1727 /// Optional. This field is now deprecated. Prefer the
1728 /// [`language_codes`][google.cloud.speech.v2.RecognitionConfig.language_codes]
1729 /// field in the
1730 /// [`RecognitionConfig`][google.cloud.speech.v2.RecognitionConfig] message.
1731 ///
1732 /// The language of the supplied audio as a
1733 /// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
1734 ///
1735 /// Supported languages for each model are listed in the [Table of Supported
1736 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
1737 ///
1738 /// If additional languages are provided, recognition result will contain
1739 /// recognition in the most likely language detected. The recognition result
1740 /// will include the language tag of the language detected in the audio.
1741 /// When you create or update a Recognizer, these values are
1742 /// stored in normalized BCP-47 form. For example, "en-us" is stored as
1743 /// "en-US".
1744 ///
1745 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
1746 /// [google.cloud.speech.v2.RecognitionConfig.language_codes]: crate::model::RecognitionConfig::language_codes
1747 #[deprecated]
1748 pub language_codes: std::vec::Vec<std::string::String>,
1749
1750 /// Default configuration to use for requests with this Recognizer.
1751 /// This can be overwritten by inline configuration in the
1752 /// [RecognizeRequest.config][google.cloud.speech.v2.RecognizeRequest.config]
1753 /// field.
1754 ///
1755 /// [google.cloud.speech.v2.RecognizeRequest.config]: crate::model::RecognizeRequest::config
1756 pub default_recognition_config: std::option::Option<crate::model::RecognitionConfig>,
1757
1758 /// Allows users to store small amounts of arbitrary data.
1759 /// Both the key and the value must be 63 characters or less each.
1760 /// At most 100 annotations.
1761 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
1762
1763 /// Output only. The Recognizer lifecycle state.
1764 pub state: crate::model::recognizer::State,
1765
1766 /// Output only. Creation time.
1767 pub create_time: std::option::Option<wkt::Timestamp>,
1768
1769 /// Output only. The most recent time this Recognizer was modified.
1770 pub update_time: std::option::Option<wkt::Timestamp>,
1771
1772 /// Output only. The time at which this Recognizer was requested for deletion.
1773 pub delete_time: std::option::Option<wkt::Timestamp>,
1774
1775 /// Output only. The time at which this Recognizer will be purged.
1776 pub expire_time: std::option::Option<wkt::Timestamp>,
1777
1778 /// Output only. This checksum is computed by the server based on the value of
1779 /// other fields. This may be sent on update, undelete, and delete requests to
1780 /// ensure the client has an up-to-date value before proceeding.
1781 pub etag: std::string::String,
1782
1783 /// Output only. Whether or not this Recognizer is in the process of being
1784 /// updated.
1785 pub reconciling: bool,
1786
1787 /// Output only. The [KMS key
1788 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
1789 /// the Recognizer is encrypted. The expected format is
1790 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
1791 pub kms_key_name: std::string::String,
1792
1793 /// Output only. The [KMS key version
1794 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
1795 /// with which the Recognizer is encrypted. The expected format is
1796 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
1797 pub kms_key_version_name: std::string::String,
1798
1799 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
1800}
1801
1802impl Recognizer {
1803 pub fn new() -> Self {
1804 std::default::Default::default()
1805 }
1806
1807 /// Sets the value of [name][crate::model::Recognizer::name].
1808 ///
1809 /// # Example
1810 /// ```ignore,no_run
1811 /// # use google_cloud_speech_v2::model::Recognizer;
1812 /// let x = Recognizer::new().set_name("example");
1813 /// ```
1814 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1815 self.name = v.into();
1816 self
1817 }
1818
1819 /// Sets the value of [uid][crate::model::Recognizer::uid].
1820 ///
1821 /// # Example
1822 /// ```ignore,no_run
1823 /// # use google_cloud_speech_v2::model::Recognizer;
1824 /// let x = Recognizer::new().set_uid("example");
1825 /// ```
1826 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1827 self.uid = v.into();
1828 self
1829 }
1830
1831 /// Sets the value of [display_name][crate::model::Recognizer::display_name].
1832 ///
1833 /// # Example
1834 /// ```ignore,no_run
1835 /// # use google_cloud_speech_v2::model::Recognizer;
1836 /// let x = Recognizer::new().set_display_name("example");
1837 /// ```
1838 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1839 self.display_name = v.into();
1840 self
1841 }
1842
1843 /// Sets the value of [model][crate::model::Recognizer::model].
1844 ///
1845 /// # Example
1846 /// ```ignore,no_run
1847 /// # use google_cloud_speech_v2::model::Recognizer;
1848 /// let x = Recognizer::new().set_model("example");
1849 /// ```
1850 #[deprecated]
1851 pub fn set_model<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
1852 self.model = v.into();
1853 self
1854 }
1855
1856 /// Sets the value of [language_codes][crate::model::Recognizer::language_codes].
1857 ///
1858 /// # Example
1859 /// ```ignore,no_run
1860 /// # use google_cloud_speech_v2::model::Recognizer;
1861 /// let x = Recognizer::new().set_language_codes(["a", "b", "c"]);
1862 /// ```
1863 #[deprecated]
1864 pub fn set_language_codes<T, V>(mut self, v: T) -> Self
1865 where
1866 T: std::iter::IntoIterator<Item = V>,
1867 V: std::convert::Into<std::string::String>,
1868 {
1869 use std::iter::Iterator;
1870 self.language_codes = v.into_iter().map(|i| i.into()).collect();
1871 self
1872 }
1873
1874 /// Sets the value of [default_recognition_config][crate::model::Recognizer::default_recognition_config].
1875 ///
1876 /// # Example
1877 /// ```ignore,no_run
1878 /// # use google_cloud_speech_v2::model::Recognizer;
1879 /// use google_cloud_speech_v2::model::RecognitionConfig;
1880 /// let x = Recognizer::new().set_default_recognition_config(RecognitionConfig::default()/* use setters */);
1881 /// ```
1882 pub fn set_default_recognition_config<T>(mut self, v: T) -> Self
1883 where
1884 T: std::convert::Into<crate::model::RecognitionConfig>,
1885 {
1886 self.default_recognition_config = std::option::Option::Some(v.into());
1887 self
1888 }
1889
1890 /// Sets or clears the value of [default_recognition_config][crate::model::Recognizer::default_recognition_config].
1891 ///
1892 /// # Example
1893 /// ```ignore,no_run
1894 /// # use google_cloud_speech_v2::model::Recognizer;
1895 /// use google_cloud_speech_v2::model::RecognitionConfig;
1896 /// let x = Recognizer::new().set_or_clear_default_recognition_config(Some(RecognitionConfig::default()/* use setters */));
1897 /// let x = Recognizer::new().set_or_clear_default_recognition_config(None::<RecognitionConfig>);
1898 /// ```
1899 pub fn set_or_clear_default_recognition_config<T>(mut self, v: std::option::Option<T>) -> Self
1900 where
1901 T: std::convert::Into<crate::model::RecognitionConfig>,
1902 {
1903 self.default_recognition_config = v.map(|x| x.into());
1904 self
1905 }
1906
1907 /// Sets the value of [annotations][crate::model::Recognizer::annotations].
1908 ///
1909 /// # Example
1910 /// ```ignore,no_run
1911 /// # use google_cloud_speech_v2::model::Recognizer;
1912 /// let x = Recognizer::new().set_annotations([
1913 /// ("key0", "abc"),
1914 /// ("key1", "xyz"),
1915 /// ]);
1916 /// ```
1917 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
1918 where
1919 T: std::iter::IntoIterator<Item = (K, V)>,
1920 K: std::convert::Into<std::string::String>,
1921 V: std::convert::Into<std::string::String>,
1922 {
1923 use std::iter::Iterator;
1924 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
1925 self
1926 }
1927
1928 /// Sets the value of [state][crate::model::Recognizer::state].
1929 ///
1930 /// # Example
1931 /// ```ignore,no_run
1932 /// # use google_cloud_speech_v2::model::Recognizer;
1933 /// use google_cloud_speech_v2::model::recognizer::State;
1934 /// let x0 = Recognizer::new().set_state(State::Active);
1935 /// let x1 = Recognizer::new().set_state(State::Deleted);
1936 /// ```
1937 pub fn set_state<T: std::convert::Into<crate::model::recognizer::State>>(
1938 mut self,
1939 v: T,
1940 ) -> Self {
1941 self.state = v.into();
1942 self
1943 }
1944
1945 /// Sets the value of [create_time][crate::model::Recognizer::create_time].
1946 ///
1947 /// # Example
1948 /// ```ignore,no_run
1949 /// # use google_cloud_speech_v2::model::Recognizer;
1950 /// use wkt::Timestamp;
1951 /// let x = Recognizer::new().set_create_time(Timestamp::default()/* use setters */);
1952 /// ```
1953 pub fn set_create_time<T>(mut self, v: T) -> Self
1954 where
1955 T: std::convert::Into<wkt::Timestamp>,
1956 {
1957 self.create_time = std::option::Option::Some(v.into());
1958 self
1959 }
1960
1961 /// Sets or clears the value of [create_time][crate::model::Recognizer::create_time].
1962 ///
1963 /// # Example
1964 /// ```ignore,no_run
1965 /// # use google_cloud_speech_v2::model::Recognizer;
1966 /// use wkt::Timestamp;
1967 /// let x = Recognizer::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
1968 /// let x = Recognizer::new().set_or_clear_create_time(None::<Timestamp>);
1969 /// ```
1970 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
1971 where
1972 T: std::convert::Into<wkt::Timestamp>,
1973 {
1974 self.create_time = v.map(|x| x.into());
1975 self
1976 }
1977
1978 /// Sets the value of [update_time][crate::model::Recognizer::update_time].
1979 ///
1980 /// # Example
1981 /// ```ignore,no_run
1982 /// # use google_cloud_speech_v2::model::Recognizer;
1983 /// use wkt::Timestamp;
1984 /// let x = Recognizer::new().set_update_time(Timestamp::default()/* use setters */);
1985 /// ```
1986 pub fn set_update_time<T>(mut self, v: T) -> Self
1987 where
1988 T: std::convert::Into<wkt::Timestamp>,
1989 {
1990 self.update_time = std::option::Option::Some(v.into());
1991 self
1992 }
1993
1994 /// Sets or clears the value of [update_time][crate::model::Recognizer::update_time].
1995 ///
1996 /// # Example
1997 /// ```ignore,no_run
1998 /// # use google_cloud_speech_v2::model::Recognizer;
1999 /// use wkt::Timestamp;
2000 /// let x = Recognizer::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
2001 /// let x = Recognizer::new().set_or_clear_update_time(None::<Timestamp>);
2002 /// ```
2003 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
2004 where
2005 T: std::convert::Into<wkt::Timestamp>,
2006 {
2007 self.update_time = v.map(|x| x.into());
2008 self
2009 }
2010
2011 /// Sets the value of [delete_time][crate::model::Recognizer::delete_time].
2012 ///
2013 /// # Example
2014 /// ```ignore,no_run
2015 /// # use google_cloud_speech_v2::model::Recognizer;
2016 /// use wkt::Timestamp;
2017 /// let x = Recognizer::new().set_delete_time(Timestamp::default()/* use setters */);
2018 /// ```
2019 pub fn set_delete_time<T>(mut self, v: T) -> Self
2020 where
2021 T: std::convert::Into<wkt::Timestamp>,
2022 {
2023 self.delete_time = std::option::Option::Some(v.into());
2024 self
2025 }
2026
2027 /// Sets or clears the value of [delete_time][crate::model::Recognizer::delete_time].
2028 ///
2029 /// # Example
2030 /// ```ignore,no_run
2031 /// # use google_cloud_speech_v2::model::Recognizer;
2032 /// use wkt::Timestamp;
2033 /// let x = Recognizer::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
2034 /// let x = Recognizer::new().set_or_clear_delete_time(None::<Timestamp>);
2035 /// ```
2036 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
2037 where
2038 T: std::convert::Into<wkt::Timestamp>,
2039 {
2040 self.delete_time = v.map(|x| x.into());
2041 self
2042 }
2043
2044 /// Sets the value of [expire_time][crate::model::Recognizer::expire_time].
2045 ///
2046 /// # Example
2047 /// ```ignore,no_run
2048 /// # use google_cloud_speech_v2::model::Recognizer;
2049 /// use wkt::Timestamp;
2050 /// let x = Recognizer::new().set_expire_time(Timestamp::default()/* use setters */);
2051 /// ```
2052 pub fn set_expire_time<T>(mut self, v: T) -> Self
2053 where
2054 T: std::convert::Into<wkt::Timestamp>,
2055 {
2056 self.expire_time = std::option::Option::Some(v.into());
2057 self
2058 }
2059
2060 /// Sets or clears the value of [expire_time][crate::model::Recognizer::expire_time].
2061 ///
2062 /// # Example
2063 /// ```ignore,no_run
2064 /// # use google_cloud_speech_v2::model::Recognizer;
2065 /// use wkt::Timestamp;
2066 /// let x = Recognizer::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
2067 /// let x = Recognizer::new().set_or_clear_expire_time(None::<Timestamp>);
2068 /// ```
2069 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
2070 where
2071 T: std::convert::Into<wkt::Timestamp>,
2072 {
2073 self.expire_time = v.map(|x| x.into());
2074 self
2075 }
2076
2077 /// Sets the value of [etag][crate::model::Recognizer::etag].
2078 ///
2079 /// # Example
2080 /// ```ignore,no_run
2081 /// # use google_cloud_speech_v2::model::Recognizer;
2082 /// let x = Recognizer::new().set_etag("example");
2083 /// ```
2084 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2085 self.etag = v.into();
2086 self
2087 }
2088
2089 /// Sets the value of [reconciling][crate::model::Recognizer::reconciling].
2090 ///
2091 /// # Example
2092 /// ```ignore,no_run
2093 /// # use google_cloud_speech_v2::model::Recognizer;
2094 /// let x = Recognizer::new().set_reconciling(true);
2095 /// ```
2096 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2097 self.reconciling = v.into();
2098 self
2099 }
2100
2101 /// Sets the value of [kms_key_name][crate::model::Recognizer::kms_key_name].
2102 ///
2103 /// # Example
2104 /// ```ignore,no_run
2105 /// # use google_cloud_speech_v2::model::Recognizer;
2106 /// let x = Recognizer::new().set_kms_key_name("example");
2107 /// ```
2108 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2109 self.kms_key_name = v.into();
2110 self
2111 }
2112
2113 /// Sets the value of [kms_key_version_name][crate::model::Recognizer::kms_key_version_name].
2114 ///
2115 /// # Example
2116 /// ```ignore,no_run
2117 /// # use google_cloud_speech_v2::model::Recognizer;
2118 /// let x = Recognizer::new().set_kms_key_version_name("example");
2119 /// ```
2120 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
2121 mut self,
2122 v: T,
2123 ) -> Self {
2124 self.kms_key_version_name = v.into();
2125 self
2126 }
2127}
2128
2129impl wkt::message::Message for Recognizer {
2130 fn typename() -> &'static str {
2131 "type.googleapis.com/google.cloud.speech.v2.Recognizer"
2132 }
2133}
2134
2135/// Defines additional types related to [Recognizer].
2136pub mod recognizer {
2137 #[allow(unused_imports)]
2138 use super::*;
2139
2140 /// Set of states that define the lifecycle of a Recognizer.
2141 ///
2142 /// # Working with unknown values
2143 ///
2144 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2145 /// additional enum variants at any time. Adding new variants is not considered
2146 /// a breaking change. Applications should write their code in anticipation of:
2147 ///
2148 /// - New values appearing in future releases of the client library, **and**
2149 /// - New values received dynamically, without application changes.
2150 ///
2151 /// Please consult the [Working with enums] section in the user guide for some
2152 /// guidelines.
2153 ///
2154 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2155 #[derive(Clone, Debug, PartialEq)]
2156 #[non_exhaustive]
2157 pub enum State {
2158 /// The default value. This value is used if the state is omitted.
2159 Unspecified,
2160 /// The Recognizer is active and ready for use.
2161 Active,
2162 /// This Recognizer has been deleted.
2163 Deleted,
2164 /// If set, the enum was initialized with an unknown value.
2165 ///
2166 /// Applications can examine the value using [State::value] or
2167 /// [State::name].
2168 UnknownValue(state::UnknownValue),
2169 }
2170
2171 #[doc(hidden)]
2172 pub mod state {
2173 #[allow(unused_imports)]
2174 use super::*;
2175 #[derive(Clone, Debug, PartialEq)]
2176 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2177 }
2178
2179 impl State {
2180 /// Gets the enum value.
2181 ///
2182 /// Returns `None` if the enum contains an unknown value deserialized from
2183 /// the string representation of enums.
2184 pub fn value(&self) -> std::option::Option<i32> {
2185 match self {
2186 Self::Unspecified => std::option::Option::Some(0),
2187 Self::Active => std::option::Option::Some(2),
2188 Self::Deleted => std::option::Option::Some(4),
2189 Self::UnknownValue(u) => u.0.value(),
2190 }
2191 }
2192
2193 /// Gets the enum value as a string.
2194 ///
2195 /// Returns `None` if the enum contains an unknown value deserialized from
2196 /// the integer representation of enums.
2197 pub fn name(&self) -> std::option::Option<&str> {
2198 match self {
2199 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
2200 Self::Active => std::option::Option::Some("ACTIVE"),
2201 Self::Deleted => std::option::Option::Some("DELETED"),
2202 Self::UnknownValue(u) => u.0.name(),
2203 }
2204 }
2205 }
2206
2207 impl std::default::Default for State {
2208 fn default() -> Self {
2209 use std::convert::From;
2210 Self::from(0)
2211 }
2212 }
2213
2214 impl std::fmt::Display for State {
2215 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
2216 wkt::internal::display_enum(f, self.name(), self.value())
2217 }
2218 }
2219
2220 impl std::convert::From<i32> for State {
2221 fn from(value: i32) -> Self {
2222 match value {
2223 0 => Self::Unspecified,
2224 2 => Self::Active,
2225 4 => Self::Deleted,
2226 _ => Self::UnknownValue(state::UnknownValue(
2227 wkt::internal::UnknownEnumValue::Integer(value),
2228 )),
2229 }
2230 }
2231 }
2232
2233 impl std::convert::From<&str> for State {
2234 fn from(value: &str) -> Self {
2235 use std::string::ToString;
2236 match value {
2237 "STATE_UNSPECIFIED" => Self::Unspecified,
2238 "ACTIVE" => Self::Active,
2239 "DELETED" => Self::Deleted,
2240 _ => Self::UnknownValue(state::UnknownValue(
2241 wkt::internal::UnknownEnumValue::String(value.to_string()),
2242 )),
2243 }
2244 }
2245 }
2246
2247 impl serde::ser::Serialize for State {
2248 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
2249 where
2250 S: serde::Serializer,
2251 {
2252 match self {
2253 Self::Unspecified => serializer.serialize_i32(0),
2254 Self::Active => serializer.serialize_i32(2),
2255 Self::Deleted => serializer.serialize_i32(4),
2256 Self::UnknownValue(u) => u.0.serialize(serializer),
2257 }
2258 }
2259 }
2260
2261 impl<'de> serde::de::Deserialize<'de> for State {
2262 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
2263 where
2264 D: serde::Deserializer<'de>,
2265 {
2266 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
2267 ".google.cloud.speech.v2.Recognizer.State",
2268 ))
2269 }
2270 }
2271}
2272
2273/// Automatically detected decoding parameters.
2274/// Supported for the following encodings:
2275///
2276/// * WAV_LINEAR16: 16-bit signed little-endian PCM samples in a WAV container.
2277///
2278/// * WAV_MULAW: 8-bit companded mulaw samples in a WAV container.
2279///
2280/// * WAV_ALAW: 8-bit companded alaw samples in a WAV container.
2281///
2282/// * RFC4867_5_AMR: AMR frames with an rfc4867.5 header.
2283///
2284/// * RFC4867_5_AMRWB: AMR-WB frames with an rfc4867.5 header.
2285///
2286/// * FLAC: FLAC frames in the "native FLAC" container format.
2287///
2288/// * MP3: MPEG audio frames with optional (ignored) ID3 metadata.
2289///
2290/// * OGG_OPUS: Opus audio frames in an Ogg container.
2291///
2292/// * WEBM_OPUS: Opus audio frames in a WebM container.
2293///
2294/// * MP4_AAC: AAC audio frames in an MP4 container.
2295///
2296/// * M4A_AAC: AAC audio frames in an M4A container.
2297///
2298/// * MOV_AAC: AAC audio frames in an MOV container.
2299///
2300#[derive(Clone, Default, PartialEq)]
2301#[non_exhaustive]
2302pub struct AutoDetectDecodingConfig {
2303 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2304}
2305
2306impl AutoDetectDecodingConfig {
2307 pub fn new() -> Self {
2308 std::default::Default::default()
2309 }
2310}
2311
2312impl wkt::message::Message for AutoDetectDecodingConfig {
2313 fn typename() -> &'static str {
2314 "type.googleapis.com/google.cloud.speech.v2.AutoDetectDecodingConfig"
2315 }
2316}
2317
2318/// Explicitly specified decoding parameters.
2319#[derive(Clone, Default, PartialEq)]
2320#[non_exhaustive]
2321pub struct ExplicitDecodingConfig {
2322 /// Required. Encoding of the audio data sent for recognition.
2323 pub encoding: crate::model::explicit_decoding_config::AudioEncoding,
2324
2325 /// Optional. Sample rate in Hertz of the audio data sent for recognition.
2326 /// Valid values are: 8000-48000, and 16000 is optimal. For best results, set
2327 /// the sampling rate of the audio source to 16000 Hz. If that's not possible,
2328 /// use the native sample rate of the audio source (instead of resampling).
2329 /// Note that this field is marked as OPTIONAL for backward compatibility
2330 /// reasons. It is (and has always been) effectively REQUIRED.
2331 pub sample_rate_hertz: i32,
2332
2333 /// Optional. Number of channels present in the audio data sent for
2334 /// recognition. Note that this field is marked as OPTIONAL for backward
2335 /// compatibility reasons. It is (and has always been) effectively REQUIRED.
2336 ///
2337 /// The maximum allowed value is 8.
2338 pub audio_channel_count: i32,
2339
2340 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2341}
2342
2343impl ExplicitDecodingConfig {
2344 pub fn new() -> Self {
2345 std::default::Default::default()
2346 }
2347
2348 /// Sets the value of [encoding][crate::model::ExplicitDecodingConfig::encoding].
2349 ///
2350 /// # Example
2351 /// ```ignore,no_run
2352 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2353 /// use google_cloud_speech_v2::model::explicit_decoding_config::AudioEncoding;
2354 /// let x0 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Linear16);
2355 /// let x1 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Mulaw);
2356 /// let x2 = ExplicitDecodingConfig::new().set_encoding(AudioEncoding::Alaw);
2357 /// ```
2358 pub fn set_encoding<
2359 T: std::convert::Into<crate::model::explicit_decoding_config::AudioEncoding>,
2360 >(
2361 mut self,
2362 v: T,
2363 ) -> Self {
2364 self.encoding = v.into();
2365 self
2366 }
2367
2368 /// Sets the value of [sample_rate_hertz][crate::model::ExplicitDecodingConfig::sample_rate_hertz].
2369 ///
2370 /// # Example
2371 /// ```ignore,no_run
2372 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2373 /// let x = ExplicitDecodingConfig::new().set_sample_rate_hertz(42);
2374 /// ```
2375 pub fn set_sample_rate_hertz<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2376 self.sample_rate_hertz = v.into();
2377 self
2378 }
2379
2380 /// Sets the value of [audio_channel_count][crate::model::ExplicitDecodingConfig::audio_channel_count].
2381 ///
2382 /// # Example
2383 /// ```ignore,no_run
2384 /// # use google_cloud_speech_v2::model::ExplicitDecodingConfig;
2385 /// let x = ExplicitDecodingConfig::new().set_audio_channel_count(42);
2386 /// ```
2387 pub fn set_audio_channel_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2388 self.audio_channel_count = v.into();
2389 self
2390 }
2391}
2392
2393impl wkt::message::Message for ExplicitDecodingConfig {
2394 fn typename() -> &'static str {
2395 "type.googleapis.com/google.cloud.speech.v2.ExplicitDecodingConfig"
2396 }
2397}
2398
2399/// Defines additional types related to [ExplicitDecodingConfig].
2400pub mod explicit_decoding_config {
2401 #[allow(unused_imports)]
2402 use super::*;
2403
2404 /// Supported audio data encodings.
2405 ///
2406 /// # Working with unknown values
2407 ///
2408 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2409 /// additional enum variants at any time. Adding new variants is not considered
2410 /// a breaking change. Applications should write their code in anticipation of:
2411 ///
2412 /// - New values appearing in future releases of the client library, **and**
2413 /// - New values received dynamically, without application changes.
2414 ///
2415 /// Please consult the [Working with enums] section in the user guide for some
2416 /// guidelines.
2417 ///
2418 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2419 #[derive(Clone, Debug, PartialEq)]
2420 #[non_exhaustive]
2421 pub enum AudioEncoding {
2422 /// Default value. This value is unused.
2423 Unspecified,
2424 /// Headerless 16-bit signed little-endian PCM samples.
2425 Linear16,
2426 /// Headerless 8-bit companded mulaw samples.
2427 Mulaw,
2428 /// Headerless 8-bit companded alaw samples.
2429 Alaw,
2430 /// AMR frames with an rfc4867.5 header.
2431 Amr,
2432 /// AMR-WB frames with an rfc4867.5 header.
2433 AmrWb,
2434 /// FLAC frames in the "native FLAC" container format.
2435 Flac,
2436 /// MPEG audio frames with optional (ignored) ID3 metadata.
2437 Mp3,
2438 /// Opus audio frames in an Ogg container.
2439 OggOpus,
2440 /// Opus audio frames in a WebM container.
2441 WebmOpus,
2442 /// AAC audio frames in an MP4 container.
2443 Mp4Aac,
2444 /// AAC audio frames in an M4A container.
2445 M4AAac,
2446 /// AAC audio frames in an MOV container.
2447 MovAac,
2448 /// If set, the enum was initialized with an unknown value.
2449 ///
2450 /// Applications can examine the value using [AudioEncoding::value] or
2451 /// [AudioEncoding::name].
2452 UnknownValue(audio_encoding::UnknownValue),
2453 }
2454
2455 #[doc(hidden)]
2456 pub mod audio_encoding {
2457 #[allow(unused_imports)]
2458 use super::*;
2459 #[derive(Clone, Debug, PartialEq)]
2460 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2461 }
2462
2463 impl AudioEncoding {
2464 /// Gets the enum value.
2465 ///
2466 /// Returns `None` if the enum contains an unknown value deserialized from
2467 /// the string representation of enums.
2468 pub fn value(&self) -> std::option::Option<i32> {
2469 match self {
2470 Self::Unspecified => std::option::Option::Some(0),
2471 Self::Linear16 => std::option::Option::Some(1),
2472 Self::Mulaw => std::option::Option::Some(2),
2473 Self::Alaw => std::option::Option::Some(3),
2474 Self::Amr => std::option::Option::Some(4),
2475 Self::AmrWb => std::option::Option::Some(5),
2476 Self::Flac => std::option::Option::Some(6),
2477 Self::Mp3 => std::option::Option::Some(7),
2478 Self::OggOpus => std::option::Option::Some(8),
2479 Self::WebmOpus => std::option::Option::Some(9),
2480 Self::Mp4Aac => std::option::Option::Some(10),
2481 Self::M4AAac => std::option::Option::Some(11),
2482 Self::MovAac => std::option::Option::Some(12),
2483 Self::UnknownValue(u) => u.0.value(),
2484 }
2485 }
2486
2487 /// Gets the enum value as a string.
2488 ///
2489 /// Returns `None` if the enum contains an unknown value deserialized from
2490 /// the integer representation of enums.
2491 pub fn name(&self) -> std::option::Option<&str> {
2492 match self {
2493 Self::Unspecified => std::option::Option::Some("AUDIO_ENCODING_UNSPECIFIED"),
2494 Self::Linear16 => std::option::Option::Some("LINEAR16"),
2495 Self::Mulaw => std::option::Option::Some("MULAW"),
2496 Self::Alaw => std::option::Option::Some("ALAW"),
2497 Self::Amr => std::option::Option::Some("AMR"),
2498 Self::AmrWb => std::option::Option::Some("AMR_WB"),
2499 Self::Flac => std::option::Option::Some("FLAC"),
2500 Self::Mp3 => std::option::Option::Some("MP3"),
2501 Self::OggOpus => std::option::Option::Some("OGG_OPUS"),
2502 Self::WebmOpus => std::option::Option::Some("WEBM_OPUS"),
2503 Self::Mp4Aac => std::option::Option::Some("MP4_AAC"),
2504 Self::M4AAac => std::option::Option::Some("M4A_AAC"),
2505 Self::MovAac => std::option::Option::Some("MOV_AAC"),
2506 Self::UnknownValue(u) => u.0.name(),
2507 }
2508 }
2509 }
2510
2511 impl std::default::Default for AudioEncoding {
2512 fn default() -> Self {
2513 use std::convert::From;
2514 Self::from(0)
2515 }
2516 }
2517
2518 impl std::fmt::Display for AudioEncoding {
2519 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
2520 wkt::internal::display_enum(f, self.name(), self.value())
2521 }
2522 }
2523
2524 impl std::convert::From<i32> for AudioEncoding {
2525 fn from(value: i32) -> Self {
2526 match value {
2527 0 => Self::Unspecified,
2528 1 => Self::Linear16,
2529 2 => Self::Mulaw,
2530 3 => Self::Alaw,
2531 4 => Self::Amr,
2532 5 => Self::AmrWb,
2533 6 => Self::Flac,
2534 7 => Self::Mp3,
2535 8 => Self::OggOpus,
2536 9 => Self::WebmOpus,
2537 10 => Self::Mp4Aac,
2538 11 => Self::M4AAac,
2539 12 => Self::MovAac,
2540 _ => Self::UnknownValue(audio_encoding::UnknownValue(
2541 wkt::internal::UnknownEnumValue::Integer(value),
2542 )),
2543 }
2544 }
2545 }
2546
2547 impl std::convert::From<&str> for AudioEncoding {
2548 fn from(value: &str) -> Self {
2549 use std::string::ToString;
2550 match value {
2551 "AUDIO_ENCODING_UNSPECIFIED" => Self::Unspecified,
2552 "LINEAR16" => Self::Linear16,
2553 "MULAW" => Self::Mulaw,
2554 "ALAW" => Self::Alaw,
2555 "AMR" => Self::Amr,
2556 "AMR_WB" => Self::AmrWb,
2557 "FLAC" => Self::Flac,
2558 "MP3" => Self::Mp3,
2559 "OGG_OPUS" => Self::OggOpus,
2560 "WEBM_OPUS" => Self::WebmOpus,
2561 "MP4_AAC" => Self::Mp4Aac,
2562 "M4A_AAC" => Self::M4AAac,
2563 "MOV_AAC" => Self::MovAac,
2564 _ => Self::UnknownValue(audio_encoding::UnknownValue(
2565 wkt::internal::UnknownEnumValue::String(value.to_string()),
2566 )),
2567 }
2568 }
2569 }
2570
2571 impl serde::ser::Serialize for AudioEncoding {
2572 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
2573 where
2574 S: serde::Serializer,
2575 {
2576 match self {
2577 Self::Unspecified => serializer.serialize_i32(0),
2578 Self::Linear16 => serializer.serialize_i32(1),
2579 Self::Mulaw => serializer.serialize_i32(2),
2580 Self::Alaw => serializer.serialize_i32(3),
2581 Self::Amr => serializer.serialize_i32(4),
2582 Self::AmrWb => serializer.serialize_i32(5),
2583 Self::Flac => serializer.serialize_i32(6),
2584 Self::Mp3 => serializer.serialize_i32(7),
2585 Self::OggOpus => serializer.serialize_i32(8),
2586 Self::WebmOpus => serializer.serialize_i32(9),
2587 Self::Mp4Aac => serializer.serialize_i32(10),
2588 Self::M4AAac => serializer.serialize_i32(11),
2589 Self::MovAac => serializer.serialize_i32(12),
2590 Self::UnknownValue(u) => u.0.serialize(serializer),
2591 }
2592 }
2593 }
2594
2595 impl<'de> serde::de::Deserialize<'de> for AudioEncoding {
2596 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
2597 where
2598 D: serde::Deserializer<'de>,
2599 {
2600 deserializer.deserialize_any(wkt::internal::EnumVisitor::<AudioEncoding>::new(
2601 ".google.cloud.speech.v2.ExplicitDecodingConfig.AudioEncoding",
2602 ))
2603 }
2604 }
2605}
2606
2607/// Configuration to enable speaker diarization.
2608#[derive(Clone, Default, PartialEq)]
2609#[non_exhaustive]
2610pub struct SpeakerDiarizationConfig {
2611 /// Optional. The system automatically determines the number of speakers. This
2612 /// value is not currently used.
2613 pub min_speaker_count: i32,
2614
2615 /// Optional. The system automatically determines the number of speakers. This
2616 /// value is not currently used.
2617 pub max_speaker_count: i32,
2618
2619 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2620}
2621
2622impl SpeakerDiarizationConfig {
2623 pub fn new() -> Self {
2624 std::default::Default::default()
2625 }
2626
2627 /// Sets the value of [min_speaker_count][crate::model::SpeakerDiarizationConfig::min_speaker_count].
2628 ///
2629 /// # Example
2630 /// ```ignore,no_run
2631 /// # use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2632 /// let x = SpeakerDiarizationConfig::new().set_min_speaker_count(42);
2633 /// ```
2634 pub fn set_min_speaker_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2635 self.min_speaker_count = v.into();
2636 self
2637 }
2638
2639 /// Sets the value of [max_speaker_count][crate::model::SpeakerDiarizationConfig::max_speaker_count].
2640 ///
2641 /// # Example
2642 /// ```ignore,no_run
2643 /// # use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2644 /// let x = SpeakerDiarizationConfig::new().set_max_speaker_count(42);
2645 /// ```
2646 pub fn set_max_speaker_count<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2647 self.max_speaker_count = v.into();
2648 self
2649 }
2650}
2651
2652impl wkt::message::Message for SpeakerDiarizationConfig {
2653 fn typename() -> &'static str {
2654 "type.googleapis.com/google.cloud.speech.v2.SpeakerDiarizationConfig"
2655 }
2656}
2657
2658/// Configuration to enable custom prompt in chirp3.
2659#[derive(Clone, Default, PartialEq)]
2660#[non_exhaustive]
2661pub struct CustomPromptConfig {
2662 /// Optional. The custom instructions to override the existing instructions for
2663 /// chirp3.
2664 pub custom_prompt: std::string::String,
2665
2666 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2667}
2668
2669impl CustomPromptConfig {
2670 pub fn new() -> Self {
2671 std::default::Default::default()
2672 }
2673
2674 /// Sets the value of [custom_prompt][crate::model::CustomPromptConfig::custom_prompt].
2675 ///
2676 /// # Example
2677 /// ```ignore,no_run
2678 /// # use google_cloud_speech_v2::model::CustomPromptConfig;
2679 /// let x = CustomPromptConfig::new().set_custom_prompt("example");
2680 /// ```
2681 pub fn set_custom_prompt<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
2682 self.custom_prompt = v.into();
2683 self
2684 }
2685}
2686
2687impl wkt::message::Message for CustomPromptConfig {
2688 fn typename() -> &'static str {
2689 "type.googleapis.com/google.cloud.speech.v2.CustomPromptConfig"
2690 }
2691}
2692
2693/// Available recognition features.
2694#[derive(Clone, Default, PartialEq)]
2695#[non_exhaustive]
2696pub struct RecognitionFeatures {
2697 /// If set to `true`, the server will attempt to filter out profanities,
2698 /// replacing all but the initial character in each filtered word with
2699 /// asterisks, for instance, "f***". If set to `false` or omitted, profanities
2700 /// won't be filtered out.
2701 pub profanity_filter: bool,
2702
2703 /// If `true`, the top result includes a list of words and the start and end
2704 /// time offsets (timestamps) for those words. If `false`, no word-level time
2705 /// offset information is returned. The default is `false`.
2706 pub enable_word_time_offsets: bool,
2707
2708 /// If `true`, the top result includes a list of words and the confidence for
2709 /// those words. If `false`, no word-level confidence information is returned.
2710 /// The default is `false`.
2711 pub enable_word_confidence: bool,
2712
2713 /// If `true`, adds punctuation to recognition result hypotheses. This feature
2714 /// is only available in select languages. The default `false` value does not
2715 /// add punctuation to result hypotheses.
2716 pub enable_automatic_punctuation: bool,
2717
2718 /// The spoken punctuation behavior for the call. If `true`, replaces spoken
2719 /// punctuation with the corresponding symbols in the request. For example,
2720 /// "how are you question mark" becomes "how are you?". See
2721 /// <https://cloud.google.com/speech-to-text/docs/spoken-punctuation> for
2722 /// support. If `false`, spoken punctuation is not replaced.
2723 pub enable_spoken_punctuation: bool,
2724
2725 /// The spoken emoji behavior for the call. If `true`, adds spoken emoji
2726 /// formatting for the request. This will replace spoken emojis with the
2727 /// corresponding Unicode symbols in the final transcript. If `false`, spoken
2728 /// emojis are not replaced.
2729 pub enable_spoken_emojis: bool,
2730
2731 /// Mode for recognizing multi-channel audio.
2732 pub multi_channel_mode: crate::model::recognition_features::MultiChannelMode,
2733
2734 /// Configuration to enable speaker diarization. To enable diarization, set
2735 /// this field to an empty SpeakerDiarizationConfig message.
2736 pub diarization_config: std::option::Option<crate::model::SpeakerDiarizationConfig>,
2737
2738 /// Maximum number of recognition hypotheses to be returned.
2739 /// The server may return fewer than `max_alternatives`.
2740 /// Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of
2741 /// one. If omitted, will return a maximum of one.
2742 pub max_alternatives: i32,
2743
2744 /// Optional. Configuration to enable custom prompt for chirp3.
2745 pub custom_prompt_config: std::option::Option<crate::model::CustomPromptConfig>,
2746
2747 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
2748}
2749
2750impl RecognitionFeatures {
2751 pub fn new() -> Self {
2752 std::default::Default::default()
2753 }
2754
2755 /// Sets the value of [profanity_filter][crate::model::RecognitionFeatures::profanity_filter].
2756 ///
2757 /// # Example
2758 /// ```ignore,no_run
2759 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2760 /// let x = RecognitionFeatures::new().set_profanity_filter(true);
2761 /// ```
2762 pub fn set_profanity_filter<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2763 self.profanity_filter = v.into();
2764 self
2765 }
2766
2767 /// Sets the value of [enable_word_time_offsets][crate::model::RecognitionFeatures::enable_word_time_offsets].
2768 ///
2769 /// # Example
2770 /// ```ignore,no_run
2771 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2772 /// let x = RecognitionFeatures::new().set_enable_word_time_offsets(true);
2773 /// ```
2774 pub fn set_enable_word_time_offsets<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2775 self.enable_word_time_offsets = v.into();
2776 self
2777 }
2778
2779 /// Sets the value of [enable_word_confidence][crate::model::RecognitionFeatures::enable_word_confidence].
2780 ///
2781 /// # Example
2782 /// ```ignore,no_run
2783 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2784 /// let x = RecognitionFeatures::new().set_enable_word_confidence(true);
2785 /// ```
2786 pub fn set_enable_word_confidence<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2787 self.enable_word_confidence = v.into();
2788 self
2789 }
2790
2791 /// Sets the value of [enable_automatic_punctuation][crate::model::RecognitionFeatures::enable_automatic_punctuation].
2792 ///
2793 /// # Example
2794 /// ```ignore,no_run
2795 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2796 /// let x = RecognitionFeatures::new().set_enable_automatic_punctuation(true);
2797 /// ```
2798 pub fn set_enable_automatic_punctuation<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2799 self.enable_automatic_punctuation = v.into();
2800 self
2801 }
2802
2803 /// Sets the value of [enable_spoken_punctuation][crate::model::RecognitionFeatures::enable_spoken_punctuation].
2804 ///
2805 /// # Example
2806 /// ```ignore,no_run
2807 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2808 /// let x = RecognitionFeatures::new().set_enable_spoken_punctuation(true);
2809 /// ```
2810 pub fn set_enable_spoken_punctuation<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2811 self.enable_spoken_punctuation = v.into();
2812 self
2813 }
2814
2815 /// Sets the value of [enable_spoken_emojis][crate::model::RecognitionFeatures::enable_spoken_emojis].
2816 ///
2817 /// # Example
2818 /// ```ignore,no_run
2819 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2820 /// let x = RecognitionFeatures::new().set_enable_spoken_emojis(true);
2821 /// ```
2822 pub fn set_enable_spoken_emojis<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
2823 self.enable_spoken_emojis = v.into();
2824 self
2825 }
2826
2827 /// Sets the value of [multi_channel_mode][crate::model::RecognitionFeatures::multi_channel_mode].
2828 ///
2829 /// # Example
2830 /// ```ignore,no_run
2831 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2832 /// use google_cloud_speech_v2::model::recognition_features::MultiChannelMode;
2833 /// let x0 = RecognitionFeatures::new().set_multi_channel_mode(MultiChannelMode::SeparateRecognitionPerChannel);
2834 /// ```
2835 pub fn set_multi_channel_mode<
2836 T: std::convert::Into<crate::model::recognition_features::MultiChannelMode>,
2837 >(
2838 mut self,
2839 v: T,
2840 ) -> Self {
2841 self.multi_channel_mode = v.into();
2842 self
2843 }
2844
2845 /// Sets the value of [diarization_config][crate::model::RecognitionFeatures::diarization_config].
2846 ///
2847 /// # Example
2848 /// ```ignore,no_run
2849 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2850 /// use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2851 /// let x = RecognitionFeatures::new().set_diarization_config(SpeakerDiarizationConfig::default()/* use setters */);
2852 /// ```
2853 pub fn set_diarization_config<T>(mut self, v: T) -> Self
2854 where
2855 T: std::convert::Into<crate::model::SpeakerDiarizationConfig>,
2856 {
2857 self.diarization_config = std::option::Option::Some(v.into());
2858 self
2859 }
2860
2861 /// Sets or clears the value of [diarization_config][crate::model::RecognitionFeatures::diarization_config].
2862 ///
2863 /// # Example
2864 /// ```ignore,no_run
2865 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2866 /// use google_cloud_speech_v2::model::SpeakerDiarizationConfig;
2867 /// let x = RecognitionFeatures::new().set_or_clear_diarization_config(Some(SpeakerDiarizationConfig::default()/* use setters */));
2868 /// let x = RecognitionFeatures::new().set_or_clear_diarization_config(None::<SpeakerDiarizationConfig>);
2869 /// ```
2870 pub fn set_or_clear_diarization_config<T>(mut self, v: std::option::Option<T>) -> Self
2871 where
2872 T: std::convert::Into<crate::model::SpeakerDiarizationConfig>,
2873 {
2874 self.diarization_config = v.map(|x| x.into());
2875 self
2876 }
2877
2878 /// Sets the value of [max_alternatives][crate::model::RecognitionFeatures::max_alternatives].
2879 ///
2880 /// # Example
2881 /// ```ignore,no_run
2882 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2883 /// let x = RecognitionFeatures::new().set_max_alternatives(42);
2884 /// ```
2885 pub fn set_max_alternatives<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
2886 self.max_alternatives = v.into();
2887 self
2888 }
2889
2890 /// Sets the value of [custom_prompt_config][crate::model::RecognitionFeatures::custom_prompt_config].
2891 ///
2892 /// # Example
2893 /// ```ignore,no_run
2894 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2895 /// use google_cloud_speech_v2::model::CustomPromptConfig;
2896 /// let x = RecognitionFeatures::new().set_custom_prompt_config(CustomPromptConfig::default()/* use setters */);
2897 /// ```
2898 pub fn set_custom_prompt_config<T>(mut self, v: T) -> Self
2899 where
2900 T: std::convert::Into<crate::model::CustomPromptConfig>,
2901 {
2902 self.custom_prompt_config = std::option::Option::Some(v.into());
2903 self
2904 }
2905
2906 /// Sets or clears the value of [custom_prompt_config][crate::model::RecognitionFeatures::custom_prompt_config].
2907 ///
2908 /// # Example
2909 /// ```ignore,no_run
2910 /// # use google_cloud_speech_v2::model::RecognitionFeatures;
2911 /// use google_cloud_speech_v2::model::CustomPromptConfig;
2912 /// let x = RecognitionFeatures::new().set_or_clear_custom_prompt_config(Some(CustomPromptConfig::default()/* use setters */));
2913 /// let x = RecognitionFeatures::new().set_or_clear_custom_prompt_config(None::<CustomPromptConfig>);
2914 /// ```
2915 pub fn set_or_clear_custom_prompt_config<T>(mut self, v: std::option::Option<T>) -> Self
2916 where
2917 T: std::convert::Into<crate::model::CustomPromptConfig>,
2918 {
2919 self.custom_prompt_config = v.map(|x| x.into());
2920 self
2921 }
2922}
2923
2924impl wkt::message::Message for RecognitionFeatures {
2925 fn typename() -> &'static str {
2926 "type.googleapis.com/google.cloud.speech.v2.RecognitionFeatures"
2927 }
2928}
2929
2930/// Defines additional types related to [RecognitionFeatures].
2931pub mod recognition_features {
2932 #[allow(unused_imports)]
2933 use super::*;
2934
2935 /// Options for how to recognize multi-channel audio.
2936 ///
2937 /// # Working with unknown values
2938 ///
2939 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
2940 /// additional enum variants at any time. Adding new variants is not considered
2941 /// a breaking change. Applications should write their code in anticipation of:
2942 ///
2943 /// - New values appearing in future releases of the client library, **and**
2944 /// - New values received dynamically, without application changes.
2945 ///
2946 /// Please consult the [Working with enums] section in the user guide for some
2947 /// guidelines.
2948 ///
2949 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
2950 #[derive(Clone, Debug, PartialEq)]
2951 #[non_exhaustive]
2952 pub enum MultiChannelMode {
2953 /// Default value for the multi-channel mode. If the audio contains
2954 /// multiple channels, only the first channel will be transcribed; other
2955 /// channels will be ignored.
2956 Unspecified,
2957 /// If selected, each channel in the provided audio is transcribed
2958 /// independently. This cannot be selected if the selected
2959 /// [model][google.cloud.speech.v2.Recognizer.model] is `latest_short`.
2960 ///
2961 /// [google.cloud.speech.v2.Recognizer.model]: crate::model::Recognizer::model
2962 SeparateRecognitionPerChannel,
2963 /// If set, the enum was initialized with an unknown value.
2964 ///
2965 /// Applications can examine the value using [MultiChannelMode::value] or
2966 /// [MultiChannelMode::name].
2967 UnknownValue(multi_channel_mode::UnknownValue),
2968 }
2969
2970 #[doc(hidden)]
2971 pub mod multi_channel_mode {
2972 #[allow(unused_imports)]
2973 use super::*;
2974 #[derive(Clone, Debug, PartialEq)]
2975 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
2976 }
2977
2978 impl MultiChannelMode {
2979 /// Gets the enum value.
2980 ///
2981 /// Returns `None` if the enum contains an unknown value deserialized from
2982 /// the string representation of enums.
2983 pub fn value(&self) -> std::option::Option<i32> {
2984 match self {
2985 Self::Unspecified => std::option::Option::Some(0),
2986 Self::SeparateRecognitionPerChannel => std::option::Option::Some(1),
2987 Self::UnknownValue(u) => u.0.value(),
2988 }
2989 }
2990
2991 /// Gets the enum value as a string.
2992 ///
2993 /// Returns `None` if the enum contains an unknown value deserialized from
2994 /// the integer representation of enums.
2995 pub fn name(&self) -> std::option::Option<&str> {
2996 match self {
2997 Self::Unspecified => std::option::Option::Some("MULTI_CHANNEL_MODE_UNSPECIFIED"),
2998 Self::SeparateRecognitionPerChannel => {
2999 std::option::Option::Some("SEPARATE_RECOGNITION_PER_CHANNEL")
3000 }
3001 Self::UnknownValue(u) => u.0.name(),
3002 }
3003 }
3004 }
3005
3006 impl std::default::Default for MultiChannelMode {
3007 fn default() -> Self {
3008 use std::convert::From;
3009 Self::from(0)
3010 }
3011 }
3012
3013 impl std::fmt::Display for MultiChannelMode {
3014 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
3015 wkt::internal::display_enum(f, self.name(), self.value())
3016 }
3017 }
3018
3019 impl std::convert::From<i32> for MultiChannelMode {
3020 fn from(value: i32) -> Self {
3021 match value {
3022 0 => Self::Unspecified,
3023 1 => Self::SeparateRecognitionPerChannel,
3024 _ => Self::UnknownValue(multi_channel_mode::UnknownValue(
3025 wkt::internal::UnknownEnumValue::Integer(value),
3026 )),
3027 }
3028 }
3029 }
3030
3031 impl std::convert::From<&str> for MultiChannelMode {
3032 fn from(value: &str) -> Self {
3033 use std::string::ToString;
3034 match value {
3035 "MULTI_CHANNEL_MODE_UNSPECIFIED" => Self::Unspecified,
3036 "SEPARATE_RECOGNITION_PER_CHANNEL" => Self::SeparateRecognitionPerChannel,
3037 _ => Self::UnknownValue(multi_channel_mode::UnknownValue(
3038 wkt::internal::UnknownEnumValue::String(value.to_string()),
3039 )),
3040 }
3041 }
3042 }
3043
3044 impl serde::ser::Serialize for MultiChannelMode {
3045 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
3046 where
3047 S: serde::Serializer,
3048 {
3049 match self {
3050 Self::Unspecified => serializer.serialize_i32(0),
3051 Self::SeparateRecognitionPerChannel => serializer.serialize_i32(1),
3052 Self::UnknownValue(u) => u.0.serialize(serializer),
3053 }
3054 }
3055 }
3056
3057 impl<'de> serde::de::Deserialize<'de> for MultiChannelMode {
3058 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
3059 where
3060 D: serde::Deserializer<'de>,
3061 {
3062 deserializer.deserialize_any(wkt::internal::EnumVisitor::<MultiChannelMode>::new(
3063 ".google.cloud.speech.v2.RecognitionFeatures.MultiChannelMode",
3064 ))
3065 }
3066 }
3067}
3068
3069/// Transcription normalization configuration. Use transcription normalization
3070/// to automatically replace parts of the transcript with phrases of your
3071/// choosing. For StreamingRecognize, this normalization only applies to stable
3072/// partial transcripts (stability > 0.8) and final transcripts.
3073#[derive(Clone, Default, PartialEq)]
3074#[non_exhaustive]
3075pub struct TranscriptNormalization {
3076 /// A list of replacement entries. We will perform replacement with one entry
3077 /// at a time. For example, the second entry in ["cat" => "dog", "mountain cat"
3078 /// => "mountain dog"] will never be applied because we will always process the
3079 /// first entry before it. At most 100 entries.
3080 pub entries: std::vec::Vec<crate::model::transcript_normalization::Entry>,
3081
3082 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3083}
3084
3085impl TranscriptNormalization {
3086 pub fn new() -> Self {
3087 std::default::Default::default()
3088 }
3089
3090 /// Sets the value of [entries][crate::model::TranscriptNormalization::entries].
3091 ///
3092 /// # Example
3093 /// ```ignore,no_run
3094 /// # use google_cloud_speech_v2::model::TranscriptNormalization;
3095 /// use google_cloud_speech_v2::model::transcript_normalization::Entry;
3096 /// let x = TranscriptNormalization::new()
3097 /// .set_entries([
3098 /// Entry::default()/* use setters */,
3099 /// Entry::default()/* use (different) setters */,
3100 /// ]);
3101 /// ```
3102 pub fn set_entries<T, V>(mut self, v: T) -> Self
3103 where
3104 T: std::iter::IntoIterator<Item = V>,
3105 V: std::convert::Into<crate::model::transcript_normalization::Entry>,
3106 {
3107 use std::iter::Iterator;
3108 self.entries = v.into_iter().map(|i| i.into()).collect();
3109 self
3110 }
3111}
3112
3113impl wkt::message::Message for TranscriptNormalization {
3114 fn typename() -> &'static str {
3115 "type.googleapis.com/google.cloud.speech.v2.TranscriptNormalization"
3116 }
3117}
3118
3119/// Defines additional types related to [TranscriptNormalization].
3120pub mod transcript_normalization {
3121 #[allow(unused_imports)]
3122 use super::*;
3123
3124 /// A single replacement configuration.
3125 #[derive(Clone, Default, PartialEq)]
3126 #[non_exhaustive]
3127 pub struct Entry {
3128 /// What to replace. Max length is 100 characters.
3129 pub search: std::string::String,
3130
3131 /// What to replace with. Max length is 100 characters.
3132 pub replace: std::string::String,
3133
3134 /// Whether the search is case sensitive.
3135 pub case_sensitive: bool,
3136
3137 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3138 }
3139
3140 impl Entry {
3141 pub fn new() -> Self {
3142 std::default::Default::default()
3143 }
3144
3145 /// Sets the value of [search][crate::model::transcript_normalization::Entry::search].
3146 ///
3147 /// # Example
3148 /// ```ignore,no_run
3149 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3150 /// let x = Entry::new().set_search("example");
3151 /// ```
3152 pub fn set_search<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3153 self.search = v.into();
3154 self
3155 }
3156
3157 /// Sets the value of [replace][crate::model::transcript_normalization::Entry::replace].
3158 ///
3159 /// # Example
3160 /// ```ignore,no_run
3161 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3162 /// let x = Entry::new().set_replace("example");
3163 /// ```
3164 pub fn set_replace<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3165 self.replace = v.into();
3166 self
3167 }
3168
3169 /// Sets the value of [case_sensitive][crate::model::transcript_normalization::Entry::case_sensitive].
3170 ///
3171 /// # Example
3172 /// ```ignore,no_run
3173 /// # use google_cloud_speech_v2::model::transcript_normalization::Entry;
3174 /// let x = Entry::new().set_case_sensitive(true);
3175 /// ```
3176 pub fn set_case_sensitive<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
3177 self.case_sensitive = v.into();
3178 self
3179 }
3180 }
3181
3182 impl wkt::message::Message for Entry {
3183 fn typename() -> &'static str {
3184 "type.googleapis.com/google.cloud.speech.v2.TranscriptNormalization.Entry"
3185 }
3186 }
3187}
3188
3189/// Translation configuration. Use to translate the given audio into text for the
3190/// desired language.
3191#[derive(Clone, Default, PartialEq)]
3192#[non_exhaustive]
3193pub struct TranslationConfig {
3194 /// Required. The language code to translate to.
3195 pub target_language: std::string::String,
3196
3197 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3198}
3199
3200impl TranslationConfig {
3201 pub fn new() -> Self {
3202 std::default::Default::default()
3203 }
3204
3205 /// Sets the value of [target_language][crate::model::TranslationConfig::target_language].
3206 ///
3207 /// # Example
3208 /// ```ignore,no_run
3209 /// # use google_cloud_speech_v2::model::TranslationConfig;
3210 /// let x = TranslationConfig::new().set_target_language("example");
3211 /// ```
3212 pub fn set_target_language<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3213 self.target_language = v.into();
3214 self
3215 }
3216}
3217
3218impl wkt::message::Message for TranslationConfig {
3219 fn typename() -> &'static str {
3220 "type.googleapis.com/google.cloud.speech.v2.TranslationConfig"
3221 }
3222}
3223
3224/// Provides "hints" to the speech recognizer to favor specific words and phrases
3225/// in the results. PhraseSets can be specified as an inline resource, or a
3226/// reference to an existing PhraseSet resource.
3227#[derive(Clone, Default, PartialEq)]
3228#[non_exhaustive]
3229pub struct SpeechAdaptation {
3230 /// A list of inline or referenced PhraseSets.
3231 pub phrase_sets: std::vec::Vec<crate::model::speech_adaptation::AdaptationPhraseSet>,
3232
3233 /// A list of inline CustomClasses. Existing CustomClass resources can be
3234 /// referenced directly in a PhraseSet.
3235 pub custom_classes: std::vec::Vec<crate::model::CustomClass>,
3236
3237 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3238}
3239
3240impl SpeechAdaptation {
3241 pub fn new() -> Self {
3242 std::default::Default::default()
3243 }
3244
3245 /// Sets the value of [phrase_sets][crate::model::SpeechAdaptation::phrase_sets].
3246 ///
3247 /// # Example
3248 /// ```ignore,no_run
3249 /// # use google_cloud_speech_v2::model::SpeechAdaptation;
3250 /// use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3251 /// let x = SpeechAdaptation::new()
3252 /// .set_phrase_sets([
3253 /// AdaptationPhraseSet::default()/* use setters */,
3254 /// AdaptationPhraseSet::default()/* use (different) setters */,
3255 /// ]);
3256 /// ```
3257 pub fn set_phrase_sets<T, V>(mut self, v: T) -> Self
3258 where
3259 T: std::iter::IntoIterator<Item = V>,
3260 V: std::convert::Into<crate::model::speech_adaptation::AdaptationPhraseSet>,
3261 {
3262 use std::iter::Iterator;
3263 self.phrase_sets = v.into_iter().map(|i| i.into()).collect();
3264 self
3265 }
3266
3267 /// Sets the value of [custom_classes][crate::model::SpeechAdaptation::custom_classes].
3268 ///
3269 /// # Example
3270 /// ```ignore,no_run
3271 /// # use google_cloud_speech_v2::model::SpeechAdaptation;
3272 /// use google_cloud_speech_v2::model::CustomClass;
3273 /// let x = SpeechAdaptation::new()
3274 /// .set_custom_classes([
3275 /// CustomClass::default()/* use setters */,
3276 /// CustomClass::default()/* use (different) setters */,
3277 /// ]);
3278 /// ```
3279 pub fn set_custom_classes<T, V>(mut self, v: T) -> Self
3280 where
3281 T: std::iter::IntoIterator<Item = V>,
3282 V: std::convert::Into<crate::model::CustomClass>,
3283 {
3284 use std::iter::Iterator;
3285 self.custom_classes = v.into_iter().map(|i| i.into()).collect();
3286 self
3287 }
3288}
3289
3290impl wkt::message::Message for SpeechAdaptation {
3291 fn typename() -> &'static str {
3292 "type.googleapis.com/google.cloud.speech.v2.SpeechAdaptation"
3293 }
3294}
3295
3296/// Defines additional types related to [SpeechAdaptation].
3297pub mod speech_adaptation {
3298 #[allow(unused_imports)]
3299 use super::*;
3300
3301 /// A biasing PhraseSet, which can be either a string referencing the name of
3302 /// an existing PhraseSets resource, or an inline definition of a PhraseSet.
3303 #[derive(Clone, Default, PartialEq)]
3304 #[non_exhaustive]
3305 pub struct AdaptationPhraseSet {
3306 pub value:
3307 std::option::Option<crate::model::speech_adaptation::adaptation_phrase_set::Value>,
3308
3309 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3310 }
3311
3312 impl AdaptationPhraseSet {
3313 pub fn new() -> Self {
3314 std::default::Default::default()
3315 }
3316
3317 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value].
3318 ///
3319 /// Note that all the setters affecting `value` are mutually
3320 /// exclusive.
3321 ///
3322 /// # Example
3323 /// ```ignore,no_run
3324 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3325 /// use google_cloud_speech_v2::model::speech_adaptation::adaptation_phrase_set::Value;
3326 /// let x = AdaptationPhraseSet::new().set_value(Some(Value::PhraseSet("example".to_string())));
3327 /// ```
3328 pub fn set_value<
3329 T: std::convert::Into<
3330 std::option::Option<
3331 crate::model::speech_adaptation::adaptation_phrase_set::Value,
3332 >,
3333 >,
3334 >(
3335 mut self,
3336 v: T,
3337 ) -> Self {
3338 self.value = v.into();
3339 self
3340 }
3341
3342 /// The value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3343 /// if it holds a `PhraseSet`, `None` if the field is not set or
3344 /// holds a different branch.
3345 pub fn phrase_set(&self) -> std::option::Option<&std::string::String> {
3346 #[allow(unreachable_patterns)]
3347 self.value.as_ref().and_then(|v| match v {
3348 crate::model::speech_adaptation::adaptation_phrase_set::Value::PhraseSet(v) => {
3349 std::option::Option::Some(v)
3350 }
3351 _ => std::option::Option::None,
3352 })
3353 }
3354
3355 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3356 /// to hold a `PhraseSet`.
3357 ///
3358 /// Note that all the setters affecting `value` are
3359 /// mutually exclusive.
3360 ///
3361 /// # Example
3362 /// ```ignore,no_run
3363 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3364 /// let x = AdaptationPhraseSet::new().set_phrase_set("example");
3365 /// assert!(x.phrase_set().is_some());
3366 /// assert!(x.inline_phrase_set().is_none());
3367 /// ```
3368 pub fn set_phrase_set<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3369 self.value = std::option::Option::Some(
3370 crate::model::speech_adaptation::adaptation_phrase_set::Value::PhraseSet(v.into()),
3371 );
3372 self
3373 }
3374
3375 /// The value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3376 /// if it holds a `InlinePhraseSet`, `None` if the field is not set or
3377 /// holds a different branch.
3378 pub fn inline_phrase_set(
3379 &self,
3380 ) -> std::option::Option<&std::boxed::Box<crate::model::PhraseSet>> {
3381 #[allow(unreachable_patterns)]
3382 self.value.as_ref().and_then(|v| match v {
3383 crate::model::speech_adaptation::adaptation_phrase_set::Value::InlinePhraseSet(
3384 v,
3385 ) => std::option::Option::Some(v),
3386 _ => std::option::Option::None,
3387 })
3388 }
3389
3390 /// Sets the value of [value][crate::model::speech_adaptation::AdaptationPhraseSet::value]
3391 /// to hold a `InlinePhraseSet`.
3392 ///
3393 /// Note that all the setters affecting `value` are
3394 /// mutually exclusive.
3395 ///
3396 /// # Example
3397 /// ```ignore,no_run
3398 /// # use google_cloud_speech_v2::model::speech_adaptation::AdaptationPhraseSet;
3399 /// use google_cloud_speech_v2::model::PhraseSet;
3400 /// let x = AdaptationPhraseSet::new().set_inline_phrase_set(PhraseSet::default()/* use setters */);
3401 /// assert!(x.inline_phrase_set().is_some());
3402 /// assert!(x.phrase_set().is_none());
3403 /// ```
3404 pub fn set_inline_phrase_set<
3405 T: std::convert::Into<std::boxed::Box<crate::model::PhraseSet>>,
3406 >(
3407 mut self,
3408 v: T,
3409 ) -> Self {
3410 self.value = std::option::Option::Some(
3411 crate::model::speech_adaptation::adaptation_phrase_set::Value::InlinePhraseSet(
3412 v.into(),
3413 ),
3414 );
3415 self
3416 }
3417 }
3418
3419 impl wkt::message::Message for AdaptationPhraseSet {
3420 fn typename() -> &'static str {
3421 "type.googleapis.com/google.cloud.speech.v2.SpeechAdaptation.AdaptationPhraseSet"
3422 }
3423 }
3424
3425 /// Defines additional types related to [AdaptationPhraseSet].
3426 pub mod adaptation_phrase_set {
3427 #[allow(unused_imports)]
3428 use super::*;
3429
3430 #[derive(Clone, Debug, PartialEq)]
3431 #[non_exhaustive]
3432 pub enum Value {
3433 /// The name of an existing PhraseSet resource. The user must have read
3434 /// access to the resource and it must not be deleted.
3435 PhraseSet(std::string::String),
3436 /// An inline defined PhraseSet.
3437 InlinePhraseSet(std::boxed::Box<crate::model::PhraseSet>),
3438 }
3439 }
3440}
3441
3442/// Denoiser config. May not be supported for all models and may
3443/// have no effect.
3444#[derive(Clone, Default, PartialEq)]
3445#[non_exhaustive]
3446pub struct DenoiserConfig {
3447 /// Denoise audio before sending to the transcription model.
3448 pub denoise_audio: bool,
3449
3450 /// Signal-to-Noise Ratio (SNR) threshold for the denoiser. Here SNR means the
3451 /// loudness of the speech signal. Audio with an SNR below this threshold,
3452 /// meaning the speech is too quiet, will be prevented from being sent to the
3453 /// transcription model.
3454 ///
3455 /// If snr_threshold=0, no filtering will be applied.
3456 pub snr_threshold: f32,
3457
3458 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3459}
3460
3461impl DenoiserConfig {
3462 pub fn new() -> Self {
3463 std::default::Default::default()
3464 }
3465
3466 /// Sets the value of [denoise_audio][crate::model::DenoiserConfig::denoise_audio].
3467 ///
3468 /// # Example
3469 /// ```ignore,no_run
3470 /// # use google_cloud_speech_v2::model::DenoiserConfig;
3471 /// let x = DenoiserConfig::new().set_denoise_audio(true);
3472 /// ```
3473 pub fn set_denoise_audio<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
3474 self.denoise_audio = v.into();
3475 self
3476 }
3477
3478 /// Sets the value of [snr_threshold][crate::model::DenoiserConfig::snr_threshold].
3479 ///
3480 /// # Example
3481 /// ```ignore,no_run
3482 /// # use google_cloud_speech_v2::model::DenoiserConfig;
3483 /// let x = DenoiserConfig::new().set_snr_threshold(42.0);
3484 /// ```
3485 pub fn set_snr_threshold<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
3486 self.snr_threshold = v.into();
3487 self
3488 }
3489}
3490
3491impl wkt::message::Message for DenoiserConfig {
3492 fn typename() -> &'static str {
3493 "type.googleapis.com/google.cloud.speech.v2.DenoiserConfig"
3494 }
3495}
3496
3497/// Provides information to the Recognizer that specifies how to process the
3498/// recognition request.
3499#[derive(Clone, Default, PartialEq)]
3500#[non_exhaustive]
3501pub struct RecognitionConfig {
3502 /// Optional. Which model to use for recognition requests. Select the model
3503 /// best suited to your domain to get best results.
3504 ///
3505 /// Guidance for choosing which model to use can be found in the [Transcription
3506 /// Models
3507 /// Documentation](https://cloud.google.com/speech-to-text/v2/docs/transcription-model)
3508 /// and the models supported in each region can be found in the [Table Of
3509 /// Supported
3510 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
3511 pub model: std::string::String,
3512
3513 /// Optional. The language of the supplied audio as a
3514 /// [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag.
3515 /// Language tags are normalized to BCP-47 before they are used eg "en-us"
3516 /// becomes "en-US".
3517 ///
3518 /// Supported languages for each model are listed in the [Table of Supported
3519 /// Models](https://cloud.google.com/speech-to-text/v2/docs/speech-to-text-supported-languages).
3520 ///
3521 /// If additional languages are provided, recognition result will contain
3522 /// recognition in the most likely language detected. The recognition result
3523 /// will include the language tag of the language detected in the audio.
3524 pub language_codes: std::vec::Vec<std::string::String>,
3525
3526 /// Speech recognition features to enable.
3527 pub features: std::option::Option<crate::model::RecognitionFeatures>,
3528
3529 /// Speech adaptation context that weights recognizer predictions for specific
3530 /// words and phrases.
3531 pub adaptation: std::option::Option<crate::model::SpeechAdaptation>,
3532
3533 /// Optional. Use transcription normalization to automatically replace parts of
3534 /// the transcript with phrases of your choosing. For StreamingRecognize, this
3535 /// normalization only applies to stable partial transcripts (stability > 0.8)
3536 /// and final transcripts.
3537 pub transcript_normalization: std::option::Option<crate::model::TranscriptNormalization>,
3538
3539 /// Optional. Optional configuration used to automatically run translation on
3540 /// the given audio to the desired language for supported models.
3541 pub translation_config: std::option::Option<crate::model::TranslationConfig>,
3542
3543 /// Optional. Optional denoiser config. May not be supported for all models
3544 /// and may have no effect.
3545 pub denoiser_config: std::option::Option<crate::model::DenoiserConfig>,
3546
3547 /// Decoding parameters for audio being sent for recognition.
3548 pub decoding_config: std::option::Option<crate::model::recognition_config::DecodingConfig>,
3549
3550 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3551}
3552
3553impl RecognitionConfig {
3554 pub fn new() -> Self {
3555 std::default::Default::default()
3556 }
3557
3558 /// Sets the value of [model][crate::model::RecognitionConfig::model].
3559 ///
3560 /// # Example
3561 /// ```ignore,no_run
3562 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3563 /// let x = RecognitionConfig::new().set_model("example");
3564 /// ```
3565 pub fn set_model<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3566 self.model = v.into();
3567 self
3568 }
3569
3570 /// Sets the value of [language_codes][crate::model::RecognitionConfig::language_codes].
3571 ///
3572 /// # Example
3573 /// ```ignore,no_run
3574 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3575 /// let x = RecognitionConfig::new().set_language_codes(["a", "b", "c"]);
3576 /// ```
3577 pub fn set_language_codes<T, V>(mut self, v: T) -> Self
3578 where
3579 T: std::iter::IntoIterator<Item = V>,
3580 V: std::convert::Into<std::string::String>,
3581 {
3582 use std::iter::Iterator;
3583 self.language_codes = v.into_iter().map(|i| i.into()).collect();
3584 self
3585 }
3586
3587 /// Sets the value of [features][crate::model::RecognitionConfig::features].
3588 ///
3589 /// # Example
3590 /// ```ignore,no_run
3591 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3592 /// use google_cloud_speech_v2::model::RecognitionFeatures;
3593 /// let x = RecognitionConfig::new().set_features(RecognitionFeatures::default()/* use setters */);
3594 /// ```
3595 pub fn set_features<T>(mut self, v: T) -> Self
3596 where
3597 T: std::convert::Into<crate::model::RecognitionFeatures>,
3598 {
3599 self.features = std::option::Option::Some(v.into());
3600 self
3601 }
3602
3603 /// Sets or clears the value of [features][crate::model::RecognitionConfig::features].
3604 ///
3605 /// # Example
3606 /// ```ignore,no_run
3607 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3608 /// use google_cloud_speech_v2::model::RecognitionFeatures;
3609 /// let x = RecognitionConfig::new().set_or_clear_features(Some(RecognitionFeatures::default()/* use setters */));
3610 /// let x = RecognitionConfig::new().set_or_clear_features(None::<RecognitionFeatures>);
3611 /// ```
3612 pub fn set_or_clear_features<T>(mut self, v: std::option::Option<T>) -> Self
3613 where
3614 T: std::convert::Into<crate::model::RecognitionFeatures>,
3615 {
3616 self.features = v.map(|x| x.into());
3617 self
3618 }
3619
3620 /// Sets the value of [adaptation][crate::model::RecognitionConfig::adaptation].
3621 ///
3622 /// # Example
3623 /// ```ignore,no_run
3624 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3625 /// use google_cloud_speech_v2::model::SpeechAdaptation;
3626 /// let x = RecognitionConfig::new().set_adaptation(SpeechAdaptation::default()/* use setters */);
3627 /// ```
3628 pub fn set_adaptation<T>(mut self, v: T) -> Self
3629 where
3630 T: std::convert::Into<crate::model::SpeechAdaptation>,
3631 {
3632 self.adaptation = std::option::Option::Some(v.into());
3633 self
3634 }
3635
3636 /// Sets or clears the value of [adaptation][crate::model::RecognitionConfig::adaptation].
3637 ///
3638 /// # Example
3639 /// ```ignore,no_run
3640 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3641 /// use google_cloud_speech_v2::model::SpeechAdaptation;
3642 /// let x = RecognitionConfig::new().set_or_clear_adaptation(Some(SpeechAdaptation::default()/* use setters */));
3643 /// let x = RecognitionConfig::new().set_or_clear_adaptation(None::<SpeechAdaptation>);
3644 /// ```
3645 pub fn set_or_clear_adaptation<T>(mut self, v: std::option::Option<T>) -> Self
3646 where
3647 T: std::convert::Into<crate::model::SpeechAdaptation>,
3648 {
3649 self.adaptation = v.map(|x| x.into());
3650 self
3651 }
3652
3653 /// Sets the value of [transcript_normalization][crate::model::RecognitionConfig::transcript_normalization].
3654 ///
3655 /// # Example
3656 /// ```ignore,no_run
3657 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3658 /// use google_cloud_speech_v2::model::TranscriptNormalization;
3659 /// let x = RecognitionConfig::new().set_transcript_normalization(TranscriptNormalization::default()/* use setters */);
3660 /// ```
3661 pub fn set_transcript_normalization<T>(mut self, v: T) -> Self
3662 where
3663 T: std::convert::Into<crate::model::TranscriptNormalization>,
3664 {
3665 self.transcript_normalization = std::option::Option::Some(v.into());
3666 self
3667 }
3668
3669 /// Sets or clears the value of [transcript_normalization][crate::model::RecognitionConfig::transcript_normalization].
3670 ///
3671 /// # Example
3672 /// ```ignore,no_run
3673 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3674 /// use google_cloud_speech_v2::model::TranscriptNormalization;
3675 /// let x = RecognitionConfig::new().set_or_clear_transcript_normalization(Some(TranscriptNormalization::default()/* use setters */));
3676 /// let x = RecognitionConfig::new().set_or_clear_transcript_normalization(None::<TranscriptNormalization>);
3677 /// ```
3678 pub fn set_or_clear_transcript_normalization<T>(mut self, v: std::option::Option<T>) -> Self
3679 where
3680 T: std::convert::Into<crate::model::TranscriptNormalization>,
3681 {
3682 self.transcript_normalization = v.map(|x| x.into());
3683 self
3684 }
3685
3686 /// Sets the value of [translation_config][crate::model::RecognitionConfig::translation_config].
3687 ///
3688 /// # Example
3689 /// ```ignore,no_run
3690 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3691 /// use google_cloud_speech_v2::model::TranslationConfig;
3692 /// let x = RecognitionConfig::new().set_translation_config(TranslationConfig::default()/* use setters */);
3693 /// ```
3694 pub fn set_translation_config<T>(mut self, v: T) -> Self
3695 where
3696 T: std::convert::Into<crate::model::TranslationConfig>,
3697 {
3698 self.translation_config = std::option::Option::Some(v.into());
3699 self
3700 }
3701
3702 /// Sets or clears the value of [translation_config][crate::model::RecognitionConfig::translation_config].
3703 ///
3704 /// # Example
3705 /// ```ignore,no_run
3706 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3707 /// use google_cloud_speech_v2::model::TranslationConfig;
3708 /// let x = RecognitionConfig::new().set_or_clear_translation_config(Some(TranslationConfig::default()/* use setters */));
3709 /// let x = RecognitionConfig::new().set_or_clear_translation_config(None::<TranslationConfig>);
3710 /// ```
3711 pub fn set_or_clear_translation_config<T>(mut self, v: std::option::Option<T>) -> Self
3712 where
3713 T: std::convert::Into<crate::model::TranslationConfig>,
3714 {
3715 self.translation_config = v.map(|x| x.into());
3716 self
3717 }
3718
3719 /// Sets the value of [denoiser_config][crate::model::RecognitionConfig::denoiser_config].
3720 ///
3721 /// # Example
3722 /// ```ignore,no_run
3723 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3724 /// use google_cloud_speech_v2::model::DenoiserConfig;
3725 /// let x = RecognitionConfig::new().set_denoiser_config(DenoiserConfig::default()/* use setters */);
3726 /// ```
3727 pub fn set_denoiser_config<T>(mut self, v: T) -> Self
3728 where
3729 T: std::convert::Into<crate::model::DenoiserConfig>,
3730 {
3731 self.denoiser_config = std::option::Option::Some(v.into());
3732 self
3733 }
3734
3735 /// Sets or clears the value of [denoiser_config][crate::model::RecognitionConfig::denoiser_config].
3736 ///
3737 /// # Example
3738 /// ```ignore,no_run
3739 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3740 /// use google_cloud_speech_v2::model::DenoiserConfig;
3741 /// let x = RecognitionConfig::new().set_or_clear_denoiser_config(Some(DenoiserConfig::default()/* use setters */));
3742 /// let x = RecognitionConfig::new().set_or_clear_denoiser_config(None::<DenoiserConfig>);
3743 /// ```
3744 pub fn set_or_clear_denoiser_config<T>(mut self, v: std::option::Option<T>) -> Self
3745 where
3746 T: std::convert::Into<crate::model::DenoiserConfig>,
3747 {
3748 self.denoiser_config = v.map(|x| x.into());
3749 self
3750 }
3751
3752 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config].
3753 ///
3754 /// Note that all the setters affecting `decoding_config` are mutually
3755 /// exclusive.
3756 ///
3757 /// # Example
3758 /// ```ignore,no_run
3759 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3760 /// use google_cloud_speech_v2::model::AutoDetectDecodingConfig;
3761 /// let x = RecognitionConfig::new().set_decoding_config(Some(
3762 /// google_cloud_speech_v2::model::recognition_config::DecodingConfig::AutoDecodingConfig(AutoDetectDecodingConfig::default().into())));
3763 /// ```
3764 pub fn set_decoding_config<
3765 T: std::convert::Into<std::option::Option<crate::model::recognition_config::DecodingConfig>>,
3766 >(
3767 mut self,
3768 v: T,
3769 ) -> Self {
3770 self.decoding_config = v.into();
3771 self
3772 }
3773
3774 /// The value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3775 /// if it holds a `AutoDecodingConfig`, `None` if the field is not set or
3776 /// holds a different branch.
3777 pub fn auto_decoding_config(
3778 &self,
3779 ) -> std::option::Option<&std::boxed::Box<crate::model::AutoDetectDecodingConfig>> {
3780 #[allow(unreachable_patterns)]
3781 self.decoding_config.as_ref().and_then(|v| match v {
3782 crate::model::recognition_config::DecodingConfig::AutoDecodingConfig(v) => {
3783 std::option::Option::Some(v)
3784 }
3785 _ => std::option::Option::None,
3786 })
3787 }
3788
3789 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3790 /// to hold a `AutoDecodingConfig`.
3791 ///
3792 /// Note that all the setters affecting `decoding_config` are
3793 /// mutually exclusive.
3794 ///
3795 /// # Example
3796 /// ```ignore,no_run
3797 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3798 /// use google_cloud_speech_v2::model::AutoDetectDecodingConfig;
3799 /// let x = RecognitionConfig::new().set_auto_decoding_config(AutoDetectDecodingConfig::default()/* use setters */);
3800 /// assert!(x.auto_decoding_config().is_some());
3801 /// assert!(x.explicit_decoding_config().is_none());
3802 /// ```
3803 pub fn set_auto_decoding_config<
3804 T: std::convert::Into<std::boxed::Box<crate::model::AutoDetectDecodingConfig>>,
3805 >(
3806 mut self,
3807 v: T,
3808 ) -> Self {
3809 self.decoding_config = std::option::Option::Some(
3810 crate::model::recognition_config::DecodingConfig::AutoDecodingConfig(v.into()),
3811 );
3812 self
3813 }
3814
3815 /// The value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3816 /// if it holds a `ExplicitDecodingConfig`, `None` if the field is not set or
3817 /// holds a different branch.
3818 pub fn explicit_decoding_config(
3819 &self,
3820 ) -> std::option::Option<&std::boxed::Box<crate::model::ExplicitDecodingConfig>> {
3821 #[allow(unreachable_patterns)]
3822 self.decoding_config.as_ref().and_then(|v| match v {
3823 crate::model::recognition_config::DecodingConfig::ExplicitDecodingConfig(v) => {
3824 std::option::Option::Some(v)
3825 }
3826 _ => std::option::Option::None,
3827 })
3828 }
3829
3830 /// Sets the value of [decoding_config][crate::model::RecognitionConfig::decoding_config]
3831 /// to hold a `ExplicitDecodingConfig`.
3832 ///
3833 /// Note that all the setters affecting `decoding_config` are
3834 /// mutually exclusive.
3835 ///
3836 /// # Example
3837 /// ```ignore,no_run
3838 /// # use google_cloud_speech_v2::model::RecognitionConfig;
3839 /// use google_cloud_speech_v2::model::ExplicitDecodingConfig;
3840 /// let x = RecognitionConfig::new().set_explicit_decoding_config(ExplicitDecodingConfig::default()/* use setters */);
3841 /// assert!(x.explicit_decoding_config().is_some());
3842 /// assert!(x.auto_decoding_config().is_none());
3843 /// ```
3844 pub fn set_explicit_decoding_config<
3845 T: std::convert::Into<std::boxed::Box<crate::model::ExplicitDecodingConfig>>,
3846 >(
3847 mut self,
3848 v: T,
3849 ) -> Self {
3850 self.decoding_config = std::option::Option::Some(
3851 crate::model::recognition_config::DecodingConfig::ExplicitDecodingConfig(v.into()),
3852 );
3853 self
3854 }
3855}
3856
3857impl wkt::message::Message for RecognitionConfig {
3858 fn typename() -> &'static str {
3859 "type.googleapis.com/google.cloud.speech.v2.RecognitionConfig"
3860 }
3861}
3862
3863/// Defines additional types related to [RecognitionConfig].
3864pub mod recognition_config {
3865 #[allow(unused_imports)]
3866 use super::*;
3867
3868 /// Decoding parameters for audio being sent for recognition.
3869 #[derive(Clone, Debug, PartialEq)]
3870 #[non_exhaustive]
3871 pub enum DecodingConfig {
3872 /// Automatically detect decoding parameters.
3873 /// Preferred for supported formats.
3874 AutoDecodingConfig(std::boxed::Box<crate::model::AutoDetectDecodingConfig>),
3875 /// Explicitly specified decoding parameters.
3876 /// Required if using headerless PCM audio (linear16, mulaw, alaw).
3877 ExplicitDecodingConfig(std::boxed::Box<crate::model::ExplicitDecodingConfig>),
3878 }
3879}
3880
3881/// Request message for the
3882/// [Recognize][google.cloud.speech.v2.Speech.Recognize] method. Either
3883/// `content` or `uri` must be supplied. Supplying both or neither returns
3884/// [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. See [content
3885/// limits](https://cloud.google.com/speech-to-text/quotas#content).
3886///
3887/// [google.cloud.speech.v2.Speech.Recognize]: crate::client::Speech::recognize
3888#[derive(Clone, Default, PartialEq)]
3889#[non_exhaustive]
3890pub struct RecognizeRequest {
3891 /// Required. The name of the Recognizer to use during recognition. The
3892 /// expected format is
3893 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
3894 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
3895 pub recognizer: std::string::String,
3896
3897 /// Features and audio metadata to use for the Automatic Speech Recognition.
3898 /// This field in combination with the
3899 /// [config_mask][google.cloud.speech.v2.RecognizeRequest.config_mask] field
3900 /// can be used to override parts of the
3901 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
3902 /// of the Recognizer resource.
3903 ///
3904 /// [google.cloud.speech.v2.RecognizeRequest.config_mask]: crate::model::RecognizeRequest::config_mask
3905 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
3906 pub config: std::option::Option<crate::model::RecognitionConfig>,
3907
3908 /// The list of fields in
3909 /// [config][google.cloud.speech.v2.RecognizeRequest.config] that override the
3910 /// values in the
3911 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
3912 /// of the recognizer during this recognition request. If no mask is provided,
3913 /// all non-default valued fields in
3914 /// [config][google.cloud.speech.v2.RecognizeRequest.config] override the
3915 /// values in the recognizer for this recognition request. If a mask is
3916 /// provided, only the fields listed in the mask override the config in the
3917 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
3918 /// [config][google.cloud.speech.v2.RecognizeRequest.config] completely
3919 /// overrides and replaces the config in the recognizer for this recognition
3920 /// request.
3921 ///
3922 /// [google.cloud.speech.v2.RecognizeRequest.config]: crate::model::RecognizeRequest::config
3923 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
3924 pub config_mask: std::option::Option<wkt::FieldMask>,
3925
3926 /// The audio source, which is either inline content or a Google Cloud
3927 /// Storage URI.
3928 pub audio_source: std::option::Option<crate::model::recognize_request::AudioSource>,
3929
3930 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
3931}
3932
3933impl RecognizeRequest {
3934 pub fn new() -> Self {
3935 std::default::Default::default()
3936 }
3937
3938 /// Sets the value of [recognizer][crate::model::RecognizeRequest::recognizer].
3939 ///
3940 /// # Example
3941 /// ```ignore,no_run
3942 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3943 /// let x = RecognizeRequest::new().set_recognizer("example");
3944 /// ```
3945 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
3946 self.recognizer = v.into();
3947 self
3948 }
3949
3950 /// Sets the value of [config][crate::model::RecognizeRequest::config].
3951 ///
3952 /// # Example
3953 /// ```ignore,no_run
3954 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3955 /// use google_cloud_speech_v2::model::RecognitionConfig;
3956 /// let x = RecognizeRequest::new().set_config(RecognitionConfig::default()/* use setters */);
3957 /// ```
3958 pub fn set_config<T>(mut self, v: T) -> Self
3959 where
3960 T: std::convert::Into<crate::model::RecognitionConfig>,
3961 {
3962 self.config = std::option::Option::Some(v.into());
3963 self
3964 }
3965
3966 /// Sets or clears the value of [config][crate::model::RecognizeRequest::config].
3967 ///
3968 /// # Example
3969 /// ```ignore,no_run
3970 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3971 /// use google_cloud_speech_v2::model::RecognitionConfig;
3972 /// let x = RecognizeRequest::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
3973 /// let x = RecognizeRequest::new().set_or_clear_config(None::<RecognitionConfig>);
3974 /// ```
3975 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
3976 where
3977 T: std::convert::Into<crate::model::RecognitionConfig>,
3978 {
3979 self.config = v.map(|x| x.into());
3980 self
3981 }
3982
3983 /// Sets the value of [config_mask][crate::model::RecognizeRequest::config_mask].
3984 ///
3985 /// # Example
3986 /// ```ignore,no_run
3987 /// # use google_cloud_speech_v2::model::RecognizeRequest;
3988 /// use wkt::FieldMask;
3989 /// let x = RecognizeRequest::new().set_config_mask(FieldMask::default()/* use setters */);
3990 /// ```
3991 pub fn set_config_mask<T>(mut self, v: T) -> Self
3992 where
3993 T: std::convert::Into<wkt::FieldMask>,
3994 {
3995 self.config_mask = std::option::Option::Some(v.into());
3996 self
3997 }
3998
3999 /// Sets or clears the value of [config_mask][crate::model::RecognizeRequest::config_mask].
4000 ///
4001 /// # Example
4002 /// ```ignore,no_run
4003 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4004 /// use wkt::FieldMask;
4005 /// let x = RecognizeRequest::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
4006 /// let x = RecognizeRequest::new().set_or_clear_config_mask(None::<FieldMask>);
4007 /// ```
4008 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
4009 where
4010 T: std::convert::Into<wkt::FieldMask>,
4011 {
4012 self.config_mask = v.map(|x| x.into());
4013 self
4014 }
4015
4016 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source].
4017 ///
4018 /// Note that all the setters affecting `audio_source` are mutually
4019 /// exclusive.
4020 ///
4021 /// # Example
4022 /// ```ignore,no_run
4023 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4024 /// use google_cloud_speech_v2::model::recognize_request::AudioSource;
4025 /// let x = RecognizeRequest::new().set_audio_source(Some(AudioSource::Content(bytes::Bytes::from_static(b"example"))));
4026 /// ```
4027 pub fn set_audio_source<
4028 T: std::convert::Into<std::option::Option<crate::model::recognize_request::AudioSource>>,
4029 >(
4030 mut self,
4031 v: T,
4032 ) -> Self {
4033 self.audio_source = v.into();
4034 self
4035 }
4036
4037 /// The value of [audio_source][crate::model::RecognizeRequest::audio_source]
4038 /// if it holds a `Content`, `None` if the field is not set or
4039 /// holds a different branch.
4040 pub fn content(&self) -> std::option::Option<&::bytes::Bytes> {
4041 #[allow(unreachable_patterns)]
4042 self.audio_source.as_ref().and_then(|v| match v {
4043 crate::model::recognize_request::AudioSource::Content(v) => {
4044 std::option::Option::Some(v)
4045 }
4046 _ => std::option::Option::None,
4047 })
4048 }
4049
4050 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source]
4051 /// to hold a `Content`.
4052 ///
4053 /// Note that all the setters affecting `audio_source` are
4054 /// mutually exclusive.
4055 ///
4056 /// # Example
4057 /// ```ignore,no_run
4058 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4059 /// let x = RecognizeRequest::new().set_content(bytes::Bytes::from_static(b"example"));
4060 /// assert!(x.content().is_some());
4061 /// assert!(x.uri().is_none());
4062 /// ```
4063 pub fn set_content<T: std::convert::Into<::bytes::Bytes>>(mut self, v: T) -> Self {
4064 self.audio_source = std::option::Option::Some(
4065 crate::model::recognize_request::AudioSource::Content(v.into()),
4066 );
4067 self
4068 }
4069
4070 /// The value of [audio_source][crate::model::RecognizeRequest::audio_source]
4071 /// if it holds a `Uri`, `None` if the field is not set or
4072 /// holds a different branch.
4073 pub fn uri(&self) -> std::option::Option<&std::string::String> {
4074 #[allow(unreachable_patterns)]
4075 self.audio_source.as_ref().and_then(|v| match v {
4076 crate::model::recognize_request::AudioSource::Uri(v) => std::option::Option::Some(v),
4077 _ => std::option::Option::None,
4078 })
4079 }
4080
4081 /// Sets the value of [audio_source][crate::model::RecognizeRequest::audio_source]
4082 /// to hold a `Uri`.
4083 ///
4084 /// Note that all the setters affecting `audio_source` are
4085 /// mutually exclusive.
4086 ///
4087 /// # Example
4088 /// ```ignore,no_run
4089 /// # use google_cloud_speech_v2::model::RecognizeRequest;
4090 /// let x = RecognizeRequest::new().set_uri("example");
4091 /// assert!(x.uri().is_some());
4092 /// assert!(x.content().is_none());
4093 /// ```
4094 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4095 self.audio_source =
4096 std::option::Option::Some(crate::model::recognize_request::AudioSource::Uri(v.into()));
4097 self
4098 }
4099}
4100
4101impl wkt::message::Message for RecognizeRequest {
4102 fn typename() -> &'static str {
4103 "type.googleapis.com/google.cloud.speech.v2.RecognizeRequest"
4104 }
4105}
4106
4107/// Defines additional types related to [RecognizeRequest].
4108pub mod recognize_request {
4109 #[allow(unused_imports)]
4110 use super::*;
4111
4112 /// The audio source, which is either inline content or a Google Cloud
4113 /// Storage URI.
4114 #[derive(Clone, Debug, PartialEq)]
4115 #[non_exhaustive]
4116 pub enum AudioSource {
4117 /// The audio data bytes encoded as specified in
4118 /// [RecognitionConfig][google.cloud.speech.v2.RecognitionConfig]. As
4119 /// with all bytes fields, proto buffers use a pure binary representation,
4120 /// whereas JSON representations use base64.
4121 ///
4122 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
4123 Content(::bytes::Bytes),
4124 /// URI that points to a file that contains audio data bytes as specified in
4125 /// [RecognitionConfig][google.cloud.speech.v2.RecognitionConfig]. The file
4126 /// must not be compressed (for example, gzip). Currently, only Google Cloud
4127 /// Storage URIs are supported, which must be specified in the following
4128 /// format: `gs://bucket_name/object_name` (other URI formats return
4129 /// [INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more
4130 /// information, see [Request
4131 /// URIs](https://cloud.google.com/storage/docs/reference-uris).
4132 ///
4133 /// [google.cloud.speech.v2.RecognitionConfig]: crate::model::RecognitionConfig
4134 Uri(std::string::String),
4135 }
4136}
4137
4138/// Metadata about the recognition request and response.
4139#[derive(Clone, Default, PartialEq)]
4140#[non_exhaustive]
4141pub struct RecognitionResponseMetadata {
4142 /// Global request identifier auto-generated by the API.
4143 pub request_id: std::string::String,
4144
4145 /// When available, billed audio seconds for the corresponding request.
4146 pub total_billed_duration: std::option::Option<wkt::Duration>,
4147
4148 /// Optional. Output only. Provides the prompt used for the recognition
4149 /// request.
4150 pub prompt: std::option::Option<std::string::String>,
4151
4152 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4153}
4154
4155impl RecognitionResponseMetadata {
4156 pub fn new() -> Self {
4157 std::default::Default::default()
4158 }
4159
4160 /// Sets the value of [request_id][crate::model::RecognitionResponseMetadata::request_id].
4161 ///
4162 /// # Example
4163 /// ```ignore,no_run
4164 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4165 /// let x = RecognitionResponseMetadata::new().set_request_id("example");
4166 /// ```
4167 pub fn set_request_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4168 self.request_id = v.into();
4169 self
4170 }
4171
4172 /// Sets the value of [total_billed_duration][crate::model::RecognitionResponseMetadata::total_billed_duration].
4173 ///
4174 /// # Example
4175 /// ```ignore,no_run
4176 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4177 /// use wkt::Duration;
4178 /// let x = RecognitionResponseMetadata::new().set_total_billed_duration(Duration::default()/* use setters */);
4179 /// ```
4180 pub fn set_total_billed_duration<T>(mut self, v: T) -> Self
4181 where
4182 T: std::convert::Into<wkt::Duration>,
4183 {
4184 self.total_billed_duration = std::option::Option::Some(v.into());
4185 self
4186 }
4187
4188 /// Sets or clears the value of [total_billed_duration][crate::model::RecognitionResponseMetadata::total_billed_duration].
4189 ///
4190 /// # Example
4191 /// ```ignore,no_run
4192 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4193 /// use wkt::Duration;
4194 /// let x = RecognitionResponseMetadata::new().set_or_clear_total_billed_duration(Some(Duration::default()/* use setters */));
4195 /// let x = RecognitionResponseMetadata::new().set_or_clear_total_billed_duration(None::<Duration>);
4196 /// ```
4197 pub fn set_or_clear_total_billed_duration<T>(mut self, v: std::option::Option<T>) -> Self
4198 where
4199 T: std::convert::Into<wkt::Duration>,
4200 {
4201 self.total_billed_duration = v.map(|x| x.into());
4202 self
4203 }
4204
4205 /// Sets the value of [prompt][crate::model::RecognitionResponseMetadata::prompt].
4206 ///
4207 /// # Example
4208 /// ```ignore,no_run
4209 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4210 /// let x = RecognitionResponseMetadata::new().set_prompt("example");
4211 /// ```
4212 pub fn set_prompt<T>(mut self, v: T) -> Self
4213 where
4214 T: std::convert::Into<std::string::String>,
4215 {
4216 self.prompt = std::option::Option::Some(v.into());
4217 self
4218 }
4219
4220 /// Sets or clears the value of [prompt][crate::model::RecognitionResponseMetadata::prompt].
4221 ///
4222 /// # Example
4223 /// ```ignore,no_run
4224 /// # use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4225 /// let x = RecognitionResponseMetadata::new().set_or_clear_prompt(Some("example"));
4226 /// let x = RecognitionResponseMetadata::new().set_or_clear_prompt(None::<String>);
4227 /// ```
4228 pub fn set_or_clear_prompt<T>(mut self, v: std::option::Option<T>) -> Self
4229 where
4230 T: std::convert::Into<std::string::String>,
4231 {
4232 self.prompt = v.map(|x| x.into());
4233 self
4234 }
4235}
4236
4237impl wkt::message::Message for RecognitionResponseMetadata {
4238 fn typename() -> &'static str {
4239 "type.googleapis.com/google.cloud.speech.v2.RecognitionResponseMetadata"
4240 }
4241}
4242
4243/// Alternative hypotheses (a.k.a. n-best list).
4244#[derive(Clone, Default, PartialEq)]
4245#[non_exhaustive]
4246pub struct SpeechRecognitionAlternative {
4247 /// Transcript text representing the words that the user spoke.
4248 pub transcript: std::string::String,
4249
4250 /// The confidence estimate between 0.0 and 1.0. A higher number
4251 /// indicates an estimated greater likelihood that the recognized words are
4252 /// correct. This field is set only for the top alternative of a non-streaming
4253 /// result or, of a streaming result where
4254 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final] is
4255 /// set to `true`. This field is not guaranteed to be accurate and users should
4256 /// not rely on it to be always provided. The default of 0.0 is a sentinel
4257 /// value indicating `confidence` was not set.
4258 ///
4259 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
4260 pub confidence: f32,
4261
4262 /// A list of word-specific information for each recognized word.
4263 /// When the
4264 /// [SpeakerDiarizationConfig][google.cloud.speech.v2.SpeakerDiarizationConfig]
4265 /// is set, you will see all the words from the beginning of the audio.
4266 ///
4267 /// [google.cloud.speech.v2.SpeakerDiarizationConfig]: crate::model::SpeakerDiarizationConfig
4268 pub words: std::vec::Vec<crate::model::WordInfo>,
4269
4270 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4271}
4272
4273impl SpeechRecognitionAlternative {
4274 pub fn new() -> Self {
4275 std::default::Default::default()
4276 }
4277
4278 /// Sets the value of [transcript][crate::model::SpeechRecognitionAlternative::transcript].
4279 ///
4280 /// # Example
4281 /// ```ignore,no_run
4282 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4283 /// let x = SpeechRecognitionAlternative::new().set_transcript("example");
4284 /// ```
4285 pub fn set_transcript<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4286 self.transcript = v.into();
4287 self
4288 }
4289
4290 /// Sets the value of [confidence][crate::model::SpeechRecognitionAlternative::confidence].
4291 ///
4292 /// # Example
4293 /// ```ignore,no_run
4294 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4295 /// let x = SpeechRecognitionAlternative::new().set_confidence(42.0);
4296 /// ```
4297 pub fn set_confidence<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
4298 self.confidence = v.into();
4299 self
4300 }
4301
4302 /// Sets the value of [words][crate::model::SpeechRecognitionAlternative::words].
4303 ///
4304 /// # Example
4305 /// ```ignore,no_run
4306 /// # use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4307 /// use google_cloud_speech_v2::model::WordInfo;
4308 /// let x = SpeechRecognitionAlternative::new()
4309 /// .set_words([
4310 /// WordInfo::default()/* use setters */,
4311 /// WordInfo::default()/* use (different) setters */,
4312 /// ]);
4313 /// ```
4314 pub fn set_words<T, V>(mut self, v: T) -> Self
4315 where
4316 T: std::iter::IntoIterator<Item = V>,
4317 V: std::convert::Into<crate::model::WordInfo>,
4318 {
4319 use std::iter::Iterator;
4320 self.words = v.into_iter().map(|i| i.into()).collect();
4321 self
4322 }
4323}
4324
4325impl wkt::message::Message for SpeechRecognitionAlternative {
4326 fn typename() -> &'static str {
4327 "type.googleapis.com/google.cloud.speech.v2.SpeechRecognitionAlternative"
4328 }
4329}
4330
4331/// Word-specific information for recognized words.
4332#[derive(Clone, Default, PartialEq)]
4333#[non_exhaustive]
4334pub struct WordInfo {
4335 /// Time offset relative to the beginning of the audio,
4336 /// and corresponding to the start of the spoken word.
4337 /// This field is only set if
4338 /// [enable_word_time_offsets][google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]
4339 /// is `true` and only in the top hypothesis. This is an experimental feature
4340 /// and the accuracy of the time offset can vary.
4341 ///
4342 /// [google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]: crate::model::RecognitionFeatures::enable_word_time_offsets
4343 pub start_offset: std::option::Option<wkt::Duration>,
4344
4345 /// Time offset relative to the beginning of the audio,
4346 /// and corresponding to the end of the spoken word.
4347 /// This field is only set if
4348 /// [enable_word_time_offsets][google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]
4349 /// is `true` and only in the top hypothesis. This is an experimental feature
4350 /// and the accuracy of the time offset can vary.
4351 ///
4352 /// [google.cloud.speech.v2.RecognitionFeatures.enable_word_time_offsets]: crate::model::RecognitionFeatures::enable_word_time_offsets
4353 pub end_offset: std::option::Option<wkt::Duration>,
4354
4355 /// The word corresponding to this set of information.
4356 pub word: std::string::String,
4357
4358 /// The confidence estimate between 0.0 and 1.0. A higher number
4359 /// indicates an estimated greater likelihood that the recognized words are
4360 /// correct. This field is set only for the top alternative of a non-streaming
4361 /// result or, of a streaming result where
4362 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final] is
4363 /// set to `true`. This field is not guaranteed to be accurate and users should
4364 /// not rely on it to be always provided. The default of 0.0 is a sentinel
4365 /// value indicating `confidence` was not set.
4366 ///
4367 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
4368 pub confidence: f32,
4369
4370 /// A distinct label is assigned for every speaker within the audio. This field
4371 /// specifies which one of those speakers was detected to have spoken this
4372 /// word. `speaker_label` is set if
4373 /// [SpeakerDiarizationConfig][google.cloud.speech.v2.SpeakerDiarizationConfig]
4374 /// is given and only in the top alternative.
4375 ///
4376 /// [google.cloud.speech.v2.SpeakerDiarizationConfig]: crate::model::SpeakerDiarizationConfig
4377 pub speaker_label: std::string::String,
4378
4379 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4380}
4381
4382impl WordInfo {
4383 pub fn new() -> Self {
4384 std::default::Default::default()
4385 }
4386
4387 /// Sets the value of [start_offset][crate::model::WordInfo::start_offset].
4388 ///
4389 /// # Example
4390 /// ```ignore,no_run
4391 /// # use google_cloud_speech_v2::model::WordInfo;
4392 /// use wkt::Duration;
4393 /// let x = WordInfo::new().set_start_offset(Duration::default()/* use setters */);
4394 /// ```
4395 pub fn set_start_offset<T>(mut self, v: T) -> Self
4396 where
4397 T: std::convert::Into<wkt::Duration>,
4398 {
4399 self.start_offset = std::option::Option::Some(v.into());
4400 self
4401 }
4402
4403 /// Sets or clears the value of [start_offset][crate::model::WordInfo::start_offset].
4404 ///
4405 /// # Example
4406 /// ```ignore,no_run
4407 /// # use google_cloud_speech_v2::model::WordInfo;
4408 /// use wkt::Duration;
4409 /// let x = WordInfo::new().set_or_clear_start_offset(Some(Duration::default()/* use setters */));
4410 /// let x = WordInfo::new().set_or_clear_start_offset(None::<Duration>);
4411 /// ```
4412 pub fn set_or_clear_start_offset<T>(mut self, v: std::option::Option<T>) -> Self
4413 where
4414 T: std::convert::Into<wkt::Duration>,
4415 {
4416 self.start_offset = v.map(|x| x.into());
4417 self
4418 }
4419
4420 /// Sets the value of [end_offset][crate::model::WordInfo::end_offset].
4421 ///
4422 /// # Example
4423 /// ```ignore,no_run
4424 /// # use google_cloud_speech_v2::model::WordInfo;
4425 /// use wkt::Duration;
4426 /// let x = WordInfo::new().set_end_offset(Duration::default()/* use setters */);
4427 /// ```
4428 pub fn set_end_offset<T>(mut self, v: T) -> Self
4429 where
4430 T: std::convert::Into<wkt::Duration>,
4431 {
4432 self.end_offset = std::option::Option::Some(v.into());
4433 self
4434 }
4435
4436 /// Sets or clears the value of [end_offset][crate::model::WordInfo::end_offset].
4437 ///
4438 /// # Example
4439 /// ```ignore,no_run
4440 /// # use google_cloud_speech_v2::model::WordInfo;
4441 /// use wkt::Duration;
4442 /// let x = WordInfo::new().set_or_clear_end_offset(Some(Duration::default()/* use setters */));
4443 /// let x = WordInfo::new().set_or_clear_end_offset(None::<Duration>);
4444 /// ```
4445 pub fn set_or_clear_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
4446 where
4447 T: std::convert::Into<wkt::Duration>,
4448 {
4449 self.end_offset = v.map(|x| x.into());
4450 self
4451 }
4452
4453 /// Sets the value of [word][crate::model::WordInfo::word].
4454 ///
4455 /// # Example
4456 /// ```ignore,no_run
4457 /// # use google_cloud_speech_v2::model::WordInfo;
4458 /// let x = WordInfo::new().set_word("example");
4459 /// ```
4460 pub fn set_word<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4461 self.word = v.into();
4462 self
4463 }
4464
4465 /// Sets the value of [confidence][crate::model::WordInfo::confidence].
4466 ///
4467 /// # Example
4468 /// ```ignore,no_run
4469 /// # use google_cloud_speech_v2::model::WordInfo;
4470 /// let x = WordInfo::new().set_confidence(42.0);
4471 /// ```
4472 pub fn set_confidence<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
4473 self.confidence = v.into();
4474 self
4475 }
4476
4477 /// Sets the value of [speaker_label][crate::model::WordInfo::speaker_label].
4478 ///
4479 /// # Example
4480 /// ```ignore,no_run
4481 /// # use google_cloud_speech_v2::model::WordInfo;
4482 /// let x = WordInfo::new().set_speaker_label("example");
4483 /// ```
4484 pub fn set_speaker_label<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4485 self.speaker_label = v.into();
4486 self
4487 }
4488}
4489
4490impl wkt::message::Message for WordInfo {
4491 fn typename() -> &'static str {
4492 "type.googleapis.com/google.cloud.speech.v2.WordInfo"
4493 }
4494}
4495
4496/// A speech recognition result corresponding to a portion of the audio.
4497#[derive(Clone, Default, PartialEq)]
4498#[non_exhaustive]
4499pub struct SpeechRecognitionResult {
4500 /// May contain one or more recognition hypotheses. These alternatives are
4501 /// ordered in terms of accuracy, with the top (first) alternative being the
4502 /// most probable, as ranked by the recognizer.
4503 pub alternatives: std::vec::Vec<crate::model::SpeechRecognitionAlternative>,
4504
4505 /// For multi-channel audio, this is the channel number corresponding to the
4506 /// recognized result for the audio from that channel.
4507 /// For `audio_channel_count` = `N`, its output values can range from `1` to
4508 /// `N`.
4509 pub channel_tag: i32,
4510
4511 /// Time offset of the end of this result relative to the beginning of the
4512 /// audio.
4513 pub result_end_offset: std::option::Option<wkt::Duration>,
4514
4515 /// Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
4516 /// language tag of the language in this result. This language code was
4517 /// detected to have the most likelihood of being spoken in the audio.
4518 pub language_code: std::string::String,
4519
4520 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4521}
4522
4523impl SpeechRecognitionResult {
4524 pub fn new() -> Self {
4525 std::default::Default::default()
4526 }
4527
4528 /// Sets the value of [alternatives][crate::model::SpeechRecognitionResult::alternatives].
4529 ///
4530 /// # Example
4531 /// ```ignore,no_run
4532 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4533 /// use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
4534 /// let x = SpeechRecognitionResult::new()
4535 /// .set_alternatives([
4536 /// SpeechRecognitionAlternative::default()/* use setters */,
4537 /// SpeechRecognitionAlternative::default()/* use (different) setters */,
4538 /// ]);
4539 /// ```
4540 pub fn set_alternatives<T, V>(mut self, v: T) -> Self
4541 where
4542 T: std::iter::IntoIterator<Item = V>,
4543 V: std::convert::Into<crate::model::SpeechRecognitionAlternative>,
4544 {
4545 use std::iter::Iterator;
4546 self.alternatives = v.into_iter().map(|i| i.into()).collect();
4547 self
4548 }
4549
4550 /// Sets the value of [channel_tag][crate::model::SpeechRecognitionResult::channel_tag].
4551 ///
4552 /// # Example
4553 /// ```ignore,no_run
4554 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4555 /// let x = SpeechRecognitionResult::new().set_channel_tag(42);
4556 /// ```
4557 pub fn set_channel_tag<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
4558 self.channel_tag = v.into();
4559 self
4560 }
4561
4562 /// Sets the value of [result_end_offset][crate::model::SpeechRecognitionResult::result_end_offset].
4563 ///
4564 /// # Example
4565 /// ```ignore,no_run
4566 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4567 /// use wkt::Duration;
4568 /// let x = SpeechRecognitionResult::new().set_result_end_offset(Duration::default()/* use setters */);
4569 /// ```
4570 pub fn set_result_end_offset<T>(mut self, v: T) -> Self
4571 where
4572 T: std::convert::Into<wkt::Duration>,
4573 {
4574 self.result_end_offset = std::option::Option::Some(v.into());
4575 self
4576 }
4577
4578 /// Sets or clears the value of [result_end_offset][crate::model::SpeechRecognitionResult::result_end_offset].
4579 ///
4580 /// # Example
4581 /// ```ignore,no_run
4582 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4583 /// use wkt::Duration;
4584 /// let x = SpeechRecognitionResult::new().set_or_clear_result_end_offset(Some(Duration::default()/* use setters */));
4585 /// let x = SpeechRecognitionResult::new().set_or_clear_result_end_offset(None::<Duration>);
4586 /// ```
4587 pub fn set_or_clear_result_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
4588 where
4589 T: std::convert::Into<wkt::Duration>,
4590 {
4591 self.result_end_offset = v.map(|x| x.into());
4592 self
4593 }
4594
4595 /// Sets the value of [language_code][crate::model::SpeechRecognitionResult::language_code].
4596 ///
4597 /// # Example
4598 /// ```ignore,no_run
4599 /// # use google_cloud_speech_v2::model::SpeechRecognitionResult;
4600 /// let x = SpeechRecognitionResult::new().set_language_code("example");
4601 /// ```
4602 pub fn set_language_code<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
4603 self.language_code = v.into();
4604 self
4605 }
4606}
4607
4608impl wkt::message::Message for SpeechRecognitionResult {
4609 fn typename() -> &'static str {
4610 "type.googleapis.com/google.cloud.speech.v2.SpeechRecognitionResult"
4611 }
4612}
4613
4614/// Response message for the
4615/// [Recognize][google.cloud.speech.v2.Speech.Recognize] method.
4616///
4617/// [google.cloud.speech.v2.Speech.Recognize]: crate::client::Speech::recognize
4618#[derive(Clone, Default, PartialEq)]
4619#[non_exhaustive]
4620pub struct RecognizeResponse {
4621 /// Sequential list of transcription results corresponding to sequential
4622 /// portions of audio.
4623 pub results: std::vec::Vec<crate::model::SpeechRecognitionResult>,
4624
4625 /// Metadata about the recognition.
4626 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
4627
4628 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4629}
4630
4631impl RecognizeResponse {
4632 pub fn new() -> Self {
4633 std::default::Default::default()
4634 }
4635
4636 /// Sets the value of [results][crate::model::RecognizeResponse::results].
4637 ///
4638 /// # Example
4639 /// ```ignore,no_run
4640 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4641 /// use google_cloud_speech_v2::model::SpeechRecognitionResult;
4642 /// let x = RecognizeResponse::new()
4643 /// .set_results([
4644 /// SpeechRecognitionResult::default()/* use setters */,
4645 /// SpeechRecognitionResult::default()/* use (different) setters */,
4646 /// ]);
4647 /// ```
4648 pub fn set_results<T, V>(mut self, v: T) -> Self
4649 where
4650 T: std::iter::IntoIterator<Item = V>,
4651 V: std::convert::Into<crate::model::SpeechRecognitionResult>,
4652 {
4653 use std::iter::Iterator;
4654 self.results = v.into_iter().map(|i| i.into()).collect();
4655 self
4656 }
4657
4658 /// Sets the value of [metadata][crate::model::RecognizeResponse::metadata].
4659 ///
4660 /// # Example
4661 /// ```ignore,no_run
4662 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4663 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4664 /// let x = RecognizeResponse::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
4665 /// ```
4666 pub fn set_metadata<T>(mut self, v: T) -> Self
4667 where
4668 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
4669 {
4670 self.metadata = std::option::Option::Some(v.into());
4671 self
4672 }
4673
4674 /// Sets or clears the value of [metadata][crate::model::RecognizeResponse::metadata].
4675 ///
4676 /// # Example
4677 /// ```ignore,no_run
4678 /// # use google_cloud_speech_v2::model::RecognizeResponse;
4679 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
4680 /// let x = RecognizeResponse::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
4681 /// let x = RecognizeResponse::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
4682 /// ```
4683 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
4684 where
4685 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
4686 {
4687 self.metadata = v.map(|x| x.into());
4688 self
4689 }
4690}
4691
4692impl wkt::message::Message for RecognizeResponse {
4693 fn typename() -> &'static str {
4694 "type.googleapis.com/google.cloud.speech.v2.RecognizeResponse"
4695 }
4696}
4697
4698/// Available recognition features specific to streaming recognition requests.
4699#[derive(Clone, Default, PartialEq)]
4700#[non_exhaustive]
4701pub struct StreamingRecognitionFeatures {
4702 /// If `true`, responses with voice activity speech events will be returned as
4703 /// they are detected.
4704 pub enable_voice_activity_events: bool,
4705
4706 /// Whether or not to stream interim results to the client. If set to true,
4707 /// interim results will be streamed to the client. Otherwise, only the final
4708 /// response will be streamed back.
4709 pub interim_results: bool,
4710
4711 /// If set, the server will automatically close the stream after the specified
4712 /// duration has elapsed after the last VOICE_ACTIVITY speech event has been
4713 /// sent. The field `voice_activity_events` must also be set to true.
4714 pub voice_activity_timeout:
4715 std::option::Option<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4716
4717 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4718}
4719
4720impl StreamingRecognitionFeatures {
4721 pub fn new() -> Self {
4722 std::default::Default::default()
4723 }
4724
4725 /// Sets the value of [enable_voice_activity_events][crate::model::StreamingRecognitionFeatures::enable_voice_activity_events].
4726 ///
4727 /// # Example
4728 /// ```ignore,no_run
4729 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4730 /// let x = StreamingRecognitionFeatures::new().set_enable_voice_activity_events(true);
4731 /// ```
4732 pub fn set_enable_voice_activity_events<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
4733 self.enable_voice_activity_events = v.into();
4734 self
4735 }
4736
4737 /// Sets the value of [interim_results][crate::model::StreamingRecognitionFeatures::interim_results].
4738 ///
4739 /// # Example
4740 /// ```ignore,no_run
4741 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4742 /// let x = StreamingRecognitionFeatures::new().set_interim_results(true);
4743 /// ```
4744 pub fn set_interim_results<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
4745 self.interim_results = v.into();
4746 self
4747 }
4748
4749 /// Sets the value of [voice_activity_timeout][crate::model::StreamingRecognitionFeatures::voice_activity_timeout].
4750 ///
4751 /// # Example
4752 /// ```ignore,no_run
4753 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4754 /// use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4755 /// let x = StreamingRecognitionFeatures::new().set_voice_activity_timeout(VoiceActivityTimeout::default()/* use setters */);
4756 /// ```
4757 pub fn set_voice_activity_timeout<T>(mut self, v: T) -> Self
4758 where
4759 T: std::convert::Into<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4760 {
4761 self.voice_activity_timeout = std::option::Option::Some(v.into());
4762 self
4763 }
4764
4765 /// Sets or clears the value of [voice_activity_timeout][crate::model::StreamingRecognitionFeatures::voice_activity_timeout].
4766 ///
4767 /// # Example
4768 /// ```ignore,no_run
4769 /// # use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
4770 /// use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4771 /// let x = StreamingRecognitionFeatures::new().set_or_clear_voice_activity_timeout(Some(VoiceActivityTimeout::default()/* use setters */));
4772 /// let x = StreamingRecognitionFeatures::new().set_or_clear_voice_activity_timeout(None::<VoiceActivityTimeout>);
4773 /// ```
4774 pub fn set_or_clear_voice_activity_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4775 where
4776 T: std::convert::Into<crate::model::streaming_recognition_features::VoiceActivityTimeout>,
4777 {
4778 self.voice_activity_timeout = v.map(|x| x.into());
4779 self
4780 }
4781}
4782
4783impl wkt::message::Message for StreamingRecognitionFeatures {
4784 fn typename() -> &'static str {
4785 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionFeatures"
4786 }
4787}
4788
4789/// Defines additional types related to [StreamingRecognitionFeatures].
4790pub mod streaming_recognition_features {
4791 #[allow(unused_imports)]
4792 use super::*;
4793
4794 /// Events that a timeout can be set on for voice activity.
4795 #[derive(Clone, Default, PartialEq)]
4796 #[non_exhaustive]
4797 pub struct VoiceActivityTimeout {
4798 /// Duration to timeout the stream if no speech begins. If this is set and
4799 /// no speech is detected in this duration at the start of the stream, the
4800 /// server will close the stream.
4801 pub speech_start_timeout: std::option::Option<wkt::Duration>,
4802
4803 /// Duration to timeout the stream after speech ends. If this is set and no
4804 /// speech is detected in this duration after speech was detected, the server
4805 /// will close the stream.
4806 pub speech_end_timeout: std::option::Option<wkt::Duration>,
4807
4808 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4809 }
4810
4811 impl VoiceActivityTimeout {
4812 pub fn new() -> Self {
4813 std::default::Default::default()
4814 }
4815
4816 /// Sets the value of [speech_start_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_start_timeout].
4817 ///
4818 /// # Example
4819 /// ```ignore,no_run
4820 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4821 /// use wkt::Duration;
4822 /// let x = VoiceActivityTimeout::new().set_speech_start_timeout(Duration::default()/* use setters */);
4823 /// ```
4824 pub fn set_speech_start_timeout<T>(mut self, v: T) -> Self
4825 where
4826 T: std::convert::Into<wkt::Duration>,
4827 {
4828 self.speech_start_timeout = std::option::Option::Some(v.into());
4829 self
4830 }
4831
4832 /// Sets or clears the value of [speech_start_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_start_timeout].
4833 ///
4834 /// # Example
4835 /// ```ignore,no_run
4836 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4837 /// use wkt::Duration;
4838 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_start_timeout(Some(Duration::default()/* use setters */));
4839 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_start_timeout(None::<Duration>);
4840 /// ```
4841 pub fn set_or_clear_speech_start_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4842 where
4843 T: std::convert::Into<wkt::Duration>,
4844 {
4845 self.speech_start_timeout = v.map(|x| x.into());
4846 self
4847 }
4848
4849 /// Sets the value of [speech_end_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_end_timeout].
4850 ///
4851 /// # Example
4852 /// ```ignore,no_run
4853 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4854 /// use wkt::Duration;
4855 /// let x = VoiceActivityTimeout::new().set_speech_end_timeout(Duration::default()/* use setters */);
4856 /// ```
4857 pub fn set_speech_end_timeout<T>(mut self, v: T) -> Self
4858 where
4859 T: std::convert::Into<wkt::Duration>,
4860 {
4861 self.speech_end_timeout = std::option::Option::Some(v.into());
4862 self
4863 }
4864
4865 /// Sets or clears the value of [speech_end_timeout][crate::model::streaming_recognition_features::VoiceActivityTimeout::speech_end_timeout].
4866 ///
4867 /// # Example
4868 /// ```ignore,no_run
4869 /// # use google_cloud_speech_v2::model::streaming_recognition_features::VoiceActivityTimeout;
4870 /// use wkt::Duration;
4871 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_end_timeout(Some(Duration::default()/* use setters */));
4872 /// let x = VoiceActivityTimeout::new().set_or_clear_speech_end_timeout(None::<Duration>);
4873 /// ```
4874 pub fn set_or_clear_speech_end_timeout<T>(mut self, v: std::option::Option<T>) -> Self
4875 where
4876 T: std::convert::Into<wkt::Duration>,
4877 {
4878 self.speech_end_timeout = v.map(|x| x.into());
4879 self
4880 }
4881 }
4882
4883 impl wkt::message::Message for VoiceActivityTimeout {
4884 fn typename() -> &'static str {
4885 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionFeatures.VoiceActivityTimeout"
4886 }
4887 }
4888}
4889
4890/// Provides configuration information for the StreamingRecognize request.
4891#[derive(Clone, Default, PartialEq)]
4892#[non_exhaustive]
4893pub struct StreamingRecognitionConfig {
4894 /// Required. Features and audio metadata to use for the Automatic Speech
4895 /// Recognition. This field in combination with the
4896 /// [config_mask][google.cloud.speech.v2.StreamingRecognitionConfig.config_mask]
4897 /// field can be used to override parts of the
4898 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
4899 /// of the Recognizer resource.
4900 ///
4901 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
4902 /// [google.cloud.speech.v2.StreamingRecognitionConfig.config_mask]: crate::model::StreamingRecognitionConfig::config_mask
4903 pub config: std::option::Option<crate::model::RecognitionConfig>,
4904
4905 /// The list of fields in
4906 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config] that
4907 /// override the values in the
4908 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
4909 /// of the recognizer during this recognition request. If no mask is provided,
4910 /// all non-default valued fields in
4911 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config] override
4912 /// the values in the Recognizer for this recognition request. If a mask is
4913 /// provided, only the fields listed in the mask override the config in the
4914 /// Recognizer for this recognition request. If a wildcard (`*`) is provided,
4915 /// [config][google.cloud.speech.v2.StreamingRecognitionConfig.config]
4916 /// completely overrides and replaces the config in the recognizer for this
4917 /// recognition request.
4918 ///
4919 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
4920 /// [google.cloud.speech.v2.StreamingRecognitionConfig.config]: crate::model::StreamingRecognitionConfig::config
4921 pub config_mask: std::option::Option<wkt::FieldMask>,
4922
4923 /// Speech recognition features to enable specific to streaming audio
4924 /// recognition requests.
4925 pub streaming_features: std::option::Option<crate::model::StreamingRecognitionFeatures>,
4926
4927 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
4928}
4929
4930impl StreamingRecognitionConfig {
4931 pub fn new() -> Self {
4932 std::default::Default::default()
4933 }
4934
4935 /// Sets the value of [config][crate::model::StreamingRecognitionConfig::config].
4936 ///
4937 /// # Example
4938 /// ```ignore,no_run
4939 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4940 /// use google_cloud_speech_v2::model::RecognitionConfig;
4941 /// let x = StreamingRecognitionConfig::new().set_config(RecognitionConfig::default()/* use setters */);
4942 /// ```
4943 pub fn set_config<T>(mut self, v: T) -> Self
4944 where
4945 T: std::convert::Into<crate::model::RecognitionConfig>,
4946 {
4947 self.config = std::option::Option::Some(v.into());
4948 self
4949 }
4950
4951 /// Sets or clears the value of [config][crate::model::StreamingRecognitionConfig::config].
4952 ///
4953 /// # Example
4954 /// ```ignore,no_run
4955 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4956 /// use google_cloud_speech_v2::model::RecognitionConfig;
4957 /// let x = StreamingRecognitionConfig::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
4958 /// let x = StreamingRecognitionConfig::new().set_or_clear_config(None::<RecognitionConfig>);
4959 /// ```
4960 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
4961 where
4962 T: std::convert::Into<crate::model::RecognitionConfig>,
4963 {
4964 self.config = v.map(|x| x.into());
4965 self
4966 }
4967
4968 /// Sets the value of [config_mask][crate::model::StreamingRecognitionConfig::config_mask].
4969 ///
4970 /// # Example
4971 /// ```ignore,no_run
4972 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4973 /// use wkt::FieldMask;
4974 /// let x = StreamingRecognitionConfig::new().set_config_mask(FieldMask::default()/* use setters */);
4975 /// ```
4976 pub fn set_config_mask<T>(mut self, v: T) -> Self
4977 where
4978 T: std::convert::Into<wkt::FieldMask>,
4979 {
4980 self.config_mask = std::option::Option::Some(v.into());
4981 self
4982 }
4983
4984 /// Sets or clears the value of [config_mask][crate::model::StreamingRecognitionConfig::config_mask].
4985 ///
4986 /// # Example
4987 /// ```ignore,no_run
4988 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
4989 /// use wkt::FieldMask;
4990 /// let x = StreamingRecognitionConfig::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
4991 /// let x = StreamingRecognitionConfig::new().set_or_clear_config_mask(None::<FieldMask>);
4992 /// ```
4993 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
4994 where
4995 T: std::convert::Into<wkt::FieldMask>,
4996 {
4997 self.config_mask = v.map(|x| x.into());
4998 self
4999 }
5000
5001 /// Sets the value of [streaming_features][crate::model::StreamingRecognitionConfig::streaming_features].
5002 ///
5003 /// # Example
5004 /// ```ignore,no_run
5005 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5006 /// use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
5007 /// let x = StreamingRecognitionConfig::new().set_streaming_features(StreamingRecognitionFeatures::default()/* use setters */);
5008 /// ```
5009 pub fn set_streaming_features<T>(mut self, v: T) -> Self
5010 where
5011 T: std::convert::Into<crate::model::StreamingRecognitionFeatures>,
5012 {
5013 self.streaming_features = std::option::Option::Some(v.into());
5014 self
5015 }
5016
5017 /// Sets or clears the value of [streaming_features][crate::model::StreamingRecognitionConfig::streaming_features].
5018 ///
5019 /// # Example
5020 /// ```ignore,no_run
5021 /// # use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5022 /// use google_cloud_speech_v2::model::StreamingRecognitionFeatures;
5023 /// let x = StreamingRecognitionConfig::new().set_or_clear_streaming_features(Some(StreamingRecognitionFeatures::default()/* use setters */));
5024 /// let x = StreamingRecognitionConfig::new().set_or_clear_streaming_features(None::<StreamingRecognitionFeatures>);
5025 /// ```
5026 pub fn set_or_clear_streaming_features<T>(mut self, v: std::option::Option<T>) -> Self
5027 where
5028 T: std::convert::Into<crate::model::StreamingRecognitionFeatures>,
5029 {
5030 self.streaming_features = v.map(|x| x.into());
5031 self
5032 }
5033}
5034
5035impl wkt::message::Message for StreamingRecognitionConfig {
5036 fn typename() -> &'static str {
5037 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionConfig"
5038 }
5039}
5040
5041/// Request message for the
5042/// [StreamingRecognize][google.cloud.speech.v2.Speech.StreamingRecognize]
5043/// method. Multiple
5044/// [StreamingRecognizeRequest][google.cloud.speech.v2.StreamingRecognizeRequest]
5045/// messages are sent in one call.
5046///
5047/// If the [Recognizer][google.cloud.speech.v2.Recognizer] referenced by
5048/// [recognizer][google.cloud.speech.v2.StreamingRecognizeRequest.recognizer]
5049/// contains a fully specified request configuration then the stream may only
5050/// contain messages with only
5051/// [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio] set.
5052///
5053/// Otherwise the first message must contain a
5054/// [recognizer][google.cloud.speech.v2.StreamingRecognizeRequest.recognizer] and
5055/// a
5056/// [streaming_config][google.cloud.speech.v2.StreamingRecognizeRequest.streaming_config]
5057/// message that together fully specify the request configuration and must not
5058/// contain [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio]. All
5059/// subsequent messages must only have
5060/// [audio][google.cloud.speech.v2.StreamingRecognizeRequest.audio] set.
5061///
5062/// [google.cloud.speech.v2.Recognizer]: crate::model::Recognizer
5063/// [google.cloud.speech.v2.StreamingRecognizeRequest]: crate::model::StreamingRecognizeRequest
5064/// [google.cloud.speech.v2.StreamingRecognizeRequest.audio]: crate::model::StreamingRecognizeRequest::streaming_request
5065/// [google.cloud.speech.v2.StreamingRecognizeRequest.recognizer]: crate::model::StreamingRecognizeRequest::recognizer
5066/// [google.cloud.speech.v2.StreamingRecognizeRequest.streaming_config]: crate::model::StreamingRecognizeRequest::streaming_request
5067#[derive(Clone, Default, PartialEq)]
5068#[non_exhaustive]
5069pub struct StreamingRecognizeRequest {
5070 /// Required. The name of the Recognizer to use during recognition. The
5071 /// expected format is
5072 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
5073 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
5074 pub recognizer: std::string::String,
5075
5076 pub streaming_request:
5077 std::option::Option<crate::model::streaming_recognize_request::StreamingRequest>,
5078
5079 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5080}
5081
5082impl StreamingRecognizeRequest {
5083 pub fn new() -> Self {
5084 std::default::Default::default()
5085 }
5086
5087 /// Sets the value of [recognizer][crate::model::StreamingRecognizeRequest::recognizer].
5088 ///
5089 /// # Example
5090 /// ```ignore,no_run
5091 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5092 /// let x = StreamingRecognizeRequest::new().set_recognizer("example");
5093 /// ```
5094 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5095 self.recognizer = v.into();
5096 self
5097 }
5098
5099 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request].
5100 ///
5101 /// Note that all the setters affecting `streaming_request` are mutually
5102 /// exclusive.
5103 ///
5104 /// # Example
5105 /// ```ignore,no_run
5106 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5107 /// use google_cloud_speech_v2::model::streaming_recognize_request::StreamingRequest;
5108 /// let x = StreamingRecognizeRequest::new().set_streaming_request(Some(StreamingRequest::Audio(bytes::Bytes::from_static(b"example"))));
5109 /// ```
5110 pub fn set_streaming_request<
5111 T: std::convert::Into<
5112 std::option::Option<crate::model::streaming_recognize_request::StreamingRequest>,
5113 >,
5114 >(
5115 mut self,
5116 v: T,
5117 ) -> Self {
5118 self.streaming_request = v.into();
5119 self
5120 }
5121
5122 /// The value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5123 /// if it holds a `StreamingConfig`, `None` if the field is not set or
5124 /// holds a different branch.
5125 pub fn streaming_config(
5126 &self,
5127 ) -> std::option::Option<&std::boxed::Box<crate::model::StreamingRecognitionConfig>> {
5128 #[allow(unreachable_patterns)]
5129 self.streaming_request.as_ref().and_then(|v| match v {
5130 crate::model::streaming_recognize_request::StreamingRequest::StreamingConfig(v) => {
5131 std::option::Option::Some(v)
5132 }
5133 _ => std::option::Option::None,
5134 })
5135 }
5136
5137 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5138 /// to hold a `StreamingConfig`.
5139 ///
5140 /// Note that all the setters affecting `streaming_request` are
5141 /// mutually exclusive.
5142 ///
5143 /// # Example
5144 /// ```ignore,no_run
5145 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5146 /// use google_cloud_speech_v2::model::StreamingRecognitionConfig;
5147 /// let x = StreamingRecognizeRequest::new().set_streaming_config(StreamingRecognitionConfig::default()/* use setters */);
5148 /// assert!(x.streaming_config().is_some());
5149 /// assert!(x.audio().is_none());
5150 /// ```
5151 pub fn set_streaming_config<
5152 T: std::convert::Into<std::boxed::Box<crate::model::StreamingRecognitionConfig>>,
5153 >(
5154 mut self,
5155 v: T,
5156 ) -> Self {
5157 self.streaming_request = std::option::Option::Some(
5158 crate::model::streaming_recognize_request::StreamingRequest::StreamingConfig(v.into()),
5159 );
5160 self
5161 }
5162
5163 /// The value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5164 /// if it holds a `Audio`, `None` if the field is not set or
5165 /// holds a different branch.
5166 pub fn audio(&self) -> std::option::Option<&::bytes::Bytes> {
5167 #[allow(unreachable_patterns)]
5168 self.streaming_request.as_ref().and_then(|v| match v {
5169 crate::model::streaming_recognize_request::StreamingRequest::Audio(v) => {
5170 std::option::Option::Some(v)
5171 }
5172 _ => std::option::Option::None,
5173 })
5174 }
5175
5176 /// Sets the value of [streaming_request][crate::model::StreamingRecognizeRequest::streaming_request]
5177 /// to hold a `Audio`.
5178 ///
5179 /// Note that all the setters affecting `streaming_request` are
5180 /// mutually exclusive.
5181 ///
5182 /// # Example
5183 /// ```ignore,no_run
5184 /// # use google_cloud_speech_v2::model::StreamingRecognizeRequest;
5185 /// let x = StreamingRecognizeRequest::new().set_audio(bytes::Bytes::from_static(b"example"));
5186 /// assert!(x.audio().is_some());
5187 /// assert!(x.streaming_config().is_none());
5188 /// ```
5189 pub fn set_audio<T: std::convert::Into<::bytes::Bytes>>(mut self, v: T) -> Self {
5190 self.streaming_request = std::option::Option::Some(
5191 crate::model::streaming_recognize_request::StreamingRequest::Audio(v.into()),
5192 );
5193 self
5194 }
5195}
5196
5197impl wkt::message::Message for StreamingRecognizeRequest {
5198 fn typename() -> &'static str {
5199 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognizeRequest"
5200 }
5201}
5202
5203/// Defines additional types related to [StreamingRecognizeRequest].
5204pub mod streaming_recognize_request {
5205 #[allow(unused_imports)]
5206 use super::*;
5207
5208 #[derive(Clone, Debug, PartialEq)]
5209 #[non_exhaustive]
5210 pub enum StreamingRequest {
5211 /// StreamingRecognitionConfig to be used in this recognition attempt.
5212 /// If provided, it will override the default RecognitionConfig stored in the
5213 /// Recognizer.
5214 StreamingConfig(std::boxed::Box<crate::model::StreamingRecognitionConfig>),
5215 /// Inline audio bytes to be Recognized.
5216 /// Maximum size for this field is 15 KB per request.
5217 Audio(::bytes::Bytes),
5218 }
5219}
5220
5221/// Request message for the
5222/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize]
5223/// method.
5224///
5225/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
5226#[derive(Clone, Default, PartialEq)]
5227#[non_exhaustive]
5228pub struct BatchRecognizeRequest {
5229 /// Required. The name of the Recognizer to use during recognition. The
5230 /// expected format is
5231 /// `projects/{project}/locations/{location}/recognizers/{recognizer}`. The
5232 /// {recognizer} segment may be set to `_` to use an empty implicit Recognizer.
5233 pub recognizer: std::string::String,
5234
5235 /// Features and audio metadata to use for the Automatic Speech Recognition.
5236 /// This field in combination with the
5237 /// [config_mask][google.cloud.speech.v2.BatchRecognizeRequest.config_mask]
5238 /// field can be used to override parts of the
5239 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5240 /// of the Recognizer resource.
5241 ///
5242 /// [google.cloud.speech.v2.BatchRecognizeRequest.config_mask]: crate::model::BatchRecognizeRequest::config_mask
5243 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5244 pub config: std::option::Option<crate::model::RecognitionConfig>,
5245
5246 /// The list of fields in
5247 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] that override
5248 /// the values in the
5249 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
5250 /// of the recognizer during this recognition request. If no mask is provided,
5251 /// all given fields in
5252 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] override the
5253 /// values in the recognizer for this recognition request. If a mask is
5254 /// provided, only the fields listed in the mask override the config in the
5255 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
5256 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] completely
5257 /// overrides and replaces the config in the recognizer for this recognition
5258 /// request.
5259 ///
5260 /// [google.cloud.speech.v2.BatchRecognizeRequest.config]: crate::model::BatchRecognizeRequest::config
5261 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
5262 pub config_mask: std::option::Option<wkt::FieldMask>,
5263
5264 /// Audio files with file metadata for ASR.
5265 /// The maximum number of files allowed to be specified is 15.
5266 pub files: std::vec::Vec<crate::model::BatchRecognizeFileMetadata>,
5267
5268 /// Configuration options for where to output the transcripts of each file.
5269 pub recognition_output_config: std::option::Option<crate::model::RecognitionOutputConfig>,
5270
5271 /// Processing strategy to use for this request.
5272 pub processing_strategy: crate::model::batch_recognize_request::ProcessingStrategy,
5273
5274 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5275}
5276
5277impl BatchRecognizeRequest {
5278 pub fn new() -> Self {
5279 std::default::Default::default()
5280 }
5281
5282 /// Sets the value of [recognizer][crate::model::BatchRecognizeRequest::recognizer].
5283 ///
5284 /// # Example
5285 /// ```ignore,no_run
5286 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5287 /// let x = BatchRecognizeRequest::new().set_recognizer("example");
5288 /// ```
5289 pub fn set_recognizer<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5290 self.recognizer = v.into();
5291 self
5292 }
5293
5294 /// Sets the value of [config][crate::model::BatchRecognizeRequest::config].
5295 ///
5296 /// # Example
5297 /// ```ignore,no_run
5298 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5299 /// use google_cloud_speech_v2::model::RecognitionConfig;
5300 /// let x = BatchRecognizeRequest::new().set_config(RecognitionConfig::default()/* use setters */);
5301 /// ```
5302 pub fn set_config<T>(mut self, v: T) -> Self
5303 where
5304 T: std::convert::Into<crate::model::RecognitionConfig>,
5305 {
5306 self.config = std::option::Option::Some(v.into());
5307 self
5308 }
5309
5310 /// Sets or clears the value of [config][crate::model::BatchRecognizeRequest::config].
5311 ///
5312 /// # Example
5313 /// ```ignore,no_run
5314 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5315 /// use google_cloud_speech_v2::model::RecognitionConfig;
5316 /// let x = BatchRecognizeRequest::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
5317 /// let x = BatchRecognizeRequest::new().set_or_clear_config(None::<RecognitionConfig>);
5318 /// ```
5319 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
5320 where
5321 T: std::convert::Into<crate::model::RecognitionConfig>,
5322 {
5323 self.config = v.map(|x| x.into());
5324 self
5325 }
5326
5327 /// Sets the value of [config_mask][crate::model::BatchRecognizeRequest::config_mask].
5328 ///
5329 /// # Example
5330 /// ```ignore,no_run
5331 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5332 /// use wkt::FieldMask;
5333 /// let x = BatchRecognizeRequest::new().set_config_mask(FieldMask::default()/* use setters */);
5334 /// ```
5335 pub fn set_config_mask<T>(mut self, v: T) -> Self
5336 where
5337 T: std::convert::Into<wkt::FieldMask>,
5338 {
5339 self.config_mask = std::option::Option::Some(v.into());
5340 self
5341 }
5342
5343 /// Sets or clears the value of [config_mask][crate::model::BatchRecognizeRequest::config_mask].
5344 ///
5345 /// # Example
5346 /// ```ignore,no_run
5347 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5348 /// use wkt::FieldMask;
5349 /// let x = BatchRecognizeRequest::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
5350 /// let x = BatchRecognizeRequest::new().set_or_clear_config_mask(None::<FieldMask>);
5351 /// ```
5352 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
5353 where
5354 T: std::convert::Into<wkt::FieldMask>,
5355 {
5356 self.config_mask = v.map(|x| x.into());
5357 self
5358 }
5359
5360 /// Sets the value of [files][crate::model::BatchRecognizeRequest::files].
5361 ///
5362 /// # Example
5363 /// ```ignore,no_run
5364 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5365 /// use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
5366 /// let x = BatchRecognizeRequest::new()
5367 /// .set_files([
5368 /// BatchRecognizeFileMetadata::default()/* use setters */,
5369 /// BatchRecognizeFileMetadata::default()/* use (different) setters */,
5370 /// ]);
5371 /// ```
5372 pub fn set_files<T, V>(mut self, v: T) -> Self
5373 where
5374 T: std::iter::IntoIterator<Item = V>,
5375 V: std::convert::Into<crate::model::BatchRecognizeFileMetadata>,
5376 {
5377 use std::iter::Iterator;
5378 self.files = v.into_iter().map(|i| i.into()).collect();
5379 self
5380 }
5381
5382 /// Sets the value of [recognition_output_config][crate::model::BatchRecognizeRequest::recognition_output_config].
5383 ///
5384 /// # Example
5385 /// ```ignore,no_run
5386 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5387 /// use google_cloud_speech_v2::model::RecognitionOutputConfig;
5388 /// let x = BatchRecognizeRequest::new().set_recognition_output_config(RecognitionOutputConfig::default()/* use setters */);
5389 /// ```
5390 pub fn set_recognition_output_config<T>(mut self, v: T) -> Self
5391 where
5392 T: std::convert::Into<crate::model::RecognitionOutputConfig>,
5393 {
5394 self.recognition_output_config = std::option::Option::Some(v.into());
5395 self
5396 }
5397
5398 /// Sets or clears the value of [recognition_output_config][crate::model::BatchRecognizeRequest::recognition_output_config].
5399 ///
5400 /// # Example
5401 /// ```ignore,no_run
5402 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5403 /// use google_cloud_speech_v2::model::RecognitionOutputConfig;
5404 /// let x = BatchRecognizeRequest::new().set_or_clear_recognition_output_config(Some(RecognitionOutputConfig::default()/* use setters */));
5405 /// let x = BatchRecognizeRequest::new().set_or_clear_recognition_output_config(None::<RecognitionOutputConfig>);
5406 /// ```
5407 pub fn set_or_clear_recognition_output_config<T>(mut self, v: std::option::Option<T>) -> Self
5408 where
5409 T: std::convert::Into<crate::model::RecognitionOutputConfig>,
5410 {
5411 self.recognition_output_config = v.map(|x| x.into());
5412 self
5413 }
5414
5415 /// Sets the value of [processing_strategy][crate::model::BatchRecognizeRequest::processing_strategy].
5416 ///
5417 /// # Example
5418 /// ```ignore,no_run
5419 /// # use google_cloud_speech_v2::model::BatchRecognizeRequest;
5420 /// use google_cloud_speech_v2::model::batch_recognize_request::ProcessingStrategy;
5421 /// let x0 = BatchRecognizeRequest::new().set_processing_strategy(ProcessingStrategy::DynamicBatching);
5422 /// ```
5423 pub fn set_processing_strategy<
5424 T: std::convert::Into<crate::model::batch_recognize_request::ProcessingStrategy>,
5425 >(
5426 mut self,
5427 v: T,
5428 ) -> Self {
5429 self.processing_strategy = v.into();
5430 self
5431 }
5432}
5433
5434impl wkt::message::Message for BatchRecognizeRequest {
5435 fn typename() -> &'static str {
5436 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeRequest"
5437 }
5438}
5439
5440/// Defines additional types related to [BatchRecognizeRequest].
5441pub mod batch_recognize_request {
5442 #[allow(unused_imports)]
5443 use super::*;
5444
5445 /// Possible processing strategies for batch requests.
5446 ///
5447 /// # Working with unknown values
5448 ///
5449 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
5450 /// additional enum variants at any time. Adding new variants is not considered
5451 /// a breaking change. Applications should write their code in anticipation of:
5452 ///
5453 /// - New values appearing in future releases of the client library, **and**
5454 /// - New values received dynamically, without application changes.
5455 ///
5456 /// Please consult the [Working with enums] section in the user guide for some
5457 /// guidelines.
5458 ///
5459 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
5460 #[derive(Clone, Debug, PartialEq)]
5461 #[non_exhaustive]
5462 pub enum ProcessingStrategy {
5463 /// Default value for the processing strategy. The request is processed as
5464 /// soon as its received.
5465 Unspecified,
5466 /// If selected, processes the request during lower utilization periods for a
5467 /// price discount. The request is fulfilled within 24 hours.
5468 DynamicBatching,
5469 /// If set, the enum was initialized with an unknown value.
5470 ///
5471 /// Applications can examine the value using [ProcessingStrategy::value] or
5472 /// [ProcessingStrategy::name].
5473 UnknownValue(processing_strategy::UnknownValue),
5474 }
5475
5476 #[doc(hidden)]
5477 pub mod processing_strategy {
5478 #[allow(unused_imports)]
5479 use super::*;
5480 #[derive(Clone, Debug, PartialEq)]
5481 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
5482 }
5483
5484 impl ProcessingStrategy {
5485 /// Gets the enum value.
5486 ///
5487 /// Returns `None` if the enum contains an unknown value deserialized from
5488 /// the string representation of enums.
5489 pub fn value(&self) -> std::option::Option<i32> {
5490 match self {
5491 Self::Unspecified => std::option::Option::Some(0),
5492 Self::DynamicBatching => std::option::Option::Some(1),
5493 Self::UnknownValue(u) => u.0.value(),
5494 }
5495 }
5496
5497 /// Gets the enum value as a string.
5498 ///
5499 /// Returns `None` if the enum contains an unknown value deserialized from
5500 /// the integer representation of enums.
5501 pub fn name(&self) -> std::option::Option<&str> {
5502 match self {
5503 Self::Unspecified => std::option::Option::Some("PROCESSING_STRATEGY_UNSPECIFIED"),
5504 Self::DynamicBatching => std::option::Option::Some("DYNAMIC_BATCHING"),
5505 Self::UnknownValue(u) => u.0.name(),
5506 }
5507 }
5508 }
5509
5510 impl std::default::Default for ProcessingStrategy {
5511 fn default() -> Self {
5512 use std::convert::From;
5513 Self::from(0)
5514 }
5515 }
5516
5517 impl std::fmt::Display for ProcessingStrategy {
5518 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
5519 wkt::internal::display_enum(f, self.name(), self.value())
5520 }
5521 }
5522
5523 impl std::convert::From<i32> for ProcessingStrategy {
5524 fn from(value: i32) -> Self {
5525 match value {
5526 0 => Self::Unspecified,
5527 1 => Self::DynamicBatching,
5528 _ => Self::UnknownValue(processing_strategy::UnknownValue(
5529 wkt::internal::UnknownEnumValue::Integer(value),
5530 )),
5531 }
5532 }
5533 }
5534
5535 impl std::convert::From<&str> for ProcessingStrategy {
5536 fn from(value: &str) -> Self {
5537 use std::string::ToString;
5538 match value {
5539 "PROCESSING_STRATEGY_UNSPECIFIED" => Self::Unspecified,
5540 "DYNAMIC_BATCHING" => Self::DynamicBatching,
5541 _ => Self::UnknownValue(processing_strategy::UnknownValue(
5542 wkt::internal::UnknownEnumValue::String(value.to_string()),
5543 )),
5544 }
5545 }
5546 }
5547
5548 impl serde::ser::Serialize for ProcessingStrategy {
5549 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
5550 where
5551 S: serde::Serializer,
5552 {
5553 match self {
5554 Self::Unspecified => serializer.serialize_i32(0),
5555 Self::DynamicBatching => serializer.serialize_i32(1),
5556 Self::UnknownValue(u) => u.0.serialize(serializer),
5557 }
5558 }
5559 }
5560
5561 impl<'de> serde::de::Deserialize<'de> for ProcessingStrategy {
5562 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
5563 where
5564 D: serde::Deserializer<'de>,
5565 {
5566 deserializer.deserialize_any(wkt::internal::EnumVisitor::<ProcessingStrategy>::new(
5567 ".google.cloud.speech.v2.BatchRecognizeRequest.ProcessingStrategy",
5568 ))
5569 }
5570 }
5571}
5572
5573/// Output configurations for Cloud Storage.
5574#[derive(Clone, Default, PartialEq)]
5575#[non_exhaustive]
5576pub struct GcsOutputConfig {
5577 /// The Cloud Storage URI prefix with which recognition results will be
5578 /// written.
5579 pub uri: std::string::String,
5580
5581 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5582}
5583
5584impl GcsOutputConfig {
5585 pub fn new() -> Self {
5586 std::default::Default::default()
5587 }
5588
5589 /// Sets the value of [uri][crate::model::GcsOutputConfig::uri].
5590 ///
5591 /// # Example
5592 /// ```ignore,no_run
5593 /// # use google_cloud_speech_v2::model::GcsOutputConfig;
5594 /// let x = GcsOutputConfig::new().set_uri("example");
5595 /// ```
5596 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
5597 self.uri = v.into();
5598 self
5599 }
5600}
5601
5602impl wkt::message::Message for GcsOutputConfig {
5603 fn typename() -> &'static str {
5604 "type.googleapis.com/google.cloud.speech.v2.GcsOutputConfig"
5605 }
5606}
5607
5608/// Output configurations for inline response.
5609#[derive(Clone, Default, PartialEq)]
5610#[non_exhaustive]
5611pub struct InlineOutputConfig {
5612 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5613}
5614
5615impl InlineOutputConfig {
5616 pub fn new() -> Self {
5617 std::default::Default::default()
5618 }
5619}
5620
5621impl wkt::message::Message for InlineOutputConfig {
5622 fn typename() -> &'static str {
5623 "type.googleapis.com/google.cloud.speech.v2.InlineOutputConfig"
5624 }
5625}
5626
5627/// Output configurations for serialized `BatchRecognizeResults` protos.
5628#[derive(Clone, Default, PartialEq)]
5629#[non_exhaustive]
5630pub struct NativeOutputFileFormatConfig {
5631 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5632}
5633
5634impl NativeOutputFileFormatConfig {
5635 pub fn new() -> Self {
5636 std::default::Default::default()
5637 }
5638}
5639
5640impl wkt::message::Message for NativeOutputFileFormatConfig {
5641 fn typename() -> &'static str {
5642 "type.googleapis.com/google.cloud.speech.v2.NativeOutputFileFormatConfig"
5643 }
5644}
5645
5646/// Output configurations for [WebVTT](https://www.w3.org/TR/webvtt1/) formatted
5647/// subtitle file.
5648#[derive(Clone, Default, PartialEq)]
5649#[non_exhaustive]
5650pub struct VttOutputFileFormatConfig {
5651 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5652}
5653
5654impl VttOutputFileFormatConfig {
5655 pub fn new() -> Self {
5656 std::default::Default::default()
5657 }
5658}
5659
5660impl wkt::message::Message for VttOutputFileFormatConfig {
5661 fn typename() -> &'static str {
5662 "type.googleapis.com/google.cloud.speech.v2.VttOutputFileFormatConfig"
5663 }
5664}
5665
5666/// Output configurations [SubRip
5667/// Text](https://www.matroska.org/technical/subtitles.html#srt-subtitles)
5668/// formatted subtitle file.
5669#[derive(Clone, Default, PartialEq)]
5670#[non_exhaustive]
5671pub struct SrtOutputFileFormatConfig {
5672 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5673}
5674
5675impl SrtOutputFileFormatConfig {
5676 pub fn new() -> Self {
5677 std::default::Default::default()
5678 }
5679}
5680
5681impl wkt::message::Message for SrtOutputFileFormatConfig {
5682 fn typename() -> &'static str {
5683 "type.googleapis.com/google.cloud.speech.v2.SrtOutputFileFormatConfig"
5684 }
5685}
5686
5687/// Configuration for the format of the results stored to `output`.
5688#[derive(Clone, Default, PartialEq)]
5689#[non_exhaustive]
5690pub struct OutputFormatConfig {
5691 /// Configuration for the native output format. If this field is set or if no
5692 /// other output format field is set, then transcripts will be written to the
5693 /// sink in the native format.
5694 pub native: std::option::Option<crate::model::NativeOutputFileFormatConfig>,
5695
5696 /// Configuration for the VTT output format. If this field is set, then
5697 /// transcripts will be written to the sink in the VTT format.
5698 pub vtt: std::option::Option<crate::model::VttOutputFileFormatConfig>,
5699
5700 /// Configuration for the SRT output format. If this field is set, then
5701 /// transcripts will be written to the sink in the SRT format.
5702 pub srt: std::option::Option<crate::model::SrtOutputFileFormatConfig>,
5703
5704 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5705}
5706
5707impl OutputFormatConfig {
5708 pub fn new() -> Self {
5709 std::default::Default::default()
5710 }
5711
5712 /// Sets the value of [native][crate::model::OutputFormatConfig::native].
5713 ///
5714 /// # Example
5715 /// ```ignore,no_run
5716 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5717 /// use google_cloud_speech_v2::model::NativeOutputFileFormatConfig;
5718 /// let x = OutputFormatConfig::new().set_native(NativeOutputFileFormatConfig::default()/* use setters */);
5719 /// ```
5720 pub fn set_native<T>(mut self, v: T) -> Self
5721 where
5722 T: std::convert::Into<crate::model::NativeOutputFileFormatConfig>,
5723 {
5724 self.native = std::option::Option::Some(v.into());
5725 self
5726 }
5727
5728 /// Sets or clears the value of [native][crate::model::OutputFormatConfig::native].
5729 ///
5730 /// # Example
5731 /// ```ignore,no_run
5732 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5733 /// use google_cloud_speech_v2::model::NativeOutputFileFormatConfig;
5734 /// let x = OutputFormatConfig::new().set_or_clear_native(Some(NativeOutputFileFormatConfig::default()/* use setters */));
5735 /// let x = OutputFormatConfig::new().set_or_clear_native(None::<NativeOutputFileFormatConfig>);
5736 /// ```
5737 pub fn set_or_clear_native<T>(mut self, v: std::option::Option<T>) -> Self
5738 where
5739 T: std::convert::Into<crate::model::NativeOutputFileFormatConfig>,
5740 {
5741 self.native = v.map(|x| x.into());
5742 self
5743 }
5744
5745 /// Sets the value of [vtt][crate::model::OutputFormatConfig::vtt].
5746 ///
5747 /// # Example
5748 /// ```ignore,no_run
5749 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5750 /// use google_cloud_speech_v2::model::VttOutputFileFormatConfig;
5751 /// let x = OutputFormatConfig::new().set_vtt(VttOutputFileFormatConfig::default()/* use setters */);
5752 /// ```
5753 pub fn set_vtt<T>(mut self, v: T) -> Self
5754 where
5755 T: std::convert::Into<crate::model::VttOutputFileFormatConfig>,
5756 {
5757 self.vtt = std::option::Option::Some(v.into());
5758 self
5759 }
5760
5761 /// Sets or clears the value of [vtt][crate::model::OutputFormatConfig::vtt].
5762 ///
5763 /// # Example
5764 /// ```ignore,no_run
5765 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5766 /// use google_cloud_speech_v2::model::VttOutputFileFormatConfig;
5767 /// let x = OutputFormatConfig::new().set_or_clear_vtt(Some(VttOutputFileFormatConfig::default()/* use setters */));
5768 /// let x = OutputFormatConfig::new().set_or_clear_vtt(None::<VttOutputFileFormatConfig>);
5769 /// ```
5770 pub fn set_or_clear_vtt<T>(mut self, v: std::option::Option<T>) -> Self
5771 where
5772 T: std::convert::Into<crate::model::VttOutputFileFormatConfig>,
5773 {
5774 self.vtt = v.map(|x| x.into());
5775 self
5776 }
5777
5778 /// Sets the value of [srt][crate::model::OutputFormatConfig::srt].
5779 ///
5780 /// # Example
5781 /// ```ignore,no_run
5782 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5783 /// use google_cloud_speech_v2::model::SrtOutputFileFormatConfig;
5784 /// let x = OutputFormatConfig::new().set_srt(SrtOutputFileFormatConfig::default()/* use setters */);
5785 /// ```
5786 pub fn set_srt<T>(mut self, v: T) -> Self
5787 where
5788 T: std::convert::Into<crate::model::SrtOutputFileFormatConfig>,
5789 {
5790 self.srt = std::option::Option::Some(v.into());
5791 self
5792 }
5793
5794 /// Sets or clears the value of [srt][crate::model::OutputFormatConfig::srt].
5795 ///
5796 /// # Example
5797 /// ```ignore,no_run
5798 /// # use google_cloud_speech_v2::model::OutputFormatConfig;
5799 /// use google_cloud_speech_v2::model::SrtOutputFileFormatConfig;
5800 /// let x = OutputFormatConfig::new().set_or_clear_srt(Some(SrtOutputFileFormatConfig::default()/* use setters */));
5801 /// let x = OutputFormatConfig::new().set_or_clear_srt(None::<SrtOutputFileFormatConfig>);
5802 /// ```
5803 pub fn set_or_clear_srt<T>(mut self, v: std::option::Option<T>) -> Self
5804 where
5805 T: std::convert::Into<crate::model::SrtOutputFileFormatConfig>,
5806 {
5807 self.srt = v.map(|x| x.into());
5808 self
5809 }
5810}
5811
5812impl wkt::message::Message for OutputFormatConfig {
5813 fn typename() -> &'static str {
5814 "type.googleapis.com/google.cloud.speech.v2.OutputFormatConfig"
5815 }
5816}
5817
5818/// Configuration options for the output(s) of recognition.
5819#[derive(Clone, Default, PartialEq)]
5820#[non_exhaustive]
5821pub struct RecognitionOutputConfig {
5822 /// Optional. Configuration for the format of the results stored to `output`.
5823 /// If unspecified transcripts will be written in the `NATIVE` format only.
5824 pub output_format_config: std::option::Option<crate::model::OutputFormatConfig>,
5825
5826 pub output: std::option::Option<crate::model::recognition_output_config::Output>,
5827
5828 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
5829}
5830
5831impl RecognitionOutputConfig {
5832 pub fn new() -> Self {
5833 std::default::Default::default()
5834 }
5835
5836 /// Sets the value of [output_format_config][crate::model::RecognitionOutputConfig::output_format_config].
5837 ///
5838 /// # Example
5839 /// ```ignore,no_run
5840 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5841 /// use google_cloud_speech_v2::model::OutputFormatConfig;
5842 /// let x = RecognitionOutputConfig::new().set_output_format_config(OutputFormatConfig::default()/* use setters */);
5843 /// ```
5844 pub fn set_output_format_config<T>(mut self, v: T) -> Self
5845 where
5846 T: std::convert::Into<crate::model::OutputFormatConfig>,
5847 {
5848 self.output_format_config = std::option::Option::Some(v.into());
5849 self
5850 }
5851
5852 /// Sets or clears the value of [output_format_config][crate::model::RecognitionOutputConfig::output_format_config].
5853 ///
5854 /// # Example
5855 /// ```ignore,no_run
5856 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5857 /// use google_cloud_speech_v2::model::OutputFormatConfig;
5858 /// let x = RecognitionOutputConfig::new().set_or_clear_output_format_config(Some(OutputFormatConfig::default()/* use setters */));
5859 /// let x = RecognitionOutputConfig::new().set_or_clear_output_format_config(None::<OutputFormatConfig>);
5860 /// ```
5861 pub fn set_or_clear_output_format_config<T>(mut self, v: std::option::Option<T>) -> Self
5862 where
5863 T: std::convert::Into<crate::model::OutputFormatConfig>,
5864 {
5865 self.output_format_config = v.map(|x| x.into());
5866 self
5867 }
5868
5869 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output].
5870 ///
5871 /// Note that all the setters affecting `output` are mutually
5872 /// exclusive.
5873 ///
5874 /// # Example
5875 /// ```ignore,no_run
5876 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5877 /// use google_cloud_speech_v2::model::GcsOutputConfig;
5878 /// let x = RecognitionOutputConfig::new().set_output(Some(
5879 /// google_cloud_speech_v2::model::recognition_output_config::Output::GcsOutputConfig(GcsOutputConfig::default().into())));
5880 /// ```
5881 pub fn set_output<
5882 T: std::convert::Into<std::option::Option<crate::model::recognition_output_config::Output>>,
5883 >(
5884 mut self,
5885 v: T,
5886 ) -> Self {
5887 self.output = v.into();
5888 self
5889 }
5890
5891 /// The value of [output][crate::model::RecognitionOutputConfig::output]
5892 /// if it holds a `GcsOutputConfig`, `None` if the field is not set or
5893 /// holds a different branch.
5894 pub fn gcs_output_config(
5895 &self,
5896 ) -> std::option::Option<&std::boxed::Box<crate::model::GcsOutputConfig>> {
5897 #[allow(unreachable_patterns)]
5898 self.output.as_ref().and_then(|v| match v {
5899 crate::model::recognition_output_config::Output::GcsOutputConfig(v) => {
5900 std::option::Option::Some(v)
5901 }
5902 _ => std::option::Option::None,
5903 })
5904 }
5905
5906 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output]
5907 /// to hold a `GcsOutputConfig`.
5908 ///
5909 /// Note that all the setters affecting `output` are
5910 /// mutually exclusive.
5911 ///
5912 /// # Example
5913 /// ```ignore,no_run
5914 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5915 /// use google_cloud_speech_v2::model::GcsOutputConfig;
5916 /// let x = RecognitionOutputConfig::new().set_gcs_output_config(GcsOutputConfig::default()/* use setters */);
5917 /// assert!(x.gcs_output_config().is_some());
5918 /// assert!(x.inline_response_config().is_none());
5919 /// ```
5920 pub fn set_gcs_output_config<
5921 T: std::convert::Into<std::boxed::Box<crate::model::GcsOutputConfig>>,
5922 >(
5923 mut self,
5924 v: T,
5925 ) -> Self {
5926 self.output = std::option::Option::Some(
5927 crate::model::recognition_output_config::Output::GcsOutputConfig(v.into()),
5928 );
5929 self
5930 }
5931
5932 /// The value of [output][crate::model::RecognitionOutputConfig::output]
5933 /// if it holds a `InlineResponseConfig`, `None` if the field is not set or
5934 /// holds a different branch.
5935 pub fn inline_response_config(
5936 &self,
5937 ) -> std::option::Option<&std::boxed::Box<crate::model::InlineOutputConfig>> {
5938 #[allow(unreachable_patterns)]
5939 self.output.as_ref().and_then(|v| match v {
5940 crate::model::recognition_output_config::Output::InlineResponseConfig(v) => {
5941 std::option::Option::Some(v)
5942 }
5943 _ => std::option::Option::None,
5944 })
5945 }
5946
5947 /// Sets the value of [output][crate::model::RecognitionOutputConfig::output]
5948 /// to hold a `InlineResponseConfig`.
5949 ///
5950 /// Note that all the setters affecting `output` are
5951 /// mutually exclusive.
5952 ///
5953 /// # Example
5954 /// ```ignore,no_run
5955 /// # use google_cloud_speech_v2::model::RecognitionOutputConfig;
5956 /// use google_cloud_speech_v2::model::InlineOutputConfig;
5957 /// let x = RecognitionOutputConfig::new().set_inline_response_config(InlineOutputConfig::default()/* use setters */);
5958 /// assert!(x.inline_response_config().is_some());
5959 /// assert!(x.gcs_output_config().is_none());
5960 /// ```
5961 pub fn set_inline_response_config<
5962 T: std::convert::Into<std::boxed::Box<crate::model::InlineOutputConfig>>,
5963 >(
5964 mut self,
5965 v: T,
5966 ) -> Self {
5967 self.output = std::option::Option::Some(
5968 crate::model::recognition_output_config::Output::InlineResponseConfig(v.into()),
5969 );
5970 self
5971 }
5972}
5973
5974impl wkt::message::Message for RecognitionOutputConfig {
5975 fn typename() -> &'static str {
5976 "type.googleapis.com/google.cloud.speech.v2.RecognitionOutputConfig"
5977 }
5978}
5979
5980/// Defines additional types related to [RecognitionOutputConfig].
5981pub mod recognition_output_config {
5982 #[allow(unused_imports)]
5983 use super::*;
5984
5985 #[derive(Clone, Debug, PartialEq)]
5986 #[non_exhaustive]
5987 pub enum Output {
5988 /// If this message is populated, recognition results are written to the
5989 /// provided Google Cloud Storage URI.
5990 GcsOutputConfig(std::boxed::Box<crate::model::GcsOutputConfig>),
5991 /// If this message is populated, recognition results are provided in the
5992 /// [BatchRecognizeResponse][google.cloud.speech.v2.BatchRecognizeResponse]
5993 /// message of the Operation when completed. This is only supported when
5994 /// calling [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize]
5995 /// with just one audio file.
5996 ///
5997 /// [google.cloud.speech.v2.BatchRecognizeResponse]: crate::model::BatchRecognizeResponse
5998 /// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
5999 InlineResponseConfig(std::boxed::Box<crate::model::InlineOutputConfig>),
6000 }
6001}
6002
6003/// Response message for
6004/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize] that is
6005/// packaged into a longrunning [Operation][google.longrunning.Operation].
6006///
6007/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6008/// [google.longrunning.Operation]: google_cloud_longrunning::model::Operation
6009#[derive(Clone, Default, PartialEq)]
6010#[non_exhaustive]
6011pub struct BatchRecognizeResponse {
6012 /// Map from filename to the final result for that file.
6013 pub results:
6014 std::collections::HashMap<std::string::String, crate::model::BatchRecognizeFileResult>,
6015
6016 /// When available, billed audio seconds for the corresponding request.
6017 pub total_billed_duration: std::option::Option<wkt::Duration>,
6018
6019 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6020}
6021
6022impl BatchRecognizeResponse {
6023 pub fn new() -> Self {
6024 std::default::Default::default()
6025 }
6026
6027 /// Sets the value of [results][crate::model::BatchRecognizeResponse::results].
6028 ///
6029 /// # Example
6030 /// ```ignore,no_run
6031 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6032 /// use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6033 /// let x = BatchRecognizeResponse::new().set_results([
6034 /// ("key0", BatchRecognizeFileResult::default()/* use setters */),
6035 /// ("key1", BatchRecognizeFileResult::default()/* use (different) setters */),
6036 /// ]);
6037 /// ```
6038 pub fn set_results<T, K, V>(mut self, v: T) -> Self
6039 where
6040 T: std::iter::IntoIterator<Item = (K, V)>,
6041 K: std::convert::Into<std::string::String>,
6042 V: std::convert::Into<crate::model::BatchRecognizeFileResult>,
6043 {
6044 use std::iter::Iterator;
6045 self.results = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
6046 self
6047 }
6048
6049 /// Sets the value of [total_billed_duration][crate::model::BatchRecognizeResponse::total_billed_duration].
6050 ///
6051 /// # Example
6052 /// ```ignore,no_run
6053 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6054 /// use wkt::Duration;
6055 /// let x = BatchRecognizeResponse::new().set_total_billed_duration(Duration::default()/* use setters */);
6056 /// ```
6057 pub fn set_total_billed_duration<T>(mut self, v: T) -> Self
6058 where
6059 T: std::convert::Into<wkt::Duration>,
6060 {
6061 self.total_billed_duration = std::option::Option::Some(v.into());
6062 self
6063 }
6064
6065 /// Sets or clears the value of [total_billed_duration][crate::model::BatchRecognizeResponse::total_billed_duration].
6066 ///
6067 /// # Example
6068 /// ```ignore,no_run
6069 /// # use google_cloud_speech_v2::model::BatchRecognizeResponse;
6070 /// use wkt::Duration;
6071 /// let x = BatchRecognizeResponse::new().set_or_clear_total_billed_duration(Some(Duration::default()/* use setters */));
6072 /// let x = BatchRecognizeResponse::new().set_or_clear_total_billed_duration(None::<Duration>);
6073 /// ```
6074 pub fn set_or_clear_total_billed_duration<T>(mut self, v: std::option::Option<T>) -> Self
6075 where
6076 T: std::convert::Into<wkt::Duration>,
6077 {
6078 self.total_billed_duration = v.map(|x| x.into());
6079 self
6080 }
6081}
6082
6083impl wkt::message::Message for BatchRecognizeResponse {
6084 fn typename() -> &'static str {
6085 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeResponse"
6086 }
6087}
6088
6089/// Output type for Cloud Storage of BatchRecognize transcripts. Though this
6090/// proto isn't returned in this API anywhere, the Cloud Storage transcripts will
6091/// be this proto serialized and should be parsed as such.
6092#[derive(Clone, Default, PartialEq)]
6093#[non_exhaustive]
6094pub struct BatchRecognizeResults {
6095 /// Sequential list of transcription results corresponding to sequential
6096 /// portions of audio.
6097 pub results: std::vec::Vec<crate::model::SpeechRecognitionResult>,
6098
6099 /// Metadata about the recognition.
6100 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
6101
6102 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6103}
6104
6105impl BatchRecognizeResults {
6106 pub fn new() -> Self {
6107 std::default::Default::default()
6108 }
6109
6110 /// Sets the value of [results][crate::model::BatchRecognizeResults::results].
6111 ///
6112 /// # Example
6113 /// ```ignore,no_run
6114 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6115 /// use google_cloud_speech_v2::model::SpeechRecognitionResult;
6116 /// let x = BatchRecognizeResults::new()
6117 /// .set_results([
6118 /// SpeechRecognitionResult::default()/* use setters */,
6119 /// SpeechRecognitionResult::default()/* use (different) setters */,
6120 /// ]);
6121 /// ```
6122 pub fn set_results<T, V>(mut self, v: T) -> Self
6123 where
6124 T: std::iter::IntoIterator<Item = V>,
6125 V: std::convert::Into<crate::model::SpeechRecognitionResult>,
6126 {
6127 use std::iter::Iterator;
6128 self.results = v.into_iter().map(|i| i.into()).collect();
6129 self
6130 }
6131
6132 /// Sets the value of [metadata][crate::model::BatchRecognizeResults::metadata].
6133 ///
6134 /// # Example
6135 /// ```ignore,no_run
6136 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6137 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6138 /// let x = BatchRecognizeResults::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
6139 /// ```
6140 pub fn set_metadata<T>(mut self, v: T) -> Self
6141 where
6142 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6143 {
6144 self.metadata = std::option::Option::Some(v.into());
6145 self
6146 }
6147
6148 /// Sets or clears the value of [metadata][crate::model::BatchRecognizeResults::metadata].
6149 ///
6150 /// # Example
6151 /// ```ignore,no_run
6152 /// # use google_cloud_speech_v2::model::BatchRecognizeResults;
6153 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6154 /// let x = BatchRecognizeResults::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
6155 /// let x = BatchRecognizeResults::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
6156 /// ```
6157 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
6158 where
6159 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6160 {
6161 self.metadata = v.map(|x| x.into());
6162 self
6163 }
6164}
6165
6166impl wkt::message::Message for BatchRecognizeResults {
6167 fn typename() -> &'static str {
6168 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeResults"
6169 }
6170}
6171
6172/// Final results written to Cloud Storage.
6173#[derive(Clone, Default, PartialEq)]
6174#[non_exhaustive]
6175pub struct CloudStorageResult {
6176 /// The Cloud Storage URI to which recognition results were written.
6177 pub uri: std::string::String,
6178
6179 /// The Cloud Storage URI to which recognition results were written as VTT
6180 /// formatted captions. This is populated only when `VTT` output is requested.
6181 pub vtt_format_uri: std::string::String,
6182
6183 /// The Cloud Storage URI to which recognition results were written as SRT
6184 /// formatted captions. This is populated only when `SRT` output is requested.
6185 pub srt_format_uri: std::string::String,
6186
6187 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6188}
6189
6190impl CloudStorageResult {
6191 pub fn new() -> Self {
6192 std::default::Default::default()
6193 }
6194
6195 /// Sets the value of [uri][crate::model::CloudStorageResult::uri].
6196 ///
6197 /// # Example
6198 /// ```ignore,no_run
6199 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6200 /// let x = CloudStorageResult::new().set_uri("example");
6201 /// ```
6202 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6203 self.uri = v.into();
6204 self
6205 }
6206
6207 /// Sets the value of [vtt_format_uri][crate::model::CloudStorageResult::vtt_format_uri].
6208 ///
6209 /// # Example
6210 /// ```ignore,no_run
6211 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6212 /// let x = CloudStorageResult::new().set_vtt_format_uri("example");
6213 /// ```
6214 pub fn set_vtt_format_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6215 self.vtt_format_uri = v.into();
6216 self
6217 }
6218
6219 /// Sets the value of [srt_format_uri][crate::model::CloudStorageResult::srt_format_uri].
6220 ///
6221 /// # Example
6222 /// ```ignore,no_run
6223 /// # use google_cloud_speech_v2::model::CloudStorageResult;
6224 /// let x = CloudStorageResult::new().set_srt_format_uri("example");
6225 /// ```
6226 pub fn set_srt_format_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6227 self.srt_format_uri = v.into();
6228 self
6229 }
6230}
6231
6232impl wkt::message::Message for CloudStorageResult {
6233 fn typename() -> &'static str {
6234 "type.googleapis.com/google.cloud.speech.v2.CloudStorageResult"
6235 }
6236}
6237
6238/// Final results returned inline in the recognition response.
6239#[derive(Clone, Default, PartialEq)]
6240#[non_exhaustive]
6241pub struct InlineResult {
6242 /// The transcript for the audio file.
6243 pub transcript: std::option::Option<crate::model::BatchRecognizeResults>,
6244
6245 /// The transcript for the audio file as VTT formatted captions. This is
6246 /// populated only when `VTT` output is requested.
6247 pub vtt_captions: std::string::String,
6248
6249 /// The transcript for the audio file as SRT formatted captions. This is
6250 /// populated only when `SRT` output is requested.
6251 pub srt_captions: std::string::String,
6252
6253 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6254}
6255
6256impl InlineResult {
6257 pub fn new() -> Self {
6258 std::default::Default::default()
6259 }
6260
6261 /// Sets the value of [transcript][crate::model::InlineResult::transcript].
6262 ///
6263 /// # Example
6264 /// ```ignore,no_run
6265 /// # use google_cloud_speech_v2::model::InlineResult;
6266 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6267 /// let x = InlineResult::new().set_transcript(BatchRecognizeResults::default()/* use setters */);
6268 /// ```
6269 pub fn set_transcript<T>(mut self, v: T) -> Self
6270 where
6271 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6272 {
6273 self.transcript = std::option::Option::Some(v.into());
6274 self
6275 }
6276
6277 /// Sets or clears the value of [transcript][crate::model::InlineResult::transcript].
6278 ///
6279 /// # Example
6280 /// ```ignore,no_run
6281 /// # use google_cloud_speech_v2::model::InlineResult;
6282 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6283 /// let x = InlineResult::new().set_or_clear_transcript(Some(BatchRecognizeResults::default()/* use setters */));
6284 /// let x = InlineResult::new().set_or_clear_transcript(None::<BatchRecognizeResults>);
6285 /// ```
6286 pub fn set_or_clear_transcript<T>(mut self, v: std::option::Option<T>) -> Self
6287 where
6288 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6289 {
6290 self.transcript = v.map(|x| x.into());
6291 self
6292 }
6293
6294 /// Sets the value of [vtt_captions][crate::model::InlineResult::vtt_captions].
6295 ///
6296 /// # Example
6297 /// ```ignore,no_run
6298 /// # use google_cloud_speech_v2::model::InlineResult;
6299 /// let x = InlineResult::new().set_vtt_captions("example");
6300 /// ```
6301 pub fn set_vtt_captions<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6302 self.vtt_captions = v.into();
6303 self
6304 }
6305
6306 /// Sets the value of [srt_captions][crate::model::InlineResult::srt_captions].
6307 ///
6308 /// # Example
6309 /// ```ignore,no_run
6310 /// # use google_cloud_speech_v2::model::InlineResult;
6311 /// let x = InlineResult::new().set_srt_captions("example");
6312 /// ```
6313 pub fn set_srt_captions<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6314 self.srt_captions = v.into();
6315 self
6316 }
6317}
6318
6319impl wkt::message::Message for InlineResult {
6320 fn typename() -> &'static str {
6321 "type.googleapis.com/google.cloud.speech.v2.InlineResult"
6322 }
6323}
6324
6325/// Final results for a single file.
6326#[derive(Clone, Default, PartialEq)]
6327#[non_exhaustive]
6328pub struct BatchRecognizeFileResult {
6329 /// Error if one was encountered.
6330 pub error: std::option::Option<google_cloud_rpc::model::Status>,
6331
6332 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
6333
6334 /// Deprecated. Use `cloud_storage_result.native_format_uri` instead.
6335 #[deprecated]
6336 pub uri: std::string::String,
6337
6338 /// Deprecated. Use `inline_result.transcript` instead.
6339 #[deprecated]
6340 pub transcript: std::option::Option<crate::model::BatchRecognizeResults>,
6341
6342 pub result: std::option::Option<crate::model::batch_recognize_file_result::Result>,
6343
6344 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6345}
6346
6347impl BatchRecognizeFileResult {
6348 pub fn new() -> Self {
6349 std::default::Default::default()
6350 }
6351
6352 /// Sets the value of [error][crate::model::BatchRecognizeFileResult::error].
6353 ///
6354 /// # Example
6355 /// ```ignore,no_run
6356 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6357 /// use google_cloud_rpc::model::Status;
6358 /// let x = BatchRecognizeFileResult::new().set_error(Status::default()/* use setters */);
6359 /// ```
6360 pub fn set_error<T>(mut self, v: T) -> Self
6361 where
6362 T: std::convert::Into<google_cloud_rpc::model::Status>,
6363 {
6364 self.error = std::option::Option::Some(v.into());
6365 self
6366 }
6367
6368 /// Sets or clears the value of [error][crate::model::BatchRecognizeFileResult::error].
6369 ///
6370 /// # Example
6371 /// ```ignore,no_run
6372 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6373 /// use google_cloud_rpc::model::Status;
6374 /// let x = BatchRecognizeFileResult::new().set_or_clear_error(Some(Status::default()/* use setters */));
6375 /// let x = BatchRecognizeFileResult::new().set_or_clear_error(None::<Status>);
6376 /// ```
6377 pub fn set_or_clear_error<T>(mut self, v: std::option::Option<T>) -> Self
6378 where
6379 T: std::convert::Into<google_cloud_rpc::model::Status>,
6380 {
6381 self.error = v.map(|x| x.into());
6382 self
6383 }
6384
6385 /// Sets the value of [metadata][crate::model::BatchRecognizeFileResult::metadata].
6386 ///
6387 /// # Example
6388 /// ```ignore,no_run
6389 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6390 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6391 /// let x = BatchRecognizeFileResult::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
6392 /// ```
6393 pub fn set_metadata<T>(mut self, v: T) -> Self
6394 where
6395 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6396 {
6397 self.metadata = std::option::Option::Some(v.into());
6398 self
6399 }
6400
6401 /// Sets or clears the value of [metadata][crate::model::BatchRecognizeFileResult::metadata].
6402 ///
6403 /// # Example
6404 /// ```ignore,no_run
6405 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6406 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
6407 /// let x = BatchRecognizeFileResult::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
6408 /// let x = BatchRecognizeFileResult::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
6409 /// ```
6410 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
6411 where
6412 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
6413 {
6414 self.metadata = v.map(|x| x.into());
6415 self
6416 }
6417
6418 /// Sets the value of [uri][crate::model::BatchRecognizeFileResult::uri].
6419 ///
6420 /// # Example
6421 /// ```ignore,no_run
6422 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6423 /// let x = BatchRecognizeFileResult::new().set_uri("example");
6424 /// ```
6425 #[deprecated]
6426 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6427 self.uri = v.into();
6428 self
6429 }
6430
6431 /// Sets the value of [transcript][crate::model::BatchRecognizeFileResult::transcript].
6432 ///
6433 /// # Example
6434 /// ```ignore,no_run
6435 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6436 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6437 /// let x = BatchRecognizeFileResult::new().set_transcript(BatchRecognizeResults::default()/* use setters */);
6438 /// ```
6439 #[deprecated]
6440 pub fn set_transcript<T>(mut self, v: T) -> Self
6441 where
6442 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6443 {
6444 self.transcript = std::option::Option::Some(v.into());
6445 self
6446 }
6447
6448 /// Sets or clears the value of [transcript][crate::model::BatchRecognizeFileResult::transcript].
6449 ///
6450 /// # Example
6451 /// ```ignore,no_run
6452 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6453 /// use google_cloud_speech_v2::model::BatchRecognizeResults;
6454 /// let x = BatchRecognizeFileResult::new().set_or_clear_transcript(Some(BatchRecognizeResults::default()/* use setters */));
6455 /// let x = BatchRecognizeFileResult::new().set_or_clear_transcript(None::<BatchRecognizeResults>);
6456 /// ```
6457 #[deprecated]
6458 pub fn set_or_clear_transcript<T>(mut self, v: std::option::Option<T>) -> Self
6459 where
6460 T: std::convert::Into<crate::model::BatchRecognizeResults>,
6461 {
6462 self.transcript = v.map(|x| x.into());
6463 self
6464 }
6465
6466 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result].
6467 ///
6468 /// Note that all the setters affecting `result` are mutually
6469 /// exclusive.
6470 ///
6471 /// # Example
6472 /// ```ignore,no_run
6473 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6474 /// use google_cloud_speech_v2::model::CloudStorageResult;
6475 /// let x = BatchRecognizeFileResult::new().set_result(Some(
6476 /// google_cloud_speech_v2::model::batch_recognize_file_result::Result::CloudStorageResult(CloudStorageResult::default().into())));
6477 /// ```
6478 pub fn set_result<
6479 T: std::convert::Into<std::option::Option<crate::model::batch_recognize_file_result::Result>>,
6480 >(
6481 mut self,
6482 v: T,
6483 ) -> Self {
6484 self.result = v.into();
6485 self
6486 }
6487
6488 /// The value of [result][crate::model::BatchRecognizeFileResult::result]
6489 /// if it holds a `CloudStorageResult`, `None` if the field is not set or
6490 /// holds a different branch.
6491 pub fn cloud_storage_result(
6492 &self,
6493 ) -> std::option::Option<&std::boxed::Box<crate::model::CloudStorageResult>> {
6494 #[allow(unreachable_patterns)]
6495 self.result.as_ref().and_then(|v| match v {
6496 crate::model::batch_recognize_file_result::Result::CloudStorageResult(v) => {
6497 std::option::Option::Some(v)
6498 }
6499 _ => std::option::Option::None,
6500 })
6501 }
6502
6503 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result]
6504 /// to hold a `CloudStorageResult`.
6505 ///
6506 /// Note that all the setters affecting `result` are
6507 /// mutually exclusive.
6508 ///
6509 /// # Example
6510 /// ```ignore,no_run
6511 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6512 /// use google_cloud_speech_v2::model::CloudStorageResult;
6513 /// let x = BatchRecognizeFileResult::new().set_cloud_storage_result(CloudStorageResult::default()/* use setters */);
6514 /// assert!(x.cloud_storage_result().is_some());
6515 /// assert!(x.inline_result().is_none());
6516 /// ```
6517 pub fn set_cloud_storage_result<
6518 T: std::convert::Into<std::boxed::Box<crate::model::CloudStorageResult>>,
6519 >(
6520 mut self,
6521 v: T,
6522 ) -> Self {
6523 self.result = std::option::Option::Some(
6524 crate::model::batch_recognize_file_result::Result::CloudStorageResult(v.into()),
6525 );
6526 self
6527 }
6528
6529 /// The value of [result][crate::model::BatchRecognizeFileResult::result]
6530 /// if it holds a `InlineResult`, `None` if the field is not set or
6531 /// holds a different branch.
6532 pub fn inline_result(
6533 &self,
6534 ) -> std::option::Option<&std::boxed::Box<crate::model::InlineResult>> {
6535 #[allow(unreachable_patterns)]
6536 self.result.as_ref().and_then(|v| match v {
6537 crate::model::batch_recognize_file_result::Result::InlineResult(v) => {
6538 std::option::Option::Some(v)
6539 }
6540 _ => std::option::Option::None,
6541 })
6542 }
6543
6544 /// Sets the value of [result][crate::model::BatchRecognizeFileResult::result]
6545 /// to hold a `InlineResult`.
6546 ///
6547 /// Note that all the setters affecting `result` are
6548 /// mutually exclusive.
6549 ///
6550 /// # Example
6551 /// ```ignore,no_run
6552 /// # use google_cloud_speech_v2::model::BatchRecognizeFileResult;
6553 /// use google_cloud_speech_v2::model::InlineResult;
6554 /// let x = BatchRecognizeFileResult::new().set_inline_result(InlineResult::default()/* use setters */);
6555 /// assert!(x.inline_result().is_some());
6556 /// assert!(x.cloud_storage_result().is_none());
6557 /// ```
6558 pub fn set_inline_result<T: std::convert::Into<std::boxed::Box<crate::model::InlineResult>>>(
6559 mut self,
6560 v: T,
6561 ) -> Self {
6562 self.result = std::option::Option::Some(
6563 crate::model::batch_recognize_file_result::Result::InlineResult(v.into()),
6564 );
6565 self
6566 }
6567}
6568
6569impl wkt::message::Message for BatchRecognizeFileResult {
6570 fn typename() -> &'static str {
6571 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeFileResult"
6572 }
6573}
6574
6575/// Defines additional types related to [BatchRecognizeFileResult].
6576pub mod batch_recognize_file_result {
6577 #[allow(unused_imports)]
6578 use super::*;
6579
6580 #[derive(Clone, Debug, PartialEq)]
6581 #[non_exhaustive]
6582 pub enum Result {
6583 /// Recognition results written to Cloud Storage. This is
6584 /// populated only when
6585 /// [GcsOutputConfig][google.cloud.speech.v2.GcsOutputConfig] is set in
6586 /// the
6587 /// [RecognitionOutputConfig][[google.cloud.speech.v2.RecognitionOutputConfig].
6588 ///
6589 /// [google.cloud.speech.v2.GcsOutputConfig]: crate::model::GcsOutputConfig
6590 CloudStorageResult(std::boxed::Box<crate::model::CloudStorageResult>),
6591 /// Recognition results. This is populated only when
6592 /// [InlineOutputConfig][google.cloud.speech.v2.InlineOutputConfig] is set in
6593 /// the
6594 /// [RecognitionOutputConfig][[google.cloud.speech.v2.RecognitionOutputConfig].
6595 ///
6596 /// [google.cloud.speech.v2.InlineOutputConfig]: crate::model::InlineOutputConfig
6597 InlineResult(std::boxed::Box<crate::model::InlineResult>),
6598 }
6599}
6600
6601/// Metadata about transcription for a single file (for example, progress
6602/// percent).
6603#[derive(Clone, Default, PartialEq)]
6604#[non_exhaustive]
6605pub struct BatchRecognizeTranscriptionMetadata {
6606 /// How much of the file has been transcribed so far.
6607 pub progress_percent: i32,
6608
6609 /// Error if one was encountered.
6610 pub error: std::option::Option<google_cloud_rpc::model::Status>,
6611
6612 /// The Cloud Storage URI to which recognition results will be written.
6613 pub uri: std::string::String,
6614
6615 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6616}
6617
6618impl BatchRecognizeTranscriptionMetadata {
6619 pub fn new() -> Self {
6620 std::default::Default::default()
6621 }
6622
6623 /// Sets the value of [progress_percent][crate::model::BatchRecognizeTranscriptionMetadata::progress_percent].
6624 ///
6625 /// # Example
6626 /// ```ignore,no_run
6627 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6628 /// let x = BatchRecognizeTranscriptionMetadata::new().set_progress_percent(42);
6629 /// ```
6630 pub fn set_progress_percent<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
6631 self.progress_percent = v.into();
6632 self
6633 }
6634
6635 /// Sets the value of [error][crate::model::BatchRecognizeTranscriptionMetadata::error].
6636 ///
6637 /// # Example
6638 /// ```ignore,no_run
6639 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6640 /// use google_cloud_rpc::model::Status;
6641 /// let x = BatchRecognizeTranscriptionMetadata::new().set_error(Status::default()/* use setters */);
6642 /// ```
6643 pub fn set_error<T>(mut self, v: T) -> Self
6644 where
6645 T: std::convert::Into<google_cloud_rpc::model::Status>,
6646 {
6647 self.error = std::option::Option::Some(v.into());
6648 self
6649 }
6650
6651 /// Sets or clears the value of [error][crate::model::BatchRecognizeTranscriptionMetadata::error].
6652 ///
6653 /// # Example
6654 /// ```ignore,no_run
6655 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6656 /// use google_cloud_rpc::model::Status;
6657 /// let x = BatchRecognizeTranscriptionMetadata::new().set_or_clear_error(Some(Status::default()/* use setters */));
6658 /// let x = BatchRecognizeTranscriptionMetadata::new().set_or_clear_error(None::<Status>);
6659 /// ```
6660 pub fn set_or_clear_error<T>(mut self, v: std::option::Option<T>) -> Self
6661 where
6662 T: std::convert::Into<google_cloud_rpc::model::Status>,
6663 {
6664 self.error = v.map(|x| x.into());
6665 self
6666 }
6667
6668 /// Sets the value of [uri][crate::model::BatchRecognizeTranscriptionMetadata::uri].
6669 ///
6670 /// # Example
6671 /// ```ignore,no_run
6672 /// # use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6673 /// let x = BatchRecognizeTranscriptionMetadata::new().set_uri("example");
6674 /// ```
6675 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6676 self.uri = v.into();
6677 self
6678 }
6679}
6680
6681impl wkt::message::Message for BatchRecognizeTranscriptionMetadata {
6682 fn typename() -> &'static str {
6683 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeTranscriptionMetadata"
6684 }
6685}
6686
6687/// Operation metadata for
6688/// [BatchRecognize][google.cloud.speech.v2.Speech.BatchRecognize].
6689///
6690/// [google.cloud.speech.v2.Speech.BatchRecognize]: crate::client::Speech::batch_recognize
6691#[derive(Clone, Default, PartialEq)]
6692#[non_exhaustive]
6693pub struct BatchRecognizeMetadata {
6694 /// Map from provided filename to the transcription metadata for that file.
6695 pub transcription_metadata: std::collections::HashMap<
6696 std::string::String,
6697 crate::model::BatchRecognizeTranscriptionMetadata,
6698 >,
6699
6700 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6701}
6702
6703impl BatchRecognizeMetadata {
6704 pub fn new() -> Self {
6705 std::default::Default::default()
6706 }
6707
6708 /// Sets the value of [transcription_metadata][crate::model::BatchRecognizeMetadata::transcription_metadata].
6709 ///
6710 /// # Example
6711 /// ```ignore,no_run
6712 /// # use google_cloud_speech_v2::model::BatchRecognizeMetadata;
6713 /// use google_cloud_speech_v2::model::BatchRecognizeTranscriptionMetadata;
6714 /// let x = BatchRecognizeMetadata::new().set_transcription_metadata([
6715 /// ("key0", BatchRecognizeTranscriptionMetadata::default()/* use setters */),
6716 /// ("key1", BatchRecognizeTranscriptionMetadata::default()/* use (different) setters */),
6717 /// ]);
6718 /// ```
6719 pub fn set_transcription_metadata<T, K, V>(mut self, v: T) -> Self
6720 where
6721 T: std::iter::IntoIterator<Item = (K, V)>,
6722 K: std::convert::Into<std::string::String>,
6723 V: std::convert::Into<crate::model::BatchRecognizeTranscriptionMetadata>,
6724 {
6725 use std::iter::Iterator;
6726 self.transcription_metadata = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
6727 self
6728 }
6729}
6730
6731impl wkt::message::Message for BatchRecognizeMetadata {
6732 fn typename() -> &'static str {
6733 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeMetadata"
6734 }
6735}
6736
6737/// Metadata about a single file in a batch for BatchRecognize.
6738#[derive(Clone, Default, PartialEq)]
6739#[non_exhaustive]
6740pub struct BatchRecognizeFileMetadata {
6741 /// Features and audio metadata to use for the Automatic Speech Recognition.
6742 /// This field in combination with the
6743 /// [config_mask][google.cloud.speech.v2.BatchRecognizeFileMetadata.config_mask]
6744 /// field can be used to override parts of the
6745 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
6746 /// of the Recognizer resource as well as the
6747 /// [config][google.cloud.speech.v2.BatchRecognizeRequest.config] at the
6748 /// request level.
6749 ///
6750 /// [google.cloud.speech.v2.BatchRecognizeFileMetadata.config_mask]: crate::model::BatchRecognizeFileMetadata::config_mask
6751 /// [google.cloud.speech.v2.BatchRecognizeRequest.config]: crate::model::BatchRecognizeRequest::config
6752 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
6753 pub config: std::option::Option<crate::model::RecognitionConfig>,
6754
6755 /// The list of fields in
6756 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config] that
6757 /// override the values in the
6758 /// [default_recognition_config][google.cloud.speech.v2.Recognizer.default_recognition_config]
6759 /// of the recognizer during this recognition request. If no mask is provided,
6760 /// all non-default valued fields in
6761 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config] override
6762 /// the values in the recognizer for this recognition request. If a mask is
6763 /// provided, only the fields listed in the mask override the config in the
6764 /// recognizer for this recognition request. If a wildcard (`*`) is provided,
6765 /// [config][google.cloud.speech.v2.BatchRecognizeFileMetadata.config]
6766 /// completely overrides and replaces the config in the recognizer for this
6767 /// recognition request.
6768 ///
6769 /// [google.cloud.speech.v2.BatchRecognizeFileMetadata.config]: crate::model::BatchRecognizeFileMetadata::config
6770 /// [google.cloud.speech.v2.Recognizer.default_recognition_config]: crate::model::Recognizer::default_recognition_config
6771 pub config_mask: std::option::Option<wkt::FieldMask>,
6772
6773 /// The audio source, which is a Google Cloud Storage URI.
6774 pub audio_source: std::option::Option<crate::model::batch_recognize_file_metadata::AudioSource>,
6775
6776 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6777}
6778
6779impl BatchRecognizeFileMetadata {
6780 pub fn new() -> Self {
6781 std::default::Default::default()
6782 }
6783
6784 /// Sets the value of [config][crate::model::BatchRecognizeFileMetadata::config].
6785 ///
6786 /// # Example
6787 /// ```ignore,no_run
6788 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6789 /// use google_cloud_speech_v2::model::RecognitionConfig;
6790 /// let x = BatchRecognizeFileMetadata::new().set_config(RecognitionConfig::default()/* use setters */);
6791 /// ```
6792 pub fn set_config<T>(mut self, v: T) -> Self
6793 where
6794 T: std::convert::Into<crate::model::RecognitionConfig>,
6795 {
6796 self.config = std::option::Option::Some(v.into());
6797 self
6798 }
6799
6800 /// Sets or clears the value of [config][crate::model::BatchRecognizeFileMetadata::config].
6801 ///
6802 /// # Example
6803 /// ```ignore,no_run
6804 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6805 /// use google_cloud_speech_v2::model::RecognitionConfig;
6806 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config(Some(RecognitionConfig::default()/* use setters */));
6807 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config(None::<RecognitionConfig>);
6808 /// ```
6809 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
6810 where
6811 T: std::convert::Into<crate::model::RecognitionConfig>,
6812 {
6813 self.config = v.map(|x| x.into());
6814 self
6815 }
6816
6817 /// Sets the value of [config_mask][crate::model::BatchRecognizeFileMetadata::config_mask].
6818 ///
6819 /// # Example
6820 /// ```ignore,no_run
6821 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6822 /// use wkt::FieldMask;
6823 /// let x = BatchRecognizeFileMetadata::new().set_config_mask(FieldMask::default()/* use setters */);
6824 /// ```
6825 pub fn set_config_mask<T>(mut self, v: T) -> Self
6826 where
6827 T: std::convert::Into<wkt::FieldMask>,
6828 {
6829 self.config_mask = std::option::Option::Some(v.into());
6830 self
6831 }
6832
6833 /// Sets or clears the value of [config_mask][crate::model::BatchRecognizeFileMetadata::config_mask].
6834 ///
6835 /// # Example
6836 /// ```ignore,no_run
6837 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6838 /// use wkt::FieldMask;
6839 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config_mask(Some(FieldMask::default()/* use setters */));
6840 /// let x = BatchRecognizeFileMetadata::new().set_or_clear_config_mask(None::<FieldMask>);
6841 /// ```
6842 pub fn set_or_clear_config_mask<T>(mut self, v: std::option::Option<T>) -> Self
6843 where
6844 T: std::convert::Into<wkt::FieldMask>,
6845 {
6846 self.config_mask = v.map(|x| x.into());
6847 self
6848 }
6849
6850 /// Sets the value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source].
6851 ///
6852 /// Note that all the setters affecting `audio_source` are mutually
6853 /// exclusive.
6854 ///
6855 /// # Example
6856 /// ```ignore,no_run
6857 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6858 /// use google_cloud_speech_v2::model::batch_recognize_file_metadata::AudioSource;
6859 /// let x = BatchRecognizeFileMetadata::new().set_audio_source(Some(AudioSource::Uri("example".to_string())));
6860 /// ```
6861 pub fn set_audio_source<
6862 T: std::convert::Into<
6863 std::option::Option<crate::model::batch_recognize_file_metadata::AudioSource>,
6864 >,
6865 >(
6866 mut self,
6867 v: T,
6868 ) -> Self {
6869 self.audio_source = v.into();
6870 self
6871 }
6872
6873 /// The value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source]
6874 /// if it holds a `Uri`, `None` if the field is not set or
6875 /// holds a different branch.
6876 pub fn uri(&self) -> std::option::Option<&std::string::String> {
6877 #[allow(unreachable_patterns)]
6878 self.audio_source.as_ref().and_then(|v| match v {
6879 crate::model::batch_recognize_file_metadata::AudioSource::Uri(v) => {
6880 std::option::Option::Some(v)
6881 }
6882 _ => std::option::Option::None,
6883 })
6884 }
6885
6886 /// Sets the value of [audio_source][crate::model::BatchRecognizeFileMetadata::audio_source]
6887 /// to hold a `Uri`.
6888 ///
6889 /// Note that all the setters affecting `audio_source` are
6890 /// mutually exclusive.
6891 ///
6892 /// # Example
6893 /// ```ignore,no_run
6894 /// # use google_cloud_speech_v2::model::BatchRecognizeFileMetadata;
6895 /// let x = BatchRecognizeFileMetadata::new().set_uri("example");
6896 /// assert!(x.uri().is_some());
6897 /// ```
6898 pub fn set_uri<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
6899 self.audio_source = std::option::Option::Some(
6900 crate::model::batch_recognize_file_metadata::AudioSource::Uri(v.into()),
6901 );
6902 self
6903 }
6904}
6905
6906impl wkt::message::Message for BatchRecognizeFileMetadata {
6907 fn typename() -> &'static str {
6908 "type.googleapis.com/google.cloud.speech.v2.BatchRecognizeFileMetadata"
6909 }
6910}
6911
6912/// Defines additional types related to [BatchRecognizeFileMetadata].
6913pub mod batch_recognize_file_metadata {
6914 #[allow(unused_imports)]
6915 use super::*;
6916
6917 /// The audio source, which is a Google Cloud Storage URI.
6918 #[derive(Clone, Debug, PartialEq)]
6919 #[non_exhaustive]
6920 pub enum AudioSource {
6921 /// Cloud Storage URI for the audio file.
6922 Uri(std::string::String),
6923 }
6924}
6925
6926/// A streaming speech recognition result corresponding to a portion of the audio
6927/// that is currently being processed.
6928#[derive(Clone, Default, PartialEq)]
6929#[non_exhaustive]
6930pub struct StreamingRecognitionResult {
6931 /// May contain one or more recognition hypotheses. These alternatives are
6932 /// ordered in terms of accuracy, with the top (first) alternative being the
6933 /// most probable, as ranked by the recognizer.
6934 pub alternatives: std::vec::Vec<crate::model::SpeechRecognitionAlternative>,
6935
6936 /// If `false`, this
6937 /// [StreamingRecognitionResult][google.cloud.speech.v2.StreamingRecognitionResult]
6938 /// represents an interim result that may change. If `true`, this is the final
6939 /// time the speech service will return this particular
6940 /// [StreamingRecognitionResult][google.cloud.speech.v2.StreamingRecognitionResult],
6941 /// the recognizer will not return any further hypotheses for this portion of
6942 /// the transcript and corresponding audio.
6943 ///
6944 /// [google.cloud.speech.v2.StreamingRecognitionResult]: crate::model::StreamingRecognitionResult
6945 pub is_final: bool,
6946
6947 /// An estimate of the likelihood that the recognizer will not change its guess
6948 /// about this interim result. Values range from 0.0 (completely unstable)
6949 /// to 1.0 (completely stable). This field is only provided for interim results
6950 /// ([is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`false`).
6951 /// The default of 0.0 is a sentinel value indicating `stability` was not set.
6952 ///
6953 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
6954 pub stability: f32,
6955
6956 /// Time offset of the end of this result relative to the beginning of the
6957 /// audio.
6958 pub result_end_offset: std::option::Option<wkt::Duration>,
6959
6960 /// For multi-channel audio, this is the channel number corresponding to the
6961 /// recognized result for the audio from that channel.
6962 /// For
6963 /// `audio_channel_count` = `N`, its output values can range from `1` to `N`.
6964 pub channel_tag: i32,
6965
6966 /// Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
6967 /// language tag of the language in this result. This language code was
6968 /// detected to have the most likelihood of being spoken in the audio.
6969 pub language_code: std::string::String,
6970
6971 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
6972}
6973
6974impl StreamingRecognitionResult {
6975 pub fn new() -> Self {
6976 std::default::Default::default()
6977 }
6978
6979 /// Sets the value of [alternatives][crate::model::StreamingRecognitionResult::alternatives].
6980 ///
6981 /// # Example
6982 /// ```ignore,no_run
6983 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
6984 /// use google_cloud_speech_v2::model::SpeechRecognitionAlternative;
6985 /// let x = StreamingRecognitionResult::new()
6986 /// .set_alternatives([
6987 /// SpeechRecognitionAlternative::default()/* use setters */,
6988 /// SpeechRecognitionAlternative::default()/* use (different) setters */,
6989 /// ]);
6990 /// ```
6991 pub fn set_alternatives<T, V>(mut self, v: T) -> Self
6992 where
6993 T: std::iter::IntoIterator<Item = V>,
6994 V: std::convert::Into<crate::model::SpeechRecognitionAlternative>,
6995 {
6996 use std::iter::Iterator;
6997 self.alternatives = v.into_iter().map(|i| i.into()).collect();
6998 self
6999 }
7000
7001 /// Sets the value of [is_final][crate::model::StreamingRecognitionResult::is_final].
7002 ///
7003 /// # Example
7004 /// ```ignore,no_run
7005 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7006 /// let x = StreamingRecognitionResult::new().set_is_final(true);
7007 /// ```
7008 pub fn set_is_final<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
7009 self.is_final = v.into();
7010 self
7011 }
7012
7013 /// Sets the value of [stability][crate::model::StreamingRecognitionResult::stability].
7014 ///
7015 /// # Example
7016 /// ```ignore,no_run
7017 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7018 /// let x = StreamingRecognitionResult::new().set_stability(42.0);
7019 /// ```
7020 pub fn set_stability<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
7021 self.stability = v.into();
7022 self
7023 }
7024
7025 /// Sets the value of [result_end_offset][crate::model::StreamingRecognitionResult::result_end_offset].
7026 ///
7027 /// # Example
7028 /// ```ignore,no_run
7029 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7030 /// use wkt::Duration;
7031 /// let x = StreamingRecognitionResult::new().set_result_end_offset(Duration::default()/* use setters */);
7032 /// ```
7033 pub fn set_result_end_offset<T>(mut self, v: T) -> Self
7034 where
7035 T: std::convert::Into<wkt::Duration>,
7036 {
7037 self.result_end_offset = std::option::Option::Some(v.into());
7038 self
7039 }
7040
7041 /// Sets or clears the value of [result_end_offset][crate::model::StreamingRecognitionResult::result_end_offset].
7042 ///
7043 /// # Example
7044 /// ```ignore,no_run
7045 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7046 /// use wkt::Duration;
7047 /// let x = StreamingRecognitionResult::new().set_or_clear_result_end_offset(Some(Duration::default()/* use setters */));
7048 /// let x = StreamingRecognitionResult::new().set_or_clear_result_end_offset(None::<Duration>);
7049 /// ```
7050 pub fn set_or_clear_result_end_offset<T>(mut self, v: std::option::Option<T>) -> Self
7051 where
7052 T: std::convert::Into<wkt::Duration>,
7053 {
7054 self.result_end_offset = v.map(|x| x.into());
7055 self
7056 }
7057
7058 /// Sets the value of [channel_tag][crate::model::StreamingRecognitionResult::channel_tag].
7059 ///
7060 /// # Example
7061 /// ```ignore,no_run
7062 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7063 /// let x = StreamingRecognitionResult::new().set_channel_tag(42);
7064 /// ```
7065 pub fn set_channel_tag<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
7066 self.channel_tag = v.into();
7067 self
7068 }
7069
7070 /// Sets the value of [language_code][crate::model::StreamingRecognitionResult::language_code].
7071 ///
7072 /// # Example
7073 /// ```ignore,no_run
7074 /// # use google_cloud_speech_v2::model::StreamingRecognitionResult;
7075 /// let x = StreamingRecognitionResult::new().set_language_code("example");
7076 /// ```
7077 pub fn set_language_code<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7078 self.language_code = v.into();
7079 self
7080 }
7081}
7082
7083impl wkt::message::Message for StreamingRecognitionResult {
7084 fn typename() -> &'static str {
7085 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognitionResult"
7086 }
7087}
7088
7089/// `StreamingRecognizeResponse` is the only message returned to the client by
7090/// `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse`
7091/// messages are streamed back to the client. If there is no recognizable
7092/// audio then no messages are streamed back to the client.
7093///
7094/// Here are some examples of `StreamingRecognizeResponse`s that might
7095/// be returned while processing audio:
7096///
7097/// 1. results { alternatives { transcript: "tube" } stability: 0.01 }
7098///
7099/// 1. results { alternatives { transcript: "to be a" } stability: 0.01 }
7100///
7101/// 1. results { alternatives { transcript: "to be" } stability: 0.9 }
7102/// results { alternatives { transcript: " or not to be" } stability: 0.01 }
7103///
7104/// 1. results { alternatives { transcript: "to be or not to be"
7105/// confidence: 0.92 }
7106/// alternatives { transcript: "to bee or not to bee" }
7107/// is_final: true }
7108///
7109/// 1. results { alternatives { transcript: " that's" } stability: 0.01 }
7110///
7111/// 1. results { alternatives { transcript: " that is" } stability: 0.9 }
7112/// results { alternatives { transcript: " the question" } stability: 0.01 }
7113///
7114/// 1. results { alternatives { transcript: " that is the question"
7115/// confidence: 0.98 }
7116/// alternatives { transcript: " that was the question" }
7117/// is_final: true }
7118///
7119///
7120/// Notes:
7121///
7122/// - Only two of the above responses #4 and #7 contain final results; they are
7123/// indicated by `is_final: true`. Concatenating these together generates the
7124/// full transcript: "to be or not to be that is the question".
7125///
7126/// - The others contain interim `results`. #3 and #6 contain two interim
7127/// `results`: the first portion has a high stability and is less likely to
7128/// change; the second portion has a low stability and is very likely to
7129/// change. A UI designer might choose to show only high stability `results`.
7130///
7131/// - The specific `stability` and `confidence` values shown above are only for
7132/// illustrative purposes. Actual values may vary.
7133///
7134/// - In each response, only one of these fields will be set:
7135/// `error`,
7136/// `speech_event_type`, or
7137/// one or more (repeated) `results`.
7138///
7139#[derive(Clone, Default, PartialEq)]
7140#[non_exhaustive]
7141pub struct StreamingRecognizeResponse {
7142 /// This repeated list contains zero or more results that
7143 /// correspond to consecutive portions of the audio currently being processed.
7144 /// It contains zero or one
7145 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`true`
7146 /// result (the newly settled portion), followed by zero or more
7147 /// [is_final][google.cloud.speech.v2.StreamingRecognitionResult.is_final]=`false`
7148 /// results (the interim results).
7149 ///
7150 /// [google.cloud.speech.v2.StreamingRecognitionResult.is_final]: crate::model::StreamingRecognitionResult::is_final
7151 pub results: std::vec::Vec<crate::model::StreamingRecognitionResult>,
7152
7153 /// Indicates the type of speech event.
7154 pub speech_event_type: crate::model::streaming_recognize_response::SpeechEventType,
7155
7156 /// Time offset between the beginning of the audio and event emission.
7157 pub speech_event_offset: std::option::Option<wkt::Duration>,
7158
7159 /// Metadata about the recognition.
7160 pub metadata: std::option::Option<crate::model::RecognitionResponseMetadata>,
7161
7162 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7163}
7164
7165impl StreamingRecognizeResponse {
7166 pub fn new() -> Self {
7167 std::default::Default::default()
7168 }
7169
7170 /// Sets the value of [results][crate::model::StreamingRecognizeResponse::results].
7171 ///
7172 /// # Example
7173 /// ```ignore,no_run
7174 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7175 /// use google_cloud_speech_v2::model::StreamingRecognitionResult;
7176 /// let x = StreamingRecognizeResponse::new()
7177 /// .set_results([
7178 /// StreamingRecognitionResult::default()/* use setters */,
7179 /// StreamingRecognitionResult::default()/* use (different) setters */,
7180 /// ]);
7181 /// ```
7182 pub fn set_results<T, V>(mut self, v: T) -> Self
7183 where
7184 T: std::iter::IntoIterator<Item = V>,
7185 V: std::convert::Into<crate::model::StreamingRecognitionResult>,
7186 {
7187 use std::iter::Iterator;
7188 self.results = v.into_iter().map(|i| i.into()).collect();
7189 self
7190 }
7191
7192 /// Sets the value of [speech_event_type][crate::model::StreamingRecognizeResponse::speech_event_type].
7193 ///
7194 /// # Example
7195 /// ```ignore,no_run
7196 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7197 /// use google_cloud_speech_v2::model::streaming_recognize_response::SpeechEventType;
7198 /// let x0 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::EndOfSingleUtterance);
7199 /// let x1 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::SpeechActivityBegin);
7200 /// let x2 = StreamingRecognizeResponse::new().set_speech_event_type(SpeechEventType::SpeechActivityEnd);
7201 /// ```
7202 pub fn set_speech_event_type<
7203 T: std::convert::Into<crate::model::streaming_recognize_response::SpeechEventType>,
7204 >(
7205 mut self,
7206 v: T,
7207 ) -> Self {
7208 self.speech_event_type = v.into();
7209 self
7210 }
7211
7212 /// Sets the value of [speech_event_offset][crate::model::StreamingRecognizeResponse::speech_event_offset].
7213 ///
7214 /// # Example
7215 /// ```ignore,no_run
7216 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7217 /// use wkt::Duration;
7218 /// let x = StreamingRecognizeResponse::new().set_speech_event_offset(Duration::default()/* use setters */);
7219 /// ```
7220 pub fn set_speech_event_offset<T>(mut self, v: T) -> Self
7221 where
7222 T: std::convert::Into<wkt::Duration>,
7223 {
7224 self.speech_event_offset = std::option::Option::Some(v.into());
7225 self
7226 }
7227
7228 /// Sets or clears the value of [speech_event_offset][crate::model::StreamingRecognizeResponse::speech_event_offset].
7229 ///
7230 /// # Example
7231 /// ```ignore,no_run
7232 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7233 /// use wkt::Duration;
7234 /// let x = StreamingRecognizeResponse::new().set_or_clear_speech_event_offset(Some(Duration::default()/* use setters */));
7235 /// let x = StreamingRecognizeResponse::new().set_or_clear_speech_event_offset(None::<Duration>);
7236 /// ```
7237 pub fn set_or_clear_speech_event_offset<T>(mut self, v: std::option::Option<T>) -> Self
7238 where
7239 T: std::convert::Into<wkt::Duration>,
7240 {
7241 self.speech_event_offset = v.map(|x| x.into());
7242 self
7243 }
7244
7245 /// Sets the value of [metadata][crate::model::StreamingRecognizeResponse::metadata].
7246 ///
7247 /// # Example
7248 /// ```ignore,no_run
7249 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7250 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
7251 /// let x = StreamingRecognizeResponse::new().set_metadata(RecognitionResponseMetadata::default()/* use setters */);
7252 /// ```
7253 pub fn set_metadata<T>(mut self, v: T) -> Self
7254 where
7255 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
7256 {
7257 self.metadata = std::option::Option::Some(v.into());
7258 self
7259 }
7260
7261 /// Sets or clears the value of [metadata][crate::model::StreamingRecognizeResponse::metadata].
7262 ///
7263 /// # Example
7264 /// ```ignore,no_run
7265 /// # use google_cloud_speech_v2::model::StreamingRecognizeResponse;
7266 /// use google_cloud_speech_v2::model::RecognitionResponseMetadata;
7267 /// let x = StreamingRecognizeResponse::new().set_or_clear_metadata(Some(RecognitionResponseMetadata::default()/* use setters */));
7268 /// let x = StreamingRecognizeResponse::new().set_or_clear_metadata(None::<RecognitionResponseMetadata>);
7269 /// ```
7270 pub fn set_or_clear_metadata<T>(mut self, v: std::option::Option<T>) -> Self
7271 where
7272 T: std::convert::Into<crate::model::RecognitionResponseMetadata>,
7273 {
7274 self.metadata = v.map(|x| x.into());
7275 self
7276 }
7277}
7278
7279impl wkt::message::Message for StreamingRecognizeResponse {
7280 fn typename() -> &'static str {
7281 "type.googleapis.com/google.cloud.speech.v2.StreamingRecognizeResponse"
7282 }
7283}
7284
7285/// Defines additional types related to [StreamingRecognizeResponse].
7286pub mod streaming_recognize_response {
7287 #[allow(unused_imports)]
7288 use super::*;
7289
7290 /// Indicates the type of speech event.
7291 ///
7292 /// # Working with unknown values
7293 ///
7294 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
7295 /// additional enum variants at any time. Adding new variants is not considered
7296 /// a breaking change. Applications should write their code in anticipation of:
7297 ///
7298 /// - New values appearing in future releases of the client library, **and**
7299 /// - New values received dynamically, without application changes.
7300 ///
7301 /// Please consult the [Working with enums] section in the user guide for some
7302 /// guidelines.
7303 ///
7304 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
7305 #[derive(Clone, Debug, PartialEq)]
7306 #[non_exhaustive]
7307 pub enum SpeechEventType {
7308 /// No speech event specified.
7309 Unspecified,
7310 /// This event indicates that the server has detected the end of the user's
7311 /// speech utterance and expects no additional speech. Therefore, the server
7312 /// will not process additional audio and will close the gRPC bidirectional
7313 /// stream. This event is only sent if there was a force cutoff due to
7314 /// silence being detected early. This event is only available through the
7315 /// `latest_short` [model][google.cloud.speech.v2.Recognizer.model].
7316 ///
7317 /// [google.cloud.speech.v2.Recognizer.model]: crate::model::Recognizer::model
7318 EndOfSingleUtterance,
7319 /// This event indicates that the server has detected the beginning of human
7320 /// voice activity in the stream. This event can be returned multiple times
7321 /// if speech starts and stops repeatedly throughout the stream. This event
7322 /// is only sent if `voice_activity_events` is set to true.
7323 SpeechActivityBegin,
7324 /// This event indicates that the server has detected the end of human voice
7325 /// activity in the stream. This event can be returned multiple times if
7326 /// speech starts and stops repeatedly throughout the stream. This event is
7327 /// only sent if `voice_activity_events` is set to true.
7328 SpeechActivityEnd,
7329 /// If set, the enum was initialized with an unknown value.
7330 ///
7331 /// Applications can examine the value using [SpeechEventType::value] or
7332 /// [SpeechEventType::name].
7333 UnknownValue(speech_event_type::UnknownValue),
7334 }
7335
7336 #[doc(hidden)]
7337 pub mod speech_event_type {
7338 #[allow(unused_imports)]
7339 use super::*;
7340 #[derive(Clone, Debug, PartialEq)]
7341 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
7342 }
7343
7344 impl SpeechEventType {
7345 /// Gets the enum value.
7346 ///
7347 /// Returns `None` if the enum contains an unknown value deserialized from
7348 /// the string representation of enums.
7349 pub fn value(&self) -> std::option::Option<i32> {
7350 match self {
7351 Self::Unspecified => std::option::Option::Some(0),
7352 Self::EndOfSingleUtterance => std::option::Option::Some(1),
7353 Self::SpeechActivityBegin => std::option::Option::Some(2),
7354 Self::SpeechActivityEnd => std::option::Option::Some(3),
7355 Self::UnknownValue(u) => u.0.value(),
7356 }
7357 }
7358
7359 /// Gets the enum value as a string.
7360 ///
7361 /// Returns `None` if the enum contains an unknown value deserialized from
7362 /// the integer representation of enums.
7363 pub fn name(&self) -> std::option::Option<&str> {
7364 match self {
7365 Self::Unspecified => std::option::Option::Some("SPEECH_EVENT_TYPE_UNSPECIFIED"),
7366 Self::EndOfSingleUtterance => std::option::Option::Some("END_OF_SINGLE_UTTERANCE"),
7367 Self::SpeechActivityBegin => std::option::Option::Some("SPEECH_ACTIVITY_BEGIN"),
7368 Self::SpeechActivityEnd => std::option::Option::Some("SPEECH_ACTIVITY_END"),
7369 Self::UnknownValue(u) => u.0.name(),
7370 }
7371 }
7372 }
7373
7374 impl std::default::Default for SpeechEventType {
7375 fn default() -> Self {
7376 use std::convert::From;
7377 Self::from(0)
7378 }
7379 }
7380
7381 impl std::fmt::Display for SpeechEventType {
7382 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
7383 wkt::internal::display_enum(f, self.name(), self.value())
7384 }
7385 }
7386
7387 impl std::convert::From<i32> for SpeechEventType {
7388 fn from(value: i32) -> Self {
7389 match value {
7390 0 => Self::Unspecified,
7391 1 => Self::EndOfSingleUtterance,
7392 2 => Self::SpeechActivityBegin,
7393 3 => Self::SpeechActivityEnd,
7394 _ => Self::UnknownValue(speech_event_type::UnknownValue(
7395 wkt::internal::UnknownEnumValue::Integer(value),
7396 )),
7397 }
7398 }
7399 }
7400
7401 impl std::convert::From<&str> for SpeechEventType {
7402 fn from(value: &str) -> Self {
7403 use std::string::ToString;
7404 match value {
7405 "SPEECH_EVENT_TYPE_UNSPECIFIED" => Self::Unspecified,
7406 "END_OF_SINGLE_UTTERANCE" => Self::EndOfSingleUtterance,
7407 "SPEECH_ACTIVITY_BEGIN" => Self::SpeechActivityBegin,
7408 "SPEECH_ACTIVITY_END" => Self::SpeechActivityEnd,
7409 _ => Self::UnknownValue(speech_event_type::UnknownValue(
7410 wkt::internal::UnknownEnumValue::String(value.to_string()),
7411 )),
7412 }
7413 }
7414 }
7415
7416 impl serde::ser::Serialize for SpeechEventType {
7417 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
7418 where
7419 S: serde::Serializer,
7420 {
7421 match self {
7422 Self::Unspecified => serializer.serialize_i32(0),
7423 Self::EndOfSingleUtterance => serializer.serialize_i32(1),
7424 Self::SpeechActivityBegin => serializer.serialize_i32(2),
7425 Self::SpeechActivityEnd => serializer.serialize_i32(3),
7426 Self::UnknownValue(u) => u.0.serialize(serializer),
7427 }
7428 }
7429 }
7430
7431 impl<'de> serde::de::Deserialize<'de> for SpeechEventType {
7432 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
7433 where
7434 D: serde::Deserializer<'de>,
7435 {
7436 deserializer.deserialize_any(wkt::internal::EnumVisitor::<SpeechEventType>::new(
7437 ".google.cloud.speech.v2.StreamingRecognizeResponse.SpeechEventType",
7438 ))
7439 }
7440 }
7441}
7442
7443/// Message representing the config for the Speech-to-Text API. This includes an
7444/// optional [KMS key](https://cloud.google.com/kms/docs/resource-hierarchy#keys)
7445/// with which incoming data will be encrypted.
7446#[derive(Clone, Default, PartialEq)]
7447#[non_exhaustive]
7448pub struct Config {
7449 /// Output only. Identifier. The name of the config resource. There is exactly
7450 /// one config resource per project per location. The expected format is
7451 /// `projects/{project}/locations/{location}/config`.
7452 pub name: std::string::String,
7453
7454 /// Optional. An optional [KMS key
7455 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) that if
7456 /// present, will be used to encrypt Speech-to-Text resources at-rest. Updating
7457 /// this key will not encrypt existing resources using this key; only new
7458 /// resources will be encrypted using this key. The expected format is
7459 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
7460 pub kms_key_name: std::string::String,
7461
7462 /// Output only. The most recent time this resource was modified.
7463 pub update_time: std::option::Option<wkt::Timestamp>,
7464
7465 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7466}
7467
7468impl Config {
7469 pub fn new() -> Self {
7470 std::default::Default::default()
7471 }
7472
7473 /// Sets the value of [name][crate::model::Config::name].
7474 ///
7475 /// # Example
7476 /// ```ignore,no_run
7477 /// # use google_cloud_speech_v2::model::Config;
7478 /// let x = Config::new().set_name("example");
7479 /// ```
7480 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7481 self.name = v.into();
7482 self
7483 }
7484
7485 /// Sets the value of [kms_key_name][crate::model::Config::kms_key_name].
7486 ///
7487 /// # Example
7488 /// ```ignore,no_run
7489 /// # use google_cloud_speech_v2::model::Config;
7490 /// let x = Config::new().set_kms_key_name("example");
7491 /// ```
7492 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7493 self.kms_key_name = v.into();
7494 self
7495 }
7496
7497 /// Sets the value of [update_time][crate::model::Config::update_time].
7498 ///
7499 /// # Example
7500 /// ```ignore,no_run
7501 /// # use google_cloud_speech_v2::model::Config;
7502 /// use wkt::Timestamp;
7503 /// let x = Config::new().set_update_time(Timestamp::default()/* use setters */);
7504 /// ```
7505 pub fn set_update_time<T>(mut self, v: T) -> Self
7506 where
7507 T: std::convert::Into<wkt::Timestamp>,
7508 {
7509 self.update_time = std::option::Option::Some(v.into());
7510 self
7511 }
7512
7513 /// Sets or clears the value of [update_time][crate::model::Config::update_time].
7514 ///
7515 /// # Example
7516 /// ```ignore,no_run
7517 /// # use google_cloud_speech_v2::model::Config;
7518 /// use wkt::Timestamp;
7519 /// let x = Config::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
7520 /// let x = Config::new().set_or_clear_update_time(None::<Timestamp>);
7521 /// ```
7522 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
7523 where
7524 T: std::convert::Into<wkt::Timestamp>,
7525 {
7526 self.update_time = v.map(|x| x.into());
7527 self
7528 }
7529}
7530
7531impl wkt::message::Message for Config {
7532 fn typename() -> &'static str {
7533 "type.googleapis.com/google.cloud.speech.v2.Config"
7534 }
7535}
7536
7537/// Request message for the
7538/// [GetConfig][google.cloud.speech.v2.Speech.GetConfig] method.
7539///
7540/// [google.cloud.speech.v2.Speech.GetConfig]: crate::client::Speech::get_config
7541#[derive(Clone, Default, PartialEq)]
7542#[non_exhaustive]
7543pub struct GetConfigRequest {
7544 /// Required. The name of the config to retrieve. There is exactly one config
7545 /// resource per project per location. The expected format is
7546 /// `projects/{project}/locations/{location}/config`.
7547 pub name: std::string::String,
7548
7549 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7550}
7551
7552impl GetConfigRequest {
7553 pub fn new() -> Self {
7554 std::default::Default::default()
7555 }
7556
7557 /// Sets the value of [name][crate::model::GetConfigRequest::name].
7558 ///
7559 /// # Example
7560 /// ```ignore,no_run
7561 /// # use google_cloud_speech_v2::model::GetConfigRequest;
7562 /// let x = GetConfigRequest::new().set_name("example");
7563 /// ```
7564 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7565 self.name = v.into();
7566 self
7567 }
7568}
7569
7570impl wkt::message::Message for GetConfigRequest {
7571 fn typename() -> &'static str {
7572 "type.googleapis.com/google.cloud.speech.v2.GetConfigRequest"
7573 }
7574}
7575
7576/// Request message for the
7577/// [UpdateConfig][google.cloud.speech.v2.Speech.UpdateConfig] method.
7578///
7579/// [google.cloud.speech.v2.Speech.UpdateConfig]: crate::client::Speech::update_config
7580#[derive(Clone, Default, PartialEq)]
7581#[non_exhaustive]
7582pub struct UpdateConfigRequest {
7583 /// Required. The config to update.
7584 ///
7585 /// The config's `name` field is used to identify the config to be updated.
7586 /// The expected format is `projects/{project}/locations/{location}/config`.
7587 pub config: std::option::Option<crate::model::Config>,
7588
7589 /// The list of fields to be updated.
7590 pub update_mask: std::option::Option<wkt::FieldMask>,
7591
7592 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7593}
7594
7595impl UpdateConfigRequest {
7596 pub fn new() -> Self {
7597 std::default::Default::default()
7598 }
7599
7600 /// Sets the value of [config][crate::model::UpdateConfigRequest::config].
7601 ///
7602 /// # Example
7603 /// ```ignore,no_run
7604 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7605 /// use google_cloud_speech_v2::model::Config;
7606 /// let x = UpdateConfigRequest::new().set_config(Config::default()/* use setters */);
7607 /// ```
7608 pub fn set_config<T>(mut self, v: T) -> Self
7609 where
7610 T: std::convert::Into<crate::model::Config>,
7611 {
7612 self.config = std::option::Option::Some(v.into());
7613 self
7614 }
7615
7616 /// Sets or clears the value of [config][crate::model::UpdateConfigRequest::config].
7617 ///
7618 /// # Example
7619 /// ```ignore,no_run
7620 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7621 /// use google_cloud_speech_v2::model::Config;
7622 /// let x = UpdateConfigRequest::new().set_or_clear_config(Some(Config::default()/* use setters */));
7623 /// let x = UpdateConfigRequest::new().set_or_clear_config(None::<Config>);
7624 /// ```
7625 pub fn set_or_clear_config<T>(mut self, v: std::option::Option<T>) -> Self
7626 where
7627 T: std::convert::Into<crate::model::Config>,
7628 {
7629 self.config = v.map(|x| x.into());
7630 self
7631 }
7632
7633 /// Sets the value of [update_mask][crate::model::UpdateConfigRequest::update_mask].
7634 ///
7635 /// # Example
7636 /// ```ignore,no_run
7637 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7638 /// use wkt::FieldMask;
7639 /// let x = UpdateConfigRequest::new().set_update_mask(FieldMask::default()/* use setters */);
7640 /// ```
7641 pub fn set_update_mask<T>(mut self, v: T) -> Self
7642 where
7643 T: std::convert::Into<wkt::FieldMask>,
7644 {
7645 self.update_mask = std::option::Option::Some(v.into());
7646 self
7647 }
7648
7649 /// Sets or clears the value of [update_mask][crate::model::UpdateConfigRequest::update_mask].
7650 ///
7651 /// # Example
7652 /// ```ignore,no_run
7653 /// # use google_cloud_speech_v2::model::UpdateConfigRequest;
7654 /// use wkt::FieldMask;
7655 /// let x = UpdateConfigRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
7656 /// let x = UpdateConfigRequest::new().set_or_clear_update_mask(None::<FieldMask>);
7657 /// ```
7658 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
7659 where
7660 T: std::convert::Into<wkt::FieldMask>,
7661 {
7662 self.update_mask = v.map(|x| x.into());
7663 self
7664 }
7665}
7666
7667impl wkt::message::Message for UpdateConfigRequest {
7668 fn typename() -> &'static str {
7669 "type.googleapis.com/google.cloud.speech.v2.UpdateConfigRequest"
7670 }
7671}
7672
7673/// CustomClass for biasing in speech recognition. Used to define a set of words
7674/// or phrases that represents a common concept or theme likely to appear in your
7675/// audio, for example a list of passenger ship names.
7676#[derive(Clone, Default, PartialEq)]
7677#[non_exhaustive]
7678pub struct CustomClass {
7679 /// Output only. Identifier. The resource name of the CustomClass.
7680 /// Format:
7681 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
7682 pub name: std::string::String,
7683
7684 /// Output only. System-assigned unique identifier for the CustomClass.
7685 pub uid: std::string::String,
7686
7687 /// Optional. User-settable, human-readable name for the CustomClass. Must be
7688 /// 63 characters or less.
7689 pub display_name: std::string::String,
7690
7691 /// A collection of class items.
7692 pub items: std::vec::Vec<crate::model::custom_class::ClassItem>,
7693
7694 /// Output only. The CustomClass lifecycle state.
7695 pub state: crate::model::custom_class::State,
7696
7697 /// Output only. Creation time.
7698 pub create_time: std::option::Option<wkt::Timestamp>,
7699
7700 /// Output only. The most recent time this resource was modified.
7701 pub update_time: std::option::Option<wkt::Timestamp>,
7702
7703 /// Output only. The time at which this resource was requested for deletion.
7704 pub delete_time: std::option::Option<wkt::Timestamp>,
7705
7706 /// Output only. The time at which this resource will be purged.
7707 pub expire_time: std::option::Option<wkt::Timestamp>,
7708
7709 /// Optional. Allows users to store small amounts of arbitrary data.
7710 /// Both the key and the value must be 63 characters or less each.
7711 /// At most 100 annotations.
7712 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
7713
7714 /// Output only. This checksum is computed by the server based on the value of
7715 /// other fields. This may be sent on update, undelete, and delete requests to
7716 /// ensure the client has an up-to-date value before proceeding.
7717 pub etag: std::string::String,
7718
7719 /// Output only. Whether or not this CustomClass is in the process of being
7720 /// updated.
7721 pub reconciling: bool,
7722
7723 /// Output only. The [KMS key
7724 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
7725 /// the CustomClass is encrypted. The expected format is
7726 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
7727 pub kms_key_name: std::string::String,
7728
7729 /// Output only. The [KMS key version
7730 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
7731 /// with which the CustomClass is encrypted. The expected format is
7732 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
7733 pub kms_key_version_name: std::string::String,
7734
7735 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
7736}
7737
7738impl CustomClass {
7739 pub fn new() -> Self {
7740 std::default::Default::default()
7741 }
7742
7743 /// Sets the value of [name][crate::model::CustomClass::name].
7744 ///
7745 /// # Example
7746 /// ```ignore,no_run
7747 /// # use google_cloud_speech_v2::model::CustomClass;
7748 /// let x = CustomClass::new().set_name("example");
7749 /// ```
7750 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7751 self.name = v.into();
7752 self
7753 }
7754
7755 /// Sets the value of [uid][crate::model::CustomClass::uid].
7756 ///
7757 /// # Example
7758 /// ```ignore,no_run
7759 /// # use google_cloud_speech_v2::model::CustomClass;
7760 /// let x = CustomClass::new().set_uid("example");
7761 /// ```
7762 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7763 self.uid = v.into();
7764 self
7765 }
7766
7767 /// Sets the value of [display_name][crate::model::CustomClass::display_name].
7768 ///
7769 /// # Example
7770 /// ```ignore,no_run
7771 /// # use google_cloud_speech_v2::model::CustomClass;
7772 /// let x = CustomClass::new().set_display_name("example");
7773 /// ```
7774 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7775 self.display_name = v.into();
7776 self
7777 }
7778
7779 /// Sets the value of [items][crate::model::CustomClass::items].
7780 ///
7781 /// # Example
7782 /// ```ignore,no_run
7783 /// # use google_cloud_speech_v2::model::CustomClass;
7784 /// use google_cloud_speech_v2::model::custom_class::ClassItem;
7785 /// let x = CustomClass::new()
7786 /// .set_items([
7787 /// ClassItem::default()/* use setters */,
7788 /// ClassItem::default()/* use (different) setters */,
7789 /// ]);
7790 /// ```
7791 pub fn set_items<T, V>(mut self, v: T) -> Self
7792 where
7793 T: std::iter::IntoIterator<Item = V>,
7794 V: std::convert::Into<crate::model::custom_class::ClassItem>,
7795 {
7796 use std::iter::Iterator;
7797 self.items = v.into_iter().map(|i| i.into()).collect();
7798 self
7799 }
7800
7801 /// Sets the value of [state][crate::model::CustomClass::state].
7802 ///
7803 /// # Example
7804 /// ```ignore,no_run
7805 /// # use google_cloud_speech_v2::model::CustomClass;
7806 /// use google_cloud_speech_v2::model::custom_class::State;
7807 /// let x0 = CustomClass::new().set_state(State::Active);
7808 /// let x1 = CustomClass::new().set_state(State::Deleted);
7809 /// ```
7810 pub fn set_state<T: std::convert::Into<crate::model::custom_class::State>>(
7811 mut self,
7812 v: T,
7813 ) -> Self {
7814 self.state = v.into();
7815 self
7816 }
7817
7818 /// Sets the value of [create_time][crate::model::CustomClass::create_time].
7819 ///
7820 /// # Example
7821 /// ```ignore,no_run
7822 /// # use google_cloud_speech_v2::model::CustomClass;
7823 /// use wkt::Timestamp;
7824 /// let x = CustomClass::new().set_create_time(Timestamp::default()/* use setters */);
7825 /// ```
7826 pub fn set_create_time<T>(mut self, v: T) -> Self
7827 where
7828 T: std::convert::Into<wkt::Timestamp>,
7829 {
7830 self.create_time = std::option::Option::Some(v.into());
7831 self
7832 }
7833
7834 /// Sets or clears the value of [create_time][crate::model::CustomClass::create_time].
7835 ///
7836 /// # Example
7837 /// ```ignore,no_run
7838 /// # use google_cloud_speech_v2::model::CustomClass;
7839 /// use wkt::Timestamp;
7840 /// let x = CustomClass::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
7841 /// let x = CustomClass::new().set_or_clear_create_time(None::<Timestamp>);
7842 /// ```
7843 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
7844 where
7845 T: std::convert::Into<wkt::Timestamp>,
7846 {
7847 self.create_time = v.map(|x| x.into());
7848 self
7849 }
7850
7851 /// Sets the value of [update_time][crate::model::CustomClass::update_time].
7852 ///
7853 /// # Example
7854 /// ```ignore,no_run
7855 /// # use google_cloud_speech_v2::model::CustomClass;
7856 /// use wkt::Timestamp;
7857 /// let x = CustomClass::new().set_update_time(Timestamp::default()/* use setters */);
7858 /// ```
7859 pub fn set_update_time<T>(mut self, v: T) -> Self
7860 where
7861 T: std::convert::Into<wkt::Timestamp>,
7862 {
7863 self.update_time = std::option::Option::Some(v.into());
7864 self
7865 }
7866
7867 /// Sets or clears the value of [update_time][crate::model::CustomClass::update_time].
7868 ///
7869 /// # Example
7870 /// ```ignore,no_run
7871 /// # use google_cloud_speech_v2::model::CustomClass;
7872 /// use wkt::Timestamp;
7873 /// let x = CustomClass::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
7874 /// let x = CustomClass::new().set_or_clear_update_time(None::<Timestamp>);
7875 /// ```
7876 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
7877 where
7878 T: std::convert::Into<wkt::Timestamp>,
7879 {
7880 self.update_time = v.map(|x| x.into());
7881 self
7882 }
7883
7884 /// Sets the value of [delete_time][crate::model::CustomClass::delete_time].
7885 ///
7886 /// # Example
7887 /// ```ignore,no_run
7888 /// # use google_cloud_speech_v2::model::CustomClass;
7889 /// use wkt::Timestamp;
7890 /// let x = CustomClass::new().set_delete_time(Timestamp::default()/* use setters */);
7891 /// ```
7892 pub fn set_delete_time<T>(mut self, v: T) -> Self
7893 where
7894 T: std::convert::Into<wkt::Timestamp>,
7895 {
7896 self.delete_time = std::option::Option::Some(v.into());
7897 self
7898 }
7899
7900 /// Sets or clears the value of [delete_time][crate::model::CustomClass::delete_time].
7901 ///
7902 /// # Example
7903 /// ```ignore,no_run
7904 /// # use google_cloud_speech_v2::model::CustomClass;
7905 /// use wkt::Timestamp;
7906 /// let x = CustomClass::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
7907 /// let x = CustomClass::new().set_or_clear_delete_time(None::<Timestamp>);
7908 /// ```
7909 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
7910 where
7911 T: std::convert::Into<wkt::Timestamp>,
7912 {
7913 self.delete_time = v.map(|x| x.into());
7914 self
7915 }
7916
7917 /// Sets the value of [expire_time][crate::model::CustomClass::expire_time].
7918 ///
7919 /// # Example
7920 /// ```ignore,no_run
7921 /// # use google_cloud_speech_v2::model::CustomClass;
7922 /// use wkt::Timestamp;
7923 /// let x = CustomClass::new().set_expire_time(Timestamp::default()/* use setters */);
7924 /// ```
7925 pub fn set_expire_time<T>(mut self, v: T) -> Self
7926 where
7927 T: std::convert::Into<wkt::Timestamp>,
7928 {
7929 self.expire_time = std::option::Option::Some(v.into());
7930 self
7931 }
7932
7933 /// Sets or clears the value of [expire_time][crate::model::CustomClass::expire_time].
7934 ///
7935 /// # Example
7936 /// ```ignore,no_run
7937 /// # use google_cloud_speech_v2::model::CustomClass;
7938 /// use wkt::Timestamp;
7939 /// let x = CustomClass::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
7940 /// let x = CustomClass::new().set_or_clear_expire_time(None::<Timestamp>);
7941 /// ```
7942 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
7943 where
7944 T: std::convert::Into<wkt::Timestamp>,
7945 {
7946 self.expire_time = v.map(|x| x.into());
7947 self
7948 }
7949
7950 /// Sets the value of [annotations][crate::model::CustomClass::annotations].
7951 ///
7952 /// # Example
7953 /// ```ignore,no_run
7954 /// # use google_cloud_speech_v2::model::CustomClass;
7955 /// let x = CustomClass::new().set_annotations([
7956 /// ("key0", "abc"),
7957 /// ("key1", "xyz"),
7958 /// ]);
7959 /// ```
7960 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
7961 where
7962 T: std::iter::IntoIterator<Item = (K, V)>,
7963 K: std::convert::Into<std::string::String>,
7964 V: std::convert::Into<std::string::String>,
7965 {
7966 use std::iter::Iterator;
7967 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
7968 self
7969 }
7970
7971 /// Sets the value of [etag][crate::model::CustomClass::etag].
7972 ///
7973 /// # Example
7974 /// ```ignore,no_run
7975 /// # use google_cloud_speech_v2::model::CustomClass;
7976 /// let x = CustomClass::new().set_etag("example");
7977 /// ```
7978 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
7979 self.etag = v.into();
7980 self
7981 }
7982
7983 /// Sets the value of [reconciling][crate::model::CustomClass::reconciling].
7984 ///
7985 /// # Example
7986 /// ```ignore,no_run
7987 /// # use google_cloud_speech_v2::model::CustomClass;
7988 /// let x = CustomClass::new().set_reconciling(true);
7989 /// ```
7990 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
7991 self.reconciling = v.into();
7992 self
7993 }
7994
7995 /// Sets the value of [kms_key_name][crate::model::CustomClass::kms_key_name].
7996 ///
7997 /// # Example
7998 /// ```ignore,no_run
7999 /// # use google_cloud_speech_v2::model::CustomClass;
8000 /// let x = CustomClass::new().set_kms_key_name("example");
8001 /// ```
8002 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8003 self.kms_key_name = v.into();
8004 self
8005 }
8006
8007 /// Sets the value of [kms_key_version_name][crate::model::CustomClass::kms_key_version_name].
8008 ///
8009 /// # Example
8010 /// ```ignore,no_run
8011 /// # use google_cloud_speech_v2::model::CustomClass;
8012 /// let x = CustomClass::new().set_kms_key_version_name("example");
8013 /// ```
8014 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
8015 mut self,
8016 v: T,
8017 ) -> Self {
8018 self.kms_key_version_name = v.into();
8019 self
8020 }
8021}
8022
8023impl wkt::message::Message for CustomClass {
8024 fn typename() -> &'static str {
8025 "type.googleapis.com/google.cloud.speech.v2.CustomClass"
8026 }
8027}
8028
8029/// Defines additional types related to [CustomClass].
8030pub mod custom_class {
8031 #[allow(unused_imports)]
8032 use super::*;
8033
8034 /// An item of the class.
8035 #[derive(Clone, Default, PartialEq)]
8036 #[non_exhaustive]
8037 pub struct ClassItem {
8038 /// The class item's value.
8039 pub value: std::string::String,
8040
8041 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8042 }
8043
8044 impl ClassItem {
8045 pub fn new() -> Self {
8046 std::default::Default::default()
8047 }
8048
8049 /// Sets the value of [value][crate::model::custom_class::ClassItem::value].
8050 ///
8051 /// # Example
8052 /// ```ignore,no_run
8053 /// # use google_cloud_speech_v2::model::custom_class::ClassItem;
8054 /// let x = ClassItem::new().set_value("example");
8055 /// ```
8056 pub fn set_value<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8057 self.value = v.into();
8058 self
8059 }
8060 }
8061
8062 impl wkt::message::Message for ClassItem {
8063 fn typename() -> &'static str {
8064 "type.googleapis.com/google.cloud.speech.v2.CustomClass.ClassItem"
8065 }
8066 }
8067
8068 /// Set of states that define the lifecycle of a CustomClass.
8069 ///
8070 /// # Working with unknown values
8071 ///
8072 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
8073 /// additional enum variants at any time. Adding new variants is not considered
8074 /// a breaking change. Applications should write their code in anticipation of:
8075 ///
8076 /// - New values appearing in future releases of the client library, **and**
8077 /// - New values received dynamically, without application changes.
8078 ///
8079 /// Please consult the [Working with enums] section in the user guide for some
8080 /// guidelines.
8081 ///
8082 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
8083 #[derive(Clone, Debug, PartialEq)]
8084 #[non_exhaustive]
8085 pub enum State {
8086 /// Unspecified state. This is only used/useful for distinguishing
8087 /// unset values.
8088 Unspecified,
8089 /// The normal and active state.
8090 Active,
8091 /// This CustomClass has been deleted.
8092 Deleted,
8093 /// If set, the enum was initialized with an unknown value.
8094 ///
8095 /// Applications can examine the value using [State::value] or
8096 /// [State::name].
8097 UnknownValue(state::UnknownValue),
8098 }
8099
8100 #[doc(hidden)]
8101 pub mod state {
8102 #[allow(unused_imports)]
8103 use super::*;
8104 #[derive(Clone, Debug, PartialEq)]
8105 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
8106 }
8107
8108 impl State {
8109 /// Gets the enum value.
8110 ///
8111 /// Returns `None` if the enum contains an unknown value deserialized from
8112 /// the string representation of enums.
8113 pub fn value(&self) -> std::option::Option<i32> {
8114 match self {
8115 Self::Unspecified => std::option::Option::Some(0),
8116 Self::Active => std::option::Option::Some(2),
8117 Self::Deleted => std::option::Option::Some(4),
8118 Self::UnknownValue(u) => u.0.value(),
8119 }
8120 }
8121
8122 /// Gets the enum value as a string.
8123 ///
8124 /// Returns `None` if the enum contains an unknown value deserialized from
8125 /// the integer representation of enums.
8126 pub fn name(&self) -> std::option::Option<&str> {
8127 match self {
8128 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
8129 Self::Active => std::option::Option::Some("ACTIVE"),
8130 Self::Deleted => std::option::Option::Some("DELETED"),
8131 Self::UnknownValue(u) => u.0.name(),
8132 }
8133 }
8134 }
8135
8136 impl std::default::Default for State {
8137 fn default() -> Self {
8138 use std::convert::From;
8139 Self::from(0)
8140 }
8141 }
8142
8143 impl std::fmt::Display for State {
8144 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
8145 wkt::internal::display_enum(f, self.name(), self.value())
8146 }
8147 }
8148
8149 impl std::convert::From<i32> for State {
8150 fn from(value: i32) -> Self {
8151 match value {
8152 0 => Self::Unspecified,
8153 2 => Self::Active,
8154 4 => Self::Deleted,
8155 _ => Self::UnknownValue(state::UnknownValue(
8156 wkt::internal::UnknownEnumValue::Integer(value),
8157 )),
8158 }
8159 }
8160 }
8161
8162 impl std::convert::From<&str> for State {
8163 fn from(value: &str) -> Self {
8164 use std::string::ToString;
8165 match value {
8166 "STATE_UNSPECIFIED" => Self::Unspecified,
8167 "ACTIVE" => Self::Active,
8168 "DELETED" => Self::Deleted,
8169 _ => Self::UnknownValue(state::UnknownValue(
8170 wkt::internal::UnknownEnumValue::String(value.to_string()),
8171 )),
8172 }
8173 }
8174 }
8175
8176 impl serde::ser::Serialize for State {
8177 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
8178 where
8179 S: serde::Serializer,
8180 {
8181 match self {
8182 Self::Unspecified => serializer.serialize_i32(0),
8183 Self::Active => serializer.serialize_i32(2),
8184 Self::Deleted => serializer.serialize_i32(4),
8185 Self::UnknownValue(u) => u.0.serialize(serializer),
8186 }
8187 }
8188 }
8189
8190 impl<'de> serde::de::Deserialize<'de> for State {
8191 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
8192 where
8193 D: serde::Deserializer<'de>,
8194 {
8195 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
8196 ".google.cloud.speech.v2.CustomClass.State",
8197 ))
8198 }
8199 }
8200}
8201
8202/// PhraseSet for biasing in speech recognition. A PhraseSet is used to provide
8203/// "hints" to the speech recognizer to favor specific words and phrases in the
8204/// results.
8205#[derive(Clone, Default, PartialEq)]
8206#[non_exhaustive]
8207pub struct PhraseSet {
8208 /// Output only. Identifier. The resource name of the PhraseSet.
8209 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
8210 pub name: std::string::String,
8211
8212 /// Output only. System-assigned unique identifier for the PhraseSet.
8213 pub uid: std::string::String,
8214
8215 /// A list of word and phrases.
8216 pub phrases: std::vec::Vec<crate::model::phrase_set::Phrase>,
8217
8218 /// Hint Boost. Positive value will increase the probability that a specific
8219 /// phrase will be recognized over other similar sounding phrases. The higher
8220 /// the boost, the higher the chance of false positive recognition as well.
8221 /// Valid `boost` values are between 0 (exclusive) and 20. We recommend using a
8222 /// binary search approach to finding the optimal value for your use case as
8223 /// well as adding phrases both with and without boost to your requests.
8224 pub boost: f32,
8225
8226 /// User-settable, human-readable name for the PhraseSet. Must be 63
8227 /// characters or less.
8228 pub display_name: std::string::String,
8229
8230 /// Output only. The PhraseSet lifecycle state.
8231 pub state: crate::model::phrase_set::State,
8232
8233 /// Output only. Creation time.
8234 pub create_time: std::option::Option<wkt::Timestamp>,
8235
8236 /// Output only. The most recent time this resource was modified.
8237 pub update_time: std::option::Option<wkt::Timestamp>,
8238
8239 /// Output only. The time at which this resource was requested for deletion.
8240 pub delete_time: std::option::Option<wkt::Timestamp>,
8241
8242 /// Output only. The time at which this resource will be purged.
8243 pub expire_time: std::option::Option<wkt::Timestamp>,
8244
8245 /// Allows users to store small amounts of arbitrary data.
8246 /// Both the key and the value must be 63 characters or less each.
8247 /// At most 100 annotations.
8248 pub annotations: std::collections::HashMap<std::string::String, std::string::String>,
8249
8250 /// Output only. This checksum is computed by the server based on the value of
8251 /// other fields. This may be sent on update, undelete, and delete requests to
8252 /// ensure the client has an up-to-date value before proceeding.
8253 pub etag: std::string::String,
8254
8255 /// Output only. Whether or not this PhraseSet is in the process of being
8256 /// updated.
8257 pub reconciling: bool,
8258
8259 /// Output only. The [KMS key
8260 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#keys) with which
8261 /// the PhraseSet is encrypted. The expected format is
8262 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`.
8263 pub kms_key_name: std::string::String,
8264
8265 /// Output only. The [KMS key version
8266 /// name](https://cloud.google.com/kms/docs/resource-hierarchy#key_versions)
8267 /// with which the PhraseSet is encrypted. The expected format is
8268 /// `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}`.
8269 pub kms_key_version_name: std::string::String,
8270
8271 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8272}
8273
8274impl PhraseSet {
8275 pub fn new() -> Self {
8276 std::default::Default::default()
8277 }
8278
8279 /// Sets the value of [name][crate::model::PhraseSet::name].
8280 ///
8281 /// # Example
8282 /// ```ignore,no_run
8283 /// # use google_cloud_speech_v2::model::PhraseSet;
8284 /// let x = PhraseSet::new().set_name("example");
8285 /// ```
8286 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8287 self.name = v.into();
8288 self
8289 }
8290
8291 /// Sets the value of [uid][crate::model::PhraseSet::uid].
8292 ///
8293 /// # Example
8294 /// ```ignore,no_run
8295 /// # use google_cloud_speech_v2::model::PhraseSet;
8296 /// let x = PhraseSet::new().set_uid("example");
8297 /// ```
8298 pub fn set_uid<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8299 self.uid = v.into();
8300 self
8301 }
8302
8303 /// Sets the value of [phrases][crate::model::PhraseSet::phrases].
8304 ///
8305 /// # Example
8306 /// ```ignore,no_run
8307 /// # use google_cloud_speech_v2::model::PhraseSet;
8308 /// use google_cloud_speech_v2::model::phrase_set::Phrase;
8309 /// let x = PhraseSet::new()
8310 /// .set_phrases([
8311 /// Phrase::default()/* use setters */,
8312 /// Phrase::default()/* use (different) setters */,
8313 /// ]);
8314 /// ```
8315 pub fn set_phrases<T, V>(mut self, v: T) -> Self
8316 where
8317 T: std::iter::IntoIterator<Item = V>,
8318 V: std::convert::Into<crate::model::phrase_set::Phrase>,
8319 {
8320 use std::iter::Iterator;
8321 self.phrases = v.into_iter().map(|i| i.into()).collect();
8322 self
8323 }
8324
8325 /// Sets the value of [boost][crate::model::PhraseSet::boost].
8326 ///
8327 /// # Example
8328 /// ```ignore,no_run
8329 /// # use google_cloud_speech_v2::model::PhraseSet;
8330 /// let x = PhraseSet::new().set_boost(42.0);
8331 /// ```
8332 pub fn set_boost<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
8333 self.boost = v.into();
8334 self
8335 }
8336
8337 /// Sets the value of [display_name][crate::model::PhraseSet::display_name].
8338 ///
8339 /// # Example
8340 /// ```ignore,no_run
8341 /// # use google_cloud_speech_v2::model::PhraseSet;
8342 /// let x = PhraseSet::new().set_display_name("example");
8343 /// ```
8344 pub fn set_display_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8345 self.display_name = v.into();
8346 self
8347 }
8348
8349 /// Sets the value of [state][crate::model::PhraseSet::state].
8350 ///
8351 /// # Example
8352 /// ```ignore,no_run
8353 /// # use google_cloud_speech_v2::model::PhraseSet;
8354 /// use google_cloud_speech_v2::model::phrase_set::State;
8355 /// let x0 = PhraseSet::new().set_state(State::Active);
8356 /// let x1 = PhraseSet::new().set_state(State::Deleted);
8357 /// ```
8358 pub fn set_state<T: std::convert::Into<crate::model::phrase_set::State>>(
8359 mut self,
8360 v: T,
8361 ) -> Self {
8362 self.state = v.into();
8363 self
8364 }
8365
8366 /// Sets the value of [create_time][crate::model::PhraseSet::create_time].
8367 ///
8368 /// # Example
8369 /// ```ignore,no_run
8370 /// # use google_cloud_speech_v2::model::PhraseSet;
8371 /// use wkt::Timestamp;
8372 /// let x = PhraseSet::new().set_create_time(Timestamp::default()/* use setters */);
8373 /// ```
8374 pub fn set_create_time<T>(mut self, v: T) -> Self
8375 where
8376 T: std::convert::Into<wkt::Timestamp>,
8377 {
8378 self.create_time = std::option::Option::Some(v.into());
8379 self
8380 }
8381
8382 /// Sets or clears the value of [create_time][crate::model::PhraseSet::create_time].
8383 ///
8384 /// # Example
8385 /// ```ignore,no_run
8386 /// # use google_cloud_speech_v2::model::PhraseSet;
8387 /// use wkt::Timestamp;
8388 /// let x = PhraseSet::new().set_or_clear_create_time(Some(Timestamp::default()/* use setters */));
8389 /// let x = PhraseSet::new().set_or_clear_create_time(None::<Timestamp>);
8390 /// ```
8391 pub fn set_or_clear_create_time<T>(mut self, v: std::option::Option<T>) -> Self
8392 where
8393 T: std::convert::Into<wkt::Timestamp>,
8394 {
8395 self.create_time = v.map(|x| x.into());
8396 self
8397 }
8398
8399 /// Sets the value of [update_time][crate::model::PhraseSet::update_time].
8400 ///
8401 /// # Example
8402 /// ```ignore,no_run
8403 /// # use google_cloud_speech_v2::model::PhraseSet;
8404 /// use wkt::Timestamp;
8405 /// let x = PhraseSet::new().set_update_time(Timestamp::default()/* use setters */);
8406 /// ```
8407 pub fn set_update_time<T>(mut self, v: T) -> Self
8408 where
8409 T: std::convert::Into<wkt::Timestamp>,
8410 {
8411 self.update_time = std::option::Option::Some(v.into());
8412 self
8413 }
8414
8415 /// Sets or clears the value of [update_time][crate::model::PhraseSet::update_time].
8416 ///
8417 /// # Example
8418 /// ```ignore,no_run
8419 /// # use google_cloud_speech_v2::model::PhraseSet;
8420 /// use wkt::Timestamp;
8421 /// let x = PhraseSet::new().set_or_clear_update_time(Some(Timestamp::default()/* use setters */));
8422 /// let x = PhraseSet::new().set_or_clear_update_time(None::<Timestamp>);
8423 /// ```
8424 pub fn set_or_clear_update_time<T>(mut self, v: std::option::Option<T>) -> Self
8425 where
8426 T: std::convert::Into<wkt::Timestamp>,
8427 {
8428 self.update_time = v.map(|x| x.into());
8429 self
8430 }
8431
8432 /// Sets the value of [delete_time][crate::model::PhraseSet::delete_time].
8433 ///
8434 /// # Example
8435 /// ```ignore,no_run
8436 /// # use google_cloud_speech_v2::model::PhraseSet;
8437 /// use wkt::Timestamp;
8438 /// let x = PhraseSet::new().set_delete_time(Timestamp::default()/* use setters */);
8439 /// ```
8440 pub fn set_delete_time<T>(mut self, v: T) -> Self
8441 where
8442 T: std::convert::Into<wkt::Timestamp>,
8443 {
8444 self.delete_time = std::option::Option::Some(v.into());
8445 self
8446 }
8447
8448 /// Sets or clears the value of [delete_time][crate::model::PhraseSet::delete_time].
8449 ///
8450 /// # Example
8451 /// ```ignore,no_run
8452 /// # use google_cloud_speech_v2::model::PhraseSet;
8453 /// use wkt::Timestamp;
8454 /// let x = PhraseSet::new().set_or_clear_delete_time(Some(Timestamp::default()/* use setters */));
8455 /// let x = PhraseSet::new().set_or_clear_delete_time(None::<Timestamp>);
8456 /// ```
8457 pub fn set_or_clear_delete_time<T>(mut self, v: std::option::Option<T>) -> Self
8458 where
8459 T: std::convert::Into<wkt::Timestamp>,
8460 {
8461 self.delete_time = v.map(|x| x.into());
8462 self
8463 }
8464
8465 /// Sets the value of [expire_time][crate::model::PhraseSet::expire_time].
8466 ///
8467 /// # Example
8468 /// ```ignore,no_run
8469 /// # use google_cloud_speech_v2::model::PhraseSet;
8470 /// use wkt::Timestamp;
8471 /// let x = PhraseSet::new().set_expire_time(Timestamp::default()/* use setters */);
8472 /// ```
8473 pub fn set_expire_time<T>(mut self, v: T) -> Self
8474 where
8475 T: std::convert::Into<wkt::Timestamp>,
8476 {
8477 self.expire_time = std::option::Option::Some(v.into());
8478 self
8479 }
8480
8481 /// Sets or clears the value of [expire_time][crate::model::PhraseSet::expire_time].
8482 ///
8483 /// # Example
8484 /// ```ignore,no_run
8485 /// # use google_cloud_speech_v2::model::PhraseSet;
8486 /// use wkt::Timestamp;
8487 /// let x = PhraseSet::new().set_or_clear_expire_time(Some(Timestamp::default()/* use setters */));
8488 /// let x = PhraseSet::new().set_or_clear_expire_time(None::<Timestamp>);
8489 /// ```
8490 pub fn set_or_clear_expire_time<T>(mut self, v: std::option::Option<T>) -> Self
8491 where
8492 T: std::convert::Into<wkt::Timestamp>,
8493 {
8494 self.expire_time = v.map(|x| x.into());
8495 self
8496 }
8497
8498 /// Sets the value of [annotations][crate::model::PhraseSet::annotations].
8499 ///
8500 /// # Example
8501 /// ```ignore,no_run
8502 /// # use google_cloud_speech_v2::model::PhraseSet;
8503 /// let x = PhraseSet::new().set_annotations([
8504 /// ("key0", "abc"),
8505 /// ("key1", "xyz"),
8506 /// ]);
8507 /// ```
8508 pub fn set_annotations<T, K, V>(mut self, v: T) -> Self
8509 where
8510 T: std::iter::IntoIterator<Item = (K, V)>,
8511 K: std::convert::Into<std::string::String>,
8512 V: std::convert::Into<std::string::String>,
8513 {
8514 use std::iter::Iterator;
8515 self.annotations = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
8516 self
8517 }
8518
8519 /// Sets the value of [etag][crate::model::PhraseSet::etag].
8520 ///
8521 /// # Example
8522 /// ```ignore,no_run
8523 /// # use google_cloud_speech_v2::model::PhraseSet;
8524 /// let x = PhraseSet::new().set_etag("example");
8525 /// ```
8526 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8527 self.etag = v.into();
8528 self
8529 }
8530
8531 /// Sets the value of [reconciling][crate::model::PhraseSet::reconciling].
8532 ///
8533 /// # Example
8534 /// ```ignore,no_run
8535 /// # use google_cloud_speech_v2::model::PhraseSet;
8536 /// let x = PhraseSet::new().set_reconciling(true);
8537 /// ```
8538 pub fn set_reconciling<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8539 self.reconciling = v.into();
8540 self
8541 }
8542
8543 /// Sets the value of [kms_key_name][crate::model::PhraseSet::kms_key_name].
8544 ///
8545 /// # Example
8546 /// ```ignore,no_run
8547 /// # use google_cloud_speech_v2::model::PhraseSet;
8548 /// let x = PhraseSet::new().set_kms_key_name("example");
8549 /// ```
8550 pub fn set_kms_key_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8551 self.kms_key_name = v.into();
8552 self
8553 }
8554
8555 /// Sets the value of [kms_key_version_name][crate::model::PhraseSet::kms_key_version_name].
8556 ///
8557 /// # Example
8558 /// ```ignore,no_run
8559 /// # use google_cloud_speech_v2::model::PhraseSet;
8560 /// let x = PhraseSet::new().set_kms_key_version_name("example");
8561 /// ```
8562 pub fn set_kms_key_version_name<T: std::convert::Into<std::string::String>>(
8563 mut self,
8564 v: T,
8565 ) -> Self {
8566 self.kms_key_version_name = v.into();
8567 self
8568 }
8569}
8570
8571impl wkt::message::Message for PhraseSet {
8572 fn typename() -> &'static str {
8573 "type.googleapis.com/google.cloud.speech.v2.PhraseSet"
8574 }
8575}
8576
8577/// Defines additional types related to [PhraseSet].
8578pub mod phrase_set {
8579 #[allow(unused_imports)]
8580 use super::*;
8581
8582 /// A Phrase contains words and phrase "hints" so that the speech recognition
8583 /// is more likely to recognize them. This can be used to improve the accuracy
8584 /// for specific words and phrases, for example, if specific commands are
8585 /// typically spoken by the user. This can also be used to add additional words
8586 /// to the vocabulary of the recognizer.
8587 ///
8588 /// List items can also include CustomClass references containing groups of
8589 /// words that represent common concepts that occur in natural language.
8590 #[derive(Clone, Default, PartialEq)]
8591 #[non_exhaustive]
8592 pub struct Phrase {
8593 /// The phrase itself.
8594 pub value: std::string::String,
8595
8596 /// Hint Boost. Overrides the boost set at the phrase set level.
8597 /// Positive value will increase the probability that a specific phrase will
8598 /// be recognized over other similar sounding phrases. The higher the boost,
8599 /// the higher the chance of false positive recognition as well. Negative
8600 /// boost values would correspond to anti-biasing. Anti-biasing is not
8601 /// enabled, so negative boost values will return an error. Boost values must
8602 /// be between 0 and 20. Any values outside that range will return an error.
8603 /// We recommend using a binary search approach to finding the optimal value
8604 /// for your use case as well as adding phrases both with and without boost
8605 /// to your requests.
8606 pub boost: f32,
8607
8608 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8609 }
8610
8611 impl Phrase {
8612 pub fn new() -> Self {
8613 std::default::Default::default()
8614 }
8615
8616 /// Sets the value of [value][crate::model::phrase_set::Phrase::value].
8617 ///
8618 /// # Example
8619 /// ```ignore,no_run
8620 /// # use google_cloud_speech_v2::model::phrase_set::Phrase;
8621 /// let x = Phrase::new().set_value("example");
8622 /// ```
8623 pub fn set_value<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8624 self.value = v.into();
8625 self
8626 }
8627
8628 /// Sets the value of [boost][crate::model::phrase_set::Phrase::boost].
8629 ///
8630 /// # Example
8631 /// ```ignore,no_run
8632 /// # use google_cloud_speech_v2::model::phrase_set::Phrase;
8633 /// let x = Phrase::new().set_boost(42.0);
8634 /// ```
8635 pub fn set_boost<T: std::convert::Into<f32>>(mut self, v: T) -> Self {
8636 self.boost = v.into();
8637 self
8638 }
8639 }
8640
8641 impl wkt::message::Message for Phrase {
8642 fn typename() -> &'static str {
8643 "type.googleapis.com/google.cloud.speech.v2.PhraseSet.Phrase"
8644 }
8645 }
8646
8647 /// Set of states that define the lifecycle of a PhraseSet.
8648 ///
8649 /// # Working with unknown values
8650 ///
8651 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
8652 /// additional enum variants at any time. Adding new variants is not considered
8653 /// a breaking change. Applications should write their code in anticipation of:
8654 ///
8655 /// - New values appearing in future releases of the client library, **and**
8656 /// - New values received dynamically, without application changes.
8657 ///
8658 /// Please consult the [Working with enums] section in the user guide for some
8659 /// guidelines.
8660 ///
8661 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
8662 #[derive(Clone, Debug, PartialEq)]
8663 #[non_exhaustive]
8664 pub enum State {
8665 /// Unspecified state. This is only used/useful for distinguishing
8666 /// unset values.
8667 Unspecified,
8668 /// The normal and active state.
8669 Active,
8670 /// This PhraseSet has been deleted.
8671 Deleted,
8672 /// If set, the enum was initialized with an unknown value.
8673 ///
8674 /// Applications can examine the value using [State::value] or
8675 /// [State::name].
8676 UnknownValue(state::UnknownValue),
8677 }
8678
8679 #[doc(hidden)]
8680 pub mod state {
8681 #[allow(unused_imports)]
8682 use super::*;
8683 #[derive(Clone, Debug, PartialEq)]
8684 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
8685 }
8686
8687 impl State {
8688 /// Gets the enum value.
8689 ///
8690 /// Returns `None` if the enum contains an unknown value deserialized from
8691 /// the string representation of enums.
8692 pub fn value(&self) -> std::option::Option<i32> {
8693 match self {
8694 Self::Unspecified => std::option::Option::Some(0),
8695 Self::Active => std::option::Option::Some(2),
8696 Self::Deleted => std::option::Option::Some(4),
8697 Self::UnknownValue(u) => u.0.value(),
8698 }
8699 }
8700
8701 /// Gets the enum value as a string.
8702 ///
8703 /// Returns `None` if the enum contains an unknown value deserialized from
8704 /// the integer representation of enums.
8705 pub fn name(&self) -> std::option::Option<&str> {
8706 match self {
8707 Self::Unspecified => std::option::Option::Some("STATE_UNSPECIFIED"),
8708 Self::Active => std::option::Option::Some("ACTIVE"),
8709 Self::Deleted => std::option::Option::Some("DELETED"),
8710 Self::UnknownValue(u) => u.0.name(),
8711 }
8712 }
8713 }
8714
8715 impl std::default::Default for State {
8716 fn default() -> Self {
8717 use std::convert::From;
8718 Self::from(0)
8719 }
8720 }
8721
8722 impl std::fmt::Display for State {
8723 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
8724 wkt::internal::display_enum(f, self.name(), self.value())
8725 }
8726 }
8727
8728 impl std::convert::From<i32> for State {
8729 fn from(value: i32) -> Self {
8730 match value {
8731 0 => Self::Unspecified,
8732 2 => Self::Active,
8733 4 => Self::Deleted,
8734 _ => Self::UnknownValue(state::UnknownValue(
8735 wkt::internal::UnknownEnumValue::Integer(value),
8736 )),
8737 }
8738 }
8739 }
8740
8741 impl std::convert::From<&str> for State {
8742 fn from(value: &str) -> Self {
8743 use std::string::ToString;
8744 match value {
8745 "STATE_UNSPECIFIED" => Self::Unspecified,
8746 "ACTIVE" => Self::Active,
8747 "DELETED" => Self::Deleted,
8748 _ => Self::UnknownValue(state::UnknownValue(
8749 wkt::internal::UnknownEnumValue::String(value.to_string()),
8750 )),
8751 }
8752 }
8753 }
8754
8755 impl serde::ser::Serialize for State {
8756 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
8757 where
8758 S: serde::Serializer,
8759 {
8760 match self {
8761 Self::Unspecified => serializer.serialize_i32(0),
8762 Self::Active => serializer.serialize_i32(2),
8763 Self::Deleted => serializer.serialize_i32(4),
8764 Self::UnknownValue(u) => u.0.serialize(serializer),
8765 }
8766 }
8767 }
8768
8769 impl<'de> serde::de::Deserialize<'de> for State {
8770 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
8771 where
8772 D: serde::Deserializer<'de>,
8773 {
8774 deserializer.deserialize_any(wkt::internal::EnumVisitor::<State>::new(
8775 ".google.cloud.speech.v2.PhraseSet.State",
8776 ))
8777 }
8778 }
8779}
8780
8781/// Request message for the
8782/// [CreateCustomClass][google.cloud.speech.v2.Speech.CreateCustomClass] method.
8783///
8784/// [google.cloud.speech.v2.Speech.CreateCustomClass]: crate::client::Speech::create_custom_class
8785#[derive(Clone, Default, PartialEq)]
8786#[non_exhaustive]
8787pub struct CreateCustomClassRequest {
8788 /// Required. The CustomClass to create.
8789 pub custom_class: std::option::Option<crate::model::CustomClass>,
8790
8791 /// If set, validate the request and preview the CustomClass, but do not
8792 /// actually create it.
8793 pub validate_only: bool,
8794
8795 /// The ID to use for the CustomClass, which will become the final component of
8796 /// the CustomClass's resource name.
8797 ///
8798 /// This value should be 4-63 characters, and valid characters
8799 /// are /[a-z][0-9]-/.
8800 pub custom_class_id: std::string::String,
8801
8802 /// Required. The project and location where this CustomClass will be created.
8803 /// The expected format is `projects/{project}/locations/{location}`.
8804 pub parent: std::string::String,
8805
8806 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8807}
8808
8809impl CreateCustomClassRequest {
8810 pub fn new() -> Self {
8811 std::default::Default::default()
8812 }
8813
8814 /// Sets the value of [custom_class][crate::model::CreateCustomClassRequest::custom_class].
8815 ///
8816 /// # Example
8817 /// ```ignore,no_run
8818 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8819 /// use google_cloud_speech_v2::model::CustomClass;
8820 /// let x = CreateCustomClassRequest::new().set_custom_class(CustomClass::default()/* use setters */);
8821 /// ```
8822 pub fn set_custom_class<T>(mut self, v: T) -> Self
8823 where
8824 T: std::convert::Into<crate::model::CustomClass>,
8825 {
8826 self.custom_class = std::option::Option::Some(v.into());
8827 self
8828 }
8829
8830 /// Sets or clears the value of [custom_class][crate::model::CreateCustomClassRequest::custom_class].
8831 ///
8832 /// # Example
8833 /// ```ignore,no_run
8834 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8835 /// use google_cloud_speech_v2::model::CustomClass;
8836 /// let x = CreateCustomClassRequest::new().set_or_clear_custom_class(Some(CustomClass::default()/* use setters */));
8837 /// let x = CreateCustomClassRequest::new().set_or_clear_custom_class(None::<CustomClass>);
8838 /// ```
8839 pub fn set_or_clear_custom_class<T>(mut self, v: std::option::Option<T>) -> Self
8840 where
8841 T: std::convert::Into<crate::model::CustomClass>,
8842 {
8843 self.custom_class = v.map(|x| x.into());
8844 self
8845 }
8846
8847 /// Sets the value of [validate_only][crate::model::CreateCustomClassRequest::validate_only].
8848 ///
8849 /// # Example
8850 /// ```ignore,no_run
8851 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8852 /// let x = CreateCustomClassRequest::new().set_validate_only(true);
8853 /// ```
8854 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8855 self.validate_only = v.into();
8856 self
8857 }
8858
8859 /// Sets the value of [custom_class_id][crate::model::CreateCustomClassRequest::custom_class_id].
8860 ///
8861 /// # Example
8862 /// ```ignore,no_run
8863 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8864 /// let x = CreateCustomClassRequest::new().set_custom_class_id("example");
8865 /// ```
8866 pub fn set_custom_class_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8867 self.custom_class_id = v.into();
8868 self
8869 }
8870
8871 /// Sets the value of [parent][crate::model::CreateCustomClassRequest::parent].
8872 ///
8873 /// # Example
8874 /// ```ignore,no_run
8875 /// # use google_cloud_speech_v2::model::CreateCustomClassRequest;
8876 /// let x = CreateCustomClassRequest::new().set_parent("example");
8877 /// ```
8878 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8879 self.parent = v.into();
8880 self
8881 }
8882}
8883
8884impl wkt::message::Message for CreateCustomClassRequest {
8885 fn typename() -> &'static str {
8886 "type.googleapis.com/google.cloud.speech.v2.CreateCustomClassRequest"
8887 }
8888}
8889
8890/// Request message for the
8891/// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] method.
8892///
8893/// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
8894#[derive(Clone, Default, PartialEq)]
8895#[non_exhaustive]
8896pub struct ListCustomClassesRequest {
8897 /// Required. The project and location of CustomClass resources to list. The
8898 /// expected format is `projects/{project}/locations/{location}`.
8899 pub parent: std::string::String,
8900
8901 /// Number of results per requests. A valid page_size ranges from 0 to 100
8902 /// inclusive. If the page_size is zero or unspecified, a page size of 5 will
8903 /// be chosen. If the page size exceeds 100, it will be coerced down to 100.
8904 /// Note that a call might return fewer results than the requested page size.
8905 pub page_size: i32,
8906
8907 /// A page token, received from a previous
8908 /// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] call.
8909 /// Provide this to retrieve the subsequent page.
8910 ///
8911 /// When paginating, all other parameters provided to
8912 /// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] must
8913 /// match the call that provided the page token.
8914 ///
8915 /// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
8916 pub page_token: std::string::String,
8917
8918 /// Whether, or not, to show resources that have been deleted.
8919 pub show_deleted: bool,
8920
8921 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
8922}
8923
8924impl ListCustomClassesRequest {
8925 pub fn new() -> Self {
8926 std::default::Default::default()
8927 }
8928
8929 /// Sets the value of [parent][crate::model::ListCustomClassesRequest::parent].
8930 ///
8931 /// # Example
8932 /// ```ignore,no_run
8933 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8934 /// let x = ListCustomClassesRequest::new().set_parent("example");
8935 /// ```
8936 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8937 self.parent = v.into();
8938 self
8939 }
8940
8941 /// Sets the value of [page_size][crate::model::ListCustomClassesRequest::page_size].
8942 ///
8943 /// # Example
8944 /// ```ignore,no_run
8945 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8946 /// let x = ListCustomClassesRequest::new().set_page_size(42);
8947 /// ```
8948 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
8949 self.page_size = v.into();
8950 self
8951 }
8952
8953 /// Sets the value of [page_token][crate::model::ListCustomClassesRequest::page_token].
8954 ///
8955 /// # Example
8956 /// ```ignore,no_run
8957 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8958 /// let x = ListCustomClassesRequest::new().set_page_token("example");
8959 /// ```
8960 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
8961 self.page_token = v.into();
8962 self
8963 }
8964
8965 /// Sets the value of [show_deleted][crate::model::ListCustomClassesRequest::show_deleted].
8966 ///
8967 /// # Example
8968 /// ```ignore,no_run
8969 /// # use google_cloud_speech_v2::model::ListCustomClassesRequest;
8970 /// let x = ListCustomClassesRequest::new().set_show_deleted(true);
8971 /// ```
8972 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
8973 self.show_deleted = v.into();
8974 self
8975 }
8976}
8977
8978impl wkt::message::Message for ListCustomClassesRequest {
8979 fn typename() -> &'static str {
8980 "type.googleapis.com/google.cloud.speech.v2.ListCustomClassesRequest"
8981 }
8982}
8983
8984/// Response message for the
8985/// [ListCustomClasses][google.cloud.speech.v2.Speech.ListCustomClasses] method.
8986///
8987/// [google.cloud.speech.v2.Speech.ListCustomClasses]: crate::client::Speech::list_custom_classes
8988#[derive(Clone, Default, PartialEq)]
8989#[non_exhaustive]
8990pub struct ListCustomClassesResponse {
8991 /// The list of requested CustomClasses.
8992 pub custom_classes: std::vec::Vec<crate::model::CustomClass>,
8993
8994 /// A token, which can be sent as
8995 /// [page_token][google.cloud.speech.v2.ListCustomClassesRequest.page_token] to
8996 /// retrieve the next page. If this field is omitted, there are no subsequent
8997 /// pages. This token expires after 72 hours.
8998 ///
8999 /// [google.cloud.speech.v2.ListCustomClassesRequest.page_token]: crate::model::ListCustomClassesRequest::page_token
9000 pub next_page_token: std::string::String,
9001
9002 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9003}
9004
9005impl ListCustomClassesResponse {
9006 pub fn new() -> Self {
9007 std::default::Default::default()
9008 }
9009
9010 /// Sets the value of [custom_classes][crate::model::ListCustomClassesResponse::custom_classes].
9011 ///
9012 /// # Example
9013 /// ```ignore,no_run
9014 /// # use google_cloud_speech_v2::model::ListCustomClassesResponse;
9015 /// use google_cloud_speech_v2::model::CustomClass;
9016 /// let x = ListCustomClassesResponse::new()
9017 /// .set_custom_classes([
9018 /// CustomClass::default()/* use setters */,
9019 /// CustomClass::default()/* use (different) setters */,
9020 /// ]);
9021 /// ```
9022 pub fn set_custom_classes<T, V>(mut self, v: T) -> Self
9023 where
9024 T: std::iter::IntoIterator<Item = V>,
9025 V: std::convert::Into<crate::model::CustomClass>,
9026 {
9027 use std::iter::Iterator;
9028 self.custom_classes = v.into_iter().map(|i| i.into()).collect();
9029 self
9030 }
9031
9032 /// Sets the value of [next_page_token][crate::model::ListCustomClassesResponse::next_page_token].
9033 ///
9034 /// # Example
9035 /// ```ignore,no_run
9036 /// # use google_cloud_speech_v2::model::ListCustomClassesResponse;
9037 /// let x = ListCustomClassesResponse::new().set_next_page_token("example");
9038 /// ```
9039 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9040 self.next_page_token = v.into();
9041 self
9042 }
9043}
9044
9045impl wkt::message::Message for ListCustomClassesResponse {
9046 fn typename() -> &'static str {
9047 "type.googleapis.com/google.cloud.speech.v2.ListCustomClassesResponse"
9048 }
9049}
9050
9051#[doc(hidden)]
9052impl google_cloud_gax::paginator::internal::PageableResponse for ListCustomClassesResponse {
9053 type PageItem = crate::model::CustomClass;
9054
9055 fn items(self) -> std::vec::Vec<Self::PageItem> {
9056 self.custom_classes
9057 }
9058
9059 fn next_page_token(&self) -> std::string::String {
9060 use std::clone::Clone;
9061 self.next_page_token.clone()
9062 }
9063}
9064
9065/// Request message for the
9066/// [GetCustomClass][google.cloud.speech.v2.Speech.GetCustomClass] method.
9067///
9068/// [google.cloud.speech.v2.Speech.GetCustomClass]: crate::client::Speech::get_custom_class
9069#[derive(Clone, Default, PartialEq)]
9070#[non_exhaustive]
9071pub struct GetCustomClassRequest {
9072 /// Required. The name of the CustomClass to retrieve. The expected format is
9073 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
9074 pub name: std::string::String,
9075
9076 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9077}
9078
9079impl GetCustomClassRequest {
9080 pub fn new() -> Self {
9081 std::default::Default::default()
9082 }
9083
9084 /// Sets the value of [name][crate::model::GetCustomClassRequest::name].
9085 ///
9086 /// # Example
9087 /// ```ignore,no_run
9088 /// # use google_cloud_speech_v2::model::GetCustomClassRequest;
9089 /// let x = GetCustomClassRequest::new().set_name("example");
9090 /// ```
9091 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9092 self.name = v.into();
9093 self
9094 }
9095}
9096
9097impl wkt::message::Message for GetCustomClassRequest {
9098 fn typename() -> &'static str {
9099 "type.googleapis.com/google.cloud.speech.v2.GetCustomClassRequest"
9100 }
9101}
9102
9103/// Request message for the
9104/// [UpdateCustomClass][google.cloud.speech.v2.Speech.UpdateCustomClass] method.
9105///
9106/// [google.cloud.speech.v2.Speech.UpdateCustomClass]: crate::client::Speech::update_custom_class
9107#[derive(Clone, Default, PartialEq)]
9108#[non_exhaustive]
9109pub struct UpdateCustomClassRequest {
9110 /// Required. The CustomClass to update.
9111 ///
9112 /// The CustomClass's `name` field is used to identify the CustomClass to
9113 /// update. Format:
9114 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`.
9115 pub custom_class: std::option::Option<crate::model::CustomClass>,
9116
9117 /// The list of fields to be updated. If empty, all fields are considered for
9118 /// update.
9119 pub update_mask: std::option::Option<wkt::FieldMask>,
9120
9121 /// If set, validate the request and preview the updated CustomClass, but do
9122 /// not actually update it.
9123 pub validate_only: bool,
9124
9125 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9126}
9127
9128impl UpdateCustomClassRequest {
9129 pub fn new() -> Self {
9130 std::default::Default::default()
9131 }
9132
9133 /// Sets the value of [custom_class][crate::model::UpdateCustomClassRequest::custom_class].
9134 ///
9135 /// # Example
9136 /// ```ignore,no_run
9137 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9138 /// use google_cloud_speech_v2::model::CustomClass;
9139 /// let x = UpdateCustomClassRequest::new().set_custom_class(CustomClass::default()/* use setters */);
9140 /// ```
9141 pub fn set_custom_class<T>(mut self, v: T) -> Self
9142 where
9143 T: std::convert::Into<crate::model::CustomClass>,
9144 {
9145 self.custom_class = std::option::Option::Some(v.into());
9146 self
9147 }
9148
9149 /// Sets or clears the value of [custom_class][crate::model::UpdateCustomClassRequest::custom_class].
9150 ///
9151 /// # Example
9152 /// ```ignore,no_run
9153 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9154 /// use google_cloud_speech_v2::model::CustomClass;
9155 /// let x = UpdateCustomClassRequest::new().set_or_clear_custom_class(Some(CustomClass::default()/* use setters */));
9156 /// let x = UpdateCustomClassRequest::new().set_or_clear_custom_class(None::<CustomClass>);
9157 /// ```
9158 pub fn set_or_clear_custom_class<T>(mut self, v: std::option::Option<T>) -> Self
9159 where
9160 T: std::convert::Into<crate::model::CustomClass>,
9161 {
9162 self.custom_class = v.map(|x| x.into());
9163 self
9164 }
9165
9166 /// Sets the value of [update_mask][crate::model::UpdateCustomClassRequest::update_mask].
9167 ///
9168 /// # Example
9169 /// ```ignore,no_run
9170 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9171 /// use wkt::FieldMask;
9172 /// let x = UpdateCustomClassRequest::new().set_update_mask(FieldMask::default()/* use setters */);
9173 /// ```
9174 pub fn set_update_mask<T>(mut self, v: T) -> Self
9175 where
9176 T: std::convert::Into<wkt::FieldMask>,
9177 {
9178 self.update_mask = std::option::Option::Some(v.into());
9179 self
9180 }
9181
9182 /// Sets or clears the value of [update_mask][crate::model::UpdateCustomClassRequest::update_mask].
9183 ///
9184 /// # Example
9185 /// ```ignore,no_run
9186 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9187 /// use wkt::FieldMask;
9188 /// let x = UpdateCustomClassRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
9189 /// let x = UpdateCustomClassRequest::new().set_or_clear_update_mask(None::<FieldMask>);
9190 /// ```
9191 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
9192 where
9193 T: std::convert::Into<wkt::FieldMask>,
9194 {
9195 self.update_mask = v.map(|x| x.into());
9196 self
9197 }
9198
9199 /// Sets the value of [validate_only][crate::model::UpdateCustomClassRequest::validate_only].
9200 ///
9201 /// # Example
9202 /// ```ignore,no_run
9203 /// # use google_cloud_speech_v2::model::UpdateCustomClassRequest;
9204 /// let x = UpdateCustomClassRequest::new().set_validate_only(true);
9205 /// ```
9206 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9207 self.validate_only = v.into();
9208 self
9209 }
9210}
9211
9212impl wkt::message::Message for UpdateCustomClassRequest {
9213 fn typename() -> &'static str {
9214 "type.googleapis.com/google.cloud.speech.v2.UpdateCustomClassRequest"
9215 }
9216}
9217
9218/// Request message for the
9219/// [DeleteCustomClass][google.cloud.speech.v2.Speech.DeleteCustomClass] method.
9220///
9221/// [google.cloud.speech.v2.Speech.DeleteCustomClass]: crate::client::Speech::delete_custom_class
9222#[derive(Clone, Default, PartialEq)]
9223#[non_exhaustive]
9224pub struct DeleteCustomClassRequest {
9225 /// Required. The name of the CustomClass to delete.
9226 /// Format:
9227 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`
9228 pub name: std::string::String,
9229
9230 /// If set, validate the request and preview the deleted CustomClass, but do
9231 /// not actually delete it.
9232 pub validate_only: bool,
9233
9234 /// If set to true, and the CustomClass is not found, the request will succeed
9235 /// and be a no-op (no Operation is recorded in this case).
9236 pub allow_missing: bool,
9237
9238 /// This checksum is computed by the server based on the value of other
9239 /// fields. This may be sent on update, undelete, and delete requests to ensure
9240 /// the client has an up-to-date value before proceeding.
9241 pub etag: std::string::String,
9242
9243 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9244}
9245
9246impl DeleteCustomClassRequest {
9247 pub fn new() -> Self {
9248 std::default::Default::default()
9249 }
9250
9251 /// Sets the value of [name][crate::model::DeleteCustomClassRequest::name].
9252 ///
9253 /// # Example
9254 /// ```ignore,no_run
9255 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9256 /// let x = DeleteCustomClassRequest::new().set_name("example");
9257 /// ```
9258 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9259 self.name = v.into();
9260 self
9261 }
9262
9263 /// Sets the value of [validate_only][crate::model::DeleteCustomClassRequest::validate_only].
9264 ///
9265 /// # Example
9266 /// ```ignore,no_run
9267 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9268 /// let x = DeleteCustomClassRequest::new().set_validate_only(true);
9269 /// ```
9270 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9271 self.validate_only = v.into();
9272 self
9273 }
9274
9275 /// Sets the value of [allow_missing][crate::model::DeleteCustomClassRequest::allow_missing].
9276 ///
9277 /// # Example
9278 /// ```ignore,no_run
9279 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9280 /// let x = DeleteCustomClassRequest::new().set_allow_missing(true);
9281 /// ```
9282 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9283 self.allow_missing = v.into();
9284 self
9285 }
9286
9287 /// Sets the value of [etag][crate::model::DeleteCustomClassRequest::etag].
9288 ///
9289 /// # Example
9290 /// ```ignore,no_run
9291 /// # use google_cloud_speech_v2::model::DeleteCustomClassRequest;
9292 /// let x = DeleteCustomClassRequest::new().set_etag("example");
9293 /// ```
9294 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9295 self.etag = v.into();
9296 self
9297 }
9298}
9299
9300impl wkt::message::Message for DeleteCustomClassRequest {
9301 fn typename() -> &'static str {
9302 "type.googleapis.com/google.cloud.speech.v2.DeleteCustomClassRequest"
9303 }
9304}
9305
9306/// Request message for the
9307/// [UndeleteCustomClass][google.cloud.speech.v2.Speech.UndeleteCustomClass]
9308/// method.
9309///
9310/// [google.cloud.speech.v2.Speech.UndeleteCustomClass]: crate::client::Speech::undelete_custom_class
9311#[derive(Clone, Default, PartialEq)]
9312#[non_exhaustive]
9313pub struct UndeleteCustomClassRequest {
9314 /// Required. The name of the CustomClass to undelete.
9315 /// Format:
9316 /// `projects/{project}/locations/{location}/customClasses/{custom_class}`
9317 pub name: std::string::String,
9318
9319 /// If set, validate the request and preview the undeleted CustomClass, but do
9320 /// not actually undelete it.
9321 pub validate_only: bool,
9322
9323 /// This checksum is computed by the server based on the value of other
9324 /// fields. This may be sent on update, undelete, and delete requests to ensure
9325 /// the client has an up-to-date value before proceeding.
9326 pub etag: std::string::String,
9327
9328 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9329}
9330
9331impl UndeleteCustomClassRequest {
9332 pub fn new() -> Self {
9333 std::default::Default::default()
9334 }
9335
9336 /// Sets the value of [name][crate::model::UndeleteCustomClassRequest::name].
9337 ///
9338 /// # Example
9339 /// ```ignore,no_run
9340 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9341 /// let x = UndeleteCustomClassRequest::new().set_name("example");
9342 /// ```
9343 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9344 self.name = v.into();
9345 self
9346 }
9347
9348 /// Sets the value of [validate_only][crate::model::UndeleteCustomClassRequest::validate_only].
9349 ///
9350 /// # Example
9351 /// ```ignore,no_run
9352 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9353 /// let x = UndeleteCustomClassRequest::new().set_validate_only(true);
9354 /// ```
9355 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9356 self.validate_only = v.into();
9357 self
9358 }
9359
9360 /// Sets the value of [etag][crate::model::UndeleteCustomClassRequest::etag].
9361 ///
9362 /// # Example
9363 /// ```ignore,no_run
9364 /// # use google_cloud_speech_v2::model::UndeleteCustomClassRequest;
9365 /// let x = UndeleteCustomClassRequest::new().set_etag("example");
9366 /// ```
9367 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9368 self.etag = v.into();
9369 self
9370 }
9371}
9372
9373impl wkt::message::Message for UndeleteCustomClassRequest {
9374 fn typename() -> &'static str {
9375 "type.googleapis.com/google.cloud.speech.v2.UndeleteCustomClassRequest"
9376 }
9377}
9378
9379/// Request message for the
9380/// [CreatePhraseSet][google.cloud.speech.v2.Speech.CreatePhraseSet] method.
9381///
9382/// [google.cloud.speech.v2.Speech.CreatePhraseSet]: crate::client::Speech::create_phrase_set
9383#[derive(Clone, Default, PartialEq)]
9384#[non_exhaustive]
9385pub struct CreatePhraseSetRequest {
9386 /// Required. The PhraseSet to create.
9387 pub phrase_set: std::option::Option<crate::model::PhraseSet>,
9388
9389 /// If set, validate the request and preview the PhraseSet, but do not
9390 /// actually create it.
9391 pub validate_only: bool,
9392
9393 /// The ID to use for the PhraseSet, which will become the final component of
9394 /// the PhraseSet's resource name.
9395 ///
9396 /// This value should be 4-63 characters, and valid characters
9397 /// are /[a-z][0-9]-/.
9398 pub phrase_set_id: std::string::String,
9399
9400 /// Required. The project and location where this PhraseSet will be created.
9401 /// The expected format is `projects/{project}/locations/{location}`.
9402 pub parent: std::string::String,
9403
9404 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9405}
9406
9407impl CreatePhraseSetRequest {
9408 pub fn new() -> Self {
9409 std::default::Default::default()
9410 }
9411
9412 /// Sets the value of [phrase_set][crate::model::CreatePhraseSetRequest::phrase_set].
9413 ///
9414 /// # Example
9415 /// ```ignore,no_run
9416 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9417 /// use google_cloud_speech_v2::model::PhraseSet;
9418 /// let x = CreatePhraseSetRequest::new().set_phrase_set(PhraseSet::default()/* use setters */);
9419 /// ```
9420 pub fn set_phrase_set<T>(mut self, v: T) -> Self
9421 where
9422 T: std::convert::Into<crate::model::PhraseSet>,
9423 {
9424 self.phrase_set = std::option::Option::Some(v.into());
9425 self
9426 }
9427
9428 /// Sets or clears the value of [phrase_set][crate::model::CreatePhraseSetRequest::phrase_set].
9429 ///
9430 /// # Example
9431 /// ```ignore,no_run
9432 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9433 /// use google_cloud_speech_v2::model::PhraseSet;
9434 /// let x = CreatePhraseSetRequest::new().set_or_clear_phrase_set(Some(PhraseSet::default()/* use setters */));
9435 /// let x = CreatePhraseSetRequest::new().set_or_clear_phrase_set(None::<PhraseSet>);
9436 /// ```
9437 pub fn set_or_clear_phrase_set<T>(mut self, v: std::option::Option<T>) -> Self
9438 where
9439 T: std::convert::Into<crate::model::PhraseSet>,
9440 {
9441 self.phrase_set = v.map(|x| x.into());
9442 self
9443 }
9444
9445 /// Sets the value of [validate_only][crate::model::CreatePhraseSetRequest::validate_only].
9446 ///
9447 /// # Example
9448 /// ```ignore,no_run
9449 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9450 /// let x = CreatePhraseSetRequest::new().set_validate_only(true);
9451 /// ```
9452 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9453 self.validate_only = v.into();
9454 self
9455 }
9456
9457 /// Sets the value of [phrase_set_id][crate::model::CreatePhraseSetRequest::phrase_set_id].
9458 ///
9459 /// # Example
9460 /// ```ignore,no_run
9461 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9462 /// let x = CreatePhraseSetRequest::new().set_phrase_set_id("example");
9463 /// ```
9464 pub fn set_phrase_set_id<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9465 self.phrase_set_id = v.into();
9466 self
9467 }
9468
9469 /// Sets the value of [parent][crate::model::CreatePhraseSetRequest::parent].
9470 ///
9471 /// # Example
9472 /// ```ignore,no_run
9473 /// # use google_cloud_speech_v2::model::CreatePhraseSetRequest;
9474 /// let x = CreatePhraseSetRequest::new().set_parent("example");
9475 /// ```
9476 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9477 self.parent = v.into();
9478 self
9479 }
9480}
9481
9482impl wkt::message::Message for CreatePhraseSetRequest {
9483 fn typename() -> &'static str {
9484 "type.googleapis.com/google.cloud.speech.v2.CreatePhraseSetRequest"
9485 }
9486}
9487
9488/// Request message for the
9489/// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
9490///
9491/// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9492#[derive(Clone, Default, PartialEq)]
9493#[non_exhaustive]
9494pub struct ListPhraseSetsRequest {
9495 /// Required. The project and location of PhraseSet resources to list. The
9496 /// expected format is `projects/{project}/locations/{location}`.
9497 pub parent: std::string::String,
9498
9499 /// The maximum number of PhraseSets to return. The service may return fewer
9500 /// than this value. If unspecified, at most 5 PhraseSets will be returned.
9501 /// The maximum value is 100; values above 100 will be coerced to 100.
9502 pub page_size: i32,
9503
9504 /// A page token, received from a previous
9505 /// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] call.
9506 /// Provide this to retrieve the subsequent page.
9507 ///
9508 /// When paginating, all other parameters provided to
9509 /// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] must match
9510 /// the call that provided the page token.
9511 ///
9512 /// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9513 pub page_token: std::string::String,
9514
9515 /// Whether, or not, to show resources that have been deleted.
9516 pub show_deleted: bool,
9517
9518 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9519}
9520
9521impl ListPhraseSetsRequest {
9522 pub fn new() -> Self {
9523 std::default::Default::default()
9524 }
9525
9526 /// Sets the value of [parent][crate::model::ListPhraseSetsRequest::parent].
9527 ///
9528 /// # Example
9529 /// ```ignore,no_run
9530 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9531 /// let x = ListPhraseSetsRequest::new().set_parent("example");
9532 /// ```
9533 pub fn set_parent<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9534 self.parent = v.into();
9535 self
9536 }
9537
9538 /// Sets the value of [page_size][crate::model::ListPhraseSetsRequest::page_size].
9539 ///
9540 /// # Example
9541 /// ```ignore,no_run
9542 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9543 /// let x = ListPhraseSetsRequest::new().set_page_size(42);
9544 /// ```
9545 pub fn set_page_size<T: std::convert::Into<i32>>(mut self, v: T) -> Self {
9546 self.page_size = v.into();
9547 self
9548 }
9549
9550 /// Sets the value of [page_token][crate::model::ListPhraseSetsRequest::page_token].
9551 ///
9552 /// # Example
9553 /// ```ignore,no_run
9554 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9555 /// let x = ListPhraseSetsRequest::new().set_page_token("example");
9556 /// ```
9557 pub fn set_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9558 self.page_token = v.into();
9559 self
9560 }
9561
9562 /// Sets the value of [show_deleted][crate::model::ListPhraseSetsRequest::show_deleted].
9563 ///
9564 /// # Example
9565 /// ```ignore,no_run
9566 /// # use google_cloud_speech_v2::model::ListPhraseSetsRequest;
9567 /// let x = ListPhraseSetsRequest::new().set_show_deleted(true);
9568 /// ```
9569 pub fn set_show_deleted<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9570 self.show_deleted = v.into();
9571 self
9572 }
9573}
9574
9575impl wkt::message::Message for ListPhraseSetsRequest {
9576 fn typename() -> &'static str {
9577 "type.googleapis.com/google.cloud.speech.v2.ListPhraseSetsRequest"
9578 }
9579}
9580
9581/// Response message for the
9582/// [ListPhraseSets][google.cloud.speech.v2.Speech.ListPhraseSets] method.
9583///
9584/// [google.cloud.speech.v2.Speech.ListPhraseSets]: crate::client::Speech::list_phrase_sets
9585#[derive(Clone, Default, PartialEq)]
9586#[non_exhaustive]
9587pub struct ListPhraseSetsResponse {
9588 /// The list of requested PhraseSets.
9589 pub phrase_sets: std::vec::Vec<crate::model::PhraseSet>,
9590
9591 /// A token, which can be sent as
9592 /// [page_token][google.cloud.speech.v2.ListPhraseSetsRequest.page_token] to
9593 /// retrieve the next page. If this field is omitted, there are no subsequent
9594 /// pages. This token expires after 72 hours.
9595 ///
9596 /// [google.cloud.speech.v2.ListPhraseSetsRequest.page_token]: crate::model::ListPhraseSetsRequest::page_token
9597 pub next_page_token: std::string::String,
9598
9599 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9600}
9601
9602impl ListPhraseSetsResponse {
9603 pub fn new() -> Self {
9604 std::default::Default::default()
9605 }
9606
9607 /// Sets the value of [phrase_sets][crate::model::ListPhraseSetsResponse::phrase_sets].
9608 ///
9609 /// # Example
9610 /// ```ignore,no_run
9611 /// # use google_cloud_speech_v2::model::ListPhraseSetsResponse;
9612 /// use google_cloud_speech_v2::model::PhraseSet;
9613 /// let x = ListPhraseSetsResponse::new()
9614 /// .set_phrase_sets([
9615 /// PhraseSet::default()/* use setters */,
9616 /// PhraseSet::default()/* use (different) setters */,
9617 /// ]);
9618 /// ```
9619 pub fn set_phrase_sets<T, V>(mut self, v: T) -> Self
9620 where
9621 T: std::iter::IntoIterator<Item = V>,
9622 V: std::convert::Into<crate::model::PhraseSet>,
9623 {
9624 use std::iter::Iterator;
9625 self.phrase_sets = v.into_iter().map(|i| i.into()).collect();
9626 self
9627 }
9628
9629 /// Sets the value of [next_page_token][crate::model::ListPhraseSetsResponse::next_page_token].
9630 ///
9631 /// # Example
9632 /// ```ignore,no_run
9633 /// # use google_cloud_speech_v2::model::ListPhraseSetsResponse;
9634 /// let x = ListPhraseSetsResponse::new().set_next_page_token("example");
9635 /// ```
9636 pub fn set_next_page_token<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9637 self.next_page_token = v.into();
9638 self
9639 }
9640}
9641
9642impl wkt::message::Message for ListPhraseSetsResponse {
9643 fn typename() -> &'static str {
9644 "type.googleapis.com/google.cloud.speech.v2.ListPhraseSetsResponse"
9645 }
9646}
9647
9648#[doc(hidden)]
9649impl google_cloud_gax::paginator::internal::PageableResponse for ListPhraseSetsResponse {
9650 type PageItem = crate::model::PhraseSet;
9651
9652 fn items(self) -> std::vec::Vec<Self::PageItem> {
9653 self.phrase_sets
9654 }
9655
9656 fn next_page_token(&self) -> std::string::String {
9657 use std::clone::Clone;
9658 self.next_page_token.clone()
9659 }
9660}
9661
9662/// Request message for the
9663/// [GetPhraseSet][google.cloud.speech.v2.Speech.GetPhraseSet] method.
9664///
9665/// [google.cloud.speech.v2.Speech.GetPhraseSet]: crate::client::Speech::get_phrase_set
9666#[derive(Clone, Default, PartialEq)]
9667#[non_exhaustive]
9668pub struct GetPhraseSetRequest {
9669 /// Required. The name of the PhraseSet to retrieve. The expected format is
9670 /// `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
9671 pub name: std::string::String,
9672
9673 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9674}
9675
9676impl GetPhraseSetRequest {
9677 pub fn new() -> Self {
9678 std::default::Default::default()
9679 }
9680
9681 /// Sets the value of [name][crate::model::GetPhraseSetRequest::name].
9682 ///
9683 /// # Example
9684 /// ```ignore,no_run
9685 /// # use google_cloud_speech_v2::model::GetPhraseSetRequest;
9686 /// let x = GetPhraseSetRequest::new().set_name("example");
9687 /// ```
9688 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9689 self.name = v.into();
9690 self
9691 }
9692}
9693
9694impl wkt::message::Message for GetPhraseSetRequest {
9695 fn typename() -> &'static str {
9696 "type.googleapis.com/google.cloud.speech.v2.GetPhraseSetRequest"
9697 }
9698}
9699
9700/// Request message for the
9701/// [UpdatePhraseSet][google.cloud.speech.v2.Speech.UpdatePhraseSet] method.
9702///
9703/// [google.cloud.speech.v2.Speech.UpdatePhraseSet]: crate::client::Speech::update_phrase_set
9704#[derive(Clone, Default, PartialEq)]
9705#[non_exhaustive]
9706pub struct UpdatePhraseSetRequest {
9707 /// Required. The PhraseSet to update.
9708 ///
9709 /// The PhraseSet's `name` field is used to identify the PhraseSet to update.
9710 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`.
9711 pub phrase_set: std::option::Option<crate::model::PhraseSet>,
9712
9713 /// The list of fields to update. If empty, all non-default valued fields are
9714 /// considered for update. Use `*` to update the entire PhraseSet resource.
9715 pub update_mask: std::option::Option<wkt::FieldMask>,
9716
9717 /// If set, validate the request and preview the updated PhraseSet, but do not
9718 /// actually update it.
9719 pub validate_only: bool,
9720
9721 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9722}
9723
9724impl UpdatePhraseSetRequest {
9725 pub fn new() -> Self {
9726 std::default::Default::default()
9727 }
9728
9729 /// Sets the value of [phrase_set][crate::model::UpdatePhraseSetRequest::phrase_set].
9730 ///
9731 /// # Example
9732 /// ```ignore,no_run
9733 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9734 /// use google_cloud_speech_v2::model::PhraseSet;
9735 /// let x = UpdatePhraseSetRequest::new().set_phrase_set(PhraseSet::default()/* use setters */);
9736 /// ```
9737 pub fn set_phrase_set<T>(mut self, v: T) -> Self
9738 where
9739 T: std::convert::Into<crate::model::PhraseSet>,
9740 {
9741 self.phrase_set = std::option::Option::Some(v.into());
9742 self
9743 }
9744
9745 /// Sets or clears the value of [phrase_set][crate::model::UpdatePhraseSetRequest::phrase_set].
9746 ///
9747 /// # Example
9748 /// ```ignore,no_run
9749 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9750 /// use google_cloud_speech_v2::model::PhraseSet;
9751 /// let x = UpdatePhraseSetRequest::new().set_or_clear_phrase_set(Some(PhraseSet::default()/* use setters */));
9752 /// let x = UpdatePhraseSetRequest::new().set_or_clear_phrase_set(None::<PhraseSet>);
9753 /// ```
9754 pub fn set_or_clear_phrase_set<T>(mut self, v: std::option::Option<T>) -> Self
9755 where
9756 T: std::convert::Into<crate::model::PhraseSet>,
9757 {
9758 self.phrase_set = v.map(|x| x.into());
9759 self
9760 }
9761
9762 /// Sets the value of [update_mask][crate::model::UpdatePhraseSetRequest::update_mask].
9763 ///
9764 /// # Example
9765 /// ```ignore,no_run
9766 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9767 /// use wkt::FieldMask;
9768 /// let x = UpdatePhraseSetRequest::new().set_update_mask(FieldMask::default()/* use setters */);
9769 /// ```
9770 pub fn set_update_mask<T>(mut self, v: T) -> Self
9771 where
9772 T: std::convert::Into<wkt::FieldMask>,
9773 {
9774 self.update_mask = std::option::Option::Some(v.into());
9775 self
9776 }
9777
9778 /// Sets or clears the value of [update_mask][crate::model::UpdatePhraseSetRequest::update_mask].
9779 ///
9780 /// # Example
9781 /// ```ignore,no_run
9782 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9783 /// use wkt::FieldMask;
9784 /// let x = UpdatePhraseSetRequest::new().set_or_clear_update_mask(Some(FieldMask::default()/* use setters */));
9785 /// let x = UpdatePhraseSetRequest::new().set_or_clear_update_mask(None::<FieldMask>);
9786 /// ```
9787 pub fn set_or_clear_update_mask<T>(mut self, v: std::option::Option<T>) -> Self
9788 where
9789 T: std::convert::Into<wkt::FieldMask>,
9790 {
9791 self.update_mask = v.map(|x| x.into());
9792 self
9793 }
9794
9795 /// Sets the value of [validate_only][crate::model::UpdatePhraseSetRequest::validate_only].
9796 ///
9797 /// # Example
9798 /// ```ignore,no_run
9799 /// # use google_cloud_speech_v2::model::UpdatePhraseSetRequest;
9800 /// let x = UpdatePhraseSetRequest::new().set_validate_only(true);
9801 /// ```
9802 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9803 self.validate_only = v.into();
9804 self
9805 }
9806}
9807
9808impl wkt::message::Message for UpdatePhraseSetRequest {
9809 fn typename() -> &'static str {
9810 "type.googleapis.com/google.cloud.speech.v2.UpdatePhraseSetRequest"
9811 }
9812}
9813
9814/// Request message for the
9815/// [DeletePhraseSet][google.cloud.speech.v2.Speech.DeletePhraseSet] method.
9816///
9817/// [google.cloud.speech.v2.Speech.DeletePhraseSet]: crate::client::Speech::delete_phrase_set
9818#[derive(Clone, Default, PartialEq)]
9819#[non_exhaustive]
9820pub struct DeletePhraseSetRequest {
9821 /// Required. The name of the PhraseSet to delete.
9822 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`
9823 pub name: std::string::String,
9824
9825 /// If set, validate the request and preview the deleted PhraseSet, but do not
9826 /// actually delete it.
9827 pub validate_only: bool,
9828
9829 /// If set to true, and the PhraseSet is not found, the request will succeed
9830 /// and be a no-op (no Operation is recorded in this case).
9831 pub allow_missing: bool,
9832
9833 /// This checksum is computed by the server based on the value of other
9834 /// fields. This may be sent on update, undelete, and delete requests to ensure
9835 /// the client has an up-to-date value before proceeding.
9836 pub etag: std::string::String,
9837
9838 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9839}
9840
9841impl DeletePhraseSetRequest {
9842 pub fn new() -> Self {
9843 std::default::Default::default()
9844 }
9845
9846 /// Sets the value of [name][crate::model::DeletePhraseSetRequest::name].
9847 ///
9848 /// # Example
9849 /// ```ignore,no_run
9850 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9851 /// let x = DeletePhraseSetRequest::new().set_name("example");
9852 /// ```
9853 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9854 self.name = v.into();
9855 self
9856 }
9857
9858 /// Sets the value of [validate_only][crate::model::DeletePhraseSetRequest::validate_only].
9859 ///
9860 /// # Example
9861 /// ```ignore,no_run
9862 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9863 /// let x = DeletePhraseSetRequest::new().set_validate_only(true);
9864 /// ```
9865 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9866 self.validate_only = v.into();
9867 self
9868 }
9869
9870 /// Sets the value of [allow_missing][crate::model::DeletePhraseSetRequest::allow_missing].
9871 ///
9872 /// # Example
9873 /// ```ignore,no_run
9874 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9875 /// let x = DeletePhraseSetRequest::new().set_allow_missing(true);
9876 /// ```
9877 pub fn set_allow_missing<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9878 self.allow_missing = v.into();
9879 self
9880 }
9881
9882 /// Sets the value of [etag][crate::model::DeletePhraseSetRequest::etag].
9883 ///
9884 /// # Example
9885 /// ```ignore,no_run
9886 /// # use google_cloud_speech_v2::model::DeletePhraseSetRequest;
9887 /// let x = DeletePhraseSetRequest::new().set_etag("example");
9888 /// ```
9889 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9890 self.etag = v.into();
9891 self
9892 }
9893}
9894
9895impl wkt::message::Message for DeletePhraseSetRequest {
9896 fn typename() -> &'static str {
9897 "type.googleapis.com/google.cloud.speech.v2.DeletePhraseSetRequest"
9898 }
9899}
9900
9901/// Request message for the
9902/// [UndeletePhraseSet][google.cloud.speech.v2.Speech.UndeletePhraseSet]
9903/// method.
9904///
9905/// [google.cloud.speech.v2.Speech.UndeletePhraseSet]: crate::client::Speech::undelete_phrase_set
9906#[derive(Clone, Default, PartialEq)]
9907#[non_exhaustive]
9908pub struct UndeletePhraseSetRequest {
9909 /// Required. The name of the PhraseSet to undelete.
9910 /// Format: `projects/{project}/locations/{location}/phraseSets/{phrase_set}`
9911 pub name: std::string::String,
9912
9913 /// If set, validate the request and preview the undeleted PhraseSet, but do
9914 /// not actually undelete it.
9915 pub validate_only: bool,
9916
9917 /// This checksum is computed by the server based on the value of other
9918 /// fields. This may be sent on update, undelete, and delete requests to ensure
9919 /// the client has an up-to-date value before proceeding.
9920 pub etag: std::string::String,
9921
9922 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9923}
9924
9925impl UndeletePhraseSetRequest {
9926 pub fn new() -> Self {
9927 std::default::Default::default()
9928 }
9929
9930 /// Sets the value of [name][crate::model::UndeletePhraseSetRequest::name].
9931 ///
9932 /// # Example
9933 /// ```ignore,no_run
9934 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
9935 /// let x = UndeletePhraseSetRequest::new().set_name("example");
9936 /// ```
9937 pub fn set_name<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9938 self.name = v.into();
9939 self
9940 }
9941
9942 /// Sets the value of [validate_only][crate::model::UndeletePhraseSetRequest::validate_only].
9943 ///
9944 /// # Example
9945 /// ```ignore,no_run
9946 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
9947 /// let x = UndeletePhraseSetRequest::new().set_validate_only(true);
9948 /// ```
9949 pub fn set_validate_only<T: std::convert::Into<bool>>(mut self, v: T) -> Self {
9950 self.validate_only = v.into();
9951 self
9952 }
9953
9954 /// Sets the value of [etag][crate::model::UndeletePhraseSetRequest::etag].
9955 ///
9956 /// # Example
9957 /// ```ignore,no_run
9958 /// # use google_cloud_speech_v2::model::UndeletePhraseSetRequest;
9959 /// let x = UndeletePhraseSetRequest::new().set_etag("example");
9960 /// ```
9961 pub fn set_etag<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
9962 self.etag = v.into();
9963 self
9964 }
9965}
9966
9967impl wkt::message::Message for UndeletePhraseSetRequest {
9968 fn typename() -> &'static str {
9969 "type.googleapis.com/google.cloud.speech.v2.UndeletePhraseSetRequest"
9970 }
9971}
9972
9973/// Represents a singular feature of a model. If the feature is `recognizer`,
9974/// the release_state of the feature represents the release_state of the model
9975#[derive(Clone, Default, PartialEq)]
9976#[non_exhaustive]
9977pub struct ModelFeature {
9978 /// The name of the feature (Note: the feature can be `recognizer`)
9979 pub feature: std::string::String,
9980
9981 /// The release state of the feature
9982 pub release_state: std::string::String,
9983
9984 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
9985}
9986
9987impl ModelFeature {
9988 pub fn new() -> Self {
9989 std::default::Default::default()
9990 }
9991
9992 /// Sets the value of [feature][crate::model::ModelFeature::feature].
9993 ///
9994 /// # Example
9995 /// ```ignore,no_run
9996 /// # use google_cloud_speech_v2::model::ModelFeature;
9997 /// let x = ModelFeature::new().set_feature("example");
9998 /// ```
9999 pub fn set_feature<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10000 self.feature = v.into();
10001 self
10002 }
10003
10004 /// Sets the value of [release_state][crate::model::ModelFeature::release_state].
10005 ///
10006 /// # Example
10007 /// ```ignore,no_run
10008 /// # use google_cloud_speech_v2::model::ModelFeature;
10009 /// let x = ModelFeature::new().set_release_state("example");
10010 /// ```
10011 pub fn set_release_state<T: std::convert::Into<std::string::String>>(mut self, v: T) -> Self {
10012 self.release_state = v.into();
10013 self
10014 }
10015}
10016
10017impl wkt::message::Message for ModelFeature {
10018 fn typename() -> &'static str {
10019 "type.googleapis.com/google.cloud.speech.v2.ModelFeature"
10020 }
10021}
10022
10023/// Represents the collection of features belonging to a model
10024#[derive(Clone, Default, PartialEq)]
10025#[non_exhaustive]
10026pub struct ModelFeatures {
10027 /// Repeated field that contains all features of the model
10028 pub model_feature: std::vec::Vec<crate::model::ModelFeature>,
10029
10030 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10031}
10032
10033impl ModelFeatures {
10034 pub fn new() -> Self {
10035 std::default::Default::default()
10036 }
10037
10038 /// Sets the value of [model_feature][crate::model::ModelFeatures::model_feature].
10039 ///
10040 /// # Example
10041 /// ```ignore,no_run
10042 /// # use google_cloud_speech_v2::model::ModelFeatures;
10043 /// use google_cloud_speech_v2::model::ModelFeature;
10044 /// let x = ModelFeatures::new()
10045 /// .set_model_feature([
10046 /// ModelFeature::default()/* use setters */,
10047 /// ModelFeature::default()/* use (different) setters */,
10048 /// ]);
10049 /// ```
10050 pub fn set_model_feature<T, V>(mut self, v: T) -> Self
10051 where
10052 T: std::iter::IntoIterator<Item = V>,
10053 V: std::convert::Into<crate::model::ModelFeature>,
10054 {
10055 use std::iter::Iterator;
10056 self.model_feature = v.into_iter().map(|i| i.into()).collect();
10057 self
10058 }
10059}
10060
10061impl wkt::message::Message for ModelFeatures {
10062 fn typename() -> &'static str {
10063 "type.googleapis.com/google.cloud.speech.v2.ModelFeatures"
10064 }
10065}
10066
10067/// The metadata about the models in a given region for a specific locale.
10068/// Currently this is just the features of the model
10069#[derive(Clone, Default, PartialEq)]
10070#[non_exhaustive]
10071pub struct ModelMetadata {
10072 /// Map of the model name -> features of that model
10073 pub model_features: std::collections::HashMap<std::string::String, crate::model::ModelFeatures>,
10074
10075 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10076}
10077
10078impl ModelMetadata {
10079 pub fn new() -> Self {
10080 std::default::Default::default()
10081 }
10082
10083 /// Sets the value of [model_features][crate::model::ModelMetadata::model_features].
10084 ///
10085 /// # Example
10086 /// ```ignore,no_run
10087 /// # use google_cloud_speech_v2::model::ModelMetadata;
10088 /// use google_cloud_speech_v2::model::ModelFeatures;
10089 /// let x = ModelMetadata::new().set_model_features([
10090 /// ("key0", ModelFeatures::default()/* use setters */),
10091 /// ("key1", ModelFeatures::default()/* use (different) setters */),
10092 /// ]);
10093 /// ```
10094 pub fn set_model_features<T, K, V>(mut self, v: T) -> Self
10095 where
10096 T: std::iter::IntoIterator<Item = (K, V)>,
10097 K: std::convert::Into<std::string::String>,
10098 V: std::convert::Into<crate::model::ModelFeatures>,
10099 {
10100 use std::iter::Iterator;
10101 self.model_features = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
10102 self
10103 }
10104}
10105
10106impl wkt::message::Message for ModelMetadata {
10107 fn typename() -> &'static str {
10108 "type.googleapis.com/google.cloud.speech.v2.ModelMetadata"
10109 }
10110}
10111
10112/// The metadata about locales available in a given region. Currently this is
10113/// just the models that are available for each locale
10114#[derive(Clone, Default, PartialEq)]
10115#[non_exhaustive]
10116pub struct LanguageMetadata {
10117 /// Map of locale (language code) -> models
10118 pub models: std::collections::HashMap<std::string::String, crate::model::ModelMetadata>,
10119
10120 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10121}
10122
10123impl LanguageMetadata {
10124 pub fn new() -> Self {
10125 std::default::Default::default()
10126 }
10127
10128 /// Sets the value of [models][crate::model::LanguageMetadata::models].
10129 ///
10130 /// # Example
10131 /// ```ignore,no_run
10132 /// # use google_cloud_speech_v2::model::LanguageMetadata;
10133 /// use google_cloud_speech_v2::model::ModelMetadata;
10134 /// let x = LanguageMetadata::new().set_models([
10135 /// ("key0", ModelMetadata::default()/* use setters */),
10136 /// ("key1", ModelMetadata::default()/* use (different) setters */),
10137 /// ]);
10138 /// ```
10139 pub fn set_models<T, K, V>(mut self, v: T) -> Self
10140 where
10141 T: std::iter::IntoIterator<Item = (K, V)>,
10142 K: std::convert::Into<std::string::String>,
10143 V: std::convert::Into<crate::model::ModelMetadata>,
10144 {
10145 use std::iter::Iterator;
10146 self.models = v.into_iter().map(|(k, v)| (k.into(), v.into())).collect();
10147 self
10148 }
10149}
10150
10151impl wkt::message::Message for LanguageMetadata {
10152 fn typename() -> &'static str {
10153 "type.googleapis.com/google.cloud.speech.v2.LanguageMetadata"
10154 }
10155}
10156
10157/// The access metadata for a particular region. This can be applied if the org
10158/// policy for the given project disallows a particular region.
10159#[derive(Clone, Default, PartialEq)]
10160#[non_exhaustive]
10161pub struct AccessMetadata {
10162 /// Describes the different types of constraints that are applied.
10163 pub constraint_type: crate::model::access_metadata::ConstraintType,
10164
10165 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10166}
10167
10168impl AccessMetadata {
10169 pub fn new() -> Self {
10170 std::default::Default::default()
10171 }
10172
10173 /// Sets the value of [constraint_type][crate::model::AccessMetadata::constraint_type].
10174 ///
10175 /// # Example
10176 /// ```ignore,no_run
10177 /// # use google_cloud_speech_v2::model::AccessMetadata;
10178 /// use google_cloud_speech_v2::model::access_metadata::ConstraintType;
10179 /// let x0 = AccessMetadata::new().set_constraint_type(ConstraintType::ResourceLocationsOrgPolicyCreateConstraint);
10180 /// ```
10181 pub fn set_constraint_type<
10182 T: std::convert::Into<crate::model::access_metadata::ConstraintType>,
10183 >(
10184 mut self,
10185 v: T,
10186 ) -> Self {
10187 self.constraint_type = v.into();
10188 self
10189 }
10190}
10191
10192impl wkt::message::Message for AccessMetadata {
10193 fn typename() -> &'static str {
10194 "type.googleapis.com/google.cloud.speech.v2.AccessMetadata"
10195 }
10196}
10197
10198/// Defines additional types related to [AccessMetadata].
10199pub mod access_metadata {
10200 #[allow(unused_imports)]
10201 use super::*;
10202
10203 /// Describes the different types of constraints that can be applied on a
10204 /// region.
10205 ///
10206 /// # Working with unknown values
10207 ///
10208 /// This enum is defined as `#[non_exhaustive]` because Google Cloud may add
10209 /// additional enum variants at any time. Adding new variants is not considered
10210 /// a breaking change. Applications should write their code in anticipation of:
10211 ///
10212 /// - New values appearing in future releases of the client library, **and**
10213 /// - New values received dynamically, without application changes.
10214 ///
10215 /// Please consult the [Working with enums] section in the user guide for some
10216 /// guidelines.
10217 ///
10218 /// [Working with enums]: https://google-cloud-rust.github.io/working_with_enums.html
10219 #[derive(Clone, Debug, PartialEq)]
10220 #[non_exhaustive]
10221 pub enum ConstraintType {
10222 /// Unspecified constraint applied.
10223 Unspecified,
10224 /// The project's org policy disallows the given region.
10225 ResourceLocationsOrgPolicyCreateConstraint,
10226 /// If set, the enum was initialized with an unknown value.
10227 ///
10228 /// Applications can examine the value using [ConstraintType::value] or
10229 /// [ConstraintType::name].
10230 UnknownValue(constraint_type::UnknownValue),
10231 }
10232
10233 #[doc(hidden)]
10234 pub mod constraint_type {
10235 #[allow(unused_imports)]
10236 use super::*;
10237 #[derive(Clone, Debug, PartialEq)]
10238 pub struct UnknownValue(pub(crate) wkt::internal::UnknownEnumValue);
10239 }
10240
10241 impl ConstraintType {
10242 /// Gets the enum value.
10243 ///
10244 /// Returns `None` if the enum contains an unknown value deserialized from
10245 /// the string representation of enums.
10246 pub fn value(&self) -> std::option::Option<i32> {
10247 match self {
10248 Self::Unspecified => std::option::Option::Some(0),
10249 Self::ResourceLocationsOrgPolicyCreateConstraint => std::option::Option::Some(1),
10250 Self::UnknownValue(u) => u.0.value(),
10251 }
10252 }
10253
10254 /// Gets the enum value as a string.
10255 ///
10256 /// Returns `None` if the enum contains an unknown value deserialized from
10257 /// the integer representation of enums.
10258 pub fn name(&self) -> std::option::Option<&str> {
10259 match self {
10260 Self::Unspecified => std::option::Option::Some("CONSTRAINT_TYPE_UNSPECIFIED"),
10261 Self::ResourceLocationsOrgPolicyCreateConstraint => {
10262 std::option::Option::Some("RESOURCE_LOCATIONS_ORG_POLICY_CREATE_CONSTRAINT")
10263 }
10264 Self::UnknownValue(u) => u.0.name(),
10265 }
10266 }
10267 }
10268
10269 impl std::default::Default for ConstraintType {
10270 fn default() -> Self {
10271 use std::convert::From;
10272 Self::from(0)
10273 }
10274 }
10275
10276 impl std::fmt::Display for ConstraintType {
10277 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
10278 wkt::internal::display_enum(f, self.name(), self.value())
10279 }
10280 }
10281
10282 impl std::convert::From<i32> for ConstraintType {
10283 fn from(value: i32) -> Self {
10284 match value {
10285 0 => Self::Unspecified,
10286 1 => Self::ResourceLocationsOrgPolicyCreateConstraint,
10287 _ => Self::UnknownValue(constraint_type::UnknownValue(
10288 wkt::internal::UnknownEnumValue::Integer(value),
10289 )),
10290 }
10291 }
10292 }
10293
10294 impl std::convert::From<&str> for ConstraintType {
10295 fn from(value: &str) -> Self {
10296 use std::string::ToString;
10297 match value {
10298 "CONSTRAINT_TYPE_UNSPECIFIED" => Self::Unspecified,
10299 "RESOURCE_LOCATIONS_ORG_POLICY_CREATE_CONSTRAINT" => {
10300 Self::ResourceLocationsOrgPolicyCreateConstraint
10301 }
10302 _ => Self::UnknownValue(constraint_type::UnknownValue(
10303 wkt::internal::UnknownEnumValue::String(value.to_string()),
10304 )),
10305 }
10306 }
10307 }
10308
10309 impl serde::ser::Serialize for ConstraintType {
10310 fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
10311 where
10312 S: serde::Serializer,
10313 {
10314 match self {
10315 Self::Unspecified => serializer.serialize_i32(0),
10316 Self::ResourceLocationsOrgPolicyCreateConstraint => serializer.serialize_i32(1),
10317 Self::UnknownValue(u) => u.0.serialize(serializer),
10318 }
10319 }
10320 }
10321
10322 impl<'de> serde::de::Deserialize<'de> for ConstraintType {
10323 fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
10324 where
10325 D: serde::Deserializer<'de>,
10326 {
10327 deserializer.deserialize_any(wkt::internal::EnumVisitor::<ConstraintType>::new(
10328 ".google.cloud.speech.v2.AccessMetadata.ConstraintType",
10329 ))
10330 }
10331 }
10332}
10333
10334/// Main metadata for the Locations API for STT V2. Currently this is just the
10335/// metadata about locales, models, and features
10336#[derive(Clone, Default, PartialEq)]
10337#[non_exhaustive]
10338pub struct LocationsMetadata {
10339 /// Information about available locales, models, and features represented in
10340 /// the hierarchical structure of locales -> models -> features
10341 pub languages: std::option::Option<crate::model::LanguageMetadata>,
10342
10343 /// Information about access metadata for the region and given project.
10344 pub access_metadata: std::option::Option<crate::model::AccessMetadata>,
10345
10346 pub(crate) _unknown_fields: serde_json::Map<std::string::String, serde_json::Value>,
10347}
10348
10349impl LocationsMetadata {
10350 pub fn new() -> Self {
10351 std::default::Default::default()
10352 }
10353
10354 /// Sets the value of [languages][crate::model::LocationsMetadata::languages].
10355 ///
10356 /// # Example
10357 /// ```ignore,no_run
10358 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10359 /// use google_cloud_speech_v2::model::LanguageMetadata;
10360 /// let x = LocationsMetadata::new().set_languages(LanguageMetadata::default()/* use setters */);
10361 /// ```
10362 pub fn set_languages<T>(mut self, v: T) -> Self
10363 where
10364 T: std::convert::Into<crate::model::LanguageMetadata>,
10365 {
10366 self.languages = std::option::Option::Some(v.into());
10367 self
10368 }
10369
10370 /// Sets or clears the value of [languages][crate::model::LocationsMetadata::languages].
10371 ///
10372 /// # Example
10373 /// ```ignore,no_run
10374 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10375 /// use google_cloud_speech_v2::model::LanguageMetadata;
10376 /// let x = LocationsMetadata::new().set_or_clear_languages(Some(LanguageMetadata::default()/* use setters */));
10377 /// let x = LocationsMetadata::new().set_or_clear_languages(None::<LanguageMetadata>);
10378 /// ```
10379 pub fn set_or_clear_languages<T>(mut self, v: std::option::Option<T>) -> Self
10380 where
10381 T: std::convert::Into<crate::model::LanguageMetadata>,
10382 {
10383 self.languages = v.map(|x| x.into());
10384 self
10385 }
10386
10387 /// Sets the value of [access_metadata][crate::model::LocationsMetadata::access_metadata].
10388 ///
10389 /// # Example
10390 /// ```ignore,no_run
10391 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10392 /// use google_cloud_speech_v2::model::AccessMetadata;
10393 /// let x = LocationsMetadata::new().set_access_metadata(AccessMetadata::default()/* use setters */);
10394 /// ```
10395 pub fn set_access_metadata<T>(mut self, v: T) -> Self
10396 where
10397 T: std::convert::Into<crate::model::AccessMetadata>,
10398 {
10399 self.access_metadata = std::option::Option::Some(v.into());
10400 self
10401 }
10402
10403 /// Sets or clears the value of [access_metadata][crate::model::LocationsMetadata::access_metadata].
10404 ///
10405 /// # Example
10406 /// ```ignore,no_run
10407 /// # use google_cloud_speech_v2::model::LocationsMetadata;
10408 /// use google_cloud_speech_v2::model::AccessMetadata;
10409 /// let x = LocationsMetadata::new().set_or_clear_access_metadata(Some(AccessMetadata::default()/* use setters */));
10410 /// let x = LocationsMetadata::new().set_or_clear_access_metadata(None::<AccessMetadata>);
10411 /// ```
10412 pub fn set_or_clear_access_metadata<T>(mut self, v: std::option::Option<T>) -> Self
10413 where
10414 T: std::convert::Into<crate::model::AccessMetadata>,
10415 {
10416 self.access_metadata = v.map(|x| x.into());
10417 self
10418 }
10419}
10420
10421impl wkt::message::Message for LocationsMetadata {
10422 fn typename() -> &'static str {
10423 "type.googleapis.com/google.cloud.speech.v2.LocationsMetadata"
10424 }
10425}