aws_sdk_bedrock/client/create_model_invocation_job.rs
1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2impl super::Client {
3 /// Constructs a fluent builder for the [`CreateModelInvocationJob`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder) operation.
4 ///
5 /// - The fluent builder is configurable:
6 /// - [`job_name(impl Into<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::job_name) / [`set_job_name(Option<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_job_name):<br>required: **true**<br><p>A name to give the batch inference job.</p><br>
7 /// - [`role_arn(impl Into<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::role_arn) / [`set_role_arn(Option<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_role_arn):<br>required: **true**<br><p>The Amazon Resource Name (ARN) of the service role with permissions to carry out and manage batch inference. You can use the console to create a default service role or follow the steps at <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/batch-iam-sr.html">Create a service role for batch inference</a>.</p><br>
8 /// - [`client_request_token(impl Into<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::client_request_token) / [`set_client_request_token(Option<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_client_request_token):<br>required: **false**<br><p>A unique, case-sensitive identifier to ensure that the API request completes no more than one time. If this token matches a previous request, Amazon Bedrock ignores the request, but does not return an error. For more information, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring idempotency</a>.</p><br>
9 /// - [`model_id(impl Into<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::model_id) / [`set_model_id(Option<String>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_model_id):<br>required: **true**<br><p>The unique identifier of the foundation model to use for the batch inference job.</p><br>
10 /// - [`input_data_config(ModelInvocationJobInputDataConfig)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::input_data_config) / [`set_input_data_config(Option<ModelInvocationJobInputDataConfig>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_input_data_config):<br>required: **true**<br><p>Details about the location of the input to the batch inference job.</p><br>
11 /// - [`output_data_config(ModelInvocationJobOutputDataConfig)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::output_data_config) / [`set_output_data_config(Option<ModelInvocationJobOutputDataConfig>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_output_data_config):<br>required: **true**<br><p>Details about the location of the output of the batch inference job.</p><br>
12 /// - [`vpc_config(VpcConfig)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::vpc_config) / [`set_vpc_config(Option<VpcConfig>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_vpc_config):<br>required: **false**<br><p>The configuration of the Virtual Private Cloud (VPC) for the data in the batch inference job. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/batch-vpc">Protect batch inference jobs using a VPC</a>.</p><br>
13 /// - [`timeout_duration_in_hours(i32)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::timeout_duration_in_hours) / [`set_timeout_duration_in_hours(Option<i32>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_timeout_duration_in_hours):<br>required: **false**<br><p>The number of hours after which to force the batch inference job to time out.</p><br>
14 /// - [`tags(Tag)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::tags) / [`set_tags(Option<Vec::<Tag>>)`](crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::set_tags):<br>required: **false**<br><p>Any tags to associate with the batch inference job. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/tagging.html">Tagging Amazon Bedrock resources</a>.</p><br>
15 /// - On success, responds with [`CreateModelInvocationJobOutput`](crate::operation::create_model_invocation_job::CreateModelInvocationJobOutput) with field(s):
16 /// - [`job_arn(String)`](crate::operation::create_model_invocation_job::CreateModelInvocationJobOutput::job_arn): <p>The Amazon Resource Name (ARN) of the batch inference job.</p>
17 /// - On failure, responds with [`SdkError<CreateModelInvocationJobError>`](crate::operation::create_model_invocation_job::CreateModelInvocationJobError)
18 pub fn create_model_invocation_job(&self) -> crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder {
19 crate::operation::create_model_invocation_job::builders::CreateModelInvocationJobFluentBuilder::new(self.handle.clone())
20 }
21}