aws_sdk_cleanroomsml/client/start_trained_model_inference_job.rs
1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2impl super::Client {
3 /// Constructs a fluent builder for the [`StartTrainedModelInferenceJob`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder) operation.
4 ///
5 /// - The fluent builder is configurable:
6 /// - [`membership_identifier(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::membership_identifier) / [`set_membership_identifier(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_membership_identifier):<br>required: **true**<br><p>The membership ID of the membership that contains the trained model inference job.</p><br>
7 /// - [`name(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::name) / [`set_name(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_name):<br>required: **true**<br><p>The name of the trained model inference job.</p><br>
8 /// - [`trained_model_arn(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::trained_model_arn) / [`set_trained_model_arn(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_trained_model_arn):<br>required: **true**<br><p>The Amazon Resource Name (ARN) of the trained model that is used for this trained model inference job.</p><br>
9 /// - [`trained_model_version_identifier(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::trained_model_version_identifier) / [`set_trained_model_version_identifier(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_trained_model_version_identifier):<br>required: **false**<br><p>The version identifier of the trained model to use for inference. This specifies which version of the trained model should be used to generate predictions on the input data.</p><br>
10 /// - [`configured_model_algorithm_association_arn(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::configured_model_algorithm_association_arn) / [`set_configured_model_algorithm_association_arn(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_configured_model_algorithm_association_arn):<br>required: **false**<br><p>The Amazon Resource Name (ARN) of the configured model algorithm association that is used for this trained model inference job.</p><br>
11 /// - [`resource_config(InferenceResourceConfig)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::resource_config) / [`set_resource_config(Option<InferenceResourceConfig>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_resource_config):<br>required: **true**<br><p>Defines the resource configuration for the trained model inference job.</p><br>
12 /// - [`output_configuration(InferenceOutputConfiguration)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::output_configuration) / [`set_output_configuration(Option<InferenceOutputConfiguration>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_output_configuration):<br>required: **true**<br><p>Defines the output configuration information for the trained model inference job.</p><br>
13 /// - [`data_source(ModelInferenceDataSource)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::data_source) / [`set_data_source(Option<ModelInferenceDataSource>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_data_source):<br>required: **true**<br><p>Defines the data source that is used for the trained model inference job.</p><br>
14 /// - [`description(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::description) / [`set_description(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_description):<br>required: **false**<br><p>The description of the trained model inference job.</p><br>
15 /// - [`container_execution_parameters(InferenceContainerExecutionParameters)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::container_execution_parameters) / [`set_container_execution_parameters(Option<InferenceContainerExecutionParameters>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_container_execution_parameters):<br>required: **false**<br><p>The execution parameters for the container.</p><br>
16 /// - [`environment(impl Into<String>, impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::environment) / [`set_environment(Option<HashMap::<String, String>>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_environment):<br>required: **false**<br><p>The environment variables to set in the Docker container.</p><br>
17 /// - [`kms_key_arn(impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::kms_key_arn) / [`set_kms_key_arn(Option<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_kms_key_arn):<br>required: **false**<br><p>The Amazon Resource Name (ARN) of the KMS key. This key is used to encrypt and decrypt customer-owned data in the ML inference job and associated data.</p><br>
18 /// - [`tags(impl Into<String>, impl Into<String>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::tags) / [`set_tags(Option<HashMap::<String, String>>)`](crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::set_tags):<br>required: **false**<br><p>The optional metadata that you apply to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.</p> <p>The following basic restrictions apply to tags:</p> <ul> <li> <p>Maximum number of tags per resource - 50.</p></li> <li> <p>For each resource, each tag key must be unique, and each tag key can have only one value.</p></li> <li> <p>Maximum key length - 128 Unicode characters in UTF-8.</p></li> <li> <p>Maximum value length - 256 Unicode characters in UTF-8.</p></li> <li> <p>If your tagging schema is used across multiple services and resources, remember that other services may have restrictions on allowed characters. Generally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following characters: + - = . _ : / @.</p></li> <li> <p>Tag keys and values are case sensitive.</p></li> <li> <p>Do not use aws:, AWS:, or any upper or lowercase combination of such as a prefix for keys as it is reserved for AWS use. You cannot edit or delete tag keys with this prefix. Values can have this prefix. If a tag value has aws as its prefix but the key does not, then Clean Rooms ML considers it to be a user tag and will count against the limit of 50 tags. Tags with only the key prefix of aws do not count against your tags per resource limit.</p></li> </ul><br>
19 /// - On success, responds with [`StartTrainedModelInferenceJobOutput`](crate::operation::start_trained_model_inference_job::StartTrainedModelInferenceJobOutput) with field(s):
20 /// - [`trained_model_inference_job_arn(String)`](crate::operation::start_trained_model_inference_job::StartTrainedModelInferenceJobOutput::trained_model_inference_job_arn): <p>The Amazon Resource Name (ARN) of the trained model inference job.</p>
21 /// - On failure, responds with [`SdkError<StartTrainedModelInferenceJobError>`](crate::operation::start_trained_model_inference_job::StartTrainedModelInferenceJobError)
22 pub fn start_trained_model_inference_job(
23 &self,
24 ) -> crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder {
25 crate::operation::start_trained_model_inference_job::builders::StartTrainedModelInferenceJobFluentBuilder::new(self.handle.clone())
26 }
27}