aws-sdk-sagemaker 1.196.0

AWS SDK for Amazon SageMaker Service
Documentation
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub use crate::operation::create_ai_recommendation_job::_create_ai_recommendation_job_input::CreateAiRecommendationJobInputBuilder;

pub use crate::operation::create_ai_recommendation_job::_create_ai_recommendation_job_output::CreateAiRecommendationJobOutputBuilder;

impl crate::operation::create_ai_recommendation_job::builders::CreateAiRecommendationJobInputBuilder {
    /// Sends a request with this input using the given client.
    pub async fn send_with(
        self,
        client: &crate::Client,
    ) -> ::std::result::Result<
        crate::operation::create_ai_recommendation_job::CreateAiRecommendationJobOutput,
        ::aws_smithy_runtime_api::client::result::SdkError<
            crate::operation::create_ai_recommendation_job::CreateAIRecommendationJobError,
            ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
        >,
    > {
        let mut fluent_builder = client.create_ai_recommendation_job();
        fluent_builder.inner = self;
        fluent_builder.send().await
    }
}
/// Fluent builder constructing a request to `CreateAIRecommendationJob`.
///
/// <p>Creates a recommendation job that generates intelligent optimization recommendations for generative AI inference deployments. The job analyzes your model, workload configuration, and performance targets to recommend optimal instance types, model optimization techniques (such as quantization and speculative decoding), and deployment configurations.</p>
#[derive(::std::clone::Clone, ::std::fmt::Debug)]
pub struct CreateAIRecommendationJobFluentBuilder {
    handle: ::std::sync::Arc<crate::client::Handle>,
    inner: crate::operation::create_ai_recommendation_job::builders::CreateAiRecommendationJobInputBuilder,
    config_override: ::std::option::Option<crate::config::Builder>,
}
impl
    crate::client::customize::internal::CustomizableSend<
        crate::operation::create_ai_recommendation_job::CreateAiRecommendationJobOutput,
        crate::operation::create_ai_recommendation_job::CreateAIRecommendationJobError,
    > for CreateAIRecommendationJobFluentBuilder
{
    fn send(
        self,
        config_override: crate::config::Builder,
    ) -> crate::client::customize::internal::BoxFuture<
        crate::client::customize::internal::SendResult<
            crate::operation::create_ai_recommendation_job::CreateAiRecommendationJobOutput,
            crate::operation::create_ai_recommendation_job::CreateAIRecommendationJobError,
        >,
    > {
        ::std::boxed::Box::pin(async move { self.config_override(config_override).send().await })
    }
}
impl CreateAIRecommendationJobFluentBuilder {
    /// Creates a new `CreateAIRecommendationJobFluentBuilder`.
    pub(crate) fn new(handle: ::std::sync::Arc<crate::client::Handle>) -> Self {
        Self {
            handle,
            inner: ::std::default::Default::default(),
            config_override: ::std::option::Option::None,
        }
    }
    /// Access the CreateAIRecommendationJob as a reference.
    pub fn as_input(&self) -> &crate::operation::create_ai_recommendation_job::builders::CreateAiRecommendationJobInputBuilder {
        &self.inner
    }
    /// Sends the request and returns the response.
    ///
    /// If an error occurs, an `SdkError` will be returned with additional details that
    /// can be matched against.
    ///
    /// By default, any retryable failures will be retried twice. Retry behavior
    /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
    /// set when configuring the client.
    pub async fn send(
        self,
    ) -> ::std::result::Result<
        crate::operation::create_ai_recommendation_job::CreateAiRecommendationJobOutput,
        ::aws_smithy_runtime_api::client::result::SdkError<
            crate::operation::create_ai_recommendation_job::CreateAIRecommendationJobError,
            ::aws_smithy_runtime_api::client::orchestrator::HttpResponse,
        >,
    > {
        let input = self
            .inner
            .build()
            .map_err(::aws_smithy_runtime_api::client::result::SdkError::construction_failure)?;
        let runtime_plugins = crate::operation::create_ai_recommendation_job::CreateAIRecommendationJob::operation_runtime_plugins(
            self.handle.runtime_plugins.clone(),
            &self.handle.conf,
            self.config_override,
        );
        crate::operation::create_ai_recommendation_job::CreateAIRecommendationJob::orchestrate(&runtime_plugins, input).await
    }

    /// Consumes this builder, creating a customizable operation that can be modified before being sent.
    pub fn customize(
        self,
    ) -> crate::client::customize::CustomizableOperation<
        crate::operation::create_ai_recommendation_job::CreateAiRecommendationJobOutput,
        crate::operation::create_ai_recommendation_job::CreateAIRecommendationJobError,
        Self,
    > {
        crate::client::customize::CustomizableOperation::new(self)
    }
    pub(crate) fn config_override(mut self, config_override: impl ::std::convert::Into<crate::config::Builder>) -> Self {
        self.set_config_override(::std::option::Option::Some(config_override.into()));
        self
    }

    pub(crate) fn set_config_override(&mut self, config_override: ::std::option::Option<crate::config::Builder>) -> &mut Self {
        self.config_override = config_override;
        self
    }
    /// <p>The name of the AI recommendation job. The name must be unique within your Amazon Web Services account in the current Amazon Web Services Region.</p>
    pub fn ai_recommendation_job_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.inner = self.inner.ai_recommendation_job_name(input.into());
        self
    }
    /// <p>The name of the AI recommendation job. The name must be unique within your Amazon Web Services account in the current Amazon Web Services Region.</p>
    pub fn set_ai_recommendation_job_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.inner = self.inner.set_ai_recommendation_job_name(input);
        self
    }
    /// <p>The name of the AI recommendation job. The name must be unique within your Amazon Web Services account in the current Amazon Web Services Region.</p>
    pub fn get_ai_recommendation_job_name(&self) -> &::std::option::Option<::std::string::String> {
        self.inner.get_ai_recommendation_job_name()
    }
    /// <p>The source of the model to optimize. Specify the Amazon S3 location of the model artifacts.</p>
    pub fn model_source(mut self, input: crate::types::AiModelSource) -> Self {
        self.inner = self.inner.model_source(input);
        self
    }
    /// <p>The source of the model to optimize. Specify the Amazon S3 location of the model artifacts.</p>
    pub fn set_model_source(mut self, input: ::std::option::Option<crate::types::AiModelSource>) -> Self {
        self.inner = self.inner.set_model_source(input);
        self
    }
    /// <p>The source of the model to optimize. Specify the Amazon S3 location of the model artifacts.</p>
    pub fn get_model_source(&self) -> &::std::option::Option<crate::types::AiModelSource> {
        self.inner.get_model_source()
    }
    /// <p>The output configuration for the recommendation job, including the Amazon S3 location for results and an optional model package group where the optimized model is registered.</p>
    pub fn output_config(mut self, input: crate::types::AiRecommendationOutputConfig) -> Self {
        self.inner = self.inner.output_config(input);
        self
    }
    /// <p>The output configuration for the recommendation job, including the Amazon S3 location for results and an optional model package group where the optimized model is registered.</p>
    pub fn set_output_config(mut self, input: ::std::option::Option<crate::types::AiRecommendationOutputConfig>) -> Self {
        self.inner = self.inner.set_output_config(input);
        self
    }
    /// <p>The output configuration for the recommendation job, including the Amazon S3 location for results and an optional model package group where the optimized model is registered.</p>
    pub fn get_output_config(&self) -> &::std::option::Option<crate::types::AiRecommendationOutputConfig> {
        self.inner.get_output_config()
    }
    /// <p>The name or Amazon Resource Name (ARN) of the AI workload configuration to use for this recommendation job.</p>
    pub fn ai_workload_config_identifier(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.inner = self.inner.ai_workload_config_identifier(input.into());
        self
    }
    /// <p>The name or Amazon Resource Name (ARN) of the AI workload configuration to use for this recommendation job.</p>
    pub fn set_ai_workload_config_identifier(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.inner = self.inner.set_ai_workload_config_identifier(input);
        self
    }
    /// <p>The name or Amazon Resource Name (ARN) of the AI workload configuration to use for this recommendation job.</p>
    pub fn get_ai_workload_config_identifier(&self) -> &::std::option::Option<::std::string::String> {
        self.inner.get_ai_workload_config_identifier()
    }
    /// <p>The performance targets for the recommendation job. Specify constraints on metrics such as time to first token (<code>ttft-ms</code>), <code>throughput</code>, or <code>cost</code>.</p>
    pub fn performance_target(mut self, input: crate::types::AiRecommendationPerformanceTarget) -> Self {
        self.inner = self.inner.performance_target(input);
        self
    }
    /// <p>The performance targets for the recommendation job. Specify constraints on metrics such as time to first token (<code>ttft-ms</code>), <code>throughput</code>, or <code>cost</code>.</p>
    pub fn set_performance_target(mut self, input: ::std::option::Option<crate::types::AiRecommendationPerformanceTarget>) -> Self {
        self.inner = self.inner.set_performance_target(input);
        self
    }
    /// <p>The performance targets for the recommendation job. Specify constraints on metrics such as time to first token (<code>ttft-ms</code>), <code>throughput</code>, or <code>cost</code>.</p>
    pub fn get_performance_target(&self) -> &::std::option::Option<crate::types::AiRecommendationPerformanceTarget> {
        self.inner.get_performance_target()
    }
    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
    pub fn role_arn(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
        self.inner = self.inner.role_arn(input.into());
        self
    }
    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
    pub fn set_role_arn(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
        self.inner = self.inner.set_role_arn(input);
        self
    }
    /// <p>The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker AI to perform tasks on your behalf.</p>
    pub fn get_role_arn(&self) -> &::std::option::Option<::std::string::String> {
        self.inner.get_role_arn()
    }
    /// <p>The inference framework configuration. Specify the framework (such as LMI or vLLM) for the recommendation job.</p>
    pub fn inference_specification(mut self, input: crate::types::AiRecommendationInferenceSpecification) -> Self {
        self.inner = self.inner.inference_specification(input);
        self
    }
    /// <p>The inference framework configuration. Specify the framework (such as LMI or vLLM) for the recommendation job.</p>
    pub fn set_inference_specification(mut self, input: ::std::option::Option<crate::types::AiRecommendationInferenceSpecification>) -> Self {
        self.inner = self.inner.set_inference_specification(input);
        self
    }
    /// <p>The inference framework configuration. Specify the framework (such as LMI or vLLM) for the recommendation job.</p>
    pub fn get_inference_specification(&self) -> &::std::option::Option<crate::types::AiRecommendationInferenceSpecification> {
        self.inner.get_inference_specification()
    }
    /// <p>Whether to allow model optimization techniques such as quantization, speculative decoding, and kernel tuning. The default is <code>true</code>.</p>
    pub fn optimize_model(mut self, input: bool) -> Self {
        self.inner = self.inner.optimize_model(input);
        self
    }
    /// <p>Whether to allow model optimization techniques such as quantization, speculative decoding, and kernel tuning. The default is <code>true</code>.</p>
    pub fn set_optimize_model(mut self, input: ::std::option::Option<bool>) -> Self {
        self.inner = self.inner.set_optimize_model(input);
        self
    }
    /// <p>Whether to allow model optimization techniques such as quantization, speculative decoding, and kernel tuning. The default is <code>true</code>.</p>
    pub fn get_optimize_model(&self) -> &::std::option::Option<bool> {
        self.inner.get_optimize_model()
    }
    /// <p>The compute resource specification for the recommendation job. You can specify up to 3 instance types to consider, and optionally provide capacity reservation configuration.</p>
    pub fn compute_spec(mut self, input: crate::types::AiRecommendationComputeSpec) -> Self {
        self.inner = self.inner.compute_spec(input);
        self
    }
    /// <p>The compute resource specification for the recommendation job. You can specify up to 3 instance types to consider, and optionally provide capacity reservation configuration.</p>
    pub fn set_compute_spec(mut self, input: ::std::option::Option<crate::types::AiRecommendationComputeSpec>) -> Self {
        self.inner = self.inner.set_compute_spec(input);
        self
    }
    /// <p>The compute resource specification for the recommendation job. You can specify up to 3 instance types to consider, and optionally provide capacity reservation configuration.</p>
    pub fn get_compute_spec(&self) -> &::std::option::Option<crate::types::AiRecommendationComputeSpec> {
        self.inner.get_compute_spec()
    }
    ///
    /// Appends an item to `Tags`.
    ///
    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
    ///
    /// <p>The metadata that you apply to Amazon Web Services resources to help you categorize and organize them.</p>
    pub fn tags(mut self, input: crate::types::Tag) -> Self {
        self.inner = self.inner.tags(input);
        self
    }
    /// <p>The metadata that you apply to Amazon Web Services resources to help you categorize and organize them.</p>
    pub fn set_tags(mut self, input: ::std::option::Option<::std::vec::Vec<crate::types::Tag>>) -> Self {
        self.inner = self.inner.set_tags(input);
        self
    }
    /// <p>The metadata that you apply to Amazon Web Services resources to help you categorize and organize them.</p>
    pub fn get_tags(&self) -> &::std::option::Option<::std::vec::Vec<crate::types::Tag>> {
        self.inner.get_tags()
    }
}