#[non_exhaustive]
pub struct CreateOptimizationJobInputBuilder { /* private fields */ }
Expand description

Implementations§

source§

impl CreateOptimizationJobInputBuilder

source

pub fn optimization_job_name(self, input: impl Into<String>) -> Self

A custom name for the new optimization job.

This field is required.
source

pub fn set_optimization_job_name(self, input: Option<String>) -> Self

A custom name for the new optimization job.

source

pub fn get_optimization_job_name(&self) -> &Option<String>

A custom name for the new optimization job.

source

pub fn role_arn(self, input: impl Into<String>) -> Self

The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.

During model optimization, Amazon SageMaker needs your permission to:

  • Read input data from an S3 bucket

  • Write model artifacts to an S3 bucket

  • Write logs to Amazon CloudWatch Logs

  • Publish metrics to Amazon CloudWatch

You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For more information, see Amazon SageMaker Roles.

This field is required.
source

pub fn set_role_arn(self, input: Option<String>) -> Self

The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.

During model optimization, Amazon SageMaker needs your permission to:

  • Read input data from an S3 bucket

  • Write model artifacts to an S3 bucket

  • Write logs to Amazon CloudWatch Logs

  • Publish metrics to Amazon CloudWatch

You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For more information, see Amazon SageMaker Roles.

source

pub fn get_role_arn(&self) -> &Option<String>

The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.

During model optimization, Amazon SageMaker needs your permission to:

  • Read input data from an S3 bucket

  • Write model artifacts to an S3 bucket

  • Write logs to Amazon CloudWatch Logs

  • Publish metrics to Amazon CloudWatch

You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For more information, see Amazon SageMaker Roles.

source

pub fn model_source(self, input: OptimizationJobModelSource) -> Self

The location of the source model to optimize with an optimization job.

This field is required.
source

pub fn set_model_source(self, input: Option<OptimizationJobModelSource>) -> Self

The location of the source model to optimize with an optimization job.

source

pub fn get_model_source(&self) -> &Option<OptimizationJobModelSource>

The location of the source model to optimize with an optimization job.

source

pub fn deployment_instance_type( self, input: OptimizationJobDeploymentInstanceType, ) -> Self

The type of instance that hosts the optimized model that you create with the optimization job.

This field is required.
source

pub fn set_deployment_instance_type( self, input: Option<OptimizationJobDeploymentInstanceType>, ) -> Self

The type of instance that hosts the optimized model that you create with the optimization job.

source

pub fn get_deployment_instance_type( &self, ) -> &Option<OptimizationJobDeploymentInstanceType>

The type of instance that hosts the optimized model that you create with the optimization job.

source

pub fn optimization_environment( self, k: impl Into<String>, v: impl Into<String>, ) -> Self

Adds a key-value pair to optimization_environment.

To override the contents of this collection use set_optimization_environment.

The environment variables to set in the model container.

source

pub fn set_optimization_environment( self, input: Option<HashMap<String, String>>, ) -> Self

The environment variables to set in the model container.

source

pub fn get_optimization_environment(&self) -> &Option<HashMap<String, String>>

The environment variables to set in the model container.

source

pub fn optimization_configs(self, input: OptimizationConfig) -> Self

Appends an item to optimization_configs.

To override the contents of this collection use set_optimization_configs.

Settings for each of the optimization techniques that the job applies.

source

pub fn set_optimization_configs( self, input: Option<Vec<OptimizationConfig>>, ) -> Self

Settings for each of the optimization techniques that the job applies.

source

pub fn get_optimization_configs(&self) -> &Option<Vec<OptimizationConfig>>

Settings for each of the optimization techniques that the job applies.

source

pub fn output_config(self, input: OptimizationJobOutputConfig) -> Self

Details for where to store the optimized model that you create with the optimization job.

This field is required.
source

pub fn set_output_config( self, input: Option<OptimizationJobOutputConfig>, ) -> Self

Details for where to store the optimized model that you create with the optimization job.

source

pub fn get_output_config(&self) -> &Option<OptimizationJobOutputConfig>

Details for where to store the optimized model that you create with the optimization job.

source

pub fn stopping_condition(self, input: StoppingCondition) -> Self

Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.

To stop a training job, SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.

The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with CreateModel.

The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.

This field is required.
source

pub fn set_stopping_condition(self, input: Option<StoppingCondition>) -> Self

Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.

To stop a training job, SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.

The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with CreateModel.

The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.

source

pub fn get_stopping_condition(&self) -> &Option<StoppingCondition>

Specifies a limit to how long a job can run. When the job reaches the time limit, SageMaker ends the job. Use this API to cap costs.

To stop a training job, SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms can use this 120-second window to save the model artifacts, so the results of training are not lost.

The training algorithms provided by SageMaker automatically save the intermediate results of a model training job when possible. This attempt to save artifacts is only a best effort case as model might not be in a state from which it can be saved. For example, if training has just started, the model might not be ready to save. When saved, this intermediate data is a valid model artifact. You can use it to create a model with CreateModel.

The Neural Topic Model (NTM) currently does not support saving intermediate model artifacts. When training NTMs, make sure that the maximum runtime is sufficient for the training job to complete.

source

pub fn tags(self, input: Tag) -> Self

Appends an item to tags.

To override the contents of this collection use set_tags.

A list of key-value pairs associated with the optimization job. For more information, see Tagging Amazon Web Services resources in the Amazon Web Services General Reference Guide.

source

pub fn set_tags(self, input: Option<Vec<Tag>>) -> Self

A list of key-value pairs associated with the optimization job. For more information, see Tagging Amazon Web Services resources in the Amazon Web Services General Reference Guide.

source

pub fn get_tags(&self) -> &Option<Vec<Tag>>

A list of key-value pairs associated with the optimization job. For more information, see Tagging Amazon Web Services resources in the Amazon Web Services General Reference Guide.

source

pub fn vpc_config(self, input: OptimizationVpcConfig) -> Self

A VPC in Amazon VPC that your optimized model has access to.

source

pub fn set_vpc_config(self, input: Option<OptimizationVpcConfig>) -> Self

A VPC in Amazon VPC that your optimized model has access to.

source

pub fn get_vpc_config(&self) -> &Option<OptimizationVpcConfig>

A VPC in Amazon VPC that your optimized model has access to.

source

pub fn build(self) -> Result<CreateOptimizationJobInput, BuildError>

Consumes the builder and constructs a CreateOptimizationJobInput.

source§

impl CreateOptimizationJobInputBuilder

source

pub async fn send_with( self, client: &Client, ) -> Result<CreateOptimizationJobOutput, SdkError<CreateOptimizationJobError, HttpResponse>>

Sends a request with this input using the given client.

Trait Implementations§

source§

impl Clone for CreateOptimizationJobInputBuilder

source§

fn clone(&self) -> CreateOptimizationJobInputBuilder

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for CreateOptimizationJobInputBuilder

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Default for CreateOptimizationJobInputBuilder

source§

fn default() -> CreateOptimizationJobInputBuilder

Returns the “default value” for a type. Read more
source§

impl PartialEq for CreateOptimizationJobInputBuilder

source§

fn eq(&self, other: &CreateOptimizationJobInputBuilder) -> bool

Tests for self and other values to be equal, and is used by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

Tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
source§

impl StructuralPartialEq for CreateOptimizationJobInputBuilder

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> CloneToUninit for T
where T: Clone,

source§

default unsafe fn clone_to_uninit(&self, dst: *mut T)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dst. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

source§

impl<T> Instrument for T

source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
source§

impl<T, U> Into<U> for T
where U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

source§

impl<T> IntoEither for T

source§

fn into_either(self, into_left: bool) -> Either<Self, Self>

Converts self into a Left variant of Either<Self, Self> if into_left is true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
source§

fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
where F: FnOnce(&Self) -> bool,

Converts self into a Left variant of Either<Self, Self> if into_left(&self) returns true. Converts self into a Right variant of Either<Self, Self> otherwise. Read more
source§

impl<Unshared, Shared> IntoShared<Shared> for Unshared
where Shared: FromUnshared<Unshared>,

source§

fn into_shared(self) -> Shared

Creates a shared type from an unshared type.
source§

impl<T> Same for T

§

type Output = T

Should always be Self
source§

impl<T> ToOwned for T
where T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
source§

impl<T> WithSubscriber for T

source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more