#[non_exhaustive]pub struct GetTrainedModelInferenceJobOutputBuilder { /* private fields */ }Expand description
A builder for GetTrainedModelInferenceJobOutput.
Implementations§
Source§impl GetTrainedModelInferenceJobOutputBuilder
impl GetTrainedModelInferenceJobOutputBuilder
Sourcepub fn create_time(self, input: DateTime) -> Self
pub fn create_time(self, input: DateTime) -> Self
The time at which the trained model inference job was created.
This field is required.Sourcepub fn set_create_time(self, input: Option<DateTime>) -> Self
pub fn set_create_time(self, input: Option<DateTime>) -> Self
The time at which the trained model inference job was created.
Sourcepub fn get_create_time(&self) -> &Option<DateTime>
pub fn get_create_time(&self) -> &Option<DateTime>
The time at which the trained model inference job was created.
Sourcepub fn update_time(self, input: DateTime) -> Self
pub fn update_time(self, input: DateTime) -> Self
The most recent time at which the trained model inference job was updated.
This field is required.Sourcepub fn set_update_time(self, input: Option<DateTime>) -> Self
pub fn set_update_time(self, input: Option<DateTime>) -> Self
The most recent time at which the trained model inference job was updated.
Sourcepub fn get_update_time(&self) -> &Option<DateTime>
pub fn get_update_time(&self) -> &Option<DateTime>
The most recent time at which the trained model inference job was updated.
Sourcepub fn trained_model_inference_job_arn(self, input: impl Into<String>) -> Self
pub fn trained_model_inference_job_arn(self, input: impl Into<String>) -> Self
The Amazon Resource Name (ARN) of the trained model inference job.
This field is required.Sourcepub fn set_trained_model_inference_job_arn(self, input: Option<String>) -> Self
pub fn set_trained_model_inference_job_arn(self, input: Option<String>) -> Self
The Amazon Resource Name (ARN) of the trained model inference job.
Sourcepub fn get_trained_model_inference_job_arn(&self) -> &Option<String>
pub fn get_trained_model_inference_job_arn(&self) -> &Option<String>
The Amazon Resource Name (ARN) of the trained model inference job.
Sourcepub fn configured_model_algorithm_association_arn(
self,
input: impl Into<String>,
) -> Self
pub fn configured_model_algorithm_association_arn( self, input: impl Into<String>, ) -> Self
The Amazon Resource Name (ARN) of the configured model algorithm association that was used for the trained model inference job.
Sourcepub fn set_configured_model_algorithm_association_arn(
self,
input: Option<String>,
) -> Self
pub fn set_configured_model_algorithm_association_arn( self, input: Option<String>, ) -> Self
The Amazon Resource Name (ARN) of the configured model algorithm association that was used for the trained model inference job.
Sourcepub fn get_configured_model_algorithm_association_arn(&self) -> &Option<String>
pub fn get_configured_model_algorithm_association_arn(&self) -> &Option<String>
The Amazon Resource Name (ARN) of the configured model algorithm association that was used for the trained model inference job.
Sourcepub fn name(self, input: impl Into<String>) -> Self
pub fn name(self, input: impl Into<String>) -> Self
The name of the trained model inference job.
This field is required.Sourcepub fn set_name(self, input: Option<String>) -> Self
pub fn set_name(self, input: Option<String>) -> Self
The name of the trained model inference job.
Sourcepub fn status(self, input: TrainedModelInferenceJobStatus) -> Self
pub fn status(self, input: TrainedModelInferenceJobStatus) -> Self
The status of the trained model inference job.
This field is required.Sourcepub fn set_status(self, input: Option<TrainedModelInferenceJobStatus>) -> Self
pub fn set_status(self, input: Option<TrainedModelInferenceJobStatus>) -> Self
The status of the trained model inference job.
Sourcepub fn get_status(&self) -> &Option<TrainedModelInferenceJobStatus>
pub fn get_status(&self) -> &Option<TrainedModelInferenceJobStatus>
The status of the trained model inference job.
Sourcepub fn trained_model_arn(self, input: impl Into<String>) -> Self
pub fn trained_model_arn(self, input: impl Into<String>) -> Self
The Amazon Resource Name (ARN) for the trained model that was used for the trained model inference job.
This field is required.Sourcepub fn set_trained_model_arn(self, input: Option<String>) -> Self
pub fn set_trained_model_arn(self, input: Option<String>) -> Self
The Amazon Resource Name (ARN) for the trained model that was used for the trained model inference job.
Sourcepub fn get_trained_model_arn(&self) -> &Option<String>
pub fn get_trained_model_arn(&self) -> &Option<String>
The Amazon Resource Name (ARN) for the trained model that was used for the trained model inference job.
Sourcepub fn trained_model_version_identifier(self, input: impl Into<String>) -> Self
pub fn trained_model_version_identifier(self, input: impl Into<String>) -> Self
The version identifier of the trained model used for this inference job. This identifies the specific version of the trained model that was used to generate the inference results.
Sourcepub fn set_trained_model_version_identifier(self, input: Option<String>) -> Self
pub fn set_trained_model_version_identifier(self, input: Option<String>) -> Self
The version identifier of the trained model used for this inference job. This identifies the specific version of the trained model that was used to generate the inference results.
Sourcepub fn get_trained_model_version_identifier(&self) -> &Option<String>
pub fn get_trained_model_version_identifier(&self) -> &Option<String>
The version identifier of the trained model used for this inference job. This identifies the specific version of the trained model that was used to generate the inference results.
Sourcepub fn resource_config(self, input: InferenceResourceConfig) -> Self
pub fn resource_config(self, input: InferenceResourceConfig) -> Self
The resource configuration information for the trained model inference job.
This field is required.Sourcepub fn set_resource_config(self, input: Option<InferenceResourceConfig>) -> Self
pub fn set_resource_config(self, input: Option<InferenceResourceConfig>) -> Self
The resource configuration information for the trained model inference job.
Sourcepub fn get_resource_config(&self) -> &Option<InferenceResourceConfig>
pub fn get_resource_config(&self) -> &Option<InferenceResourceConfig>
The resource configuration information for the trained model inference job.
Sourcepub fn output_configuration(self, input: InferenceOutputConfiguration) -> Self
pub fn output_configuration(self, input: InferenceOutputConfiguration) -> Self
The output configuration information for the trained model inference job.
This field is required.Sourcepub fn set_output_configuration(
self,
input: Option<InferenceOutputConfiguration>,
) -> Self
pub fn set_output_configuration( self, input: Option<InferenceOutputConfiguration>, ) -> Self
The output configuration information for the trained model inference job.
Sourcepub fn get_output_configuration(&self) -> &Option<InferenceOutputConfiguration>
pub fn get_output_configuration(&self) -> &Option<InferenceOutputConfiguration>
The output configuration information for the trained model inference job.
Sourcepub fn membership_identifier(self, input: impl Into<String>) -> Self
pub fn membership_identifier(self, input: impl Into<String>) -> Self
The membership ID of the membership that contains the trained model inference job.
This field is required.Sourcepub fn set_membership_identifier(self, input: Option<String>) -> Self
pub fn set_membership_identifier(self, input: Option<String>) -> Self
The membership ID of the membership that contains the trained model inference job.
Sourcepub fn get_membership_identifier(&self) -> &Option<String>
pub fn get_membership_identifier(&self) -> &Option<String>
The membership ID of the membership that contains the trained model inference job.
Sourcepub fn data_source(self, input: ModelInferenceDataSource) -> Self
pub fn data_source(self, input: ModelInferenceDataSource) -> Self
The data source that was used for the trained model inference job.
This field is required.Sourcepub fn set_data_source(self, input: Option<ModelInferenceDataSource>) -> Self
pub fn set_data_source(self, input: Option<ModelInferenceDataSource>) -> Self
The data source that was used for the trained model inference job.
Sourcepub fn get_data_source(&self) -> &Option<ModelInferenceDataSource>
pub fn get_data_source(&self) -> &Option<ModelInferenceDataSource>
The data source that was used for the trained model inference job.
Sourcepub fn container_execution_parameters(
self,
input: InferenceContainerExecutionParameters,
) -> Self
pub fn container_execution_parameters( self, input: InferenceContainerExecutionParameters, ) -> Self
The execution parameters for the model inference job container.
Sourcepub fn set_container_execution_parameters(
self,
input: Option<InferenceContainerExecutionParameters>,
) -> Self
pub fn set_container_execution_parameters( self, input: Option<InferenceContainerExecutionParameters>, ) -> Self
The execution parameters for the model inference job container.
Sourcepub fn get_container_execution_parameters(
&self,
) -> &Option<InferenceContainerExecutionParameters>
pub fn get_container_execution_parameters( &self, ) -> &Option<InferenceContainerExecutionParameters>
The execution parameters for the model inference job container.
Sourcepub fn status_details(self, input: StatusDetails) -> Self
pub fn status_details(self, input: StatusDetails) -> Self
Details about the status of a resource.
Sourcepub fn set_status_details(self, input: Option<StatusDetails>) -> Self
pub fn set_status_details(self, input: Option<StatusDetails>) -> Self
Details about the status of a resource.
Sourcepub fn get_status_details(&self) -> &Option<StatusDetails>
pub fn get_status_details(&self) -> &Option<StatusDetails>
Details about the status of a resource.
Sourcepub fn description(self, input: impl Into<String>) -> Self
pub fn description(self, input: impl Into<String>) -> Self
The description of the trained model inference job.
Sourcepub fn set_description(self, input: Option<String>) -> Self
pub fn set_description(self, input: Option<String>) -> Self
The description of the trained model inference job.
Sourcepub fn get_description(&self) -> &Option<String>
pub fn get_description(&self) -> &Option<String>
The description of the trained model inference job.
Sourcepub fn inference_container_image_digest(self, input: impl Into<String>) -> Self
pub fn inference_container_image_digest(self, input: impl Into<String>) -> Self
Information about the training container image.
Sourcepub fn set_inference_container_image_digest(self, input: Option<String>) -> Self
pub fn set_inference_container_image_digest(self, input: Option<String>) -> Self
Information about the training container image.
Sourcepub fn get_inference_container_image_digest(&self) -> &Option<String>
pub fn get_inference_container_image_digest(&self) -> &Option<String>
Information about the training container image.
Sourcepub fn environment(self, k: impl Into<String>, v: impl Into<String>) -> Self
pub fn environment(self, k: impl Into<String>, v: impl Into<String>) -> Self
Adds a key-value pair to environment.
To override the contents of this collection use set_environment.
The environment variables to set in the Docker container.
Sourcepub fn set_environment(self, input: Option<HashMap<String, String>>) -> Self
pub fn set_environment(self, input: Option<HashMap<String, String>>) -> Self
The environment variables to set in the Docker container.
Sourcepub fn get_environment(&self) -> &Option<HashMap<String, String>>
pub fn get_environment(&self) -> &Option<HashMap<String, String>>
The environment variables to set in the Docker container.
Sourcepub fn kms_key_arn(self, input: impl Into<String>) -> Self
pub fn kms_key_arn(self, input: impl Into<String>) -> Self
The Amazon Resource Name (ARN) of the KMS key. This key is used to encrypt and decrypt customer-owned data in the ML inference job and associated data.
Sourcepub fn set_kms_key_arn(self, input: Option<String>) -> Self
pub fn set_kms_key_arn(self, input: Option<String>) -> Self
The Amazon Resource Name (ARN) of the KMS key. This key is used to encrypt and decrypt customer-owned data in the ML inference job and associated data.
Sourcepub fn get_kms_key_arn(&self) -> &Option<String>
pub fn get_kms_key_arn(&self) -> &Option<String>
The Amazon Resource Name (ARN) of the KMS key. This key is used to encrypt and decrypt customer-owned data in the ML inference job and associated data.
Sourcepub fn metrics_status(self, input: MetricsStatus) -> Self
pub fn metrics_status(self, input: MetricsStatus) -> Self
The metrics status for the trained model inference job.
Sourcepub fn set_metrics_status(self, input: Option<MetricsStatus>) -> Self
pub fn set_metrics_status(self, input: Option<MetricsStatus>) -> Self
The metrics status for the trained model inference job.
Sourcepub fn get_metrics_status(&self) -> &Option<MetricsStatus>
pub fn get_metrics_status(&self) -> &Option<MetricsStatus>
The metrics status for the trained model inference job.
Sourcepub fn metrics_status_details(self, input: impl Into<String>) -> Self
pub fn metrics_status_details(self, input: impl Into<String>) -> Self
Details about the metrics status for the trained model inference job.
Sourcepub fn set_metrics_status_details(self, input: Option<String>) -> Self
pub fn set_metrics_status_details(self, input: Option<String>) -> Self
Details about the metrics status for the trained model inference job.
Sourcepub fn get_metrics_status_details(&self) -> &Option<String>
pub fn get_metrics_status_details(&self) -> &Option<String>
Details about the metrics status for the trained model inference job.
Sourcepub fn logs_status(self, input: LogsStatus) -> Self
pub fn logs_status(self, input: LogsStatus) -> Self
The logs status for the trained model inference job.
Sourcepub fn set_logs_status(self, input: Option<LogsStatus>) -> Self
pub fn set_logs_status(self, input: Option<LogsStatus>) -> Self
The logs status for the trained model inference job.
Sourcepub fn get_logs_status(&self) -> &Option<LogsStatus>
pub fn get_logs_status(&self) -> &Option<LogsStatus>
The logs status for the trained model inference job.
Sourcepub fn logs_status_details(self, input: impl Into<String>) -> Self
pub fn logs_status_details(self, input: impl Into<String>) -> Self
Details about the logs status for the trained model inference job.
Sourcepub fn set_logs_status_details(self, input: Option<String>) -> Self
pub fn set_logs_status_details(self, input: Option<String>) -> Self
Details about the logs status for the trained model inference job.
Sourcepub fn get_logs_status_details(&self) -> &Option<String>
pub fn get_logs_status_details(&self) -> &Option<String>
Details about the logs status for the trained model inference job.
Adds a key-value pair to tags.
To override the contents of this collection use set_tags.
The optional metadata that you applied to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.
The following basic restrictions apply to tags:
-
Maximum number of tags per resource - 50.
-
For each resource, each tag key must be unique, and each tag key can have only one value.
-
Maximum key length - 128 Unicode characters in UTF-8.
-
Maximum value length - 256 Unicode characters in UTF-8.
-
If your tagging schema is used across multiple services and resources, remember that other services may have restrictions on allowed characters. Generally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following characters: + - = . _ : / @.
-
Tag keys and values are case sensitive.
-
Do not use aws:, AWS:, or any upper or lowercase combination of such as a prefix for keys as it is reserved for AWS use. You cannot edit or delete tag keys with this prefix. Values can have this prefix. If a tag value has aws as its prefix but the key does not, then Clean Rooms ML considers it to be a user tag and will count against the limit of 50 tags. Tags with only the key prefix of aws do not count against your tags per resource limit.
The optional metadata that you applied to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.
The following basic restrictions apply to tags:
-
Maximum number of tags per resource - 50.
-
For each resource, each tag key must be unique, and each tag key can have only one value.
-
Maximum key length - 128 Unicode characters in UTF-8.
-
Maximum value length - 256 Unicode characters in UTF-8.
-
If your tagging schema is used across multiple services and resources, remember that other services may have restrictions on allowed characters. Generally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following characters: + - = . _ : / @.
-
Tag keys and values are case sensitive.
-
Do not use aws:, AWS:, or any upper or lowercase combination of such as a prefix for keys as it is reserved for AWS use. You cannot edit or delete tag keys with this prefix. Values can have this prefix. If a tag value has aws as its prefix but the key does not, then Clean Rooms ML considers it to be a user tag and will count against the limit of 50 tags. Tags with only the key prefix of aws do not count against your tags per resource limit.
The optional metadata that you applied to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.
The following basic restrictions apply to tags:
-
Maximum number of tags per resource - 50.
-
For each resource, each tag key must be unique, and each tag key can have only one value.
-
Maximum key length - 128 Unicode characters in UTF-8.
-
Maximum value length - 256 Unicode characters in UTF-8.
-
If your tagging schema is used across multiple services and resources, remember that other services may have restrictions on allowed characters. Generally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following characters: + - = . _ : / @.
-
Tag keys and values are case sensitive.
-
Do not use aws:, AWS:, or any upper or lowercase combination of such as a prefix for keys as it is reserved for AWS use. You cannot edit or delete tag keys with this prefix. Values can have this prefix. If a tag value has aws as its prefix but the key does not, then Clean Rooms ML considers it to be a user tag and will count against the limit of 50 tags. Tags with only the key prefix of aws do not count against your tags per resource limit.
Sourcepub fn build(self) -> Result<GetTrainedModelInferenceJobOutput, BuildError>
pub fn build(self) -> Result<GetTrainedModelInferenceJobOutput, BuildError>
Consumes the builder and constructs a GetTrainedModelInferenceJobOutput.
This method will fail if any of the following fields are not set:
Trait Implementations§
Source§impl Clone for GetTrainedModelInferenceJobOutputBuilder
impl Clone for GetTrainedModelInferenceJobOutputBuilder
Source§fn clone(&self) -> GetTrainedModelInferenceJobOutputBuilder
fn clone(&self) -> GetTrainedModelInferenceJobOutputBuilder
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read moreSource§impl Default for GetTrainedModelInferenceJobOutputBuilder
impl Default for GetTrainedModelInferenceJobOutputBuilder
Source§fn default() -> GetTrainedModelInferenceJobOutputBuilder
fn default() -> GetTrainedModelInferenceJobOutputBuilder
Source§impl PartialEq for GetTrainedModelInferenceJobOutputBuilder
impl PartialEq for GetTrainedModelInferenceJobOutputBuilder
Source§fn eq(&self, other: &GetTrainedModelInferenceJobOutputBuilder) -> bool
fn eq(&self, other: &GetTrainedModelInferenceJobOutputBuilder) -> bool
self and other values to be equal, and is used by ==.impl StructuralPartialEq for GetTrainedModelInferenceJobOutputBuilder
Auto Trait Implementations§
impl Freeze for GetTrainedModelInferenceJobOutputBuilder
impl RefUnwindSafe for GetTrainedModelInferenceJobOutputBuilder
impl Send for GetTrainedModelInferenceJobOutputBuilder
impl Sync for GetTrainedModelInferenceJobOutputBuilder
impl Unpin for GetTrainedModelInferenceJobOutputBuilder
impl UnwindSafe for GetTrainedModelInferenceJobOutputBuilder
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> Paint for Twhere
T: ?Sized,
impl<T> Paint for Twhere
T: ?Sized,
Source§fn fg(&self, value: Color) -> Painted<&T>
fn fg(&self, value: Color) -> Painted<&T>
Returns a styled value derived from self with the foreground set to
value.
This method should be used rarely. Instead, prefer to use color-specific
builder methods like red() and
green(), which have the same functionality but are
pithier.
§Example
Set foreground color to white using fg():
use yansi::{Paint, Color};
painted.fg(Color::White);Set foreground color to white using white().
use yansi::Paint;
painted.white();Source§fn bright_black(&self) -> Painted<&T>
fn bright_black(&self) -> Painted<&T>
Source§fn bright_red(&self) -> Painted<&T>
fn bright_red(&self) -> Painted<&T>
Source§fn bright_green(&self) -> Painted<&T>
fn bright_green(&self) -> Painted<&T>
Source§fn bright_yellow(&self) -> Painted<&T>
fn bright_yellow(&self) -> Painted<&T>
Source§fn bright_blue(&self) -> Painted<&T>
fn bright_blue(&self) -> Painted<&T>
Source§fn bright_magenta(&self) -> Painted<&T>
fn bright_magenta(&self) -> Painted<&T>
Source§fn bright_cyan(&self) -> Painted<&T>
fn bright_cyan(&self) -> Painted<&T>
Source§fn bright_white(&self) -> Painted<&T>
fn bright_white(&self) -> Painted<&T>
Source§fn bg(&self, value: Color) -> Painted<&T>
fn bg(&self, value: Color) -> Painted<&T>
Returns a styled value derived from self with the background set to
value.
This method should be used rarely. Instead, prefer to use color-specific
builder methods like on_red() and
on_green(), which have the same functionality but
are pithier.
§Example
Set background color to red using fg():
use yansi::{Paint, Color};
painted.bg(Color::Red);Set background color to red using on_red().
use yansi::Paint;
painted.on_red();Source§fn on_primary(&self) -> Painted<&T>
fn on_primary(&self) -> Painted<&T>
Source§fn on_magenta(&self) -> Painted<&T>
fn on_magenta(&self) -> Painted<&T>
Source§fn on_bright_black(&self) -> Painted<&T>
fn on_bright_black(&self) -> Painted<&T>
Source§fn on_bright_red(&self) -> Painted<&T>
fn on_bright_red(&self) -> Painted<&T>
Source§fn on_bright_green(&self) -> Painted<&T>
fn on_bright_green(&self) -> Painted<&T>
Source§fn on_bright_yellow(&self) -> Painted<&T>
fn on_bright_yellow(&self) -> Painted<&T>
Source§fn on_bright_blue(&self) -> Painted<&T>
fn on_bright_blue(&self) -> Painted<&T>
Source§fn on_bright_magenta(&self) -> Painted<&T>
fn on_bright_magenta(&self) -> Painted<&T>
Source§fn on_bright_cyan(&self) -> Painted<&T>
fn on_bright_cyan(&self) -> Painted<&T>
Source§fn on_bright_white(&self) -> Painted<&T>
fn on_bright_white(&self) -> Painted<&T>
Source§fn attr(&self, value: Attribute) -> Painted<&T>
fn attr(&self, value: Attribute) -> Painted<&T>
Enables the styling Attribute value.
This method should be used rarely. Instead, prefer to use
attribute-specific builder methods like bold() and
underline(), which have the same functionality
but are pithier.
§Example
Make text bold using attr():
use yansi::{Paint, Attribute};
painted.attr(Attribute::Bold);Make text bold using using bold().
use yansi::Paint;
painted.bold();Source§fn rapid_blink(&self) -> Painted<&T>
fn rapid_blink(&self) -> Painted<&T>
Source§fn quirk(&self, value: Quirk) -> Painted<&T>
fn quirk(&self, value: Quirk) -> Painted<&T>
Enables the yansi Quirk value.
This method should be used rarely. Instead, prefer to use quirk-specific
builder methods like mask() and
wrap(), which have the same functionality but are
pithier.
§Example
Enable wrapping using .quirk():
use yansi::{Paint, Quirk};
painted.quirk(Quirk::Wrap);Enable wrapping using wrap().
use yansi::Paint;
painted.wrap();Source§fn clear(&self) -> Painted<&T>
👎Deprecated since 1.0.1: renamed to resetting() due to conflicts with Vec::clear().
The clear() method will be removed in a future release.
fn clear(&self) -> Painted<&T>
resetting() due to conflicts with Vec::clear().
The clear() method will be removed in a future release.Source§fn whenever(&self, value: Condition) -> Painted<&T>
fn whenever(&self, value: Condition) -> Painted<&T>
Conditionally enable styling based on whether the Condition value
applies. Replaces any previous condition.
See the crate level docs for more details.
§Example
Enable styling painted only when both stdout and stderr are TTYs:
use yansi::{Paint, Condition};
painted.red().on_yellow().whenever(Condition::STDOUTERR_ARE_TTY);