// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(::std::clone::Clone, ::std::cmp::PartialEq)]
pub struct InvokeEndpointInput {
/// <p>The name of the endpoint that you specified when you created the endpoint using the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html">CreateEndpoint</a> API.</p>
pub endpoint_name: ::std::option::Option<::std::string::String>,
/// <p>Provides input data, in the format specified in the <code>ContentType</code> request header. Amazon SageMaker AI passes all of the data in the body to the model.</p>
/// <p>For information about the format of the request body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data Formats-Inference</a>.</p>
pub body: ::std::option::Option<::aws_smithy_types::Blob>,
/// <p>The MIME type of the input data in the request body.</p>
pub content_type: ::std::option::Option<::std::string::String>,
/// <p>The desired MIME type of the inference response from the model container.</p>
pub accept: ::std::option::Option<::std::string::String>,
/// <p>Provides additional information about a request for an inference submitted to a model hosted at an Amazon SageMaker AI endpoint. The information is an opaque value that is forwarded verbatim. You could use this value, for example, to provide an ID that you can use to track a request or to provide other metadata that a service endpoint was programmed to process. The value must consist of no more than 1024 visible US-ASCII characters as specified in <a href="https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value Components</a> of the Hypertext Transfer Protocol (HTTP/1.1).</p>
/// <p>The code in your model is responsible for setting or updating any custom attributes in the response. If your code does not set this value in the response, an empty value is returned. For example, if a custom attribute represents the trace ID, your model can prepend the custom attribute with <code>Trace ID:</code> in your post-processing function.</p>
/// <p>This feature is currently supported in the Amazon Web Services SDKs but not in the Amazon SageMaker AI Python SDK.</p>
pub custom_attributes: ::std::option::Option<::std::string::String>,
/// <p>The model to request for inference when invoking a multi-model endpoint.</p>
pub target_model: ::std::option::Option<::std::string::String>,
/// <p>Specify the production variant to send the inference request to when invoking an endpoint that is running two or more variants. Note that this parameter overrides the default behavior for the endpoint, which is to distribute the invocation traffic based on the variant weights.</p>
/// <p>For information about how to use variant targeting to perform a/b testing, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-ab-testing.html">Test models in production</a></p>
pub target_variant: ::std::option::Option<::std::string::String>,
/// <p>If the endpoint hosts multiple containers and is configured to use direct invocation, this parameter specifies the host name of the container to invoke.</p>
pub target_container_hostname: ::std::option::Option<::std::string::String>,
/// <p>If you provide a value, it is added to the captured data when you enable data capture on the endpoint. For information about data capture, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-monitor-data-capture.html">Capture Data</a>.</p>
pub inference_id: ::std::option::Option<::std::string::String>,
/// <p>An optional JMESPath expression used to override the <code>EnableExplanations</code> parameter of the <code>ClarifyExplainerConfig</code> API. See the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/clarify-online-explainability-create-endpoint.html#clarify-online-explainability-create-endpoint-enable">EnableExplanations</a> section in the developer guide for more information.</p>
pub enable_explanations: ::std::option::Option<::std::string::String>,
/// <p>If the endpoint hosts one or more inference components, this parameter specifies the name of inference component to invoke.</p>
pub inference_component_name: ::std::option::Option<::std::string::String>,
/// <p>Creates a stateful session or identifies an existing one. You can do one of the following:</p>
/// <ul>
/// <li>
/// <p>Create a stateful session by specifying the value <code>NEW_SESSION</code>.</p></li>
/// <li>
/// <p>Send your request to an existing stateful session by specifying the ID of that session.</p></li>
/// </ul>
/// <p>With a stateful session, you can send multiple requests to a stateful model. When you create a session with a stateful model, the model must create the session ID and set the expiration time. The model must also provide that information in the response to your request. You can get the ID and timestamp from the <code>NewSessionId</code> response parameter. For any subsequent request where you specify that session ID, SageMaker AI routes the request to the same instance that supports the session.</p>
pub session_id: ::std::option::Option<::std::string::String>,
}
impl InvokeEndpointInput {
/// <p>The name of the endpoint that you specified when you created the endpoint using the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html">CreateEndpoint</a> API.</p>
pub fn endpoint_name(&self) -> ::std::option::Option<&str> {
self.endpoint_name.as_deref()
}
/// <p>Provides input data, in the format specified in the <code>ContentType</code> request header. Amazon SageMaker AI passes all of the data in the body to the model.</p>
/// <p>For information about the format of the request body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data Formats-Inference</a>.</p>
pub fn body(&self) -> ::std::option::Option<&::aws_smithy_types::Blob> {
self.body.as_ref()
}
/// <p>The MIME type of the input data in the request body.</p>
pub fn content_type(&self) -> ::std::option::Option<&str> {
self.content_type.as_deref()
}
/// <p>The desired MIME type of the inference response from the model container.</p>
pub fn accept(&self) -> ::std::option::Option<&str> {
self.accept.as_deref()
}
/// <p>Provides additional information about a request for an inference submitted to a model hosted at an Amazon SageMaker AI endpoint. The information is an opaque value that is forwarded verbatim. You could use this value, for example, to provide an ID that you can use to track a request or to provide other metadata that a service endpoint was programmed to process. The value must consist of no more than 1024 visible US-ASCII characters as specified in <a href="https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value Components</a> of the Hypertext Transfer Protocol (HTTP/1.1).</p>
/// <p>The code in your model is responsible for setting or updating any custom attributes in the response. If your code does not set this value in the response, an empty value is returned. For example, if a custom attribute represents the trace ID, your model can prepend the custom attribute with <code>Trace ID:</code> in your post-processing function.</p>
/// <p>This feature is currently supported in the Amazon Web Services SDKs but not in the Amazon SageMaker AI Python SDK.</p>
pub fn custom_attributes(&self) -> ::std::option::Option<&str> {
self.custom_attributes.as_deref()
}
/// <p>The model to request for inference when invoking a multi-model endpoint.</p>
pub fn target_model(&self) -> ::std::option::Option<&str> {
self.target_model.as_deref()
}
/// <p>Specify the production variant to send the inference request to when invoking an endpoint that is running two or more variants. Note that this parameter overrides the default behavior for the endpoint, which is to distribute the invocation traffic based on the variant weights.</p>
/// <p>For information about how to use variant targeting to perform a/b testing, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-ab-testing.html">Test models in production</a></p>
pub fn target_variant(&self) -> ::std::option::Option<&str> {
self.target_variant.as_deref()
}
/// <p>If the endpoint hosts multiple containers and is configured to use direct invocation, this parameter specifies the host name of the container to invoke.</p>
pub fn target_container_hostname(&self) -> ::std::option::Option<&str> {
self.target_container_hostname.as_deref()
}
/// <p>If you provide a value, it is added to the captured data when you enable data capture on the endpoint. For information about data capture, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-monitor-data-capture.html">Capture Data</a>.</p>
pub fn inference_id(&self) -> ::std::option::Option<&str> {
self.inference_id.as_deref()
}
/// <p>An optional JMESPath expression used to override the <code>EnableExplanations</code> parameter of the <code>ClarifyExplainerConfig</code> API. See the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/clarify-online-explainability-create-endpoint.html#clarify-online-explainability-create-endpoint-enable">EnableExplanations</a> section in the developer guide for more information.</p>
pub fn enable_explanations(&self) -> ::std::option::Option<&str> {
self.enable_explanations.as_deref()
}
/// <p>If the endpoint hosts one or more inference components, this parameter specifies the name of inference component to invoke.</p>
pub fn inference_component_name(&self) -> ::std::option::Option<&str> {
self.inference_component_name.as_deref()
}
/// <p>Creates a stateful session or identifies an existing one. You can do one of the following:</p>
/// <ul>
/// <li>
/// <p>Create a stateful session by specifying the value <code>NEW_SESSION</code>.</p></li>
/// <li>
/// <p>Send your request to an existing stateful session by specifying the ID of that session.</p></li>
/// </ul>
/// <p>With a stateful session, you can send multiple requests to a stateful model. When you create a session with a stateful model, the model must create the session ID and set the expiration time. The model must also provide that information in the response to your request. You can get the ID and timestamp from the <code>NewSessionId</code> response parameter. For any subsequent request where you specify that session ID, SageMaker AI routes the request to the same instance that supports the session.</p>
pub fn session_id(&self) -> ::std::option::Option<&str> {
self.session_id.as_deref()
}
}
impl ::std::fmt::Debug for InvokeEndpointInput {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
let mut formatter = f.debug_struct("InvokeEndpointInput");
formatter.field("endpoint_name", &self.endpoint_name);
formatter.field("body", &"*** Sensitive Data Redacted ***");
formatter.field("content_type", &self.content_type);
formatter.field("accept", &self.accept);
formatter.field("custom_attributes", &"*** Sensitive Data Redacted ***");
formatter.field("target_model", &self.target_model);
formatter.field("target_variant", &self.target_variant);
formatter.field("target_container_hostname", &self.target_container_hostname);
formatter.field("inference_id", &self.inference_id);
formatter.field("enable_explanations", &self.enable_explanations);
formatter.field("inference_component_name", &self.inference_component_name);
formatter.field("session_id", &self.session_id);
formatter.finish()
}
}
impl InvokeEndpointInput {
/// Creates a new builder-style object to manufacture [`InvokeEndpointInput`](crate::operation::invoke_endpoint::InvokeEndpointInput).
pub fn builder() -> crate::operation::invoke_endpoint::builders::InvokeEndpointInputBuilder {
crate::operation::invoke_endpoint::builders::InvokeEndpointInputBuilder::default()
}
}
/// A builder for [`InvokeEndpointInput`](crate::operation::invoke_endpoint::InvokeEndpointInput).
#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default)]
#[non_exhaustive]
pub struct InvokeEndpointInputBuilder {
pub(crate) endpoint_name: ::std::option::Option<::std::string::String>,
pub(crate) body: ::std::option::Option<::aws_smithy_types::Blob>,
pub(crate) content_type: ::std::option::Option<::std::string::String>,
pub(crate) accept: ::std::option::Option<::std::string::String>,
pub(crate) custom_attributes: ::std::option::Option<::std::string::String>,
pub(crate) target_model: ::std::option::Option<::std::string::String>,
pub(crate) target_variant: ::std::option::Option<::std::string::String>,
pub(crate) target_container_hostname: ::std::option::Option<::std::string::String>,
pub(crate) inference_id: ::std::option::Option<::std::string::String>,
pub(crate) enable_explanations: ::std::option::Option<::std::string::String>,
pub(crate) inference_component_name: ::std::option::Option<::std::string::String>,
pub(crate) session_id: ::std::option::Option<::std::string::String>,
}
impl InvokeEndpointInputBuilder {
/// <p>The name of the endpoint that you specified when you created the endpoint using the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html">CreateEndpoint</a> API.</p>
/// This field is required.
pub fn endpoint_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.endpoint_name = ::std::option::Option::Some(input.into());
self
}
/// <p>The name of the endpoint that you specified when you created the endpoint using the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html">CreateEndpoint</a> API.</p>
pub fn set_endpoint_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.endpoint_name = input;
self
}
/// <p>The name of the endpoint that you specified when you created the endpoint using the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/API_CreateEndpoint.html">CreateEndpoint</a> API.</p>
pub fn get_endpoint_name(&self) -> &::std::option::Option<::std::string::String> {
&self.endpoint_name
}
/// <p>Provides input data, in the format specified in the <code>ContentType</code> request header. Amazon SageMaker AI passes all of the data in the body to the model.</p>
/// <p>For information about the format of the request body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data Formats-Inference</a>.</p>
/// This field is required.
pub fn body(mut self, input: ::aws_smithy_types::Blob) -> Self {
self.body = ::std::option::Option::Some(input);
self
}
/// <p>Provides input data, in the format specified in the <code>ContentType</code> request header. Amazon SageMaker AI passes all of the data in the body to the model.</p>
/// <p>For information about the format of the request body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data Formats-Inference</a>.</p>
pub fn set_body(mut self, input: ::std::option::Option<::aws_smithy_types::Blob>) -> Self {
self.body = input;
self
}
/// <p>Provides input data, in the format specified in the <code>ContentType</code> request header. Amazon SageMaker AI passes all of the data in the body to the model.</p>
/// <p>For information about the format of the request body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data Formats-Inference</a>.</p>
pub fn get_body(&self) -> &::std::option::Option<::aws_smithy_types::Blob> {
&self.body
}
/// <p>The MIME type of the input data in the request body.</p>
pub fn content_type(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.content_type = ::std::option::Option::Some(input.into());
self
}
/// <p>The MIME type of the input data in the request body.</p>
pub fn set_content_type(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.content_type = input;
self
}
/// <p>The MIME type of the input data in the request body.</p>
pub fn get_content_type(&self) -> &::std::option::Option<::std::string::String> {
&self.content_type
}
/// <p>The desired MIME type of the inference response from the model container.</p>
pub fn accept(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.accept = ::std::option::Option::Some(input.into());
self
}
/// <p>The desired MIME type of the inference response from the model container.</p>
pub fn set_accept(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.accept = input;
self
}
/// <p>The desired MIME type of the inference response from the model container.</p>
pub fn get_accept(&self) -> &::std::option::Option<::std::string::String> {
&self.accept
}
/// <p>Provides additional information about a request for an inference submitted to a model hosted at an Amazon SageMaker AI endpoint. The information is an opaque value that is forwarded verbatim. You could use this value, for example, to provide an ID that you can use to track a request or to provide other metadata that a service endpoint was programmed to process. The value must consist of no more than 1024 visible US-ASCII characters as specified in <a href="https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value Components</a> of the Hypertext Transfer Protocol (HTTP/1.1).</p>
/// <p>The code in your model is responsible for setting or updating any custom attributes in the response. If your code does not set this value in the response, an empty value is returned. For example, if a custom attribute represents the trace ID, your model can prepend the custom attribute with <code>Trace ID:</code> in your post-processing function.</p>
/// <p>This feature is currently supported in the Amazon Web Services SDKs but not in the Amazon SageMaker AI Python SDK.</p>
pub fn custom_attributes(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.custom_attributes = ::std::option::Option::Some(input.into());
self
}
/// <p>Provides additional information about a request for an inference submitted to a model hosted at an Amazon SageMaker AI endpoint. The information is an opaque value that is forwarded verbatim. You could use this value, for example, to provide an ID that you can use to track a request or to provide other metadata that a service endpoint was programmed to process. The value must consist of no more than 1024 visible US-ASCII characters as specified in <a href="https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value Components</a> of the Hypertext Transfer Protocol (HTTP/1.1).</p>
/// <p>The code in your model is responsible for setting or updating any custom attributes in the response. If your code does not set this value in the response, an empty value is returned. For example, if a custom attribute represents the trace ID, your model can prepend the custom attribute with <code>Trace ID:</code> in your post-processing function.</p>
/// <p>This feature is currently supported in the Amazon Web Services SDKs but not in the Amazon SageMaker AI Python SDK.</p>
pub fn set_custom_attributes(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.custom_attributes = input;
self
}
/// <p>Provides additional information about a request for an inference submitted to a model hosted at an Amazon SageMaker AI endpoint. The information is an opaque value that is forwarded verbatim. You could use this value, for example, to provide an ID that you can use to track a request or to provide other metadata that a service endpoint was programmed to process. The value must consist of no more than 1024 visible US-ASCII characters as specified in <a href="https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value Components</a> of the Hypertext Transfer Protocol (HTTP/1.1).</p>
/// <p>The code in your model is responsible for setting or updating any custom attributes in the response. If your code does not set this value in the response, an empty value is returned. For example, if a custom attribute represents the trace ID, your model can prepend the custom attribute with <code>Trace ID:</code> in your post-processing function.</p>
/// <p>This feature is currently supported in the Amazon Web Services SDKs but not in the Amazon SageMaker AI Python SDK.</p>
pub fn get_custom_attributes(&self) -> &::std::option::Option<::std::string::String> {
&self.custom_attributes
}
/// <p>The model to request for inference when invoking a multi-model endpoint.</p>
pub fn target_model(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.target_model = ::std::option::Option::Some(input.into());
self
}
/// <p>The model to request for inference when invoking a multi-model endpoint.</p>
pub fn set_target_model(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.target_model = input;
self
}
/// <p>The model to request for inference when invoking a multi-model endpoint.</p>
pub fn get_target_model(&self) -> &::std::option::Option<::std::string::String> {
&self.target_model
}
/// <p>Specify the production variant to send the inference request to when invoking an endpoint that is running two or more variants. Note that this parameter overrides the default behavior for the endpoint, which is to distribute the invocation traffic based on the variant weights.</p>
/// <p>For information about how to use variant targeting to perform a/b testing, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-ab-testing.html">Test models in production</a></p>
pub fn target_variant(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.target_variant = ::std::option::Option::Some(input.into());
self
}
/// <p>Specify the production variant to send the inference request to when invoking an endpoint that is running two or more variants. Note that this parameter overrides the default behavior for the endpoint, which is to distribute the invocation traffic based on the variant weights.</p>
/// <p>For information about how to use variant targeting to perform a/b testing, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-ab-testing.html">Test models in production</a></p>
pub fn set_target_variant(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.target_variant = input;
self
}
/// <p>Specify the production variant to send the inference request to when invoking an endpoint that is running two or more variants. Note that this parameter overrides the default behavior for the endpoint, which is to distribute the invocation traffic based on the variant weights.</p>
/// <p>For information about how to use variant targeting to perform a/b testing, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-ab-testing.html">Test models in production</a></p>
pub fn get_target_variant(&self) -> &::std::option::Option<::std::string::String> {
&self.target_variant
}
/// <p>If the endpoint hosts multiple containers and is configured to use direct invocation, this parameter specifies the host name of the container to invoke.</p>
pub fn target_container_hostname(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.target_container_hostname = ::std::option::Option::Some(input.into());
self
}
/// <p>If the endpoint hosts multiple containers and is configured to use direct invocation, this parameter specifies the host name of the container to invoke.</p>
pub fn set_target_container_hostname(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.target_container_hostname = input;
self
}
/// <p>If the endpoint hosts multiple containers and is configured to use direct invocation, this parameter specifies the host name of the container to invoke.</p>
pub fn get_target_container_hostname(&self) -> &::std::option::Option<::std::string::String> {
&self.target_container_hostname
}
/// <p>If you provide a value, it is added to the captured data when you enable data capture on the endpoint. For information about data capture, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-monitor-data-capture.html">Capture Data</a>.</p>
pub fn inference_id(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.inference_id = ::std::option::Option::Some(input.into());
self
}
/// <p>If you provide a value, it is added to the captured data when you enable data capture on the endpoint. For information about data capture, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-monitor-data-capture.html">Capture Data</a>.</p>
pub fn set_inference_id(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.inference_id = input;
self
}
/// <p>If you provide a value, it is added to the captured data when you enable data capture on the endpoint. For information about data capture, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/model-monitor-data-capture.html">Capture Data</a>.</p>
pub fn get_inference_id(&self) -> &::std::option::Option<::std::string::String> {
&self.inference_id
}
/// <p>An optional JMESPath expression used to override the <code>EnableExplanations</code> parameter of the <code>ClarifyExplainerConfig</code> API. See the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/clarify-online-explainability-create-endpoint.html#clarify-online-explainability-create-endpoint-enable">EnableExplanations</a> section in the developer guide for more information.</p>
pub fn enable_explanations(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.enable_explanations = ::std::option::Option::Some(input.into());
self
}
/// <p>An optional JMESPath expression used to override the <code>EnableExplanations</code> parameter of the <code>ClarifyExplainerConfig</code> API. See the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/clarify-online-explainability-create-endpoint.html#clarify-online-explainability-create-endpoint-enable">EnableExplanations</a> section in the developer guide for more information.</p>
pub fn set_enable_explanations(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.enable_explanations = input;
self
}
/// <p>An optional JMESPath expression used to override the <code>EnableExplanations</code> parameter of the <code>ClarifyExplainerConfig</code> API. See the <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/clarify-online-explainability-create-endpoint.html#clarify-online-explainability-create-endpoint-enable">EnableExplanations</a> section in the developer guide for more information.</p>
pub fn get_enable_explanations(&self) -> &::std::option::Option<::std::string::String> {
&self.enable_explanations
}
/// <p>If the endpoint hosts one or more inference components, this parameter specifies the name of inference component to invoke.</p>
pub fn inference_component_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.inference_component_name = ::std::option::Option::Some(input.into());
self
}
/// <p>If the endpoint hosts one or more inference components, this parameter specifies the name of inference component to invoke.</p>
pub fn set_inference_component_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.inference_component_name = input;
self
}
/// <p>If the endpoint hosts one or more inference components, this parameter specifies the name of inference component to invoke.</p>
pub fn get_inference_component_name(&self) -> &::std::option::Option<::std::string::String> {
&self.inference_component_name
}
/// <p>Creates a stateful session or identifies an existing one. You can do one of the following:</p>
/// <ul>
/// <li>
/// <p>Create a stateful session by specifying the value <code>NEW_SESSION</code>.</p></li>
/// <li>
/// <p>Send your request to an existing stateful session by specifying the ID of that session.</p></li>
/// </ul>
/// <p>With a stateful session, you can send multiple requests to a stateful model. When you create a session with a stateful model, the model must create the session ID and set the expiration time. The model must also provide that information in the response to your request. You can get the ID and timestamp from the <code>NewSessionId</code> response parameter. For any subsequent request where you specify that session ID, SageMaker AI routes the request to the same instance that supports the session.</p>
pub fn session_id(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
self.session_id = ::std::option::Option::Some(input.into());
self
}
/// <p>Creates a stateful session or identifies an existing one. You can do one of the following:</p>
/// <ul>
/// <li>
/// <p>Create a stateful session by specifying the value <code>NEW_SESSION</code>.</p></li>
/// <li>
/// <p>Send your request to an existing stateful session by specifying the ID of that session.</p></li>
/// </ul>
/// <p>With a stateful session, you can send multiple requests to a stateful model. When you create a session with a stateful model, the model must create the session ID and set the expiration time. The model must also provide that information in the response to your request. You can get the ID and timestamp from the <code>NewSessionId</code> response parameter. For any subsequent request where you specify that session ID, SageMaker AI routes the request to the same instance that supports the session.</p>
pub fn set_session_id(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
self.session_id = input;
self
}
/// <p>Creates a stateful session or identifies an existing one. You can do one of the following:</p>
/// <ul>
/// <li>
/// <p>Create a stateful session by specifying the value <code>NEW_SESSION</code>.</p></li>
/// <li>
/// <p>Send your request to an existing stateful session by specifying the ID of that session.</p></li>
/// </ul>
/// <p>With a stateful session, you can send multiple requests to a stateful model. When you create a session with a stateful model, the model must create the session ID and set the expiration time. The model must also provide that information in the response to your request. You can get the ID and timestamp from the <code>NewSessionId</code> response parameter. For any subsequent request where you specify that session ID, SageMaker AI routes the request to the same instance that supports the session.</p>
pub fn get_session_id(&self) -> &::std::option::Option<::std::string::String> {
&self.session_id
}
/// Consumes the builder and constructs a [`InvokeEndpointInput`](crate::operation::invoke_endpoint::InvokeEndpointInput).
pub fn build(
self,
) -> ::std::result::Result<crate::operation::invoke_endpoint::InvokeEndpointInput, ::aws_smithy_types::error::operation::BuildError> {
::std::result::Result::Ok(crate::operation::invoke_endpoint::InvokeEndpointInput {
endpoint_name: self.endpoint_name,
body: self.body,
content_type: self.content_type,
accept: self.accept,
custom_attributes: self.custom_attributes,
target_model: self.target_model,
target_variant: self.target_variant,
target_container_hostname: self.target_container_hostname,
inference_id: self.inference_id,
enable_explanations: self.enable_explanations,
inference_component_name: self.inference_component_name,
session_id: self.session_id,
})
}
}
impl ::std::fmt::Debug for InvokeEndpointInputBuilder {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
let mut formatter = f.debug_struct("InvokeEndpointInputBuilder");
formatter.field("endpoint_name", &self.endpoint_name);
formatter.field("body", &"*** Sensitive Data Redacted ***");
formatter.field("content_type", &self.content_type);
formatter.field("accept", &self.accept);
formatter.field("custom_attributes", &"*** Sensitive Data Redacted ***");
formatter.field("target_model", &self.target_model);
formatter.field("target_variant", &self.target_variant);
formatter.field("target_container_hostname", &self.target_container_hostname);
formatter.field("inference_id", &self.inference_id);
formatter.field("enable_explanations", &self.enable_explanations);
formatter.field("inference_component_name", &self.inference_component_name);
formatter.field("session_id", &self.session_id);
formatter.finish()
}
}