aws_sdk_kinesis/operation/create_stream/
_create_stream_input.rs

1// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
2
3/// <p>Represents the input for <code>CreateStream</code>.</p>
4#[non_exhaustive]
5#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::fmt::Debug)]
6pub struct CreateStreamInput {
7    /// <p>A name to identify the stream. The stream name is scoped to the Amazon Web Services account used by the application that creates the stream. It is also scoped by Amazon Web Services Region. That is, two streams in two different Amazon Web Services accounts can have the same name. Two streams in the same Amazon Web Services account but in two different Regions can also have the same name.</p>
8    pub stream_name: ::std::option::Option<::std::string::String>,
9    /// <p>The number of shards that the stream will use. The throughput of the stream is a function of the number of shards; more shards are required for greater provisioned throughput.</p>
10    pub shard_count: ::std::option::Option<i32>,
11    /// <p>Indicates the capacity mode of the data stream. Currently, in Kinesis Data Streams, you can choose between an <b>on-demand</b> capacity mode and a <b>provisioned</b> capacity mode for your data streams.</p>
12    pub stream_mode_details: ::std::option::Option<crate::types::StreamModeDetails>,
13    /// <p>A set of up to 50 key-value pairs to use to create the tags. A tag consists of a required key and an optional value.</p>
14    pub tags: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
15    /// <p>The target warm throughput in MB/s that the stream should be scaled to handle. This represents the throughput capacity that will be immediately available for write operations.</p>
16    pub warm_throughput_mibps: ::std::option::Option<i32>,
17    /// <p>The maximum record size of a single record in kibibyte (KiB) that you can write to, and read from a stream.</p>
18    pub max_record_size_in_kib: ::std::option::Option<i32>,
19}
20impl CreateStreamInput {
21    /// <p>A name to identify the stream. The stream name is scoped to the Amazon Web Services account used by the application that creates the stream. It is also scoped by Amazon Web Services Region. That is, two streams in two different Amazon Web Services accounts can have the same name. Two streams in the same Amazon Web Services account but in two different Regions can also have the same name.</p>
22    pub fn stream_name(&self) -> ::std::option::Option<&str> {
23        self.stream_name.as_deref()
24    }
25    /// <p>The number of shards that the stream will use. The throughput of the stream is a function of the number of shards; more shards are required for greater provisioned throughput.</p>
26    pub fn shard_count(&self) -> ::std::option::Option<i32> {
27        self.shard_count
28    }
29    /// <p>Indicates the capacity mode of the data stream. Currently, in Kinesis Data Streams, you can choose between an <b>on-demand</b> capacity mode and a <b>provisioned</b> capacity mode for your data streams.</p>
30    pub fn stream_mode_details(&self) -> ::std::option::Option<&crate::types::StreamModeDetails> {
31        self.stream_mode_details.as_ref()
32    }
33    /// <p>A set of up to 50 key-value pairs to use to create the tags. A tag consists of a required key and an optional value.</p>
34    pub fn tags(&self) -> ::std::option::Option<&::std::collections::HashMap<::std::string::String, ::std::string::String>> {
35        self.tags.as_ref()
36    }
37    /// <p>The target warm throughput in MB/s that the stream should be scaled to handle. This represents the throughput capacity that will be immediately available for write operations.</p>
38    pub fn warm_throughput_mibps(&self) -> ::std::option::Option<i32> {
39        self.warm_throughput_mibps
40    }
41    /// <p>The maximum record size of a single record in kibibyte (KiB) that you can write to, and read from a stream.</p>
42    pub fn max_record_size_in_kib(&self) -> ::std::option::Option<i32> {
43        self.max_record_size_in_kib
44    }
45}
46impl CreateStreamInput {
47    /// Creates a new builder-style object to manufacture [`CreateStreamInput`](crate::operation::create_stream::CreateStreamInput).
48    pub fn builder() -> crate::operation::create_stream::builders::CreateStreamInputBuilder {
49        crate::operation::create_stream::builders::CreateStreamInputBuilder::default()
50    }
51}
52
53/// A builder for [`CreateStreamInput`](crate::operation::create_stream::CreateStreamInput).
54#[derive(::std::clone::Clone, ::std::cmp::PartialEq, ::std::default::Default, ::std::fmt::Debug)]
55#[non_exhaustive]
56pub struct CreateStreamInputBuilder {
57    pub(crate) stream_name: ::std::option::Option<::std::string::String>,
58    pub(crate) shard_count: ::std::option::Option<i32>,
59    pub(crate) stream_mode_details: ::std::option::Option<crate::types::StreamModeDetails>,
60    pub(crate) tags: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>,
61    pub(crate) warm_throughput_mibps: ::std::option::Option<i32>,
62    pub(crate) max_record_size_in_kib: ::std::option::Option<i32>,
63}
64impl CreateStreamInputBuilder {
65    /// <p>A name to identify the stream. The stream name is scoped to the Amazon Web Services account used by the application that creates the stream. It is also scoped by Amazon Web Services Region. That is, two streams in two different Amazon Web Services accounts can have the same name. Two streams in the same Amazon Web Services account but in two different Regions can also have the same name.</p>
66    /// This field is required.
67    pub fn stream_name(mut self, input: impl ::std::convert::Into<::std::string::String>) -> Self {
68        self.stream_name = ::std::option::Option::Some(input.into());
69        self
70    }
71    /// <p>A name to identify the stream. The stream name is scoped to the Amazon Web Services account used by the application that creates the stream. It is also scoped by Amazon Web Services Region. That is, two streams in two different Amazon Web Services accounts can have the same name. Two streams in the same Amazon Web Services account but in two different Regions can also have the same name.</p>
72    pub fn set_stream_name(mut self, input: ::std::option::Option<::std::string::String>) -> Self {
73        self.stream_name = input;
74        self
75    }
76    /// <p>A name to identify the stream. The stream name is scoped to the Amazon Web Services account used by the application that creates the stream. It is also scoped by Amazon Web Services Region. That is, two streams in two different Amazon Web Services accounts can have the same name. Two streams in the same Amazon Web Services account but in two different Regions can also have the same name.</p>
77    pub fn get_stream_name(&self) -> &::std::option::Option<::std::string::String> {
78        &self.stream_name
79    }
80    /// <p>The number of shards that the stream will use. The throughput of the stream is a function of the number of shards; more shards are required for greater provisioned throughput.</p>
81    pub fn shard_count(mut self, input: i32) -> Self {
82        self.shard_count = ::std::option::Option::Some(input);
83        self
84    }
85    /// <p>The number of shards that the stream will use. The throughput of the stream is a function of the number of shards; more shards are required for greater provisioned throughput.</p>
86    pub fn set_shard_count(mut self, input: ::std::option::Option<i32>) -> Self {
87        self.shard_count = input;
88        self
89    }
90    /// <p>The number of shards that the stream will use. The throughput of the stream is a function of the number of shards; more shards are required for greater provisioned throughput.</p>
91    pub fn get_shard_count(&self) -> &::std::option::Option<i32> {
92        &self.shard_count
93    }
94    /// <p>Indicates the capacity mode of the data stream. Currently, in Kinesis Data Streams, you can choose between an <b>on-demand</b> capacity mode and a <b>provisioned</b> capacity mode for your data streams.</p>
95    pub fn stream_mode_details(mut self, input: crate::types::StreamModeDetails) -> Self {
96        self.stream_mode_details = ::std::option::Option::Some(input);
97        self
98    }
99    /// <p>Indicates the capacity mode of the data stream. Currently, in Kinesis Data Streams, you can choose between an <b>on-demand</b> capacity mode and a <b>provisioned</b> capacity mode for your data streams.</p>
100    pub fn set_stream_mode_details(mut self, input: ::std::option::Option<crate::types::StreamModeDetails>) -> Self {
101        self.stream_mode_details = input;
102        self
103    }
104    /// <p>Indicates the capacity mode of the data stream. Currently, in Kinesis Data Streams, you can choose between an <b>on-demand</b> capacity mode and a <b>provisioned</b> capacity mode for your data streams.</p>
105    pub fn get_stream_mode_details(&self) -> &::std::option::Option<crate::types::StreamModeDetails> {
106        &self.stream_mode_details
107    }
108    /// Adds a key-value pair to `tags`.
109    ///
110    /// To override the contents of this collection use [`set_tags`](Self::set_tags).
111    ///
112    /// <p>A set of up to 50 key-value pairs to use to create the tags. A tag consists of a required key and an optional value.</p>
113    pub fn tags(mut self, k: impl ::std::convert::Into<::std::string::String>, v: impl ::std::convert::Into<::std::string::String>) -> Self {
114        let mut hash_map = self.tags.unwrap_or_default();
115        hash_map.insert(k.into(), v.into());
116        self.tags = ::std::option::Option::Some(hash_map);
117        self
118    }
119    /// <p>A set of up to 50 key-value pairs to use to create the tags. A tag consists of a required key and an optional value.</p>
120    pub fn set_tags(mut self, input: ::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>>) -> Self {
121        self.tags = input;
122        self
123    }
124    /// <p>A set of up to 50 key-value pairs to use to create the tags. A tag consists of a required key and an optional value.</p>
125    pub fn get_tags(&self) -> &::std::option::Option<::std::collections::HashMap<::std::string::String, ::std::string::String>> {
126        &self.tags
127    }
128    /// <p>The target warm throughput in MB/s that the stream should be scaled to handle. This represents the throughput capacity that will be immediately available for write operations.</p>
129    pub fn warm_throughput_mibps(mut self, input: i32) -> Self {
130        self.warm_throughput_mibps = ::std::option::Option::Some(input);
131        self
132    }
133    /// <p>The target warm throughput in MB/s that the stream should be scaled to handle. This represents the throughput capacity that will be immediately available for write operations.</p>
134    pub fn set_warm_throughput_mibps(mut self, input: ::std::option::Option<i32>) -> Self {
135        self.warm_throughput_mibps = input;
136        self
137    }
138    /// <p>The target warm throughput in MB/s that the stream should be scaled to handle. This represents the throughput capacity that will be immediately available for write operations.</p>
139    pub fn get_warm_throughput_mibps(&self) -> &::std::option::Option<i32> {
140        &self.warm_throughput_mibps
141    }
142    /// <p>The maximum record size of a single record in kibibyte (KiB) that you can write to, and read from a stream.</p>
143    pub fn max_record_size_in_kib(mut self, input: i32) -> Self {
144        self.max_record_size_in_kib = ::std::option::Option::Some(input);
145        self
146    }
147    /// <p>The maximum record size of a single record in kibibyte (KiB) that you can write to, and read from a stream.</p>
148    pub fn set_max_record_size_in_kib(mut self, input: ::std::option::Option<i32>) -> Self {
149        self.max_record_size_in_kib = input;
150        self
151    }
152    /// <p>The maximum record size of a single record in kibibyte (KiB) that you can write to, and read from a stream.</p>
153    pub fn get_max_record_size_in_kib(&self) -> &::std::option::Option<i32> {
154        &self.max_record_size_in_kib
155    }
156    /// Consumes the builder and constructs a [`CreateStreamInput`](crate::operation::create_stream::CreateStreamInput).
157    pub fn build(
158        self,
159    ) -> ::std::result::Result<crate::operation::create_stream::CreateStreamInput, ::aws_smithy_types::error::operation::BuildError> {
160        ::std::result::Result::Ok(crate::operation::create_stream::CreateStreamInput {
161            stream_name: self.stream_name,
162            shard_count: self.shard_count,
163            stream_mode_details: self.stream_mode_details,
164            tags: self.tags,
165            warm_throughput_mibps: self.warm_throughput_mibps,
166            max_record_size_in_kib: self.max_record_size_in_kib,
167        })
168    }
169}