#[non_exhaustive]
pub struct KafkaSettingsBuilder { /* private fields */ }
Expand description

A builder for KafkaSettings.

Implementations§

source§

impl KafkaSettingsBuilder

source

pub fn broker(self, input: impl Into<String>) -> Self

A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka instance. Specify each broker location in the form broker-hostname-or-ip:port . For example, "ec2-12-345-678-901.compute-1.amazonaws.com:2345". For more information and examples of specifying a list of broker locations, see Using Apache Kafka as a target for Database Migration Service in the Database Migration Service User Guide.

source

pub fn set_broker(self, input: Option<String>) -> Self

A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka instance. Specify each broker location in the form broker-hostname-or-ip:port . For example, "ec2-12-345-678-901.compute-1.amazonaws.com:2345". For more information and examples of specifying a list of broker locations, see Using Apache Kafka as a target for Database Migration Service in the Database Migration Service User Guide.

source

pub fn get_broker(&self) -> &Option<String>

A comma-separated list of one or more broker locations in your Kafka cluster that host your Kafka instance. Specify each broker location in the form broker-hostname-or-ip:port . For example, "ec2-12-345-678-901.compute-1.amazonaws.com:2345". For more information and examples of specifying a list of broker locations, see Using Apache Kafka as a target for Database Migration Service in the Database Migration Service User Guide.

source

pub fn topic(self, input: impl Into<String>) -> Self

The topic to which you migrate the data. If you don't specify a topic, DMS specifies "kafka-default-topic" as the migration topic.

source

pub fn set_topic(self, input: Option<String>) -> Self

The topic to which you migrate the data. If you don't specify a topic, DMS specifies "kafka-default-topic" as the migration topic.

source

pub fn get_topic(&self) -> &Option<String>

The topic to which you migrate the data. If you don't specify a topic, DMS specifies "kafka-default-topic" as the migration topic.

source

pub fn message_format(self, input: MessageFormatValue) -> Self

The output format for the records created on the endpoint. The message format is JSON (default) or JSON_UNFORMATTED (a single line with no tab).

source

pub fn set_message_format(self, input: Option<MessageFormatValue>) -> Self

The output format for the records created on the endpoint. The message format is JSON (default) or JSON_UNFORMATTED (a single line with no tab).

source

pub fn get_message_format(&self) -> &Option<MessageFormatValue>

The output format for the records created on the endpoint. The message format is JSON (default) or JSON_UNFORMATTED (a single line with no tab).

source

pub fn include_transaction_details(self, input: bool) -> Self

Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for transaction_id, previous transaction_id, and transaction_record_id (the record offset within a transaction). The default is false.

source

pub fn set_include_transaction_details(self, input: Option<bool>) -> Self

Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for transaction_id, previous transaction_id, and transaction_record_id (the record offset within a transaction). The default is false.

source

pub fn get_include_transaction_details(&self) -> &Option<bool>

Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for transaction_id, previous transaction_id, and transaction_record_id (the record offset within a transaction). The default is false.

source

pub fn include_partition_value(self, input: bool) -> Self

Shows the partition value within the Kafka message output unless the partition type is schema-table-type. The default is false.

source

pub fn set_include_partition_value(self, input: Option<bool>) -> Self

Shows the partition value within the Kafka message output unless the partition type is schema-table-type. The default is false.

source

pub fn get_include_partition_value(&self) -> &Option<bool>

Shows the partition value within the Kafka message output unless the partition type is schema-table-type. The default is false.

source

pub fn partition_include_schema_table(self, input: bool) -> Self

Prefixes schema and table names to partition values, when the partition type is primary-key-type. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. The default is false.

source

pub fn set_partition_include_schema_table(self, input: Option<bool>) -> Self

Prefixes schema and table names to partition values, when the partition type is primary-key-type. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. The default is false.

source

pub fn get_partition_include_schema_table(&self) -> &Option<bool>

Prefixes schema and table names to partition values, when the partition type is primary-key-type. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. The default is false.

source

pub fn include_table_alter_operations(self, input: bool) -> Self

Includes any data definition language (DDL) operations that change the table in the control data, such as rename-table, drop-table, add-column, drop-column, and rename-column. The default is false.

source

pub fn set_include_table_alter_operations(self, input: Option<bool>) -> Self

Includes any data definition language (DDL) operations that change the table in the control data, such as rename-table, drop-table, add-column, drop-column, and rename-column. The default is false.

source

pub fn get_include_table_alter_operations(&self) -> &Option<bool>

Includes any data definition language (DDL) operations that change the table in the control data, such as rename-table, drop-table, add-column, drop-column, and rename-column. The default is false.

source

pub fn include_control_details(self, input: bool) -> Self

Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. The default is false.

source

pub fn set_include_control_details(self, input: Option<bool>) -> Self

Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. The default is false.

source

pub fn get_include_control_details(&self) -> &Option<bool>

Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. The default is false.

source

pub fn message_max_bytes(self, input: i32) -> Self

The maximum size in bytes for records created on the endpoint The default is 1,000,000.

source

pub fn set_message_max_bytes(self, input: Option<i32>) -> Self

The maximum size in bytes for records created on the endpoint The default is 1,000,000.

source

pub fn get_message_max_bytes(&self) -> &Option<i32>

The maximum size in bytes for records created on the endpoint The default is 1,000,000.

source

pub fn include_null_and_empty(self, input: bool) -> Self

Include NULL and empty columns for records migrated to the endpoint. The default is false.

source

pub fn set_include_null_and_empty(self, input: Option<bool>) -> Self

Include NULL and empty columns for records migrated to the endpoint. The default is false.

source

pub fn get_include_null_and_empty(&self) -> &Option<bool>

Include NULL and empty columns for records migrated to the endpoint. The default is false.

source

pub fn security_protocol(self, input: KafkaSecurityProtocol) -> Self

Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include ssl-encryption, ssl-authentication, and sasl-ssl. sasl-ssl requires SaslUsername and SaslPassword.

source

pub fn set_security_protocol(self, input: Option<KafkaSecurityProtocol>) -> Self

Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include ssl-encryption, ssl-authentication, and sasl-ssl. sasl-ssl requires SaslUsername and SaslPassword.

source

pub fn get_security_protocol(&self) -> &Option<KafkaSecurityProtocol>

Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include ssl-encryption, ssl-authentication, and sasl-ssl. sasl-ssl requires SaslUsername and SaslPassword.

source

pub fn ssl_client_certificate_arn(self, input: impl Into<String>) -> Self

The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target endpoint.

source

pub fn set_ssl_client_certificate_arn(self, input: Option<String>) -> Self

The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target endpoint.

source

pub fn get_ssl_client_certificate_arn(&self) -> &Option<String>

The Amazon Resource Name (ARN) of the client certificate used to securely connect to a Kafka target endpoint.

source

pub fn ssl_client_key_arn(self, input: impl Into<String>) -> Self

The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target endpoint.

source

pub fn set_ssl_client_key_arn(self, input: Option<String>) -> Self

The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target endpoint.

source

pub fn get_ssl_client_key_arn(&self) -> &Option<String>

The Amazon Resource Name (ARN) for the client private key used to securely connect to a Kafka target endpoint.

source

pub fn ssl_client_key_password(self, input: impl Into<String>) -> Self

The password for the client private key used to securely connect to a Kafka target endpoint.

source

pub fn set_ssl_client_key_password(self, input: Option<String>) -> Self

The password for the client private key used to securely connect to a Kafka target endpoint.

source

pub fn get_ssl_client_key_password(&self) -> &Option<String>

The password for the client private key used to securely connect to a Kafka target endpoint.

source

pub fn ssl_ca_certificate_arn(self, input: impl Into<String>) -> Self

The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely connect to your Kafka target endpoint.

source

pub fn set_ssl_ca_certificate_arn(self, input: Option<String>) -> Self

The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely connect to your Kafka target endpoint.

source

pub fn get_ssl_ca_certificate_arn(&self) -> &Option<String>

The Amazon Resource Name (ARN) for the private certificate authority (CA) cert that DMS uses to securely connect to your Kafka target endpoint.

source

pub fn sasl_username(self, input: impl Into<String>) -> Self

The secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.

source

pub fn set_sasl_username(self, input: Option<String>) -> Self

The secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.

source

pub fn get_sasl_username(&self) -> &Option<String>

The secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.

source

pub fn sasl_password(self, input: impl Into<String>) -> Self

The secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.

source

pub fn set_sasl_password(self, input: Option<String>) -> Self

The secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.

source

pub fn get_sasl_password(&self) -> &Option<String>

The secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.

source

pub fn no_hex_prefix(self, input: bool) -> Self

Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the NoHexPrefix endpoint setting to enable migration of RAW data type columns without adding the '0x' prefix.

source

pub fn set_no_hex_prefix(self, input: Option<bool>) -> Self

Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the NoHexPrefix endpoint setting to enable migration of RAW data type columns without adding the '0x' prefix.

source

pub fn get_no_hex_prefix(&self) -> &Option<bool>

Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the NoHexPrefix endpoint setting to enable migration of RAW data type columns without adding the '0x' prefix.

source

pub fn sasl_mechanism(self, input: KafkaSaslMechanism) -> Self

For SASL/SSL authentication, DMS supports the SCRAM-SHA-512 mechanism by default. DMS versions 3.5.0 and later also support the PLAIN mechanism. To use the PLAIN mechanism, set this parameter to PLAIN.

source

pub fn set_sasl_mechanism(self, input: Option<KafkaSaslMechanism>) -> Self

For SASL/SSL authentication, DMS supports the SCRAM-SHA-512 mechanism by default. DMS versions 3.5.0 and later also support the PLAIN mechanism. To use the PLAIN mechanism, set this parameter to PLAIN.

source

pub fn get_sasl_mechanism(&self) -> &Option<KafkaSaslMechanism>

For SASL/SSL authentication, DMS supports the SCRAM-SHA-512 mechanism by default. DMS versions 3.5.0 and later also support the PLAIN mechanism. To use the PLAIN mechanism, set this parameter to PLAIN.

source

pub fn ssl_endpoint_identification_algorithm( self, input: KafkaSslEndpointIdentificationAlgorithm ) -> Self

Sets hostname verification for the certificate. This setting is supported in DMS version 3.5.1 and later.

source

pub fn set_ssl_endpoint_identification_algorithm( self, input: Option<KafkaSslEndpointIdentificationAlgorithm> ) -> Self

Sets hostname verification for the certificate. This setting is supported in DMS version 3.5.1 and later.

source

pub fn get_ssl_endpoint_identification_algorithm( &self ) -> &Option<KafkaSslEndpointIdentificationAlgorithm>

Sets hostname verification for the certificate. This setting is supported in DMS version 3.5.1 and later.

source

pub fn build(self) -> KafkaSettings

Consumes the builder and constructs a KafkaSettings.

Trait Implementations§

source§

impl Clone for KafkaSettingsBuilder

source§

fn clone(&self) -> KafkaSettingsBuilder

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for KafkaSettingsBuilder

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Default for KafkaSettingsBuilder

source§

fn default() -> KafkaSettingsBuilder

Returns the “default value” for a type. Read more
source§

impl PartialEq for KafkaSettingsBuilder

source§

fn eq(&self, other: &KafkaSettingsBuilder) -> bool

This method tests for self and other values to be equal, and is used by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always sufficient, and should not be overridden without very good reason.
source§

impl StructuralPartialEq for KafkaSettingsBuilder

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

source§

impl<T> Instrument for T

source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
source§

impl<T, U> Into<U> for T
where U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

source§

impl<Unshared, Shared> IntoShared<Shared> for Unshared
where Shared: FromUnshared<Unshared>,

source§

fn into_shared(self) -> Shared

Creates a shared type from an unshared type.
source§

impl<T> Same for T

§

type Output = T

Should always be Self
source§

impl<T> ToOwned for T
where T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
source§

impl<T> WithSubscriber for T

source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more