pub fn ser_spark_sql(
object: &mut ::aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::types::SparkSql,
) -> Result<(), ::aws_smithy_types::error::operation::SerializationError> {
{
object.key("Name").string(input.name.as_str());
}
{
let mut array_1 = object.key("Inputs").start_array();
for item_2 in &input.inputs {
{
array_1.value().string(item_2.as_str());
}
}
array_1.finish();
}
{
object.key("SqlQuery").string(input.sql_query.as_str());
}
{
let mut array_3 = object.key("SqlAliases").start_array();
for item_4 in &input.sql_aliases {
{
#[allow(unused_mut)]
let mut object_5 = array_3.value().start_object();
crate::protocol_serde::shape_sql_alias::ser_sql_alias(&mut object_5, item_4)?;
object_5.finish();
}
}
array_3.finish();
}
if let Some(var_6) = &input.output_schemas {
let mut array_7 = object.key("OutputSchemas").start_array();
for item_8 in var_6 {
{
#[allow(unused_mut)]
let mut object_9 = array_7.value().start_object();
crate::protocol_serde::shape_glue_schema::ser_glue_schema(&mut object_9, item_8)?;
object_9.finish();
}
}
array_7.finish();
}
Ok(())
}
pub(crate) fn de_spark_sql<'a, I>(
tokens: &mut ::std::iter::Peekable<I>,
) -> Result<Option<crate::types::SparkSql>, ::aws_smithy_json::deserialize::error::DeserializeError>
where
I: Iterator<Item = Result<::aws_smithy_json::deserialize::Token<'a>, ::aws_smithy_json::deserialize::error::DeserializeError>>,
{
match tokens.next().transpose()? {
Some(::aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(::aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::types::builders::SparkSqlBuilder::default();
loop {
match tokens.next().transpose()? {
Some(::aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(::aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
::aws_smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Inputs" => {
builder = builder.set_inputs(crate::protocol_serde::shape_many_inputs::de_many_inputs(tokens)?);
}
"SqlQuery" => {
builder = builder.set_sql_query(
::aws_smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SqlAliases" => {
builder = builder.set_sql_aliases(crate::protocol_serde::shape_sql_aliases::de_sql_aliases(tokens)?);
}
"OutputSchemas" => {
builder = builder.set_output_schemas(crate::protocol_serde::shape_glue_schemas::de_glue_schemas(tokens)?);
}
_ => ::aws_smithy_json::deserialize::token::skip_value(tokens)?,
},
other => {
return Err(::aws_smithy_json::deserialize::error::DeserializeError::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(crate::serde_util::spark_sql_correct_errors(builder).build().map_err(|err| {
::aws_smithy_json::deserialize::error::DeserializeError::custom_source("Response was invalid", err)
})?))
}
_ => Err(::aws_smithy_json::deserialize::error::DeserializeError::custom(
"expected start object or null",
)),
}
}