use arrow_schema::ArrowError;
use datafusion_common::DataFusionError;
#[derive(thiserror::Error, Debug)]
pub enum SparkError {
#[error("[CAST_INVALID_INPUT] The value '{value}' of the type \"{from_type}\" cannot be cast to \"{to_type}\" \
because it is malformed. Correct the value as per the syntax, or change its target type. \
Use `try_cast` to tolerate malformed input and return NULL instead. If necessary \
set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.")]
CastInvalidValue {
value: String,
from_type: String,
to_type: String,
},
#[error("[NUMERIC_VALUE_OUT_OF_RANGE] {value} cannot be represented as Decimal({precision}, {scale}). If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error, and return NULL instead.")]
NumericValueOutOfRange {
value: String,
precision: u8,
scale: i8,
},
#[error("[CAST_OVERFLOW] The value {value} of the type \"{from_type}\" cannot be cast to \"{to_type}\" \
due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary \
set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.")]
CastOverFlow {
value: String,
from_type: String,
to_type: String,
},
#[error("[ARITHMETIC_OVERFLOW] {from_type} overflow. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.")]
ArithmeticOverflow { from_type: String },
#[error("ArrowError: {0}.")]
Arrow(ArrowError),
#[error("InternalError: {0}.")]
Internal(String),
}
pub type SparkResult<T> = Result<T, SparkError>;
impl From<ArrowError> for SparkError {
fn from(value: ArrowError) -> Self {
SparkError::Arrow(value)
}
}
impl From<SparkError> for DataFusionError {
fn from(value: SparkError) -> Self {
DataFusionError::External(Box::new(value))
}
}