use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use crate::error::{QueryError, QueryResult};
use crate::filter::FilterValue;
use crate::sql::DatabaseType;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct JsonPath {
pub column: String,
pub segments: Vec<PathSegment>,
pub as_text: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum PathSegment {
Field(String),
Index(i64),
Wildcard,
RecursiveDescent,
}
impl JsonPath {
pub fn new(column: impl Into<String>) -> Self {
Self {
column: column.into(),
segments: Vec::new(),
as_text: false,
}
}
pub fn from_path(column: impl Into<String>, path: &str) -> Self {
let mut json_path = Self::new(column);
let path = path.trim_start_matches('$').trim_start_matches('.');
for segment in path.split('.') {
if segment.is_empty() {
continue;
}
if let Some(bracket_pos) = segment.find('[') {
let field_name = &segment[..bracket_pos];
if !field_name.is_empty() {
json_path
.segments
.push(PathSegment::Field(field_name.to_string()));
}
if let Some(end_pos) = segment.find(']') {
let idx_str = &segment[bracket_pos + 1..end_pos];
if idx_str == "*" {
json_path.segments.push(PathSegment::Wildcard);
} else if let Ok(i) = idx_str.parse::<i64>() {
json_path.segments.push(PathSegment::Index(i));
}
}
} else {
json_path
.segments
.push(PathSegment::Field(segment.to_string()));
}
}
json_path
}
pub fn field(mut self, name: impl Into<String>) -> Self {
self.segments.push(PathSegment::Field(name.into()));
self
}
pub fn index(mut self, idx: i64) -> Self {
self.segments.push(PathSegment::Index(idx));
self
}
pub fn all(mut self) -> Self {
self.segments.push(PathSegment::Wildcard);
self
}
pub fn text(mut self) -> Self {
self.as_text = true;
self
}
pub fn to_postgres_expr(&self) -> String {
let mut expr = self.column.clone();
for segment in &self.segments {
match segment {
PathSegment::Field(name) => {
if self.as_text && self.segments.last() == Some(segment) {
expr.push_str(" ->> '");
} else {
expr.push_str(" -> '");
}
expr.push_str(name);
expr.push('\'');
}
PathSegment::Index(idx) => {
if self.as_text && self.segments.last() == Some(segment) {
expr.push_str(" ->> ");
} else {
expr.push_str(" -> ");
}
expr.push_str(&idx.to_string());
}
PathSegment::Wildcard => {
expr = format!("jsonb_array_elements({})", expr);
}
PathSegment::RecursiveDescent => {
expr = format!("jsonb_path_query({}, '$.**')", expr);
}
}
}
expr
}
pub fn to_mysql_expr(&self) -> String {
let path = self.to_jsonpath_string();
if self.as_text {
format!("JSON_UNQUOTE(JSON_EXTRACT({}, '{}'))", self.column, path)
} else {
format!("JSON_EXTRACT({}, '{}')", self.column, path)
}
}
pub fn to_sqlite_expr(&self) -> String {
let path = self.to_jsonpath_string();
if self.as_text {
format!("json_extract({}, '{}')", self.column, path)
} else {
format!("json({}, '{}')", self.column, path)
}
}
pub fn to_mssql_expr(&self) -> String {
let path = self.to_jsonpath_string();
if self.as_text {
format!("JSON_VALUE({}, '{}')", self.column, path)
} else {
format!("JSON_QUERY({}, '{}')", self.column, path)
}
}
pub fn to_jsonpath_string(&self) -> String {
let mut path = String::from("$");
for segment in &self.segments {
match segment {
PathSegment::Field(name) => {
path.push('.');
path.push_str(name);
}
PathSegment::Index(idx) => {
path.push('[');
path.push_str(&idx.to_string());
path.push(']');
}
PathSegment::Wildcard => {
path.push_str("[*]");
}
PathSegment::RecursiveDescent => {
path.push_str("..");
}
}
}
path
}
pub fn to_mongodb_path(&self) -> String {
let mut parts = vec![self.column.clone()];
for segment in &self.segments {
match segment {
PathSegment::Field(name) => parts.push(name.clone()),
PathSegment::Index(idx) => parts.push(idx.to_string()),
PathSegment::Wildcard => parts.push("$".to_string()),
PathSegment::RecursiveDescent => {} }
}
parts.join(".")
}
pub fn to_sql(&self, db_type: DatabaseType) -> String {
match db_type {
DatabaseType::PostgreSQL => self.to_postgres_expr(),
DatabaseType::MySQL => self.to_mysql_expr(),
DatabaseType::SQLite => self.to_sqlite_expr(),
DatabaseType::MSSQL => self.to_mssql_expr(),
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum JsonFilter {
Equals(JsonPath, JsonValue),
NotEquals(JsonPath, JsonValue),
Contains(String, JsonValue),
ContainedBy(String, JsonValue),
HasAnyKey(String, Vec<String>),
HasAllKeys(String, Vec<String>),
HasKey(String, String),
GreaterThan(JsonPath, JsonValue),
LessThan(JsonPath, JsonValue),
Exists(JsonPath),
IsNull(JsonPath),
IsNotNull(JsonPath),
ArrayContains(JsonPath, JsonValue),
PathMatch(String, String),
}
impl JsonFilter {
pub fn equals(path: JsonPath, value: impl Into<JsonValue>) -> Self {
Self::Equals(path, value.into())
}
pub fn contains(column: impl Into<String>, value: impl Into<JsonValue>) -> Self {
Self::Contains(column.into(), value.into())
}
pub fn has_key(column: impl Into<String>, key: impl Into<String>) -> Self {
Self::HasKey(column.into(), key.into())
}
pub fn exists(path: JsonPath) -> Self {
Self::Exists(path)
}
pub fn to_postgres_sql(&self) -> (String, Vec<FilterValue>) {
let mut params = Vec::new();
let sql = match self {
Self::Equals(path, value) => {
let expr = path.to_postgres_expr();
params.push(FilterValue::Json(value.clone()));
format!("{} = $1::jsonb", expr)
}
Self::NotEquals(path, value) => {
let expr = path.to_postgres_expr();
params.push(FilterValue::Json(value.clone()));
format!("{} <> $1::jsonb", expr)
}
Self::Contains(col, value) => {
params.push(FilterValue::Json(value.clone()));
format!("{} @> $1::jsonb", col)
}
Self::ContainedBy(col, value) => {
params.push(FilterValue::Json(value.clone()));
format!("{} <@ $1::jsonb", col)
}
Self::HasKey(col, key) => {
params.push(FilterValue::String(key.clone()));
format!("{} ? $1", col)
}
Self::HasAnyKey(col, keys) => {
let placeholders: Vec<String> =
(1..=keys.len()).map(|i| format!("${}", i)).collect();
for key in keys {
params.push(FilterValue::String(key.clone()));
}
format!("{} ?| ARRAY[{}]", col, placeholders.join(", "))
}
Self::HasAllKeys(col, keys) => {
let placeholders: Vec<String> =
(1..=keys.len()).map(|i| format!("${}", i)).collect();
for key in keys {
params.push(FilterValue::String(key.clone()));
}
format!("{} ?& ARRAY[{}]", col, placeholders.join(", "))
}
Self::GreaterThan(path, value) => {
let expr = path.to_postgres_expr();
params.push(FilterValue::Json(value.clone()));
format!("({})::numeric > ($1::jsonb)::numeric", expr)
}
Self::LessThan(path, value) => {
let expr = path.to_postgres_expr();
params.push(FilterValue::Json(value.clone()));
format!("({})::numeric < ($1::jsonb)::numeric", expr)
}
Self::Exists(path) => {
format!("{} IS NOT NULL", path.to_postgres_expr())
}
Self::IsNull(path) => {
format!("{} IS NULL", path.to_postgres_expr())
}
Self::IsNotNull(path) => {
format!("{} IS NOT NULL", path.to_postgres_expr())
}
Self::ArrayContains(path, value) => {
params.push(FilterValue::Json(value.clone()));
format!("{} @> $1::jsonb", path.to_postgres_expr())
}
Self::PathMatch(col, predicate) => {
params.push(FilterValue::String(predicate.clone()));
format!("{} @? $1::jsonpath", col)
}
};
(sql, params)
}
pub fn to_mysql_sql(&self) -> (String, Vec<FilterValue>) {
let mut params = Vec::new();
let sql = match self {
Self::Equals(path, value) => {
let expr = path.to_mysql_expr();
params.push(FilterValue::Json(value.clone()));
format!("{} = CAST(? AS JSON)", expr)
}
Self::NotEquals(path, value) => {
let expr = path.to_mysql_expr();
params.push(FilterValue::Json(value.clone()));
format!("{} <> CAST(? AS JSON)", expr)
}
Self::Contains(col, value) => {
params.push(FilterValue::Json(value.clone()));
format!("JSON_CONTAINS({}, ?)", col)
}
Self::HasKey(col, key) => {
params.push(FilterValue::String(format!("$.{}", key)));
format!("JSON_CONTAINS_PATH({}, 'one', ?)", col)
}
Self::Exists(path) => {
format!("{} IS NOT NULL", path.to_mysql_expr())
}
Self::IsNull(path) => {
format!("{} IS NULL", path.to_mysql_expr())
}
Self::IsNotNull(path) => {
format!("{} IS NOT NULL", path.to_mysql_expr())
}
Self::ArrayContains(path, value) => {
params.push(FilterValue::Json(value.clone()));
format!("JSON_CONTAINS({}, ?)", path.column)
}
_ => "1=1".to_string(), };
(sql, params)
}
pub fn to_sql(&self, db_type: DatabaseType) -> QueryResult<(String, Vec<FilterValue>)> {
match db_type {
DatabaseType::PostgreSQL => Ok(self.to_postgres_sql()),
DatabaseType::MySQL => Ok(self.to_mysql_sql()),
DatabaseType::SQLite => {
let (sql, params) = match self {
Self::Equals(path, value) => {
let expr = path.to_sqlite_expr();
(
format!("{} = json(?)", expr),
vec![FilterValue::Json(value.clone())],
)
}
Self::IsNull(path) => (format!("{} IS NULL", path.to_sqlite_expr()), vec![]),
Self::IsNotNull(path) => {
(format!("{} IS NOT NULL", path.to_sqlite_expr()), vec![])
}
_ => {
return Err(QueryError::unsupported(
"This JSON filter is not supported in SQLite",
));
}
};
Ok((sql, params))
}
DatabaseType::MSSQL => {
let (sql, params) = match self {
Self::Equals(path, value) => {
let expr = path.to_mssql_expr();
(
format!("{} = ?", expr),
vec![FilterValue::Json(value.clone())],
)
}
Self::IsNull(path) => (format!("{} IS NULL", path.to_mssql_expr()), vec![]),
Self::IsNotNull(path) => {
(format!("{} IS NOT NULL", path.to_mssql_expr()), vec![])
}
_ => {
return Err(QueryError::unsupported(
"This JSON filter is not supported in MSSQL",
));
}
};
Ok((sql, params))
}
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum JsonOp {
Set {
column: String,
path: String,
value: JsonValue,
},
Insert {
column: String,
path: String,
value: JsonValue,
},
Replace {
column: String,
path: String,
value: JsonValue,
},
Remove { column: String, path: String },
ArrayAppend {
column: String,
path: String,
value: JsonValue,
},
ArrayPrepend {
column: String,
path: String,
value: JsonValue,
},
Merge { column: String, value: JsonValue },
Increment {
column: String,
path: String,
amount: f64,
},
}
impl JsonOp {
pub fn set(
column: impl Into<String>,
path: impl Into<String>,
value: impl Into<JsonValue>,
) -> Self {
Self::Set {
column: column.into(),
path: path.into(),
value: value.into(),
}
}
pub fn insert(
column: impl Into<String>,
path: impl Into<String>,
value: impl Into<JsonValue>,
) -> Self {
Self::Insert {
column: column.into(),
path: path.into(),
value: value.into(),
}
}
pub fn remove(column: impl Into<String>, path: impl Into<String>) -> Self {
Self::Remove {
column: column.into(),
path: path.into(),
}
}
pub fn array_append(
column: impl Into<String>,
path: impl Into<String>,
value: impl Into<JsonValue>,
) -> Self {
Self::ArrayAppend {
column: column.into(),
path: path.into(),
value: value.into(),
}
}
pub fn merge(column: impl Into<String>, value: impl Into<JsonValue>) -> Self {
Self::Merge {
column: column.into(),
value: value.into(),
}
}
pub fn increment(column: impl Into<String>, path: impl Into<String>, amount: f64) -> Self {
Self::Increment {
column: column.into(),
path: path.into(),
amount,
}
}
pub fn to_postgres_expr(&self) -> (String, Vec<FilterValue>) {
let mut params = Vec::new();
let expr = match self {
Self::Set {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
format!(
"jsonb_set({}, '{{{}}}', $1::jsonb)",
column,
path.replace('.', ",")
)
}
Self::Insert {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
format!(
"jsonb_set({}, '{{{}}}', $1::jsonb, true)",
column,
path.replace('.', ",")
)
}
Self::Replace {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
format!(
"jsonb_set({}, '{{{}}}', $1::jsonb, false)",
column,
path.replace('.', ",")
)
}
Self::Remove { column, path } => {
format!("{} #- '{{{}}}' ", column, path.replace('.', ","))
}
Self::ArrayAppend {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
if path.is_empty() || path == "$" {
format!("{} || $1::jsonb", column)
} else {
format!(
"jsonb_set({}, '{{{}}}', ({} -> '{}') || $1::jsonb)",
column,
path.replace('.', ","),
column,
path
)
}
}
Self::ArrayPrepend {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
if path.is_empty() || path == "$" {
format!("$1::jsonb || {}", column)
} else {
format!(
"jsonb_set({}, '{{{}}}', $1::jsonb || ({} -> '{}'))",
column,
path.replace('.', ","),
column,
path
)
}
}
Self::Merge { column, value } => {
params.push(FilterValue::Json(value.clone()));
format!("{} || $1::jsonb", column)
}
Self::Increment {
column,
path,
amount,
} => {
params.push(FilterValue::Float(*amount));
format!(
"jsonb_set({}, '{{{}}}', to_jsonb((({} -> '{}')::numeric + $1)))",
column,
path.replace('.', ","),
column,
path
)
}
};
(expr, params)
}
pub fn to_mysql_expr(&self) -> (String, Vec<FilterValue>) {
let mut params = Vec::new();
let expr = match self {
Self::Set {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
format!("JSON_SET({}, '$.{}', CAST(? AS JSON))", column, path)
}
Self::Insert {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
format!("JSON_INSERT({}, '$.{}', CAST(? AS JSON))", column, path)
}
Self::Replace {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
format!("JSON_REPLACE({}, '$.{}', CAST(? AS JSON))", column, path)
}
Self::Remove { column, path } => {
format!("JSON_REMOVE({}, '$.{}')", column, path)
}
Self::ArrayAppend {
column,
path,
value,
} => {
params.push(FilterValue::Json(value.clone()));
if path.is_empty() || path == "$" {
format!("JSON_ARRAY_APPEND({}, '$', CAST(? AS JSON))", column)
} else {
format!(
"JSON_ARRAY_APPEND({}, '$.{}', CAST(? AS JSON))",
column, path
)
}
}
Self::Merge { column, value } => {
params.push(FilterValue::Json(value.clone()));
format!("JSON_MERGE_PATCH({}, CAST(? AS JSON))", column)
}
_ => column_name_from_op(self).to_string(),
};
(expr, params)
}
pub fn to_sql(&self, db_type: DatabaseType) -> QueryResult<(String, Vec<FilterValue>)> {
match db_type {
DatabaseType::PostgreSQL => Ok(self.to_postgres_expr()),
DatabaseType::MySQL => Ok(self.to_mysql_expr()),
DatabaseType::SQLite => match self {
Self::Set {
column,
path,
value,
} => Ok((
format!("json_set({}, '$.{}', json(?))", column, path),
vec![FilterValue::Json(value.clone())],
)),
Self::Remove { column, path } => {
Ok((format!("json_remove({}, '$.{}')", column, path), vec![]))
}
_ => Err(QueryError::unsupported(
"This JSON operation is not supported in SQLite",
)),
},
DatabaseType::MSSQL => match self {
Self::Set {
column,
path,
value,
} => Ok((
format!("JSON_MODIFY({}, '$.{}', JSON_QUERY(?))", column, path),
vec![FilterValue::Json(value.clone())],
)),
_ => Err(QueryError::unsupported(
"This JSON operation is not supported in MSSQL",
)),
},
}
}
}
fn column_name_from_op(op: &JsonOp) -> &str {
match op {
JsonOp::Set { column, .. }
| JsonOp::Insert { column, .. }
| JsonOp::Replace { column, .. }
| JsonOp::Remove { column, .. }
| JsonOp::ArrayAppend { column, .. }
| JsonOp::ArrayPrepend { column, .. }
| JsonOp::Merge { column, .. }
| JsonOp::Increment { column, .. } => column,
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum JsonAgg {
ArrayAgg {
column: String,
distinct: bool,
order_by: Option<String>,
},
ObjectAgg {
key_column: String,
value_column: String,
},
BuildObject { pairs: Vec<(String, String)> },
BuildArray { elements: Vec<String> },
}
impl JsonAgg {
pub fn array_agg(column: impl Into<String>) -> Self {
Self::ArrayAgg {
column: column.into(),
distinct: false,
order_by: None,
}
}
pub fn object_agg(key: impl Into<String>, value: impl Into<String>) -> Self {
Self::ObjectAgg {
key_column: key.into(),
value_column: value.into(),
}
}
pub fn to_postgres_sql(&self) -> String {
match self {
Self::ArrayAgg {
column,
distinct,
order_by,
} => {
let mut sql = String::from("jsonb_agg(");
if *distinct {
sql.push_str("DISTINCT ");
}
sql.push_str(column);
if let Some(order) = order_by {
sql.push_str(" ORDER BY ");
sql.push_str(order);
}
sql.push(')');
sql
}
Self::ObjectAgg {
key_column,
value_column,
} => {
format!("jsonb_object_agg({}, {})", key_column, value_column)
}
Self::BuildObject { pairs } => {
let args: Vec<String> = pairs
.iter()
.flat_map(|(k, v)| vec![format!("'{}'", k), v.clone()])
.collect();
format!("jsonb_build_object({})", args.join(", "))
}
Self::BuildArray { elements } => {
format!("jsonb_build_array({})", elements.join(", "))
}
}
}
pub fn to_mysql_sql(&self) -> String {
match self {
Self::ArrayAgg { column, .. } => {
format!("JSON_ARRAYAGG({})", column)
}
Self::ObjectAgg {
key_column,
value_column,
} => {
format!("JSON_OBJECTAGG({}, {})", key_column, value_column)
}
Self::BuildObject { pairs } => {
let args: Vec<String> = pairs
.iter()
.flat_map(|(k, v)| vec![format!("'{}'", k), v.clone()])
.collect();
format!("JSON_OBJECT({})", args.join(", "))
}
Self::BuildArray { elements } => {
format!("JSON_ARRAY({})", elements.join(", "))
}
}
}
pub fn to_sql(&self, db_type: DatabaseType) -> String {
match db_type {
DatabaseType::PostgreSQL => self.to_postgres_sql(),
DatabaseType::MySQL => self.to_mysql_sql(),
DatabaseType::SQLite => match self {
Self::ArrayAgg { column, .. } => format!("json_group_array({})", column),
Self::ObjectAgg {
key_column,
value_column,
} => {
format!("json_group_object({}, {})", key_column, value_column)
}
Self::BuildObject { pairs } => {
let args: Vec<String> = pairs
.iter()
.flat_map(|(k, v)| vec![format!("'{}'", k), v.clone()])
.collect();
format!("json_object({})", args.join(", "))
}
Self::BuildArray { elements } => {
format!("json_array({})", elements.join(", "))
}
},
DatabaseType::MSSQL => {
match self {
Self::ArrayAgg { .. } => "-- Use FOR JSON AUTO".to_string(),
_ => "-- Use FOR JSON PATH".to_string(),
}
}
}
}
}
pub mod mongodb {
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum UpdateOp {
Set(String, JsonValue),
Unset(String),
Inc(String, f64),
Mul(String, f64),
Rename(String, String),
CurrentDate(String),
Min(String, JsonValue),
Max(String, JsonValue),
SetOnInsert(String, JsonValue),
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum ArrayOp {
Push {
field: String,
value: JsonValue,
position: Option<i32>,
},
PushAll {
field: String,
values: Vec<JsonValue>,
},
Pull { field: String, value: JsonValue },
PullAll {
field: String,
values: Vec<JsonValue>,
},
AddToSet { field: String, value: JsonValue },
AddToSetAll {
field: String,
values: Vec<JsonValue>,
},
Pop { field: String, first: bool },
}
impl UpdateOp {
pub fn set(field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
Self::Set(field.into(), value.into())
}
pub fn unset(field: impl Into<String>) -> Self {
Self::Unset(field.into())
}
pub fn inc(field: impl Into<String>, amount: f64) -> Self {
Self::Inc(field.into(), amount)
}
pub fn to_bson(&self) -> serde_json::Value {
match self {
Self::Set(field, value) => serde_json::json!({ "$set": { field: value } }),
Self::Unset(field) => serde_json::json!({ "$unset": { field: "" } }),
Self::Inc(field, amount) => serde_json::json!({ "$inc": { field: amount } }),
Self::Mul(field, amount) => serde_json::json!({ "$mul": { field: amount } }),
Self::Rename(old, new) => serde_json::json!({ "$rename": { old: new } }),
Self::CurrentDate(field) => serde_json::json!({ "$currentDate": { field: true } }),
Self::Min(field, value) => serde_json::json!({ "$min": { field: value } }),
Self::Max(field, value) => serde_json::json!({ "$max": { field: value } }),
Self::SetOnInsert(field, value) => {
serde_json::json!({ "$setOnInsert": { field: value } })
}
}
}
}
impl ArrayOp {
pub fn push(field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
Self::Push {
field: field.into(),
value: value.into(),
position: None,
}
}
pub fn pull(field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
Self::Pull {
field: field.into(),
value: value.into(),
}
}
pub fn add_to_set(field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
Self::AddToSet {
field: field.into(),
value: value.into(),
}
}
pub fn to_bson(&self) -> serde_json::Value {
match self {
Self::Push {
field,
value,
position,
} => {
if let Some(pos) = position {
serde_json::json!({
"$push": { field: { "$each": [value], "$position": pos } }
})
} else {
serde_json::json!({ "$push": { field: value } })
}
}
Self::PushAll { field, values } => {
serde_json::json!({ "$push": { field: { "$each": values } } })
}
Self::Pull { field, value } => {
serde_json::json!({ "$pull": { field: value } })
}
Self::PullAll { field, values } => {
serde_json::json!({ "$pullAll": { field: values } })
}
Self::AddToSet { field, value } => {
serde_json::json!({ "$addToSet": { field: value } })
}
Self::AddToSetAll { field, values } => {
serde_json::json!({ "$addToSet": { field: { "$each": values } } })
}
Self::Pop { field, first } => {
let direction = if *first { -1 } else { 1 };
serde_json::json!({ "$pop": { field: direction } })
}
}
}
}
#[derive(Debug, Clone, Default)]
pub struct UpdateBuilder {
ops: Vec<serde_json::Value>,
}
impl UpdateBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn set(mut self, field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
self.ops.push(UpdateOp::set(field, value).to_bson());
self
}
pub fn unset(mut self, field: impl Into<String>) -> Self {
self.ops.push(UpdateOp::unset(field).to_bson());
self
}
pub fn inc(mut self, field: impl Into<String>, amount: f64) -> Self {
self.ops.push(UpdateOp::inc(field, amount).to_bson());
self
}
pub fn push(mut self, field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
self.ops.push(ArrayOp::push(field, value).to_bson());
self
}
pub fn pull(mut self, field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
self.ops.push(ArrayOp::pull(field, value).to_bson());
self
}
pub fn add_to_set(mut self, field: impl Into<String>, value: impl Into<JsonValue>) -> Self {
self.ops.push(ArrayOp::add_to_set(field, value).to_bson());
self
}
pub fn build(self) -> serde_json::Value {
let mut result = serde_json::Map::new();
for op in self.ops {
if let serde_json::Value::Object(map) = op {
for (key, value) in map {
if let Some(existing) = result.get_mut(&key) {
if let (
serde_json::Value::Object(existing_map),
serde_json::Value::Object(new_map),
) = (existing, value)
{
for (k, v) in new_map {
existing_map.insert(k, v);
}
}
} else {
result.insert(key, value);
}
}
}
}
serde_json::Value::Object(result)
}
}
pub fn update() -> UpdateBuilder {
UpdateBuilder::new()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct JsonIndex {
pub name: String,
pub table: String,
pub column: String,
pub path: Option<String>,
pub gin: bool,
}
impl JsonIndex {
pub fn builder(name: impl Into<String>) -> JsonIndexBuilder {
JsonIndexBuilder::new(name)
}
pub fn to_postgres_sql(&self) -> String {
if let Some(ref path) = self.path {
format!(
"CREATE INDEX {} ON {} USING {} (({} -> '{}'));",
self.name,
self.table,
if self.gin { "GIN" } else { "BTREE" },
self.column,
path
)
} else {
format!(
"CREATE INDEX {} ON {} USING GIN ({});",
self.name, self.table, self.column
)
}
}
pub fn to_mysql_sql(&self) -> Vec<String> {
if let Some(ref path) = self.path {
let gen_col = format!("{}_{}_{}", self.table, self.column, path.replace('.', "_"));
vec![
format!(
"ALTER TABLE {} ADD COLUMN {} VARCHAR(255) GENERATED ALWAYS AS (JSON_UNQUOTE(JSON_EXTRACT({}, '$.{}'))) STORED;",
self.table, gen_col, self.column, path
),
format!(
"CREATE INDEX {} ON {} ({});",
self.name, self.table, gen_col
),
]
} else {
vec!["-- MySQL requires generated columns for JSON indexing".to_string()]
}
}
}
#[derive(Debug, Clone)]
pub struct JsonIndexBuilder {
name: String,
table: Option<String>,
column: Option<String>,
path: Option<String>,
gin: bool,
}
impl JsonIndexBuilder {
pub fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
table: None,
column: None,
path: None,
gin: true,
}
}
pub fn on_table(mut self, table: impl Into<String>) -> Self {
self.table = Some(table.into());
self
}
pub fn column(mut self, column: impl Into<String>) -> Self {
self.column = Some(column.into());
self
}
pub fn path(mut self, path: impl Into<String>) -> Self {
self.path = Some(path.into());
self
}
pub fn gin(mut self) -> Self {
self.gin = true;
self
}
pub fn btree(mut self) -> Self {
self.gin = false;
self
}
pub fn build(self) -> QueryResult<JsonIndex> {
let table = self.table.ok_or_else(|| {
QueryError::invalid_input("table", "Must specify table with on_table()")
})?;
let column = self.column.ok_or_else(|| {
QueryError::invalid_input("column", "Must specify column with column()")
})?;
Ok(JsonIndex {
name: self.name,
table,
column,
path: self.path,
gin: self.gin,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_json_path_basic() {
let path = JsonPath::new("metadata").field("user").field("name");
assert_eq!(path.to_jsonpath_string(), "$.user.name");
}
#[test]
fn test_json_path_with_index() {
let path = JsonPath::new("items").field("tags").index(0);
assert_eq!(path.to_jsonpath_string(), "$.tags[0]");
}
#[test]
fn test_json_path_from_string() {
let path = JsonPath::from_path("data", "$.user.addresses[0].city");
assert_eq!(path.segments.len(), 4);
assert_eq!(path.to_jsonpath_string(), "$.user.addresses[0].city");
}
#[test]
fn test_postgres_path_expr() {
let path = JsonPath::new("metadata").field("role").text();
let expr = path.to_postgres_expr();
assert!(expr.contains(" ->> "));
}
#[test]
fn test_mysql_path_expr() {
let path = JsonPath::new("data").field("name").text();
let expr = path.to_mysql_expr();
assert!(expr.contains("JSON_UNQUOTE"));
assert!(expr.contains("JSON_EXTRACT"));
}
#[test]
fn test_mongodb_path() {
let path = JsonPath::new("address").field("city");
assert_eq!(path.to_mongodb_path(), "address.city");
}
#[test]
fn test_json_filter_contains() {
let filter = JsonFilter::contains("metadata", serde_json::json!({"role": "admin"}));
let (sql, params) = filter.to_postgres_sql();
assert!(sql.contains("@>"));
assert_eq!(params.len(), 1);
}
#[test]
fn test_json_filter_has_key() {
let filter = JsonFilter::has_key("settings", "theme");
let (sql, params) = filter.to_postgres_sql();
assert!(sql.contains("?"));
assert_eq!(params.len(), 1);
}
#[test]
fn test_json_op_set() {
let op = JsonOp::set("metadata", "theme", serde_json::json!("dark"));
let (expr, params) = op.to_postgres_expr();
assert!(expr.contains("jsonb_set"));
assert_eq!(params.len(), 1);
}
#[test]
fn test_json_op_remove() {
let op = JsonOp::remove("metadata", "old_field");
let (expr, _) = op.to_postgres_expr();
assert!(expr.contains("#-"));
}
#[test]
fn test_json_op_array_append() {
let op = JsonOp::array_append("tags", "$", serde_json::json!("new_tag"));
let (expr, params) = op.to_postgres_expr();
assert!(expr.contains("||"));
assert_eq!(params.len(), 1);
}
#[test]
fn test_json_op_merge() {
let op = JsonOp::merge("settings", serde_json::json!({"new_key": "value"}));
let (expr, params) = op.to_postgres_expr();
assert!(expr.contains("||"));
assert_eq!(params.len(), 1);
}
#[test]
fn test_json_agg_array() {
let agg = JsonAgg::array_agg("name");
let sql = agg.to_postgres_sql();
assert_eq!(sql, "jsonb_agg(name)");
}
#[test]
fn test_json_agg_object() {
let agg = JsonAgg::object_agg("key", "value");
let sql = agg.to_postgres_sql();
assert_eq!(sql, "jsonb_object_agg(key, value)");
}
#[test]
fn test_json_index_postgres() {
let index = JsonIndex::builder("users_metadata_idx")
.on_table("users")
.column("metadata")
.gin()
.build()
.unwrap();
let sql = index.to_postgres_sql();
assert!(sql.contains("USING GIN"));
}
#[test]
fn test_json_index_with_path() {
let index = JsonIndex::builder("users_role_idx")
.on_table("users")
.column("metadata")
.path("role")
.btree()
.build()
.unwrap();
let sql = index.to_postgres_sql();
assert!(sql.contains("USING BTREE"));
assert!(sql.contains("-> 'role'"));
}
mod mongodb_tests {
use super::super::mongodb::*;
#[test]
fn test_update_set() {
let op = UpdateOp::set("name", "John");
let bson = op.to_bson();
assert!(bson["$set"]["name"].is_string());
}
#[test]
fn test_update_inc() {
let op = UpdateOp::inc("count", 1.0);
let bson = op.to_bson();
assert_eq!(bson["$inc"]["count"], 1.0);
}
#[test]
fn test_array_push() {
let op = ArrayOp::push("tags", "new_tag");
let bson = op.to_bson();
assert!(bson["$push"]["tags"].is_string());
}
#[test]
fn test_array_add_to_set() {
let op = ArrayOp::add_to_set("roles", "admin");
let bson = op.to_bson();
assert!(bson["$addToSet"]["roles"].is_string());
}
#[test]
fn test_update_builder() {
let update = update()
.set("name", "John")
.inc("visits", 1.0)
.push("tags", "active")
.build();
assert!(update["$set"].is_object());
assert!(update["$inc"].is_object());
assert!(update["$push"].is_object());
}
}
}