use std::collections::HashMap;
use std::fmt::Write;
use std::path::Path;
use scythe_backend::manifest::{BackendManifest, load_manifest};
use scythe_backend::naming::{
enum_type_name, enum_variant_name, fn_name, row_struct_name, to_pascal_case, to_snake_case,
};
use scythe_backend::types::resolve_type;
use scythe_core::analyzer::{AnalyzedQuery, CompositeInfo, EnumInfo};
use scythe_core::errors::{ErrorCode, ScytheError};
use scythe_core::parser::QueryCommand;
use crate::backend_trait::{CodegenBackend, ResolvedColumn, ResolvedParam};
use crate::singularize;
use super::python_common::PythonRowType;
const DEFAULT_MANIFEST_TOML: &str = include_str!("../../manifests/python-asyncpg.toml");
pub struct PythonAsyncpgBackend {
manifest: BackendManifest,
row_type: PythonRowType,
}
impl PythonAsyncpgBackend {
pub fn new(engine: &str) -> Result<Self, ScytheError> {
match engine {
"postgresql" | "postgres" | "pg" => {}
_ => {
return Err(ScytheError::new(
ErrorCode::InternalError,
format!(
"python-asyncpg only supports PostgreSQL, got engine '{}'",
engine
),
));
}
}
let manifest_path = Path::new("backends/python-asyncpg/manifest.toml");
let manifest = if manifest_path.exists() {
load_manifest(manifest_path)
.map_err(|e| ScytheError::new(ErrorCode::InternalError, format!("manifest: {e}")))?
} else {
toml::from_str(DEFAULT_MANIFEST_TOML)
.map_err(|e| ScytheError::new(ErrorCode::InternalError, format!("manifest: {e}")))?
};
Ok(Self {
manifest,
row_type: PythonRowType::default(),
})
}
}
impl CodegenBackend for PythonAsyncpgBackend {
fn name(&self) -> &str {
"python-asyncpg"
}
fn manifest(&self) -> &scythe_backend::manifest::BackendManifest {
&self.manifest
}
fn apply_options(&mut self, options: &HashMap<String, String>) -> Result<(), ScytheError> {
if let Some(rt) = options.get("row_type") {
self.row_type = PythonRowType::from_option(rt)?;
}
Ok(())
}
fn file_header(&self) -> String {
let import_line = self.row_type.import_line();
if self.row_type.is_stdlib_import() {
format!(
"\"\"\"Auto-generated by scythe. Do not edit.\"\"\"\n\
\n\
import datetime # noqa: F401\n\
import decimal # noqa: F401\n\
{import_line}\n\
from enum import Enum # noqa: F401\n\
\n\
from asyncpg import Connection # noqa: F401\n\
\n",
)
} else {
let third_party = self
.row_type
.sorted_third_party_imports("from asyncpg import Connection # noqa: F401");
format!(
"\"\"\"Auto-generated by scythe. Do not edit.\"\"\"\n\
\n\
import datetime # noqa: F401\n\
import decimal # noqa: F401\n\
from enum import Enum # noqa: F401\n\
\n\
{third_party}\n\
\n",
)
}
}
fn generate_row_struct(
&self,
query_name: &str,
columns: &[ResolvedColumn],
) -> Result<String, ScytheError> {
let struct_name = row_struct_name(query_name, &self.manifest.naming);
let mut out = String::new();
let _ = write!(out, "{}", self.row_type.decorator());
let _ = writeln!(out, "{}", self.row_type.class_def(&struct_name));
let _ = writeln!(out, " \"\"\"Row type for {} query.\"\"\"", query_name);
if columns.is_empty() {
let _ = writeln!(out, " pass");
} else {
let _ = writeln!(out);
for col in columns {
let _ = writeln!(out, " {}: {}", col.field_name, col.full_type);
}
}
Ok(out)
}
fn generate_model_struct(
&self,
table_name: &str,
columns: &[ResolvedColumn],
) -> Result<String, ScytheError> {
let singular = singularize(table_name);
let name = to_pascal_case(&singular);
self.generate_row_struct(&name, columns)
}
fn generate_query_fn(
&self,
analyzed: &AnalyzedQuery,
struct_name: &str,
columns: &[ResolvedColumn],
params: &[ResolvedParam],
) -> Result<String, ScytheError> {
let func_name = fn_name(&analyzed.name, &self.manifest.naming);
let mut out = String::new();
let param_list = params
.iter()
.map(|p| format!("{}: {}", p.field_name, p.full_type))
.collect::<Vec<_>>()
.join(", ");
let kw_sep = if param_list.is_empty() { "" } else { ", *, " };
let sql = super::clean_sql_with_optional(
&analyzed.sql,
&analyzed.optional_params,
&analyzed.params,
);
match &analyzed.command {
QueryCommand::One => {
let _ = writeln!(
out,
"async def {}(conn: Connection{}{}) -> {} | None:",
func_name, kw_sep, param_list, struct_name
);
let _ = writeln!(out, " \"\"\"Execute {} query.\"\"\"", analyzed.name);
let _ = writeln!(out, " row = await conn.fetchrow(");
let _ = writeln!(out, " \"\"\"{}\"\"\",", sql);
if !params.is_empty() {
let args: Vec<String> = params.iter().map(|p| p.field_name.clone()).collect();
let _ = writeln!(out, " {},", args.join(", "));
}
let _ = writeln!(out, " )");
let _ = writeln!(out, " if row is None:");
let _ = writeln!(out, " return None");
let field_assignments: Vec<String> = columns
.iter()
.map(|col| format!("{}=row[\"{}\"]", col.field_name, col.name))
.collect();
let oneliner = format!(
" return {}({})",
struct_name,
field_assignments.join(", ")
);
if oneliner.len() <= 88 {
let _ = writeln!(out, "{}", oneliner);
} else {
let _ = writeln!(out, " return {}(", struct_name);
for fa in &field_assignments {
let _ = writeln!(out, " {},", fa);
}
let _ = writeln!(out, " )");
}
}
QueryCommand::Many => {
let _ = writeln!(
out,
"async def {}(conn: Connection{}{}) -> list[{}]:",
func_name, kw_sep, param_list, struct_name
);
let _ = writeln!(out, " \"\"\"Execute {} query.\"\"\"", analyzed.name);
let _ = writeln!(out, " rows = await conn.fetch(");
let _ = writeln!(out, " \"\"\"{}\"\"\",", sql);
if !params.is_empty() {
let args: Vec<String> = params.iter().map(|p| p.field_name.clone()).collect();
let _ = writeln!(out, " {},", args.join(", "));
}
let _ = writeln!(out, " )");
let field_assignments: Vec<String> = columns
.iter()
.map(|col| format!("{}=r[\"{}\"]", col.field_name, col.name))
.collect();
let oneliner = format!(
" return [{}({}) for r in rows]",
struct_name,
field_assignments.join(", ")
);
if oneliner.len() <= 88 {
let _ = writeln!(out, "{}", oneliner);
} else {
let _ = writeln!(out, " return [");
let _ = writeln!(out, " {}(", struct_name);
for fa in &field_assignments {
let _ = writeln!(out, " {},", fa);
}
let _ = writeln!(out, " )");
let _ = writeln!(out, " for r in rows");
let _ = writeln!(out, " ]");
}
}
QueryCommand::Batch => {
let batch_fn_name = format!("{}_batch", func_name);
let items_type = if params.len() > 1 {
let tuple_types: Vec<String> =
params.iter().map(|p| p.full_type.clone()).collect();
format!("list[tuple[{}]]", tuple_types.join(", "))
} else if params.len() == 1 {
format!("list[{}]", params[0].full_type)
} else {
"int".to_string()
};
let _ = writeln!(
out,
"async def {}(conn: Connection, *, items: {}) -> None:",
batch_fn_name, items_type
);
let _ = writeln!(
out,
" \"\"\"Execute {} query for each item in the batch.\"\"\"",
analyzed.name
);
if params.is_empty() {
let _ = writeln!(out, " for _ in range(items):");
let _ = writeln!(out, " await conn.execute(");
let _ = writeln!(out, " \"\"\"{}\"\"\",", sql);
let _ = writeln!(out, " )");
} else {
if params.len() == 1 {
let _ = writeln!(out, " args = [(item,) for item in items]");
} else {
let _ = writeln!(out, " args = items");
}
let _ = writeln!(out, " await conn.executemany(");
let _ = writeln!(out, " \"\"\"{}\"\"\",", sql);
let _ = writeln!(out, " args,");
let _ = writeln!(out, " )");
}
}
QueryCommand::Exec => {
let _ = writeln!(
out,
"async def {}(conn: Connection{}{}) -> None:",
func_name, kw_sep, param_list
);
let _ = writeln!(out, " \"\"\"Execute {} query.\"\"\"", analyzed.name);
let _ = writeln!(out, " await conn.execute(");
let _ = writeln!(out, " \"\"\"{}\"\"\",", sql);
if !params.is_empty() {
let args: Vec<String> = params.iter().map(|p| p.field_name.clone()).collect();
let _ = writeln!(out, " {},", args.join(", "));
}
let _ = writeln!(out, " )");
}
QueryCommand::ExecResult | QueryCommand::ExecRows => {
let _ = writeln!(
out,
"async def {}(conn: Connection{}{}) -> int:",
func_name, kw_sep, param_list
);
let _ = writeln!(out, " \"\"\"Execute {} query.\"\"\"", analyzed.name);
let _ = writeln!(out, " result = await conn.execute(");
let _ = writeln!(out, " \"\"\"{}\"\"\",", sql);
if !params.is_empty() {
let args: Vec<String> = params.iter().map(|p| p.field_name.clone()).collect();
let _ = writeln!(out, " {},", args.join(", "));
}
let _ = writeln!(out, " )");
let _ = writeln!(out, " return int(result.split()[-1])");
}
QueryCommand::Grouped => {
unreachable!("Grouped is rewritten to Many before codegen")
}
}
Ok(out)
}
fn generate_enum_def(&self, enum_info: &EnumInfo) -> Result<String, ScytheError> {
let type_name = enum_type_name(&enum_info.sql_name, &self.manifest.naming);
let mut out = String::new();
let _ = writeln!(out, "class {}(str, Enum):", type_name);
let _ = writeln!(
out,
" \"\"\"Database enum type {}.\"\"\"",
enum_info.sql_name
);
if enum_info.values.is_empty() {
let _ = writeln!(out, " pass");
} else {
let _ = writeln!(out);
for value in &enum_info.values {
let variant = enum_variant_name(value, &self.manifest.naming);
let _ = writeln!(out, " {} = \"{}\"", variant, value);
}
}
Ok(out)
}
fn generate_composite_def(&self, composite: &CompositeInfo) -> Result<String, ScytheError> {
let name = to_pascal_case(&composite.sql_name);
let mut out = String::new();
let _ = write!(out, "{}", self.row_type.decorator());
let _ = writeln!(out, "{}", self.row_type.class_def(&name));
let _ = writeln!(
out,
" \"\"\"Composite type {}.\"\"\"",
composite.sql_name
);
if composite.fields.is_empty() {
let _ = writeln!(out, " pass");
} else {
let _ = writeln!(out);
for field in &composite.fields {
let py_type = resolve_type(&field.neutral_type, &self.manifest, false)
.map(|t| t.into_owned())
.map_err(|e| {
ScytheError::new(
ErrorCode::InternalError,
format!("composite field type error: {}", e),
)
})?;
let _ = writeln!(out, " {}: {}", to_snake_case(&field.name), py_type);
}
}
Ok(out)
}
}