ormlite_macro/
lib.rs

1#![allow(unused)]
2#![allow(non_snake_case)]
3
4use codegen::insert::impl_Insert;
5use convert_case::{Case, Casing};
6use ormlite_attr::InsertMeta;
7use proc_macro::TokenStream;
8use std::borrow::Borrow;
9use std::cell::OnceCell;
10use std::collections::HashMap;
11use std::env;
12use std::env::var;
13use std::ops::Deref;
14use std::sync::OnceLock;
15use syn::DataEnum;
16
17use quote::quote;
18use syn::{parse_macro_input, Data, DeriveInput};
19
20use codegen::into_arguments::impl_IntoArguments;
21use ormlite_attr::schema_from_filepaths;
22use ormlite_attr::DeriveInputExt;
23use ormlite_attr::ModelMeta;
24use ormlite_attr::TableMeta;
25use ormlite_core::config::get_var_model_folders;
26
27use crate::codegen::common::OrmliteCodegen;
28use crate::codegen::from_row::{impl_FromRow, impl_from_row_using_aliases};
29use crate::codegen::insert::impl_InsertModel;
30use crate::codegen::insert_model::struct_InsertModel;
31use crate::codegen::join_description::static_join_descriptions;
32use crate::codegen::meta::{impl_JoinMeta, impl_TableMeta};
33use crate::codegen::model::impl_Model;
34use crate::codegen::model_builder::{impl_ModelBuilder, struct_ModelBuilder};
35
36mod codegen;
37mod util;
38
39/// Mapping from StructName -> ModelMeta
40pub(crate) type MetadataCache = HashMap<String, ModelMeta>;
41
42static TABLES: OnceLock<MetadataCache> = OnceLock::new();
43
44fn get_tables() -> &'static MetadataCache {
45    TABLES.get_or_init(|| load_metadata_cache())
46}
47
48fn load_metadata_cache() -> MetadataCache {
49    let mut tables = HashMap::new();
50    let paths = get_var_model_folders();
51    let paths = paths.iter().map(|p| p.as_path()).collect::<Vec<_>>();
52    let schema = schema_from_filepaths(&paths).expect("Failed to preload models");
53    for meta in schema.tables {
54        let name = meta.ident.to_string();
55        tables.insert(name, meta);
56    }
57    tables
58}
59
60/// For a given struct, determine what codegen to use.
61fn get_databases(table_meta: &TableMeta) -> Vec<Box<dyn OrmliteCodegen>> {
62    let mut databases: Vec<Box<dyn OrmliteCodegen>> = Vec::new();
63    let dbs = table_meta.databases.clone();
64    if dbs.is_empty() {
65        #[cfg(feature = "default-sqlite")]
66        databases.push(Box::new(codegen::sqlite::SqliteBackend {}));
67        #[cfg(feature = "default-postgres")]
68        databases.push(Box::new(codegen::postgres::PostgresBackend));
69        #[cfg(feature = "default-mysql")]
70        databases.push(Box::new(codegen::mysql::MysqlBackend {}));
71    } else {
72        for db in dbs {
73            match db.as_str() {
74                #[cfg(feature = "sqlite")]
75                "sqlite" => databases.push(Box::new(codegen::sqlite::SqliteBackend {})),
76                #[cfg(feature = "postgres")]
77                "postgres" => databases.push(Box::new(codegen::postgres::PostgresBackend)),
78                #[cfg(feature = "mysql")]
79                "mysql" => databases.push(Box::new(codegen::mysql::MysqlBackend {})),
80                "sqlite" | "postgres" | "mysql" => {
81                    panic!("Database {} is not enabled. Enable it with features = [\"{}\"]", db, db)
82                }
83                _ => panic!("Unknown database: {}", db),
84            }
85        }
86    }
87    if databases.is_empty() {
88        let mut count = 0;
89        #[cfg(feature = "sqlite")]
90        {
91            count += 1;
92        }
93        #[cfg(feature = "postgres")]
94        {
95            count += 1;
96        }
97        #[cfg(feature = "mysql")]
98        {
99            count += 1;
100        }
101        if count > 1 {
102            panic!("You have more than one database configured using features, but no database is specified for this model. \
103            Specify a database for the model like this:\n\n#[ormlite(database = \"<db>\")]\n\nOr you can enable \
104            a default database feature:\n\n # Cargo.toml\normlite = {{ features = [\"default-<db>\"] }}");
105        }
106    }
107    if databases.is_empty() {
108        #[cfg(feature = "sqlite")]
109        databases.push(Box::new(codegen::sqlite::SqliteBackend {}));
110        #[cfg(feature = "postgres")]
111        databases.push(Box::new(codegen::postgres::PostgresBackend));
112        #[cfg(feature = "mysql")]
113        databases.push(Box::new(codegen::mysql::MysqlBackend {}));
114    }
115    if databases.is_empty() {
116        panic!(
117            r#"No database is enabled. Enable one of these features for the ormlite crate: postgres, mysql, sqlite"#
118        );
119    }
120    databases
121}
122
123/// Derive macro for `#[derive(Model)]` It additionally generates FromRow for the struct, since
124/// Model requires FromRow.
125#[proc_macro_derive(Model, attributes(ormlite))]
126pub fn expand_ormlite_model(input: TokenStream) -> TokenStream {
127    let ast = parse_macro_input!(input as DeriveInput);
128    let meta = ModelMeta::from_derive(&ast);
129    let mut databases = get_databases(&meta.table);
130    let tables = get_tables();
131    let first = databases.remove(0);
132
133    let primary = {
134        let db = first.as_ref();
135        let impl_TableMeta = impl_TableMeta(&meta.table, Some(meta.pkey.name.as_str()));
136        let impl_JoinMeta = impl_JoinMeta(&meta);
137        let static_join_descriptions = static_join_descriptions(&meta.table, &tables);
138        let impl_Model = impl_Model(db, &meta, tables);
139        let impl_FromRow = impl_FromRow(db, &meta.table, &tables);
140        let impl_from_row_using_aliases = impl_from_row_using_aliases(db, &meta.table, &tables);
141
142        let struct_ModelBuilder = struct_ModelBuilder(&ast, &meta);
143        let impl_ModelBuilder = impl_ModelBuilder(db, &meta);
144
145        let struct_InsertModel = struct_InsertModel(&ast, &meta);
146        let impl_InsertModel = impl_InsertModel(db, &meta);
147
148        quote! {
149            #impl_TableMeta
150            #impl_JoinMeta
151
152            #static_join_descriptions
153            #impl_Model
154            #impl_FromRow
155            #impl_from_row_using_aliases
156
157            #struct_ModelBuilder
158            #impl_ModelBuilder
159
160            #struct_InsertModel
161            #impl_InsertModel
162        }
163    };
164
165    let rest = databases.iter().map(|db| {
166        let impl_Model = impl_Model(db.as_ref(), &meta, tables);
167        quote! {
168            #impl_Model
169        }
170    });
171
172    TokenStream::from(quote! {
173        #primary
174        #(#rest)*
175    })
176}
177
178#[proc_macro_derive(Insert, attributes(ormlite))]
179pub fn expand_ormlite_insert(input: TokenStream) -> TokenStream {
180    let ast = parse_macro_input!(input as DeriveInput);
181    let mut meta = InsertMeta::from_derive(&ast);
182    let mut databases = get_databases(&meta.table);
183    let tables = get_tables();
184    if meta.name.is_none() {
185        if let Some(m) = tables.get(meta.returns.as_ref()) {
186            meta.table.name = m.name.clone();
187        }
188    }
189    let first = databases.remove(0);
190    TokenStream::from(impl_Insert(first.as_ref(), &meta.table, &meta.ident, &meta.returns))
191}
192
193#[proc_macro_derive(FromRow, attributes(ormlite))]
194pub fn expand_derive_fromrow(input: TokenStream) -> TokenStream {
195    let ast = parse_macro_input!(input as DeriveInput);
196    let meta = TableMeta::from_derive(&ast);
197
198    let databases = get_databases(&meta);
199    let tables = get_tables();
200
201    let expanded = databases.iter().map(|db| {
202        let db = db.as_ref();
203        let impl_FromRow = impl_FromRow(db, &meta, &tables);
204        let impl_from_row_using_aliases = impl_from_row_using_aliases(db, &meta, &tables);
205        quote! {
206            #impl_FromRow
207            #impl_from_row_using_aliases
208        }
209    });
210
211    TokenStream::from(quote! {
212        #(#expanded)*
213    })
214}
215
216#[proc_macro_derive(TableMeta, attributes(ormlite))]
217pub fn expand_derive_table_meta(input: TokenStream) -> TokenStream {
218    let ast = parse_macro_input!(input as DeriveInput);
219    let Data::Struct(data) = &ast.data else {
220        panic!("Only structs can derive Model");
221    };
222
223    let table_meta = TableMeta::from_derive(&ast);
224    let databases = get_databases(&table_meta);
225    let impl_TableMeta = impl_TableMeta(&table_meta, table_meta.pkey.as_deref());
226    TokenStream::from(impl_TableMeta)
227}
228
229#[proc_macro_derive(IntoArguments, attributes(ormlite))]
230pub fn expand_derive_into_arguments(input: TokenStream) -> TokenStream {
231    let ast = parse_macro_input!(input as DeriveInput);
232    let Data::Struct(data) = &ast.data else {
233        panic!("Only structs can derive Model");
234    };
235
236    let meta = TableMeta::from_derive(&ast);
237    let databases = get_databases(&meta);
238
239    let expanded = databases.iter().map(|db| {
240        let impl_IntoArguments = impl_IntoArguments(db.as_ref(), &meta);
241        impl_IntoArguments
242    });
243    TokenStream::from(quote! {
244        #(#expanded)*
245    })
246}
247
248/// This is a no-op marker trait that allows the migration tool to know when a user has
249/// manually implemented a type.
250///
251/// This is useful for having data that's a string in the database, but a strum::EnumString in code.
252#[proc_macro_derive(ManualType)]
253pub fn expand_derive_manual_type(input: TokenStream) -> TokenStream {
254    TokenStream::new()
255}
256
257#[proc_macro_derive(Enum)]
258pub fn derive_ormlite_enum(input: TokenStream) -> TokenStream {
259    let input = parse_macro_input!(input as DeriveInput);
260
261    let enum_name = input.ident;
262
263    let variants = match input.data {
264        Data::Enum(DataEnum { variants, .. }) => variants,
265        _ => panic!("#[derive(OrmliteEnum)] is only supported on enums"),
266    };
267
268    // Collect variant names and strings into vectors
269    let variant_names: Vec<_> = variants.iter().map(|v| &v.ident).collect();
270    let variant_strings: Vec<_> = variant_names
271        .iter()
272        .map(|v| v.to_string().to_case(Case::Snake))
273        .collect();
274
275    let gen = quote! {
276        impl std::fmt::Display for #enum_name {
277            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
278                match self {
279                    #(Self::#variant_names => write!(f, "{}", #variant_strings)),*
280                }
281            }
282        }
283
284        impl std::str::FromStr for #enum_name {
285            type Err = String;
286            fn from_str(s: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
287                match s {
288                    #(#variant_strings => Ok(Self::#variant_names)),*,
289                    _ => Err(format!("Invalid {} value: {}", stringify!(#enum_name), s))
290                }
291            }
292        }
293
294        impl std::convert::TryFrom<&str> for #enum_name {
295            type Error = String;
296            fn try_from(value: &str) -> Result<Self, Self::Error> {
297                <Self as std::str::FromStr>::from_str(value)
298            }
299        }
300
301        impl sqlx::Decode<'_, sqlx::Postgres> for #enum_name {
302            fn decode(
303                value: sqlx::postgres::PgValueRef<'_>,
304            ) -> Result<Self, sqlx::error::BoxDynError> {
305                let s = value.as_str()?;
306                <Self as std::str::FromStr>::from_str(s).map_err(|e| sqlx::error::BoxDynError::from(
307                    std::io::Error::new(std::io::ErrorKind::InvalidData, e)
308                ))
309            }
310        }
311
312        impl sqlx::Encode<'_, sqlx::Postgres> for #enum_name {
313            fn encode_by_ref(
314                &self,
315                buf: &mut sqlx::postgres::PgArgumentBuffer
316            ) -> Result<sqlx::encode::IsNull, sqlx::error::BoxDynError> {
317                let s = self.to_string();
318                <String as sqlx::Encode<sqlx::Postgres>>::encode(s, buf)
319            }
320        }
321
322        impl sqlx::Type<sqlx::Postgres> for #enum_name {
323            fn type_info() -> <sqlx::Postgres as sqlx::Database>::TypeInfo {
324                sqlx::postgres::PgTypeInfo::with_name("VARCHAR")
325            }
326
327            fn compatible(ty: &<sqlx::Postgres as sqlx::Database>::TypeInfo) -> bool {
328                ty.to_string() == "VARCHAR"
329            }
330        }
331    };
332
333    gen.into()
334}