1use proc_macro2::TokenStream;
11use quote::{format_ident, quote};
12
13use crate::state::{
14 ArchitectureState, ConnectorDef, ConnectorDirection, RecordDef, SerializationType, TaskDef,
15 TaskType,
16};
17
18pub fn generate_rust(state: &ArchitectureState) -> String {
28 let formatted = generate_rust_inner(state);
29
30 let header = "\
31// @generated — do not edit manually.\n\
32// Source: .aimdb/state.toml — edit via `aimdb generate` or the architecture agent.\n\
33// Regenerate: `aimdb generate` or confirm a proposal in the architecture agent.\n\n";
34
35 format!("{header}{formatted}")
36}
37
38pub fn generate_schema_rs(state: &ArchitectureState) -> String {
44 generate_types_inner(state)
45}
46
47fn generate_types_inner(state: &ArchitectureState) -> String {
49 let imports = emit_imports_types_only(state);
50
51 let record_items: Vec<TokenStream> = state
52 .records
53 .iter()
54 .flat_map(|rec| {
55 let mut items = vec![emit_value_struct(rec), emit_key_enum(rec)];
56 items.push(emit_schema_type_impl(rec));
57 let linkable = emit_linkable_impl(rec);
58 if !linkable.is_empty() {
59 items.push(linkable);
60 }
61 if let Some(obs) = emit_observable_impl(rec) {
62 items.push(obs);
63 }
64 if let Some(set) = emit_settable_impl(rec) {
65 items.push(set);
66 }
67 items
68 })
69 .collect();
70
71 let file_tokens = quote! {
72 #imports
73 #(#record_items)*
74 };
75
76 let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust");
77 prettyplease::unparse(&syntax_tree)
78}
79
80fn generate_rust_inner(state: &ArchitectureState) -> String {
81 let imports = emit_imports(state);
82
83 let record_items: Vec<TokenStream> = state
84 .records
85 .iter()
86 .flat_map(|rec| {
87 let mut items = vec![emit_value_struct(rec), emit_key_enum(rec)];
88 items.push(emit_schema_type_impl(rec));
89 let linkable = emit_linkable_impl(rec);
90 if !linkable.is_empty() {
91 items.push(linkable);
92 }
93 if let Some(obs) = emit_observable_impl(rec) {
94 items.push(obs);
95 }
96 if let Some(set) = emit_settable_impl(rec) {
97 items.push(set);
98 }
99 items
100 })
101 .collect();
102
103 let configure_fn = emit_configure_schema(state);
104
105 let file_tokens = quote! {
106 #imports
107 #(#record_items)*
108 #configure_fn
109 };
110
111 let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust");
112 prettyplease::unparse(&syntax_tree)
113}
114
115pub fn generate_cargo_toml(state: &ArchitectureState) -> String {
120 let project = state
121 .project
122 .as_ref()
123 .expect("generate_cargo_toml requires [project] block in state.toml");
124 let crate_name = format!("{}-common", project.name);
125 let edition = project.edition.as_deref().unwrap_or("2024");
126
127 let has_non_custom_ser = state.records.iter().any(|r| {
128 r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom
129 });
130 let has_postcard = state
131 .records
132 .iter()
133 .any(|r| r.serialization.as_ref() == Some(&SerializationType::Postcard));
134 let has_observable = state.records.iter().any(|r| r.observable.is_some());
135
136 let mut data_contracts_features = Vec::new();
137 if has_non_custom_ser {
138 data_contracts_features.push("\"linkable\"");
139 }
140
141 let dc_features_str = if data_contracts_features.is_empty() {
142 String::new()
143 } else {
144 format!(", features = [{}]", data_contracts_features.join(", "))
145 };
146
147 let mut std_deps = vec!["\"aimdb-data-contracts/std\"".to_string()];
149 if has_non_custom_ser && !has_postcard {
150 std_deps.push("\"serde_json\"".to_string());
151 }
152 if has_observable {
153 std_deps.push("\"aimdb-data-contracts/observable\"".to_string());
154 }
155 let std_features = std_deps.join(", ");
156
157 let mut optional_deps = String::new();
158 if has_non_custom_ser && !has_postcard {
159 optional_deps.push_str("serde_json = { version = \"1.0\", optional = true }\n");
160 }
161 if has_postcard {
162 optional_deps.push_str(
163 "postcard = { version = \"1.0\", default-features = false, features = [\"alloc\"] }\n",
164 );
165 }
166
167 format!(
168 r#"# Regenerate with `aimdb generate --common-crate`
169[package]
170name = "{crate_name}"
171version = "0.1.0"
172edition = "{edition}"
173
174[features]
175default = ["std"]
176std = [{std_features}]
177alloc = []
178
179[dependencies]
180aimdb-core = {{ version = "0.5", default-features = false, features = ["derive", "alloc"] }}
181aimdb-data-contracts = {{ version = "0.5", default-features = false{dc_features_str} }}
182serde = {{ version = "1.0", default-features = false, features = ["derive", "alloc"] }}
183{optional_deps}"#
184 )
185}
186
187pub fn generate_lib_rs() -> String {
189 "\
190// Regenerate with `aimdb generate --common-crate`
191#![cfg_attr(not(feature = \"std\"), no_std)]
192extern crate alloc;
193
194mod schema;
195
196// Re-export all public types for downstream crates
197pub use schema::*;
198"
199 .to_string()
200}
201
202pub fn generate_main_rs(state: &ArchitectureState, binary_name: &str) -> Option<String> {
209 let bin = state.binaries.iter().find(|b| b.name == binary_name)?;
210 let project_name = state
211 .project
212 .as_ref()
213 .map(|p| p.name.as_str())
214 .unwrap_or("project");
215 let common_crate = format_ident!("{}", format!("{}_common", project_name.replace('-', "_")));
216
217 let tasks: Vec<&TaskDef> = bin
219 .tasks
220 .iter()
221 .filter_map(|tname| state.tasks.iter().find(|t| &t.name == tname))
222 .collect();
223
224 let task_use_idents: Vec<syn::Ident> = bin
225 .tasks
226 .iter()
227 .map(|name| format_ident!("{}", name))
228 .collect();
229
230 let connector_use_stmts: Vec<TokenStream> = bin
232 .external_connectors
233 .iter()
234 .filter_map(|c| match c.protocol.as_str() {
235 "mqtt" => Some(quote! { use aimdb_mqtt_connector::MqttConnector; }),
236 "knx" => Some(quote! { use aimdb_knx_connector::KnxConnector; }),
237 "ws" => Some(quote! { use aimdb_websocket_connector::WebSocketConnector; }),
238 _ => None,
239 })
240 .collect();
241
242 let connector_let_stmts: Vec<TokenStream> = bin
244 .external_connectors
245 .iter()
246 .map(|c| {
247 let var_ident = format_ident!("{}", c.env_var.to_lowercase());
248 let var_name = &c.env_var;
249 let default = &c.default;
250 let ctor: TokenStream = match c.protocol.as_str() {
251 "mqtt" => quote! { MqttConnector::new(&#var_ident) },
252 "knx" => quote! { KnxConnector::new(&#var_ident) },
253 "ws" => quote! {
254 WebSocketConnector::new()
255 .bind(#var_ident.parse::<std::net::SocketAddr>()
256 .expect("invalid WebSocket bind address"))
257 .path("/ws")
258 },
259 _ => {
260 let msg = format!("build connector for protocol '{}'", c.protocol);
261 quote! { todo!(#msg) }
262 }
263 };
264 let connector_ident = format_ident!("{}_connector", c.protocol);
265 quote! {
266 let #var_ident = std::env::var(#var_name)
267 .unwrap_or_else(|_| #default.to_string());
268 let #connector_ident = #ctor;
269 }
270 })
271 .collect();
272
273 let with_connector_calls: Vec<TokenStream> = bin
275 .external_connectors
276 .iter()
277 .map(|c| {
278 let connector_ident = format_ident!("{}_connector", c.protocol);
279 quote! { .with_connector(#connector_ident) }
280 })
281 .collect();
282
283 let task_registrations: Vec<TokenStream> = tasks
285 .iter()
286 .flat_map(|task| {
287 task.outputs.iter().flat_map(move |output| {
288 let variants: Vec<String> = if output.variants.is_empty() {
289 state
290 .records
291 .iter()
292 .find(|r| r.name == output.record)
293 .map(|r| r.key_variants.clone())
294 .unwrap_or_default()
295 } else {
296 output.variants.clone()
297 };
298
299 let value_type = format_ident!("{}Value", output.record);
300 let key_type = format_ident!("{}Key", output.record);
301 let task_fn = format_ident!("{}", task.name);
302
303 variants.into_iter().map(move |variant| {
304 let variant_ident = format_ident!("{}", to_pascal_case(&variant));
305 quote! {
306 builder.configure::<#value_type>(#key_type::#variant_ident, |reg| {
307 reg.source(#task_fn);
308 });
309 }
310 })
311 })
312 })
313 .collect();
314
315 let file_tokens = quote! {
317 use aimdb_core::{AimDbBuilder, DbResult};
318 use aimdb_tokio_adapter::TokioAdapter;
319 #(#connector_use_stmts)*
320 use std::sync::Arc;
321 use #common_crate::configure_schema;
322
323 mod tasks;
324 use tasks::{#(#task_use_idents),*};
325
326 #[tokio::main]
327 async fn main() -> DbResult<()> {
328 tracing_subscriber::fmt::init();
329
330 #(#connector_let_stmts)*
331
332 let runtime = Arc::new(TokioAdapter::new());
333
334 let mut builder = AimDbBuilder::new()
335 .runtime(runtime)
336 #(#with_connector_calls)*
337 ;
338
339 configure_schema(&mut builder);
340
341 #(#task_registrations)*
342
343 builder.run().await
344 }
345 };
346
347 let header = format!(
348 "// @generated — do not edit manually.\n\
349 // Source: .aimdb/state.toml\n\
350 // Regenerate: `aimdb generate --binary {binary_name}`\n\n"
351 );
352
353 let syntax_tree =
354 syn::parse2(file_tokens).expect("generate_main_rs: tokens should be valid Rust");
355 Some(format!("{header}{}", prettyplease::unparse(&syntax_tree)))
356}
357
358pub fn generate_tasks_rs(state: &ArchitectureState, binary_name: &str) -> Option<String> {
365 let bin = state.binaries.iter().find(|b| b.name == binary_name)?;
366 let project_name = state
367 .project
368 .as_ref()
369 .map(|p| p.name.as_str())
370 .unwrap_or("project");
371 let common_crate = format_ident!("{}", format!("{}_common", project_name.replace('-', "_")));
372
373 let tasks: Vec<&TaskDef> = bin
375 .tasks
376 .iter()
377 .filter_map(|tname| state.tasks.iter().find(|t| &t.name == tname))
378 .collect();
379
380 let task_fns: Vec<TokenStream> = tasks
381 .iter()
382 .map(|task| {
383 let fn_name = format_ident!("{}", task.name);
384
385 let mut params: Vec<TokenStream> = vec![quote! { ctx: RuntimeContext<TokioAdapter> }];
387 for input in &task.inputs {
388 let arg_name = format_ident!("{}", to_snake_case(&input.record));
389 let value_type = format_ident!("{}Value", input.record);
390 params.push(quote! { #arg_name: Consumer<#value_type, TokioAdapter> });
391 }
392 for output in &task.outputs {
393 let arg_name = format_ident!("{}", to_snake_case(&output.record));
394 let value_type = format_ident!("{}Value", output.record);
395 params.push(quote! { #arg_name: Producer<#value_type, TokioAdapter> });
396 }
397
398 let todo_msg = match &task.task_type {
399 TaskType::Agent => "LLM agent stub — implement reasoning loop".to_string(),
400 _ => format!("implement: {}", task.description),
401 };
402
403 let doc_attr = if task.description.is_empty() {
404 quote! {}
405 } else {
406 let desc = &task.description;
407 quote! { #[doc = #desc] }
408 };
409
410 quote! {
411 #doc_attr
412 pub async fn #fn_name(#(#params),*) -> DbResult<()> {
413 todo!(#todo_msg)
414 }
415 }
416 })
417 .collect();
418
419 let file_tokens = quote! {
420 use aimdb_core::{Consumer, DbResult, Producer, RuntimeContext};
421 use aimdb_tokio_adapter::TokioAdapter;
422 use #common_crate::*;
423
424 #(#task_fns)*
425 };
426
427 let header = format!(
428 "// Implement the task bodies; signatures must not change.\n\
429 // Regenerate with `aimdb generate --binary {binary_name} --tasks-scaffold`\n\
430 // (only writes this file if it does not already exist)\n\n"
431 );
432
433 let syntax_tree =
434 syn::parse2(file_tokens).expect("generate_tasks_rs: tokens should be valid Rust");
435 Some(format!("{header}{}", prettyplease::unparse(&syntax_tree)))
436}
437
438pub fn generate_binary_cargo_toml(state: &ArchitectureState, binary_name: &str) -> Option<String> {
443 let bin = state.binaries.iter().find(|b| b.name == binary_name)?;
444 let project_name = state
445 .project
446 .as_ref()
447 .map(|p| p.name.as_str())
448 .unwrap_or("project");
449 let common_crate_name = format!("{project_name}-common");
450 let common_crate_dep = common_crate_name.replace('-', "_");
451 let edition = state
452 .project
453 .as_ref()
454 .and_then(|p| p.edition.as_deref())
455 .unwrap_or("2024");
456
457 let has_mqtt = bin.external_connectors.iter().any(|c| c.protocol == "mqtt");
458 let has_knx = bin.external_connectors.iter().any(|c| c.protocol == "knx");
459 let has_ws = bin.external_connectors.iter().any(|c| c.protocol == "ws");
460
461 let mut optional_connector_deps = String::new();
462 if has_mqtt {
463 optional_connector_deps.push_str(
464 "aimdb-mqtt-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
465 );
466 }
467 if has_knx {
468 optional_connector_deps.push_str(
469 "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
470 );
471 }
472 if has_ws {
473 optional_connector_deps.push_str(
474 "aimdb-websocket-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
475 );
476 }
477
478 let out = format!(
479 "# @generated — do not edit manually.\n\
480# Source: .aimdb/state.toml — regenerate with `aimdb generate --binary {binary_name}`\n\
481[package]\n\
482name = \"{binary_name}\"\n\
483version = \"0.1.0\"\n\
484edition = \"{edition}\"\n\
485\n\
486[[bin]]\n\
487name = \"{binary_name}\"\n\
488path = \"src/main.rs\"\n\
489\n\
490[dependencies]\n\
491{common_crate_dep} = {{ path = \"../{common_crate_name}\" }}\n\
492aimdb-core = {{ version = \"0.5\" }}\n\
493aimdb-tokio-adapter = {{ version = \"0.5\", features = [\"tokio-runtime\"] }}\n\
494{optional_connector_deps}\
495tokio = {{ version = \"1\", features = [\"full\"] }}\n\
496tracing = \"0.1\"\n\
497tracing-subscriber = {{ version = \"0.3\", features = [\"env-filter\"] }}\n"
498 );
499
500 Some(out)
501}
502
503fn emit_imports_types_only(state: &ArchitectureState) -> TokenStream {
505 let has_non_custom_ser = state.records.iter().any(|r| {
506 r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom
507 });
508 let has_observable = state.records.iter().any(|r| r.observable.is_some());
509 let has_settable = state
510 .records
511 .iter()
512 .any(|r| r.fields.iter().any(|f| f.settable));
513
514 let mut contract_traits: Vec<TokenStream> = vec![quote! { SchemaType }];
515 if has_non_custom_ser {
516 contract_traits.push(quote! { Linkable });
517 }
518 if has_observable {
519 contract_traits.push(quote! { Observable });
520 }
521 if has_settable {
522 contract_traits.push(quote! { Settable });
523 }
524
525 quote! {
526 use aimdb_core::RecordKey;
527 use aimdb_data_contracts::{#(#contract_traits),*};
528 use serde::{Deserialize, Serialize};
529 }
530}
531
532fn emit_imports(state: &ArchitectureState) -> TokenStream {
534 let has_non_custom_ser = state.records.iter().any(|r| {
535 r.serialization.as_ref().unwrap_or(&SerializationType::Json) != &SerializationType::Custom
536 });
537 let has_observable = state.records.iter().any(|r| r.observable.is_some());
538 let has_settable = state
539 .records
540 .iter()
541 .any(|r| r.fields.iter().any(|f| f.settable));
542
543 let mut contract_traits: Vec<TokenStream> = vec![quote! { SchemaType }];
545 if has_non_custom_ser {
546 contract_traits.push(quote! { Linkable });
547 }
548 if has_observable {
549 contract_traits.push(quote! { Observable });
550 }
551 if has_settable {
552 contract_traits.push(quote! { Settable });
553 }
554
555 quote! {
556 use aimdb_core::buffer::BufferCfg;
557 use aimdb_core::builder::AimDbBuilder;
558 use aimdb_core::RecordKey;
559 use aimdb_data_contracts::{#(#contract_traits),*};
560 use aimdb_executor::Spawn;
561 use serde::{Deserialize, Serialize};
562 }
563}
564
565fn emit_value_struct(rec: &RecordDef) -> TokenStream {
568 let struct_name = format_ident!("{}Value", rec.name);
569 let doc = format!("Value type for `{}`.", rec.name);
570
571 let fields: Vec<TokenStream> = if rec.fields.is_empty() {
572 vec![emit_todo_field(
573 "add fields — use `propose_record` to define them via the architecture agent",
574 )]
575 } else {
576 rec.fields
577 .iter()
578 .map(|f| {
579 let fname = format_ident!("{}", f.name);
580 let ftype: syn::Type = syn::parse_str(&f.field_type).unwrap_or_else(|_| {
581 panic!("invalid type `{}` for field `{}`", f.field_type, f.name)
582 });
583 if f.description.is_empty() {
584 quote! { pub #fname: #ftype, }
585 } else {
586 let desc = &f.description;
587 quote! {
588 #[doc = #desc]
589 pub #fname: #ftype,
590 }
591 }
592 })
593 .collect()
594 };
595
596 quote! {
597 #[doc = #doc]
598 #[derive(Debug, Clone, Serialize, Deserialize)]
599 pub struct #struct_name {
600 #(#fields)*
601 }
602 }
603}
604
605fn emit_todo_field(msg: &str) -> TokenStream {
607 let doc = format!("TODO: {msg}");
608 quote! {
609 #[doc = #doc]
610 pub _placeholder: (),
611 }
612}
613
614fn emit_key_enum(rec: &RecordDef) -> TokenStream {
617 let enum_name = format_ident!("{}Key", rec.name);
618 let connector = rec.connectors.first();
622
623 let key_prefix_attr = if !rec.key_prefix.is_empty() {
624 let prefix = &rec.key_prefix;
625 quote! { #[key_prefix = #prefix] }
626 } else {
627 quote! {}
628 };
629
630 let variants: Vec<TokenStream> = if rec.key_variants.is_empty() {
631 let doc = "TODO: add key variants — use the architecture agent to resolve them";
632 vec![quote! {
633 #[doc = #doc]
634 _Placeholder,
635 }]
636 } else {
637 rec.key_variants
638 .iter()
639 .map(|variant_str| {
640 let variant_name = format_ident!("{}", to_pascal_case(variant_str));
641 let link_attr = connector.map(|conn| {
642 let addr = conn.url.replace("{variant}", variant_str);
643 quote! { #[link_address = #addr] }
644 });
645 quote! {
646 #[key = #variant_str]
647 #link_attr
648 #variant_name,
649 }
650 })
651 .collect()
652 };
653
654 let address_fns = emit_connector_address_fns(rec);
655
656 quote! {
657 #[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)]
658 #key_prefix_attr
659 pub enum #enum_name {
660 #(#variants)*
661 }
662
663 #address_fns
664 }
665}
666
667fn emit_connector_address_fns(rec: &RecordDef) -> TokenStream {
674 if rec.connectors.len() <= 1 || rec.key_variants.is_empty() {
675 return quote! {};
676 }
677
678 let key_type = format_ident!("{}Key", rec.name);
679 let record_snake = to_snake_case(&rec.name);
680
681 let fns: Vec<TokenStream> = rec
682 .connectors
683 .iter()
684 .skip(1) .map(|conn| {
686 let fn_name = format_ident!("{}_{}_address", record_snake, conn.protocol);
687 let doc = format!(
688 "Link address for `{}` — {} connector (`{}`).",
689 rec.name,
690 conn.protocol,
691 conn.direction_label(),
692 );
693
694 let arms: Vec<TokenStream> = rec
695 .key_variants
696 .iter()
697 .map(|variant_str| {
698 let variant_ident = format_ident!("{}", to_pascal_case(variant_str));
699 let addr = conn.url.replace("{variant}", variant_str);
700 quote! { #key_type::#variant_ident => Some(#addr), }
701 })
702 .collect();
703
704 quote! {
705 #[doc = #doc]
706 pub fn #fn_name(key: &#key_type) -> Option<&'static str> {
707 match key {
708 #(#arms)*
709 }
710 }
711 }
712 })
713 .collect();
714
715 quote! { #(#fns)* }
716}
717
718fn emit_configure_schema(state: &ArchitectureState) -> TokenStream {
721 let record_blocks: Vec<TokenStream> = state
722 .records
723 .iter()
724 .map(emit_record_configure_block)
725 .collect();
726
727 quote! {
728 pub fn configure_schema<R: Spawn + 'static>(builder: &mut AimDbBuilder<R>) {
735 #(#record_blocks)*
736 }
737 }
738}
739
740fn emit_record_configure_block(rec: &RecordDef) -> TokenStream {
741 if rec.key_variants.is_empty() {
742 let msg = format!("TODO: {}: no key variants defined yet", rec.name);
743 return quote! {
744 let _ = (#msg,);
746 };
747 }
748
749 let value_type = format_ident!("{}Value", rec.name);
750 let key_type = format_ident!("{}Key", rec.name);
751 let buffer_tokens = rec.buffer.to_tokens(rec.capacity);
752
753 let variant_idents: Vec<syn::Ident> = rec
754 .key_variants
755 .iter()
756 .map(|v| format_ident!("{}", to_pascal_case(v)))
757 .collect();
758
759 let is_custom = rec
760 .serialization
761 .as_ref()
762 .map(|s| s == &SerializationType::Custom)
763 .unwrap_or(false);
764
765 if rec.connectors.is_empty() {
766 return quote! {
768 for key in [
769 #(#key_type::#variant_idents,)*
770 ] {
771 builder.configure::<#value_type>(key, |reg| {
772 reg.buffer(#buffer_tokens);
773 });
774 }
775 };
776 }
777
778 let record_snake = to_snake_case(&rec.name);
782
783 let addr_extractions: Vec<TokenStream> = rec
784 .connectors
785 .iter()
786 .enumerate()
787 .map(|(i, conn)| {
788 let addr_var = format_ident!("addr_{}", i);
789 if i == 0 {
790 quote! {
791 let #addr_var = key.link_address().map(|s| s.to_string());
792 }
793 } else {
794 let resolver_fn = format_ident!("{}_{}_address", record_snake, conn.protocol);
795 quote! {
796 let #addr_var = #resolver_fn(&key).map(|s| s.to_string());
797 }
798 }
799 })
800 .collect();
801
802 let linked_chain =
809 emit_connector_chain(&rec.connectors, &value_type, &buffer_tokens, is_custom);
810 let addr_conditions: Vec<TokenStream> = (0..rec.connectors.len())
811 .map(|i| {
812 let addr_var = format_ident!("addr_{}", i);
813 quote! { #addr_var.as_deref() }
814 })
815 .collect();
816
817 let body = if rec.connectors.len() == 1 {
820 let cond = &addr_conditions[0];
821 quote! {
822 if let Some(addr_0) = #cond {
823 #linked_chain
824 } else {
825 reg.buffer(#buffer_tokens);
826 }
827 }
828 } else {
829 let some_bindings: Vec<TokenStream> = (0..rec.connectors.len())
833 .map(|i| {
834 let binding = format_ident!("addr_{}", i);
835 quote! { Some(#binding) }
836 })
837 .collect();
838 quote! {
839 match (#(#addr_conditions),*) {
840 (#(#some_bindings),*) => {
841 #linked_chain
842 }
843 _ => {
844 reg.buffer(#buffer_tokens);
845 }
846 }
847 }
848 };
849
850 quote! {
851 for key in [
852 #(#key_type::#variant_idents,)*
853 ] {
854 #(#addr_extractions)*
855 builder.configure::<#value_type>(key, |reg| {
856 #body
857 });
858 }
859 }
860}
861
862fn emit_connector_chain(
868 connectors: &[ConnectorDef],
869 value_type: &syn::Ident,
870 buffer_tokens: &TokenStream,
871 is_custom: bool,
872) -> TokenStream {
873 let mut chain = quote! { reg.buffer(#buffer_tokens) };
875
876 for (i, conn) in connectors.iter().enumerate() {
877 let addr_var = format_ident!("addr_{}", i);
878
879 if is_custom {
880 let todo_comment = match conn.direction {
881 ConnectorDirection::Outbound => {
882 "TODO: chain .link_to(...).with_serializer(...) — serialization = \"custom\""
883 }
884 ConnectorDirection::Inbound => {
885 "TODO: chain .link_from(...).with_deserializer(...) — serialization = \"custom\""
886 }
887 };
888 chain = quote! {
891 #chain;
892 let _ = (#todo_comment, #addr_var)
893 };
894 } else {
895 match conn.direction {
896 ConnectorDirection::Inbound => {
897 chain = quote! {
898 #chain
899 .link_from(#addr_var)
900 .with_deserializer(#value_type::from_bytes)
901 .finish()
902 };
903 }
904 ConnectorDirection::Outbound => {
905 chain = quote! {
906 #chain
907 .link_to(#addr_var)
908 .with_serializer(|v: &#value_type| {
909 v.to_bytes()
910 .map_err(|_| aimdb_core::connector::SerializeError::InvalidData)
911 })
912 .finish()
913 };
914 }
915 }
916 }
917 }
918
919 quote! { #chain; }
921}
922
923fn emit_schema_type_impl(rec: &RecordDef) -> TokenStream {
926 let struct_name = format_ident!("{}Value", rec.name);
927 let schema_name = to_snake_case(&rec.name);
928 let version = proc_macro2::Literal::u32_unsuffixed(rec.schema_version.unwrap_or(1));
929
930 quote! {
931 impl SchemaType for #struct_name {
932 const NAME: &'static str = #schema_name;
933 const VERSION: u32 = #version;
934 }
935 }
936}
937
938fn emit_linkable_impl(rec: &RecordDef) -> TokenStream {
939 let ser = rec
940 .serialization
941 .as_ref()
942 .unwrap_or(&SerializationType::Json);
943
944 match ser {
945 SerializationType::Custom => quote! {},
946 SerializationType::Json => emit_linkable_json(rec),
947 SerializationType::Postcard => emit_linkable_postcard(rec),
948 }
949}
950
951fn emit_linkable_json(rec: &RecordDef) -> TokenStream {
952 let struct_name = format_ident!("{}Value", rec.name);
953 quote! {
954 impl Linkable for #struct_name {
955 fn to_bytes(&self) -> Result<alloc::vec::Vec<u8>, alloc::string::String> {
956 #[cfg(feature = "std")]
957 {
958 serde_json::to_vec(self)
959 .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME))
960 }
961 #[cfg(not(feature = "std"))]
962 {
963 Err(alloc::string::String::from(
964 "no_std serialization not available — enable the std feature or use postcard",
965 ))
966 }
967 }
968
969 fn from_bytes(data: &[u8]) -> Result<Self, alloc::string::String> {
970 #[cfg(feature = "std")]
971 {
972 serde_json::from_slice(data)
973 .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME))
974 }
975 #[cfg(not(feature = "std"))]
976 {
977 let _ = data;
978 Err(alloc::string::String::from(
979 "no_std deserialization not available — enable the std feature or use postcard",
980 ))
981 }
982 }
983 }
984 }
985}
986
987fn emit_linkable_postcard(rec: &RecordDef) -> TokenStream {
988 let struct_name = format_ident!("{}Value", rec.name);
989 quote! {
990 impl Linkable for #struct_name {
991 fn to_bytes(&self) -> Result<alloc::vec::Vec<u8>, alloc::string::String> {
992 postcard::to_allocvec(self)
993 .map_err(|e| alloc::format!("serialize {}: {e}", Self::NAME))
994 }
995
996 fn from_bytes(data: &[u8]) -> Result<Self, alloc::string::String> {
997 postcard::from_bytes(data)
998 .map_err(|e| alloc::format!("deserialize {}: {e}", Self::NAME))
999 }
1000 }
1001 }
1002}
1003
1004fn emit_observable_impl(rec: &RecordDef) -> Option<TokenStream> {
1005 let obs = rec.observable.as_ref()?;
1006 let struct_name = format_ident!("{}Value", rec.name);
1007
1008 let signal_field = rec.fields.iter().find(|f| f.name == obs.signal_field)?;
1010 let signal_type: syn::Type = syn::parse_str(&signal_field.field_type).ok()?;
1011 let signal_ident = format_ident!("{}", obs.signal_field);
1012
1013 let icon = &obs.icon;
1014 let unit = &obs.unit;
1015
1016 let timestamp_names = ["timestamp", "computed_at", "fetched_at"];
1018 let timestamp_field = rec
1019 .fields
1020 .iter()
1021 .find(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str()));
1022
1023 let format_log_body = if let Some(ts) = timestamp_field {
1024 let ts_ident = format_ident!("{}", ts.name);
1025 quote! {
1026 alloc::format!(
1027 "{} [{}] {}: {:.1}{} at {}",
1028 Self::ICON,
1029 node_id,
1030 Self::NAME,
1031 self.signal(),
1032 Self::UNIT,
1033 self.#ts_ident,
1034 )
1035 }
1036 } else {
1037 quote! {
1038 alloc::format!(
1039 "{} [{}] {}: {:.1}{}",
1040 Self::ICON,
1041 node_id,
1042 Self::NAME,
1043 self.signal(),
1044 Self::UNIT,
1045 )
1046 }
1047 };
1048
1049 Some(quote! {
1050 impl Observable for #struct_name {
1051 type Signal = #signal_type;
1052 const ICON: &'static str = #icon;
1053 const UNIT: &'static str = #unit;
1054
1055 fn signal(&self) -> #signal_type {
1056 self.#signal_ident
1057 }
1058
1059 fn format_log(&self, node_id: &str) -> alloc::string::String {
1060 #format_log_body
1061 }
1062 }
1063 })
1064}
1065
1066fn emit_settable_impl(rec: &RecordDef) -> Option<TokenStream> {
1067 let settable_fields: Vec<_> = rec.fields.iter().filter(|f| f.settable).collect();
1068 if settable_fields.is_empty() {
1069 return None;
1070 }
1071
1072 let struct_name = format_ident!("{}Value", rec.name);
1073
1074 let settable_types: Vec<syn::Type> = settable_fields
1076 .iter()
1077 .map(|f| syn::parse_str(&f.field_type).unwrap())
1078 .collect();
1079
1080 let value_type: TokenStream = if settable_types.len() == 1 {
1081 let t = &settable_types[0];
1082 quote! { #t }
1083 } else {
1084 quote! { (#(#settable_types),*) }
1085 };
1086
1087 let timestamp_names = ["timestamp", "computed_at", "fetched_at"];
1089 let timestamp_field = rec
1090 .fields
1091 .iter()
1092 .find(|f| f.field_type == "u64" && timestamp_names.contains(&f.name.as_str()));
1093
1094 let mut settable_idx = 0usize;
1096 let field_assignments: Vec<TokenStream> = rec
1097 .fields
1098 .iter()
1099 .map(|f| {
1100 let fname = format_ident!("{}", f.name);
1101 if timestamp_field.map(|tf| tf.name == f.name).unwrap_or(false) && !f.settable {
1102 quote! { #fname: timestamp, }
1104 } else if f.settable {
1105 let assignment = if settable_fields.len() == 1 {
1106 quote! { value }
1107 } else {
1108 let idx = syn::Index::from(settable_idx);
1109 quote! { value.#idx }
1110 };
1111 settable_idx += 1;
1112 quote! { #fname: #assignment, }
1113 } else {
1114 quote! { #fname: Default::default(), }
1116 }
1117 })
1118 .collect();
1119
1120 Some(quote! {
1121 impl Settable for #struct_name {
1122 type Value = #value_type;
1123
1124 fn set(value: Self::Value, timestamp: u64) -> Self {
1125 Self {
1126 #(#field_assignments)*
1127 }
1128 }
1129 }
1130 })
1131}
1132
1133pub fn to_snake_case(s: &str) -> String {
1145 let mut result = String::with_capacity(s.len() + 4);
1146 for (i, c) in s.chars().enumerate() {
1147 if c.is_uppercase() && i > 0 {
1148 result.push('_');
1149 }
1150 for lc in c.to_lowercase() {
1151 result.push(lc);
1152 }
1153 }
1154 result
1155}
1156
1157pub fn to_pascal_case(s: &str) -> String {
1168 s.split(['-', '_'])
1169 .map(|part| {
1170 let mut chars = part.chars();
1171 match chars.next() {
1172 None => String::new(),
1173 Some(first) => {
1174 let upper: String = first.to_uppercase().collect();
1175 upper + chars.as_str()
1176 }
1177 }
1178 })
1179 .collect()
1180}
1181
1182fn hub_task_names(state: &ArchitectureState) -> Vec<String> {
1196 use std::collections::HashSet;
1198 let external_producers: HashSet<&str> = state
1199 .records
1200 .iter()
1201 .filter(|r| {
1202 r.connectors
1203 .iter()
1204 .any(|c| matches!(c.direction, ConnectorDirection::Inbound))
1205 })
1206 .flat_map(|r| r.producers.iter().map(|p| p.as_str()))
1207 .collect();
1208
1209 let mut seen = HashSet::new();
1211 let mut tasks: Vec<String> = state
1212 .records
1213 .iter()
1214 .filter(|r| {
1215 !r.connectors
1216 .iter()
1217 .any(|c| matches!(c.direction, ConnectorDirection::Inbound))
1218 })
1219 .flat_map(|r| r.producers.iter().cloned())
1220 .filter(|p| !external_producers.contains(p.as_str()))
1221 .filter(|p| seen.insert(p.clone()))
1222 .collect();
1223
1224 for rec in &state.records {
1226 for consumer in &rec.consumers {
1227 let consumer = consumer.clone();
1228 if !external_producers.contains(consumer.as_str()) && seen.insert(consumer.clone()) {
1229 tasks.push(consumer);
1230 }
1231 }
1232 }
1233
1234 tasks
1235}
1236
1237pub fn generate_hub_schema_rs(state: &ArchitectureState) -> String {
1242 let project = state
1243 .project
1244 .as_ref()
1245 .expect("generate_hub_schema_rs requires [project] block in state.toml");
1246 let common_crate = format_ident!("{}", project.name.replace('-', "_") + "_common");
1247
1248 let configure_fn = emit_configure_schema(state);
1249
1250 let file_tokens = quote! {
1251 use aimdb_core::buffer::BufferCfg;
1252 use aimdb_core::builder::AimDbBuilder;
1253 use aimdb_executor::Spawn;
1254 use #common_crate::*;
1255
1256 #configure_fn
1257 };
1258
1259 let header = "// @generated — do not edit manually.\n\
1260// Source: .aimdb/state.toml — regenerate with `aimdb generate --hub`.\n\n";
1261
1262 let syntax_tree = syn::parse2(file_tokens).expect("generated tokens should be valid Rust");
1263 format!("{header}{}", prettyplease::unparse(&syntax_tree))
1264}
1265
1266pub fn generate_hub_cargo_toml(state: &ArchitectureState) -> String {
1268 let project = state
1269 .project
1270 .as_ref()
1271 .expect("generate_hub_cargo_toml requires [project] block in state.toml");
1272 let hub_crate = format!("{}-hub", project.name);
1273 let common_crate_name = format!("{}-common", project.name);
1274 let edition = project.edition.as_deref().unwrap_or("2024");
1275
1276 let has_mqtt = state
1277 .records
1278 .iter()
1279 .any(|r| r.connectors.iter().any(|c| c.protocol == "mqtt"));
1280 let has_knx = state
1281 .records
1282 .iter()
1283 .any(|r| r.connectors.iter().any(|c| c.protocol == "knx"));
1284 let has_ws = state
1285 .records
1286 .iter()
1287 .any(|r| r.connectors.iter().any(|c| c.protocol == "ws"));
1288
1289 let mut connector_deps = String::new();
1290 if has_mqtt {
1291 connector_deps.push_str(
1292 "aimdb-mqtt-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
1293 );
1294 }
1295 if has_knx {
1296 connector_deps.push_str(
1297 "aimdb-knx-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
1298 );
1299 }
1300 if has_ws {
1301 connector_deps.push_str(
1302 "aimdb-websocket-connector = { version = \"0.5\", features = [\"tokio-runtime\"] }\n",
1303 );
1304 }
1305
1306 format!(
1307 "# @generated — do not edit manually.\n\
1308# Source: .aimdb/state.toml — regenerate with `aimdb generate --hub`\n\
1309[package]\n\
1310name = \"{hub_crate}\"\n\
1311version = \"0.1.0\"\n\
1312edition = \"{edition}\"\n\
1313description = \"Hub binary for {project_name}\"\n\
1314publish = false\n\
1315\n\
1316[[bin]]\n\
1317name = \"{hub_crate}\"\n\
1318path = \"src/main.rs\"\n\
1319\n\
1320[dependencies]\n\
1321{common_crate_name} = {{ path = \"../{common_crate_name}\" }}\n\
1322aimdb-core = {{ version = \"0.5\" }}\n\
1323aimdb-data-contracts = {{ version = \"0.5\", features = [\"linkable\"] }}\n\
1324aimdb-tokio-adapter = {{ version = \"0.5\", features = [\"tokio-runtime\"] }}\n\
1325{connector_deps}\
1326tokio = {{ version = \"1\", features = [\"full\"] }}\n\
1327tracing = \"0.1\"\n\
1328tracing-subscriber = {{ version = \"0.3\", features = [\"env-filter\"] }}\n",
1329 project_name = project.name
1330 )
1331}
1332
1333pub fn generate_hub_main_rs(state: &ArchitectureState) -> String {
1338 let project = state
1339 .project
1340 .as_ref()
1341 .expect("generate_hub_main_rs requires [project] block in state.toml");
1342 let common_crate = format_ident!("{}", project.name.replace('-', "_") + "_common");
1343
1344 let has_mqtt = state
1346 .records
1347 .iter()
1348 .any(|r| r.connectors.iter().any(|c| c.protocol == "mqtt"));
1349 let has_knx = state
1350 .records
1351 .iter()
1352 .any(|r| r.connectors.iter().any(|c| c.protocol == "knx"));
1353 let has_ws = state
1354 .records
1355 .iter()
1356 .any(|r| r.connectors.iter().any(|c| c.protocol == "ws"));
1357
1358 let connector_use_stmts: Vec<TokenStream> = {
1360 let mut v = vec![];
1361 if has_mqtt {
1362 v.push(quote! { use aimdb_mqtt_connector::MqttConnector; });
1363 }
1364 if has_knx {
1365 v.push(quote! { use aimdb_knx_connector::KnxConnector; });
1366 }
1367 if has_ws {
1368 v.push(quote! { use aimdb_websocket_connector::WebSocketConnector; });
1369 }
1370 v
1371 };
1372
1373 let connector_let_stmts: TokenStream = {
1375 let mut ts = TokenStream::new();
1376 if has_mqtt {
1377 ts.extend(quote! {
1378 let mqtt_broker =
1379 std::env::var("MQTT_BROKER").unwrap_or_else(|_| "localhost".to_string());
1380 let mqtt_url = format!("mqtt://{}", mqtt_broker);
1381 });
1382 }
1383 if has_knx {
1384 ts.extend(quote! {
1385 let knx_gateway = std::env::var("KNX_GATEWAY")
1386 .unwrap_or_else(|_| "224.0.23.12:3671".to_string());
1387 });
1388 }
1389 if has_ws {
1390 ts.extend(quote! {
1391 let ws_bind: std::net::SocketAddr = std::env::var("WS_BIND")
1392 .unwrap_or_else(|_| "0.0.0.0:8080".to_string())
1393 .parse()
1394 .expect("invalid WS_BIND address");
1395 });
1396 }
1397 ts
1398 };
1399
1400 let with_connector_calls: Vec<TokenStream> = {
1402 let mut v = vec![];
1403 if has_mqtt {
1404 v.push(quote! { .with_connector(MqttConnector::new(&mqtt_url)) });
1405 }
1406 if has_knx {
1407 v.push(quote! { .with_connector(KnxConnector::new(&knx_gateway)) });
1408 }
1409 if has_ws {
1410 v.push(quote! { .with_connector(WebSocketConnector::new().bind(ws_bind).path("/ws")) });
1411 }
1412 v
1413 };
1414
1415 let record_blocks: Vec<TokenStream> = state
1417 .records
1418 .iter()
1419 .map(|r| emit_hub_record_configure_block(r, state))
1420 .collect();
1421
1422 let log_filter = format!(
1424 "{}_hub=info,aimdb_core=info",
1425 project.name.replace('-', "_")
1426 );
1427 let startup_msg = format!("Starting {} hub", project.name);
1428
1429 let file_tokens = quote! {
1431 use aimdb_core::{buffer::BufferCfg, AimDbBuilder, DbResult, RecordKey};
1432 use aimdb_data_contracts::Linkable;
1433 use aimdb_tokio_adapter::{TokioAdapter, TokioRecordRegistrarExt};
1434 #(#connector_use_stmts)*
1435 use std::sync::Arc;
1436 use #common_crate::*;
1437
1438 mod tasks;
1439 use tasks::*;
1440
1441 #[tokio::main]
1442 async fn main() -> DbResult<()> {
1443 tracing_subscriber::fmt()
1444 .with_env_filter(
1445 tracing_subscriber::EnvFilter::try_from_default_env()
1446 .unwrap_or_else(|_| #log_filter.into()),
1447 )
1448 .init();
1449
1450 tracing::info!(#startup_msg);
1451
1452 #connector_let_stmts
1453
1454 let runtime = Arc::new(TokioAdapter::new()?);
1455
1456 let mut builder = AimDbBuilder::new()
1457 .runtime(runtime)
1458 #(#with_connector_calls)*
1459 ;
1460
1461 #(#record_blocks)*
1462
1463 builder.run().await
1464 }
1465 };
1466
1467 let header = "// @generated — do not edit manually.\n\
1468// Source: .aimdb/state.toml\n\
1469// Regenerate: `aimdb generate --hub`\n\n";
1470
1471 let syntax_tree =
1472 syn::parse2(file_tokens).expect("generate_hub_main_rs: tokens should be valid Rust");
1473 format!("{header}{}", prettyplease::unparse(&syntax_tree))
1474}
1475
1476fn emit_hub_record_configure_block(rec: &RecordDef, state: &ArchitectureState) -> TokenStream {
1483 if rec.key_variants.is_empty() {
1484 let msg = format!("TODO: {}: no key variants defined yet", rec.name);
1485 return quote! { let _ = (#msg,); };
1486 }
1487
1488 let producing_task = state
1490 .tasks
1491 .iter()
1492 .find(|t| t.outputs.iter().any(|o| o.record == rec.name));
1493
1494 match producing_task {
1495 Some(task) => emit_transform_configure_block(rec, task),
1496 None => emit_record_configure_block(rec),
1497 }
1498}
1499
1500fn emit_transform_configure_block(rec: &RecordDef, task: &TaskDef) -> TokenStream {
1506 let value_type = format_ident!("{}Value", rec.name);
1507 let key_type = format_ident!("{}Key", rec.name);
1508 let buffer_tokens = rec.buffer.to_tokens(rec.capacity);
1509
1510 let has_outbound = rec
1512 .connectors
1513 .iter()
1514 .any(|c| matches!(c.direction, ConnectorDirection::Outbound));
1515 let outbound_chain = if has_outbound {
1516 quote! {
1517 .link_to(addr)
1518 .with_serializer(|v: &#value_type| {
1519 v.to_bytes()
1520 .map_err(|_| aimdb_core::connector::SerializeError::InvalidData)
1521 })
1522 .finish()
1523 }
1524 } else {
1525 quote! {}
1526 };
1527
1528 let variant_idents: Vec<syn::Ident> = rec
1529 .key_variants
1530 .iter()
1531 .map(|v| format_ident!("{}", to_pascal_case(v)))
1532 .collect();
1533
1534 let per_variant: Vec<TokenStream> = variant_idents
1535 .iter()
1536 .map(|variant_ident| {
1537 let transform_call = build_transform_call(task, variant_ident);
1538
1539 if has_outbound {
1540 let outbound = outbound_chain.clone();
1541 quote! {
1542 {
1543 let link_addr = #key_type::#variant_ident
1544 .link_address()
1545 .map(|s| s.to_string());
1546 builder.configure::<#value_type>(#key_type::#variant_ident, |reg| {
1547 if let Some(addr) = link_addr.as_deref() {
1548 reg.buffer(#buffer_tokens)
1549 #transform_call
1550 #outbound;
1551 } else {
1552 reg.buffer(#buffer_tokens)
1553 #transform_call;
1554 }
1555 });
1556 }
1557 }
1558 } else {
1559 quote! {
1560 builder.configure::<#value_type>(#key_type::#variant_ident, |reg| {
1561 reg.buffer(#buffer_tokens)
1562 #transform_call;
1563 });
1564 }
1565 }
1566 })
1567 .collect();
1568
1569 quote! { #(#per_variant)* }
1570}
1571
1572fn build_transform_call(task: &TaskDef, variant_ident: &syn::Ident) -> TokenStream {
1577 if task.inputs.len() != 1 {
1578 let handler_ident = format_ident!("{}_handler", task.name);
1580 let input_calls: Vec<TokenStream> = task
1581 .inputs
1582 .iter()
1583 .map(|inp| {
1584 let in_val = format_ident!("{}Value", inp.record);
1585 let in_key = format_ident!("{}Key", inp.record);
1586 quote! { .input::<#in_val>(#in_key::#variant_ident) }
1587 })
1588 .collect();
1589 quote! {
1590 .transform_join(|j| {
1591 j #(#input_calls)*
1592 .with_state(())
1593 .on_trigger(#handler_ident)
1594 })
1595 }
1596 } else {
1597 let handler_ident = format_ident!("{}_transform", task.name);
1599 let inp = &task.inputs[0];
1600 let in_val = format_ident!("{}Value", inp.record);
1601 let in_key = format_ident!("{}Key", inp.record);
1602 quote! {
1603 .transform::<#in_val, _>(#in_key::#variant_ident, |b| b.map(#handler_ident))
1604 }
1605 }
1606}
1607
1608pub fn generate_hub_tasks_rs(state: &ArchitectureState) -> String {
1620 let project = state
1621 .project
1622 .as_ref()
1623 .expect("generate_hub_tasks_rs requires [project] block in state.toml");
1624 let common_crate = format!("{}_common", project.name.replace('-', "_"));
1625
1626 let mut fns = String::new();
1627 let mut handled: std::collections::HashSet<String> = std::collections::HashSet::new();
1628
1629 for task in &state.tasks {
1630 handled.insert(task.name.clone());
1631 let n_in = task.inputs.len();
1632 let n_out = task.outputs.len();
1633
1634 let out_t = task
1635 .outputs
1636 .first()
1637 .map(|o| format!("{}Value", o.record))
1638 .unwrap_or_else(|| "()".to_string());
1639 let in_t = task
1640 .inputs
1641 .first()
1642 .map(|i| format!("{}Value", i.record))
1643 .unwrap_or_else(|| "()".to_string());
1644
1645 if !task.description.is_empty() {
1646 fns.push_str(&format!("/// {}\n", task.description));
1647 }
1648
1649 if n_in > 1 && n_out >= 1 {
1650 let handler = format!("{}_handler", task.name);
1654 let inputs_doc = task
1655 .inputs
1656 .iter()
1657 .enumerate()
1658 .map(|(i, inp)| format!(" index {i} = {}", inp.record))
1659 .collect::<Vec<_>>()
1660 .join(", ");
1661 fns.push_str(&format!(
1662 "/// Join handler — match `trigger.index()` to identify which input fired:\n\
1663/// {inputs_doc}\n\
1664pub fn {handler}(\n\
1665 _trigger: aimdb_core::transform::JoinTrigger,\n\
1666 _state: &mut (),\n\
1667 _producer: &aimdb_core::Producer<{out_t}, TokioAdapter>,\n\
1668) -> std::pin::Pin<Box<dyn std::future::Future<Output = ()> + Send + 'static>> {{\n\
1669 Box::pin(async move {{ todo!(\"implement {handler}\") }})\n\
1670}}\n\n"
1671 ));
1672 } else if n_in == 1 && n_out >= 1 {
1673 let handler = format!("{}_transform", task.name);
1675 let input_rec = &task.inputs[0].record;
1676 let output_rec = task
1677 .outputs
1678 .first()
1679 .map(|o| o.record.as_str())
1680 .unwrap_or("?");
1681 fns.push_str(&format!(
1682 "/// Transform: {input_rec} → {output_rec}\n\
1683/// Return `Some(value)` to emit, `None` to skip this input.\n\
1684pub fn {handler}(input: &{in_t}) -> Option<{out_t}> {{\n\
1685 let _ = input;\n\
1686 todo!(\"implement {handler}\")\n\
1687}}\n\n"
1688 ));
1689 } else if n_in == 0 && n_out >= 1 {
1690 fns.push_str(&format!(
1692 "pub async fn {}(\n\
1693 _ctx: aimdb_core::RuntimeContext<TokioAdapter>,\n\
1694 _producer: aimdb_core::Producer<{out_t}, TokioAdapter>,\n\
1695) {{\n\
1696 todo!(\"implement {}\")\n\
1697}}\n\n",
1698 task.name, task.name
1699 ));
1700 } else if n_in >= 1 && n_out == 0 {
1701 fns.push_str(&format!(
1703 "pub async fn {}(\n\
1704 _ctx: aimdb_core::RuntimeContext<TokioAdapter>,\n\
1705 _consumer: aimdb_core::Consumer<{in_t}, TokioAdapter>,\n\
1706) {{\n\
1707 todo!(\"implement {}\")\n\
1708}}\n\n",
1709 task.name, task.name
1710 ));
1711 }
1712 }
1713
1714 for task_name in hub_task_names(state) {
1716 if handled.contains(&task_name) {
1717 continue;
1718 }
1719 fns.push_str(&format!(
1720 "/// Hub task: add a `[[tasks]]` entry in state.toml for a typed stub.\n\
1721pub async fn {task_name}() {{\n\
1722 todo!(\"implement {task_name}\")\n\
1723}}\n\n"
1724 ));
1725 }
1726
1727 format!(
1728 "// Implement task bodies; signatures are derived from state.toml [[tasks]].\n\
1729// This file is scaffolded once — it will not be overwritten on subsequent runs.\n\
1730// Regenerate signatures: delete this file, then run `aimdb generate --hub`.\n\
1731\n\
1732use aimdb_tokio_adapter::TokioAdapter;\n\
1733use {common_crate}::*;\n\
1734\n\
1735{fns}"
1736 )
1737}
1738
1739#[cfg(test)]
1742mod tests {
1743 use super::*;
1744 use crate::state::ArchitectureState;
1745
1746 const SAMPLE_TOML: &str = r#"
1747[meta]
1748aimdb_version = "0.5.0"
1749created_at = "2026-02-22T14:00:00Z"
1750last_modified = "2026-02-22T14:33:00Z"
1751
1752[[records]]
1753name = "TemperatureReading"
1754buffer = "SpmcRing"
1755capacity = 256
1756key_prefix = "sensors.temp."
1757key_variants = ["indoor", "outdoor", "garage"]
1758producers = ["sensor_task"]
1759consumers = ["dashboard", "anomaly_detector"]
1760
1761[[records.fields]]
1762name = "celsius"
1763type = "f64"
1764description = "Temperature in degrees Celsius"
1765
1766[[records.fields]]
1767name = "humidity_percent"
1768type = "f64"
1769description = "Relative humidity 0-100"
1770
1771[[records.fields]]
1772name = "timestamp"
1773type = "u64"
1774description = "Unix timestamp in milliseconds"
1775
1776[[records.connectors]]
1777protocol = "mqtt"
1778direction = "outbound"
1779url = "mqtt://sensors/temp/{variant}"
1780
1781[[records]]
1782name = "OtaCommand"
1783buffer = "Mailbox"
1784key_prefix = "device.ota."
1785key_variants = ["gateway-01", "sensor-hub-01"]
1786producers = ["cloud_ota"]
1787consumers = ["updater"]
1788
1789[[records.fields]]
1790name = "action"
1791type = "String"
1792description = "Command: update, rollback, reboot"
1793
1794[[records.fields]]
1795name = "target_version"
1796type = "String"
1797description = "Target firmware version"
1798
1799[[records.connectors]]
1800protocol = "mqtt"
1801direction = "inbound"
1802url = "mqtt://ota/cmd/{variant}"
1803"#;
1804
1805 fn state() -> ArchitectureState {
1806 ArchitectureState::from_toml(SAMPLE_TOML).unwrap()
1807 }
1808
1809 fn generated() -> String {
1810 generate_rust(&state())
1811 }
1812
1813 #[test]
1814 fn has_generated_header() {
1815 let out = generated();
1816 assert!(
1817 out.contains("@generated"),
1818 "Missing @generated header:\n{out}"
1819 );
1820 }
1821
1822 #[test]
1823 fn has_imports() {
1824 let out = generated();
1825 assert!(
1826 out.contains("use aimdb_core::buffer::BufferCfg;"),
1827 "Missing BufferCfg import:\n{out}"
1828 );
1829 assert!(
1830 out.contains("use aimdb_core::builder::AimDbBuilder;"),
1831 "Missing AimDbBuilder import:\n{out}"
1832 );
1833 assert!(
1834 out.contains("use aimdb_core::RecordKey;"),
1835 "Missing RecordKey import:\n{out}"
1836 );
1837 assert!(
1838 out.contains("use aimdb_executor::Spawn;"),
1839 "Missing Spawn import:\n{out}"
1840 );
1841 assert!(
1842 out.contains("use serde::{Deserialize, Serialize};"),
1843 "Missing serde import:\n{out}"
1844 );
1845 }
1846
1847 #[test]
1848 fn value_struct_generated() {
1849 let out = generated();
1850 assert!(
1851 out.contains("pub struct TemperatureReadingValue"),
1852 "Missing TemperatureReadingValue struct:\n{out}"
1853 );
1854 assert!(
1855 out.contains("pub celsius: f64,"),
1856 "Missing celsius field:\n{out}"
1857 );
1858 assert!(
1859 out.contains("pub humidity_percent: f64,"),
1860 "Missing humidity_percent field:\n{out}"
1861 );
1862 assert!(
1863 out.contains("pub timestamp: u64,"),
1864 "Missing timestamp field:\n{out}"
1865 );
1866 assert!(
1867 out.contains("#[derive(Debug, Clone, Serialize, Deserialize)]"),
1868 "Missing derives:\n{out}"
1869 );
1870 }
1871
1872 #[test]
1873 fn key_enum_generated() {
1874 let out = generated();
1875 assert!(
1876 out.contains("pub enum TemperatureReadingKey"),
1877 "Missing key enum:\n{out}"
1878 );
1879 assert!(
1880 out.contains("#[derive(Debug, RecordKey, Clone, Copy, PartialEq, Eq)]"),
1881 "Missing RecordKey derive:\n{out}"
1882 );
1883 assert!(
1884 out.contains("#[key_prefix = \"sensors.temp.\"]"),
1885 "Missing key_prefix:\n{out}"
1886 );
1887 assert!(
1888 out.contains("#[key = \"indoor\"]"),
1889 "Missing indoor key attr:\n{out}"
1890 );
1891 assert!(
1892 out.contains("#[key = \"outdoor\"]"),
1893 "Missing outdoor key attr:\n{out}"
1894 );
1895 assert!(
1896 out.contains("#[key = \"garage\"]"),
1897 "Missing garage key attr:\n{out}"
1898 );
1899 assert!(out.contains("Indoor,"), "Missing Indoor variant:\n{out}");
1900 assert!(out.contains("Outdoor,"), "Missing Outdoor variant:\n{out}");
1901 assert!(out.contains("Garage,"), "Missing Garage variant:\n{out}");
1902 }
1903
1904 #[test]
1905 fn link_address_substituted_per_variant() {
1906 let out = generated();
1907 assert!(
1908 out.contains("#[link_address = \"mqtt://sensors/temp/indoor\"]"),
1909 "link_address not substituted for indoor:\n{out}"
1910 );
1911 assert!(
1912 out.contains("#[link_address = \"mqtt://sensors/temp/outdoor\"]"),
1913 "link_address not substituted for outdoor:\n{out}"
1914 );
1915 assert!(
1916 out.contains("#[link_address = \"mqtt://sensors/temp/garage\"]"),
1917 "link_address not substituted for garage:\n{out}"
1918 );
1919 }
1920
1921 #[test]
1922 fn kebab_variants_to_pascal_case() {
1923 let out = generated();
1924 assert!(
1925 out.contains("pub enum OtaCommandKey"),
1926 "Missing OtaCommandKey enum:\n{out}"
1927 );
1928 assert!(
1929 out.contains("Gateway01,"),
1930 "gateway-01 should become Gateway01:\n{out}"
1931 );
1932 assert!(
1933 out.contains("SensorHub01,"),
1934 "sensor-hub-01 should become SensorHub01:\n{out}"
1935 );
1936 }
1937
1938 #[test]
1939 fn configure_schema_function_present() {
1940 let out = generated();
1941 assert!(
1942 out.contains(
1943 "pub fn configure_schema<R: Spawn + 'static>(builder: &mut AimDbBuilder<R>)"
1944 ),
1945 "Missing configure_schema function:\n{out}"
1946 );
1947 }
1948
1949 #[test]
1950 fn configure_schema_spmc_buffer() {
1951 let out = generated();
1952 assert!(
1954 out.contains("BufferCfg::SpmcRing"),
1955 "Missing SpmcRing buffer call:\n{out}"
1956 );
1957 assert!(
1958 out.contains("capacity: 256"),
1959 "Missing capacity value:\n{out}"
1960 );
1961 }
1962
1963 #[test]
1964 fn configure_schema_mailbox_buffer() {
1965 let out = generated();
1966 assert!(
1967 out.contains("BufferCfg::Mailbox"),
1968 "Missing Mailbox buffer call:\n{out}"
1969 );
1970 }
1971
1972 #[test]
1973 fn configure_schema_outbound_link_to_with_serializer() {
1974 let out = generated();
1975 assert!(
1976 out.contains("link_to(addr_0)"),
1977 "Missing link_to call:\n{out}"
1978 );
1979 assert!(
1980 out.contains("with_serializer"),
1981 "Missing with_serializer call:\n{out}"
1982 );
1983 assert!(out.contains(".finish()"), "Missing .finish() call:\n{out}");
1984 }
1985
1986 #[test]
1987 fn configure_schema_inbound_link_from_with_deserializer() {
1988 let out = generated();
1989 assert!(
1990 out.contains("link_from(addr_0)"),
1991 "Missing link_from call:\n{out}"
1992 );
1993 assert!(
1994 out.contains("with_deserializer(OtaCommandValue::from_bytes)"),
1995 "Missing with_deserializer call:\n{out}"
1996 );
1997 }
1998
1999 #[test]
2000 fn configure_schema_key_variants_iterated() {
2001 let out = generated();
2002 assert!(
2003 out.contains("TemperatureReadingKey::Indoor"),
2004 "Missing Indoor in configure_schema:\n{out}"
2005 );
2006 assert!(
2007 out.contains("TemperatureReadingKey::Outdoor"),
2008 "Missing Outdoor in configure_schema:\n{out}"
2009 );
2010 assert!(
2011 out.contains("TemperatureReadingKey::Garage"),
2012 "Missing Garage in configure_schema:\n{out}"
2013 );
2014 assert!(
2015 out.contains("OtaCommandKey::Gateway01"),
2016 "Missing Gateway01 in configure_schema:\n{out}"
2017 );
2018 }
2019
2020 #[test]
2023 fn pascal_case_simple() {
2024 assert_eq!(to_pascal_case("indoor"), "Indoor");
2025 assert_eq!(to_pascal_case("outdoor"), "Outdoor");
2026 }
2027
2028 #[test]
2029 fn pascal_case_kebab() {
2030 assert_eq!(to_pascal_case("gateway-01"), "Gateway01");
2031 assert_eq!(to_pascal_case("sensor-hub-01"), "SensorHub01");
2032 }
2033
2034 #[test]
2035 fn pascal_case_snake() {
2036 assert_eq!(to_pascal_case("sensor_hub_01"), "SensorHub01");
2037 }
2038
2039 #[test]
2040 fn pascal_case_already_capitalized() {
2041 assert_eq!(to_pascal_case("Indoor"), "Indoor");
2042 }
2043
2044 #[test]
2046 fn snapshot_full_output() {
2047 let out = generated();
2048 assert!(!out.is_empty());
2051 }
2052
2053 #[test]
2056 fn snake_case_basic() {
2057 assert_eq!(to_snake_case("WeatherObservation"), "weather_observation");
2058 assert_eq!(to_snake_case("Temperature"), "temperature");
2059 }
2060
2061 #[test]
2062 fn snake_case_acronym() {
2063 assert_eq!(to_snake_case("OtaCommand"), "ota_command");
2064 }
2065
2066 const EXTENDED_TOML: &str = r#"
2069[project]
2070name = "weather-sentinel"
2071
2072[meta]
2073aimdb_version = "0.5.0"
2074created_at = "2026-02-24T21:39:15Z"
2075last_modified = "2026-02-25T10:00:00Z"
2076
2077[[records]]
2078name = "WeatherObservation"
2079buffer = "SpmcRing"
2080capacity = 256
2081key_prefix = "weather.observation."
2082key_variants = ["Vienna", "Munich"]
2083schema_version = 2
2084serialization = "json"
2085
2086[records.observable]
2087signal_field = "temperature_celsius"
2088icon = "🌡️"
2089unit = "°C"
2090
2091[[records.fields]]
2092name = "timestamp"
2093type = "u64"
2094description = "Unix timestamp in milliseconds"
2095
2096[[records.fields]]
2097name = "temperature_celsius"
2098type = "f32"
2099description = "Air temperature"
2100settable = true
2101
2102[[records.fields]]
2103name = "humidity_percent"
2104type = "f32"
2105description = "Relative humidity"
2106settable = true
2107
2108[[records.connectors]]
2109protocol = "mqtt"
2110direction = "inbound"
2111url = "sensors/{variant}/observation"
2112"#;
2113
2114 fn extended_state() -> ArchitectureState {
2115 ArchitectureState::from_toml(EXTENDED_TOML).unwrap()
2116 }
2117
2118 fn extended_generated() -> String {
2119 generate_rust(&extended_state())
2120 }
2121
2122 #[test]
2123 fn schema_type_impl_generated() {
2124 let out = extended_generated();
2125 assert!(
2126 out.contains("impl SchemaType for WeatherObservationValue"),
2127 "Missing SchemaType impl:\n{out}"
2128 );
2129 assert!(
2130 out.contains("\"weather_observation\""),
2131 "Missing schema name:\n{out}"
2132 );
2133 assert!(
2134 out.contains("VERSION: u32 = 2"),
2135 "Missing schema version:\n{out}"
2136 );
2137 }
2138
2139 #[test]
2140 fn linkable_impl_json_generated() {
2141 let out = extended_generated();
2142 assert!(
2143 out.contains("impl Linkable for WeatherObservationValue"),
2144 "Missing Linkable impl:\n{out}"
2145 );
2146 assert!(
2147 out.contains("serde_json::to_vec"),
2148 "Missing serde_json::to_vec call:\n{out}"
2149 );
2150 assert!(
2151 out.contains("serde_json::from_slice"),
2152 "Missing serde_json::from_slice call:\n{out}"
2153 );
2154 }
2155
2156 #[test]
2157 fn observable_impl_generated() {
2158 let out = extended_generated();
2159 assert!(
2160 out.contains("impl Observable for WeatherObservationValue"),
2161 "Missing Observable impl:\n{out}"
2162 );
2163 assert!(
2164 out.contains("self.temperature_celsius"),
2165 "Missing signal field access:\n{out}"
2166 );
2167 assert!(out.contains("\"°C\""), "Missing unit:\n{out}");
2168 }
2169
2170 #[test]
2171 fn settable_impl_generated() {
2172 let out = extended_generated();
2173 assert!(
2174 out.contains("impl Settable for WeatherObservationValue"),
2175 "Missing Settable impl:\n{out}"
2176 );
2177 assert!(
2178 out.contains("(f32, f32)"),
2179 "Missing tuple value type:\n{out}"
2180 );
2181 }
2182
2183 #[test]
2184 fn configure_schema_with_real_deserializer() {
2185 let out = extended_generated();
2186 assert!(
2187 out.contains("with_deserializer(WeatherObservationValue::from_bytes)"),
2188 "Missing with_deserializer for inbound connector:\n{out}"
2189 );
2190 }
2191
2192 #[test]
2193 fn data_contracts_import_present() {
2194 let out = extended_generated();
2195 assert!(
2196 out.contains("use aimdb_data_contracts"),
2197 "Missing aimdb_data_contracts import:\n{out}"
2198 );
2199 assert!(
2200 out.contains("SchemaType"),
2201 "Missing SchemaType in import:\n{out}"
2202 );
2203 }
2204
2205 #[test]
2206 fn generate_cargo_toml_output() {
2207 let state = extended_state();
2208 let toml = generate_cargo_toml(&state);
2209 assert!(
2210 toml.contains("weather-sentinel-common"),
2211 "Missing crate name:\n{toml}"
2212 );
2213 assert!(
2214 toml.contains("serde_json"),
2215 "Missing serde_json dep:\n{toml}"
2216 );
2217 assert!(
2218 toml.contains("linkable"),
2219 "Missing linkable feature:\n{toml}"
2220 );
2221 }
2222
2223 #[test]
2224 fn generate_lib_rs_output() {
2225 let lib = generate_lib_rs();
2226 assert!(lib.contains("no_std"), "Missing no_std attribute:\n{lib}");
2227 assert!(
2228 lib.contains("extern crate alloc"),
2229 "Missing alloc extern:\n{lib}"
2230 );
2231 assert!(lib.contains("mod schema"), "Missing schema module:\n{lib}");
2232 assert!(
2233 lib.contains("pub use schema::*"),
2234 "Missing re-export:\n{lib}"
2235 );
2236 }
2237
2238 #[test]
2239 fn schema_rs_has_no_generated_header() {
2240 let out = generate_schema_rs(&extended_state());
2241 assert!(
2242 !out.contains("@generated"),
2243 "schema.rs should not have @generated header:\n{out}"
2244 );
2245 }
2246}