1use alembic_core::{FieldType, JsonMap, Key, Schema, TypeName, Uid};
4use alembic_engine::{
5 apply_non_delete_with_retries, build_key_from_schema, resolve_value_for_type, Adapter,
6 AdapterApplyError, AppliedOp, ApplyReport, BackendId, ObservedObject, ObservedState, Op,
7 ProvisionReport, RetryApplyDriver, StateStore,
8};
9use anyhow::{anyhow, Context, Result};
10use async_trait::async_trait;
11use graphql_parser::schema::{parse_schema, Definition, Type as GqlType, TypeDefinition};
12use infrahub::{Client, ClientConfig};
13use reqwest::header::{HeaderMap, HeaderValue};
14use serde::{Deserialize, Serialize};
15use serde_json::{json, Map, Value};
16use serde_yaml::{Mapping as YamlMapping, Value as YamlValue};
17use std::collections::{BTreeMap, BTreeSet};
18use std::fs;
19use std::path::{Path, PathBuf};
20use std::time::Duration;
21use tokio::process::Command;
22use tokio::time::sleep;
23
24#[derive(Debug, Clone, Copy, PartialEq, Eq)]
26pub enum SchemaApplyMode {
27 Infrahubctl,
28 Repository,
29}
30
31#[derive(Debug, Clone)]
33pub struct SchemaPushConfig {
34 pub schema_path: PathBuf,
35 pub mode: SchemaApplyMode,
36 pub repository_id: Option<String>,
37 pub repository_name: Option<String>,
38 pub repository_root: Option<PathBuf>,
39 pub branch: Option<String>,
40 pub infrahubctl_path: Option<PathBuf>,
41}
42
43impl SchemaPushConfig {
44 pub fn infrahubctl(schema_path: PathBuf) -> Self {
45 Self {
46 schema_path,
47 mode: SchemaApplyMode::Infrahubctl,
48 repository_id: None,
49 repository_name: None,
50 repository_root: None,
51 branch: None,
52 infrahubctl_path: None,
53 }
54 }
55
56 pub fn repository(
57 schema_path: PathBuf,
58 repository_id: String,
59 repository_root: PathBuf,
60 ) -> Self {
61 Self {
62 schema_path,
63 mode: SchemaApplyMode::Repository,
64 repository_id: Some(repository_id),
65 repository_name: None,
66 repository_root: Some(repository_root),
67 branch: None,
68 infrahubctl_path: None,
69 }
70 }
71}
72
73pub struct InfrahubAdapter {
75 client: Client,
76 api: reqwest::Client,
77 base_url: String,
78 token: String,
79 schema_push: Option<SchemaPushConfig>,
80}
81
82impl InfrahubAdapter {
83 pub fn new(url: &str, token: &str, branch: Option<&str>) -> Result<Self> {
84 let mut config = ClientConfig::new(url, token);
85 config = config.with_http_client_builder(|builder| builder.no_proxy());
86 if let Some(branch) = branch {
87 config = config.with_default_branch(branch);
88 }
89 let client = Client::new(config)?;
90 let mut headers = HeaderMap::new();
91 headers.insert(
92 "X-INFRAHUB-KEY",
93 HeaderValue::from_str(token)
94 .map_err(|err| anyhow!("invalid infrahub token header: {err}"))?,
95 );
96 let api = reqwest::Client::builder()
97 .default_headers(headers)
98 .timeout(Duration::from_secs(30))
99 .no_proxy()
100 .build()
101 .context("build infrahub http client")?;
102 Ok(Self {
103 client,
104 api,
105 base_url: url.to_string(),
106 token: token.to_string(),
107 schema_push: None,
108 })
109 }
110
111 pub fn with_schema_push(mut self, schema_push: SchemaPushConfig) -> Self {
112 self.schema_push = Some(schema_push);
113 self
114 }
115
116 async fn load_schema_info(&self) -> Result<SchemaInfo> {
117 let raw = self
118 .client
119 .fetch_schema(None)
120 .await
121 .context("fetch infrahub schema")?;
122 SchemaInfo::parse(&raw)
123 }
124
125 async fn load_schema_snapshot(&self) -> Result<SchemaSnapshot> {
126 let base = self.base_url.trim_end_matches('/');
127 let url = format!("{base}/api/schema");
128 let response = self
129 .api
130 .get(url)
131 .send()
132 .await
133 .context("fetch infrahub schema snapshot")?;
134 let status = response.status();
135 let text = response
136 .text()
137 .await
138 .context("read infrahub schema snapshot")?;
139 if !status.is_success() {
140 return Err(anyhow!("infrahub schema snapshot http error: {}", status));
141 }
142 serde_json::from_str(&text).context("parse infrahub schema snapshot")
143 }
144
145 async fn read_type_objects(
146 &self,
147 schema_info: &SchemaInfo,
148 type_name: &TypeName,
149 type_schema: &alembic_core::TypeSchema,
150 mappings: &StateMappings,
151 ) -> Result<Vec<ObservedObject>> {
152 let gql_type = gql_type_name(type_name);
153 let fields = field_names_for_schema(type_schema);
154 let field_kinds = schema_info.field_kinds(&gql_type, type_schema, &fields)?;
155 let selection = build_selection(&field_kinds);
156
157 let query = format!(
158 "query($offset: Int, $limit: Int) {{ {type_name}(offset: $offset, limit: $limit) {{ count edges {{ node {{ id hfid {selection} }} }} }} }}",
159 type_name = gql_type,
160 selection = selection
161 );
162
163 let mut observed = Vec::new();
164 let mut offset = 0usize;
165 let limit = 200usize;
166
167 loop {
168 let vars = json!({
169 "offset": offset,
170 "limit": limit,
171 });
172 let response = self
173 .client
174 .execute_raw(&query, Some(vars), None)
175 .await
176 .context("execute infrahub query")?;
177 let data = response
178 .data
179 .ok_or_else(|| anyhow!("missing data in infrahub response"))?;
180 let connection = data
181 .get(&gql_type)
182 .ok_or_else(|| anyhow!("missing {} in infrahub response", gql_type))?;
183 let edges = connection
184 .get("edges")
185 .and_then(Value::as_array)
186 .cloned()
187 .unwrap_or_default();
188
189 if edges.is_empty() {
190 break;
191 }
192
193 for edge in edges {
194 let node = edge
195 .get("node")
196 .ok_or_else(|| anyhow!("missing node in infrahub response"))?;
197 let backend_id = node
198 .get("id")
199 .and_then(Value::as_str)
200 .ok_or_else(|| anyhow!("missing id in infrahub response"))?
201 .to_string();
202
203 let attrs = extract_attrs(node, &field_kinds)?;
204 let attrs = normalize_attrs_refs(&attrs, type_schema, mappings);
205 let key = build_key_from_schema(type_schema, &attrs)?;
206
207 observed.push(ObservedObject {
208 type_name: type_name.clone(),
209 key,
210 attrs,
211 backend_id: Some(BackendId::String(backend_id)),
212 });
213 }
214
215 let count = connection.get("count").and_then(Value::as_u64).unwrap_or(0) as usize;
216 offset += limit;
217 if count > 0 && offset >= count {
218 break;
219 }
220 }
221
222 Ok(observed)
223 }
224
225 async fn apply_create(
226 &self,
227 op: &Op,
228 schema: &Schema,
229 resolved: &mut BTreeMap<Uid, BackendId>,
230 ) -> Result<AppliedOp> {
231 let (uid, type_name, desired) = match op {
232 Op::Create {
233 uid,
234 type_name,
235 desired,
236 } => (*uid, type_name, desired),
237 _ => return Err(anyhow!("expected create op")),
238 };
239 let type_schema = schema
240 .types
241 .get(type_name.as_str())
242 .ok_or_else(|| anyhow!("missing schema for {}", type_name))?;
243 let gql_type = gql_type_name(type_name);
244
245 let data = build_input(&desired.attrs, type_schema, resolved)?;
246 let mutation = format!(
247 "mutation($data: {type_name}CreateInput!) {{ {type_name}Create(data: $data) {{ ok object {{ id }} }} }}",
248 type_name = gql_type
249 );
250
251 let response = self
252 .client
253 .execute_raw(&mutation, Some(json!({ "data": data })), None)
254 .await
255 .context("execute infrahub create")?;
256 let data = response
257 .data
258 .ok_or_else(|| anyhow!("missing data in infrahub response"))?;
259 let root = data
260 .get(format!("{}Create", gql_type))
261 .ok_or_else(|| anyhow!("missing create response for {}", gql_type))?;
262 let backend_id = root
263 .get("object")
264 .and_then(|obj| obj.get("id"))
265 .and_then(Value::as_str)
266 .ok_or_else(|| anyhow!("missing id in infrahub create response"))?
267 .to_string();
268
269 let backend_id = BackendId::String(backend_id);
270 resolved.insert(uid, backend_id.clone());
271
272 Ok(AppliedOp {
273 uid,
274 type_name: type_name.clone(),
275 backend_id: Some(backend_id),
276 })
277 }
278
279 async fn apply_update(
280 &self,
281 op: &Op,
282 schema: &Schema,
283 resolved: &BTreeMap<Uid, BackendId>,
284 ) -> Result<AppliedOp> {
285 let (uid, type_name, desired, backend_id) = match op {
286 Op::Update {
287 uid,
288 type_name,
289 desired,
290 backend_id,
291 ..
292 } => (*uid, type_name, desired, backend_id),
293 _ => return Err(anyhow!("expected update op")),
294 };
295 let type_schema = schema
296 .types
297 .get(type_name.as_str())
298 .ok_or_else(|| anyhow!("missing schema for {}", type_name))?;
299 let gql_type = gql_type_name(type_name);
300
301 let id = if let Some(BackendId::String(id)) = backend_id {
302 id.clone()
303 } else if let Some(BackendId::String(id)) = resolved.get(&uid) {
304 id.clone()
305 } else {
306 let key = build_key_from_schema(type_schema, &desired.attrs)?;
307 self.lookup_backend_id(type_name, type_schema, &key).await?
308 };
309
310 let mut data = build_input(&desired.attrs, type_schema, resolved)?;
311 let map = data
312 .as_object_mut()
313 .ok_or_else(|| anyhow!("expected object for infrahub input"))?;
314 map.insert("id".to_string(), Value::String(id.clone()));
315
316 let mutation = format!(
317 "mutation($data: {type_name}UpdateInput!) {{ {type_name}Update(data: $data) {{ ok object {{ id }} }} }}",
318 type_name = gql_type
319 );
320 self.client
321 .execute_raw(&mutation, Some(json!({ "data": data })), None)
322 .await
323 .context("execute infrahub update")?;
324
325 Ok(AppliedOp {
326 uid,
327 type_name: type_name.clone(),
328 backend_id: Some(BackendId::String(id)),
329 })
330 }
331
332 async fn apply_delete(
333 &self,
334 op: &Op,
335 schema: &Schema,
336 resolved: &BTreeMap<Uid, BackendId>,
337 ) -> Result<AppliedOp> {
338 let (uid, type_name, backend_id, key) = match op {
339 Op::Delete {
340 uid,
341 type_name,
342 backend_id,
343 key,
344 } => (*uid, type_name, backend_id, key),
345 _ => return Err(anyhow!("expected delete op")),
346 };
347 let type_schema = schema
348 .types
349 .get(type_name.as_str())
350 .ok_or_else(|| anyhow!("missing schema for {}", type_name))?;
351 let gql_type = gql_type_name(type_name);
352
353 let id = if let Some(BackendId::String(id)) = backend_id {
354 id.clone()
355 } else if let Some(BackendId::String(id)) = resolved.get(&uid) {
356 id.clone()
357 } else {
358 self.lookup_backend_id(type_name, type_schema, key).await?
359 };
360
361 let mutation = format!(
362 "mutation($data: DeleteInput!) {{ {type_name}Delete(data: $data) {{ ok }} }}",
363 type_name = gql_type
364 );
365 self.client
366 .execute_raw(&mutation, Some(json!({ "data": { "id": id } })), None)
367 .await
368 .context("execute infrahub delete")?;
369
370 Ok(AppliedOp {
371 uid,
372 type_name: type_name.clone(),
373 backend_id: None,
374 })
375 }
376
377 async fn lookup_backend_id(
378 &self,
379 type_name: &TypeName,
380 type_schema: &alembic_core::TypeSchema,
381 key: &Key,
382 ) -> Result<String> {
383 let schema_info = self.load_schema_info().await?;
384 let mappings = StateMappings::default();
385 let objects = self
386 .read_type_objects(&schema_info, type_name, type_schema, &mappings)
387 .await?;
388 let key_string = serde_json::to_string(key).unwrap_or_default();
389 for object in objects {
390 if object.key == *key {
391 if let Some(BackendId::String(id)) = object.backend_id {
392 return Ok(id);
393 }
394 }
395 }
396 Err(anyhow!("missing infrahub object with key {key_string}"))
397 }
398
399 async fn apply_schema_infrahubctl(&self, config: &SchemaPushConfig) -> Result<()> {
400 let mut cmd = Command::new(
401 config
402 .infrahubctl_path
403 .as_deref()
404 .unwrap_or_else(|| Path::new("infrahubctl")),
405 );
406 cmd.arg("schema")
407 .arg("load")
408 .arg(&config.schema_path)
409 .env("INFRAHUB_ADDRESS", &self.base_url)
410 .env("INFRAHUB_API_TOKEN", &self.token);
411 if let Some(branch) = &config.branch {
412 cmd.arg("--branch").arg(branch);
413 }
414
415 let output = cmd.output().await.context("run infrahubctl schema load")?;
416 if !output.status.success() {
417 let stderr = String::from_utf8_lossy(&output.stderr);
418 let stdout = String::from_utf8_lossy(&output.stdout);
419 return Err(anyhow!(
420 "infrahubctl schema load failed: {}\nstdout: {}\nstderr: {}",
421 output.status,
422 stdout.trim(),
423 stderr.trim()
424 ));
425 }
426 Ok(())
427 }
428
429 async fn apply_schema_repository(&self, config: &SchemaPushConfig) -> Result<()> {
430 let repo_root = config
431 .repository_root
432 .as_ref()
433 .ok_or_else(|| anyhow!("infrahub repository mode requires repository_root"))?;
434 ensure_repository_config(repo_root, &config.schema_path)?;
435
436 let repo_id = match (&config.repository_id, &config.repository_name) {
437 (Some(id), _) => id.clone(),
438 (None, Some(name)) => self.resolve_repository_id(name).await?,
439 (None, None) => {
440 return Err(anyhow!(
441 "infrahub repository mode requires repository_id or repository_name"
442 ))
443 }
444 };
445
446 self.process_repository(&repo_id).await?;
447 Ok(())
448 }
449
450 async fn resolve_repository_id(&self, name: &str) -> Result<String> {
451 let query = "query($name: String) { CoreRepository(name__value: $name, limit: 1) { edges { node { id } } } }";
452 let response = self
453 .client
454 .execute_raw(query, Some(json!({ "name": name })), None)
455 .await
456 .context("query infrahub repository")?;
457 let data = response
458 .data
459 .ok_or_else(|| anyhow!("missing data in infrahub repository response"))?;
460 let edges = data
461 .get("CoreRepository")
462 .and_then(|value| value.get("edges"))
463 .and_then(Value::as_array)
464 .cloned()
465 .unwrap_or_default();
466 for edge in edges {
467 if let Some(id) = edge
468 .get("node")
469 .and_then(|node| node.get("id"))
470 .and_then(Value::as_str)
471 {
472 return Ok(id.to_string());
473 }
474 }
475 Err(anyhow!("infrahub repository not found: {name}"))
476 }
477
478 async fn process_repository(&self, repo_id: &str) -> Result<()> {
479 let mutation = "mutation($data: IdentifierInput!) { InfrahubRepositoryProcess(data: $data) { ok task { id } } }";
480 self.client
481 .execute_raw(mutation, Some(json!({ "data": { "id": repo_id } })), None)
482 .await
483 .context("trigger infrahub repository process")?;
484 Ok(())
485 }
486}
487
488#[async_trait]
489impl Adapter for InfrahubAdapter {
490 async fn read(
491 &self,
492 schema: &Schema,
493 types: &[TypeName],
494 state_store: &StateStore,
495 ) -> Result<ObservedState> {
496 let schema_info = self.load_schema_info().await?;
497 validate_schema(schema, &schema_info)?;
498
499 let requested: Vec<TypeName> = if types.is_empty() {
500 schema
501 .types
502 .keys()
503 .map(|name| TypeName::new(name.clone()))
504 .collect()
505 } else {
506 types.to_vec()
507 };
508
509 let mappings = state_mappings(state_store);
510 let mut state = ObservedState::default();
511 for type_name in requested {
512 let type_schema = schema
513 .types
514 .get(type_name.as_str())
515 .ok_or_else(|| anyhow!("missing schema for {}", type_name))?;
516 let objects = self
517 .read_type_objects(&schema_info, &type_name, type_schema, &mappings)
518 .await?;
519 for object in objects {
520 state.insert(object);
521 }
522 }
523
524 Ok(state)
525 }
526
527 async fn write(&self, schema: &Schema, ops: &[Op], state: &StateStore) -> Result<ApplyReport> {
528 let schema_info = self.load_schema_info().await?;
529 validate_schema(schema, &schema_info)?;
530
531 let mut applied = Vec::new();
532 let mut resolved = resolved_from_state(state);
533 let mut creates_updates = Vec::new();
534 let mut deletes = Vec::new();
535 for op in ops {
536 match op {
537 Op::Delete { .. } => deletes.push(op.clone()),
538 _ => creates_updates.push(op.clone()),
539 }
540 }
541
542 struct ApplyDriver<'a> {
543 adapter: &'a InfrahubAdapter,
544 schema: &'a Schema,
545 resolved: &'a mut BTreeMap<Uid, BackendId>,
546 }
547
548 #[async_trait]
549 impl RetryApplyDriver for ApplyDriver<'_> {
550 async fn apply_non_delete(&mut self, op: &Op) -> Result<AppliedOp> {
551 match op {
552 Op::Create { .. } => {
553 self.adapter
554 .apply_create(op, self.schema, self.resolved)
555 .await
556 }
557 Op::Update { .. } => {
558 self.adapter
559 .apply_update(op, self.schema, self.resolved)
560 .await
561 }
562 Op::Delete { .. } => Err(anyhow!("delete ops not supported here")),
563 }
564 }
565
566 fn is_retryable(&self, err: &anyhow::Error) -> bool {
567 is_missing_ref_error(err)
568 }
569 }
570
571 let mut driver = ApplyDriver {
572 adapter: self,
573 schema,
574 resolved: &mut resolved,
575 };
576 let retry_result = apply_non_delete_with_retries(&creates_updates, &mut driver).await?;
577 if !retry_result.pending.is_empty() {
578 let missing = describe_missing_refs(&retry_result.pending, &resolved);
579 return Err(anyhow!("unresolved references: {missing}"));
580 }
581
582 for applied_op in retry_result.applied {
583 if let Some(backend_id) = &applied_op.backend_id {
584 resolved.insert(applied_op.uid, backend_id.clone());
585 }
586 applied.push(applied_op);
587 }
588
589 for op in deletes {
590 applied.push(self.apply_delete(&op, schema, &resolved).await?);
591 }
592
593 Ok(ApplyReport {
594 applied,
595 ..Default::default()
596 })
597 }
598
599 async fn ensure_schema(&self, schema: &Schema) -> Result<ProvisionReport> {
600 let schema_info = self.load_schema_info().await?;
601 let schema_snapshot = self.load_schema_snapshot().await?;
602 let Some(plan) = build_provision_plan(schema, &schema_info, &schema_snapshot)? else {
603 return Ok(ProvisionReport::default());
604 };
605
606 let config = self.schema_push.as_ref().ok_or_else(|| {
607 anyhow!(
608 "infrahub schema mismatch (configure schema provisioning): {}",
609 plan.summary
610 )
611 })?;
612
613 write_schema_document(&config.schema_path, &plan.document)?;
614
615 match config.mode {
616 SchemaApplyMode::Infrahubctl => self.apply_schema_infrahubctl(config).await?,
617 SchemaApplyMode::Repository => self.apply_schema_repository(config).await?,
618 }
619
620 let mut refreshed = None;
621 for _ in 0..5 {
622 let schema_info = self.load_schema_info().await?;
623 if validate_schema(schema, &schema_info).is_ok() {
624 refreshed = Some(schema_info);
625 break;
626 }
627 sleep(Duration::from_secs(1)).await;
628 }
629 if refreshed.is_none() {
630 let schema_info = self.load_schema_info().await?;
631 validate_schema(schema, &schema_info)?;
632 }
633
634 Ok(plan.report)
635 }
636}
637
638#[derive(Debug, Clone)]
639struct GraphField {
640 base_type: String,
641 is_list: bool,
642}
643
644#[derive(Debug, Default, Clone)]
645struct SchemaInfo {
646 attribute_types: BTreeSet<String>,
647 type_fields: BTreeMap<String, BTreeMap<String, GraphField>>,
648}
649
650#[derive(Debug, Default, Clone, Deserialize)]
651struct SchemaSnapshot {
652 #[serde(default)]
653 nodes: Vec<SchemaNodeSnapshot>,
654}
655
656#[derive(Debug, Default, Clone, Deserialize)]
657struct SchemaNodeSnapshot {
658 name: String,
659 namespace: String,
660 #[serde(default)]
661 inherit_from: Vec<String>,
662 #[serde(default)]
663 include_in_menu: bool,
664}
665
666impl SchemaNodeSnapshot {
667 fn key(&self) -> NodeKey {
668 NodeKey::new(self.namespace.clone(), self.name.clone())
669 }
670
671 fn qualified_name(&self) -> String {
672 format!("{}.{}", self.namespace, self.name)
673 }
674}
675
676#[derive(Debug, Clone)]
677enum RelationShape {
678 RelatedNode,
679 NestedEdged,
680 NestedPaginated,
681}
682
683#[derive(Debug, Clone)]
684enum FieldKind {
685 Attribute,
686 RelationSingle(RelationShape),
687 RelationList(RelationShape),
688}
689
690impl SchemaInfo {
691 fn parse(raw: &str) -> Result<Self> {
692 let document = parse_schema::<String>(raw).map_err(|err| anyhow!(err.to_string()))?;
693 let mut attribute_types = BTreeSet::new();
694 let mut type_fields = BTreeMap::new();
695
696 for def in document.definitions {
697 let Definition::TypeDefinition(TypeDefinition::Object(obj)) = def else {
698 continue;
699 };
700 if obj
701 .implements_interfaces
702 .iter()
703 .any(|iface| iface == "AttributeInterface")
704 {
705 attribute_types.insert(obj.name.clone());
706 }
707 let mut fields = BTreeMap::new();
708 for field in obj.fields {
709 let (base_type, is_list) = unwrap_type(&field.field_type);
710 fields.insert(field.name.clone(), GraphField { base_type, is_list });
711 }
712 type_fields.insert(obj.name.clone(), fields);
713 }
714
715 Ok(Self {
716 attribute_types,
717 type_fields,
718 })
719 }
720
721 fn field_kinds(
722 &self,
723 type_name: &str,
724 type_schema: &alembic_core::TypeSchema,
725 fields: &[String],
726 ) -> Result<BTreeMap<String, FieldKind>> {
727 let info = self
728 .type_fields
729 .get(type_name)
730 .ok_or_else(|| anyhow!("infrahub schema missing type {}", type_name))?;
731 let mut kinds = BTreeMap::new();
732 for field in fields {
733 let graph = info
734 .get(field)
735 .ok_or_else(|| anyhow!("infrahub schema missing field {}.{}", type_name, field))?;
736 let kind = self.kind_for_field(graph);
737 let field_schema = field_schema_for(type_schema, field)
738 .ok_or_else(|| anyhow!("missing alembic schema for {}.{}", type_name, field))?;
739 validate_kind(type_name, field, &field_schema.r#type, &kind)?;
740 kinds.insert(field.clone(), kind);
741 }
742 Ok(kinds)
743 }
744
745 fn kind_for_field(&self, graph: &GraphField) -> FieldKind {
746 if self.attribute_types.contains(&graph.base_type) {
747 return FieldKind::Attribute;
748 }
749 if graph.base_type == "RelatedNode" {
750 return if graph.is_list {
751 FieldKind::RelationList(RelationShape::RelatedNode)
752 } else {
753 FieldKind::RelationSingle(RelationShape::RelatedNode)
754 };
755 }
756 if graph.base_type.starts_with("NestedPaginated") {
757 return FieldKind::RelationList(RelationShape::NestedPaginated);
758 }
759 if graph.base_type.starts_with("NestedEdged") {
760 return FieldKind::RelationSingle(RelationShape::NestedEdged);
761 }
762 FieldKind::Attribute
763 }
764}
765
766fn unwrap_type(field_type: &GqlType<String>) -> (String, bool) {
767 match field_type {
768 GqlType::NamedType(name) => (name.clone(), false),
769 GqlType::ListType(inner) => {
770 let (name, _inner_list) = unwrap_type(inner);
771 (name, true)
772 }
773 GqlType::NonNullType(inner) => unwrap_type(inner),
774 }
775}
776
777fn field_names_for_schema(type_schema: &alembic_core::TypeSchema) -> Vec<String> {
778 let mut fields = BTreeSet::new();
779 for field in type_schema.key.keys() {
780 fields.insert(field.clone());
781 }
782 for field in type_schema.fields.keys() {
783 fields.insert(field.clone());
784 }
785 fields.into_iter().collect()
786}
787
788fn build_selection(field_kinds: &BTreeMap<String, FieldKind>) -> String {
789 let mut parts = Vec::new();
790 for (field, kind) in field_kinds {
791 let selection = match kind {
792 FieldKind::Attribute => format!("{field} {{ value }}"),
793 FieldKind::RelationSingle(RelationShape::RelatedNode) => {
794 format!("{field} {{ id kind }}")
795 }
796 FieldKind::RelationSingle(RelationShape::NestedEdged) => {
797 format!("{field} {{ node {{ id }} }}")
798 }
799 FieldKind::RelationSingle(RelationShape::NestedPaginated) => {
800 format!("{field} {{ node {{ id }} }}")
801 }
802 FieldKind::RelationList(RelationShape::RelatedNode) => {
803 format!("{field} {{ id kind }}")
804 }
805 FieldKind::RelationList(RelationShape::NestedPaginated) => {
806 format!("{field} {{ edges {{ node {{ id }} }} }}")
807 }
808 FieldKind::RelationList(RelationShape::NestedEdged) => {
809 format!("{field} {{ node {{ id }} }}")
810 }
811 };
812 parts.push(selection);
813 }
814 parts.join("\n")
815}
816
817fn extract_attrs(node: &Value, field_kinds: &BTreeMap<String, FieldKind>) -> Result<JsonMap> {
818 let mut map = BTreeMap::new();
819 for (field, kind) in field_kinds {
820 let value = extract_field_value(node, field, kind)?;
821 if let Some(value) = value {
822 map.insert(field.clone(), value);
823 }
824 }
825 Ok(JsonMap::from(map))
826}
827
828fn extract_field_value(node: &Value, field: &str, kind: &FieldKind) -> Result<Option<Value>> {
829 let Some(field_val) = node.get(field) else {
830 return Ok(None);
831 };
832 if field_val.is_null() {
833 return Ok(None);
834 }
835 let value = match kind {
836 FieldKind::Attribute => field_val.get("value").cloned().unwrap_or(Value::Null),
837 FieldKind::RelationSingle(RelationShape::RelatedNode) => field_val
838 .get("id")
839 .and_then(Value::as_str)
840 .map(|s| Value::String(s.to_string()))
841 .unwrap_or(Value::Null),
842 FieldKind::RelationSingle(RelationShape::NestedEdged) => field_val
843 .get("node")
844 .and_then(|node| node.get("id"))
845 .and_then(Value::as_str)
846 .map(|s| Value::String(s.to_string()))
847 .unwrap_or(Value::Null),
848 FieldKind::RelationSingle(RelationShape::NestedPaginated) => field_val
849 .get("node")
850 .and_then(|node| node.get("id"))
851 .and_then(Value::as_str)
852 .map(|s| Value::String(s.to_string()))
853 .unwrap_or(Value::Null),
854 FieldKind::RelationList(RelationShape::RelatedNode) => {
855 let items = field_val
856 .as_array()
857 .cloned()
858 .unwrap_or_default()
859 .into_iter()
860 .filter_map(|item| {
861 item.get("id")
862 .and_then(Value::as_str)
863 .map(|s| s.to_string())
864 })
865 .map(Value::String)
866 .collect::<Vec<_>>();
867 Value::Array(items)
868 }
869 FieldKind::RelationList(RelationShape::NestedPaginated) => {
870 let items = field_val
871 .get("edges")
872 .and_then(Value::as_array)
873 .cloned()
874 .unwrap_or_default()
875 .into_iter()
876 .filter_map(|edge| {
877 edge.get("node")
878 .and_then(|node| node.get("id"))
879 .and_then(Value::as_str)
880 .map(|s| s.to_string())
881 })
882 .map(Value::String)
883 .collect::<Vec<_>>();
884 Value::Array(items)
885 }
886 FieldKind::RelationList(RelationShape::NestedEdged) => {
887 let items = field_val
888 .as_array()
889 .cloned()
890 .unwrap_or_default()
891 .into_iter()
892 .filter_map(|edge| {
893 edge.get("node")
894 .and_then(|node| node.get("id"))
895 .and_then(Value::as_str)
896 .map(|s| s.to_string())
897 })
898 .map(Value::String)
899 .collect::<Vec<_>>();
900 Value::Array(items)
901 }
902 };
903 if value.is_null() {
904 Ok(None)
905 } else {
906 Ok(Some(value))
907 }
908}
909
910#[derive(Debug, Default)]
911struct SchemaMissing {
912 types: Vec<String>,
913 fields: Vec<String>,
914}
915
916impl SchemaMissing {
917 fn is_empty(&self) -> bool {
918 self.types.is_empty() && self.fields.is_empty()
919 }
920
921 fn summary(&self) -> String {
922 let mut parts = Vec::new();
923 if !self.types.is_empty() {
924 parts.push(format!("missing types: {}", self.types.join(", ")));
925 }
926 if !self.fields.is_empty() {
927 parts.push(format!("missing fields: {}", self.fields.join(", ")));
928 }
929 parts.join("; ")
930 }
931}
932
933fn schema_missing(schema: &Schema, schema_info: &SchemaInfo) -> SchemaMissing {
934 let mut missing = SchemaMissing::default();
935 for (type_name, type_schema) in &schema.types {
936 let gql_type = gql_type_name_str(type_name);
937 if !schema_info.type_fields.contains_key(&gql_type) {
938 missing.types.push(type_name.clone());
939 continue;
940 }
941 let fields = field_names_for_schema(type_schema);
942 for field in fields {
943 if schema_info
944 .type_fields
945 .get(&gql_type)
946 .and_then(|fields| fields.get(&field))
947 .is_none()
948 {
949 missing.fields.push(format!("{type_name}.{field}"));
950 }
951 }
952 }
953 missing
954}
955
956fn validate_schema(schema: &Schema, schema_info: &SchemaInfo) -> Result<()> {
957 let missing = schema_missing(schema, schema_info);
958 if missing.is_empty() {
959 return Ok(());
960 }
961 Err(anyhow!(
962 "infrahub schema mismatch (define missing types/fields before apply): {}",
963 missing.summary()
964 ))
965}
966
967#[derive(Debug)]
968struct ProvisionPlan {
969 document: SchemaDocument,
970 report: ProvisionReport,
971 summary: String,
972}
973
974fn build_provision_plan(
975 schema: &Schema,
976 schema_info: &SchemaInfo,
977 schema_snapshot: &SchemaSnapshot,
978) -> Result<Option<ProvisionPlan>> {
979 let missing = schema_missing(schema, schema_info);
980 let menu_anchors = menu_anchor_map(schema)?;
981 let mut nodes = Vec::new();
982 let mut extensions = Vec::new();
983 let mut created_object_types = Vec::new();
984 let mut created_object_fields = Vec::new();
985 let mut deprecated_object_types = Vec::new();
986
987 let mut desired_node_keys = BTreeSet::new();
988 for type_name in schema.types.keys() {
989 let parts = type_name_parts(type_name)?;
990 desired_node_keys.insert(NodeKey::new(parts.namespace, parts.name));
991 }
992
993 for (type_name, type_schema) in &schema.types {
994 let gql_type = gql_type_name_str(type_name);
995 let parts = type_name_parts(type_name)?;
996 let menu_placement = menu_placement_for(&menu_anchors, &parts, &gql_type);
997 let menu_node = NodeDef {
998 name: parts.name.clone(),
999 namespace: parts.namespace.clone(),
1000 label: None,
1001 description: None,
1002 icon: None,
1003 include_in_menu: Some(false),
1004 menu_placement,
1005 inherit_from: Vec::new(),
1006 human_friendly_id: Vec::new(),
1007 display_label: None,
1008 default_filter: None,
1009 attributes: Vec::new(),
1010 relationships: Vec::new(),
1011 };
1012
1013 let Some(existing_fields) = schema_info.type_fields.get(&gql_type) else {
1014 let (attributes, relationships, key_attrs) =
1015 collect_field_defs(type_name, type_schema, None)?;
1016 let mut human_friendly_id = Vec::new();
1017 if !key_attrs.is_empty() {
1018 human_friendly_id.extend(key_attrs.iter().map(|key| format!("{key}__value")));
1019 }
1020 let (display_label, default_filter) = display_label_for_keys(&key_attrs);
1021 let name = parts.name.clone();
1022 let namespace = parts.namespace.clone();
1023 let label = label_from_pascal(&name);
1024 nodes.push(NodeDef {
1025 name,
1026 namespace: namespace.clone(),
1027 label: Some(label),
1028 description: None,
1029 icon: None,
1030 include_in_menu: Some(false),
1031 menu_placement: menu_node.menu_placement.clone(),
1032 inherit_from: Vec::new(),
1033 human_friendly_id,
1034 display_label,
1035 default_filter,
1036 attributes,
1037 relationships,
1038 });
1039 created_object_types.push(type_name.clone());
1040 for field in field_names_for_schema(type_schema) {
1041 created_object_fields.push(format!("{type_name}.{field}"));
1042 }
1043 continue;
1044 };
1045
1046 nodes.push(menu_node);
1047
1048 let mut missing_fields = BTreeSet::new();
1049 for field in field_names_for_schema(type_schema) {
1050 if !existing_fields.contains_key(&field) {
1051 missing_fields.insert(field.clone());
1052 created_object_fields.push(format!("{type_name}.{field}"));
1053 }
1054 }
1055 if missing_fields.is_empty() {
1056 continue;
1057 }
1058 let (attributes, relationships, _key_attrs) =
1059 collect_field_defs(type_name, type_schema, Some(&missing_fields))?;
1060 if attributes.is_empty() && relationships.is_empty() {
1061 continue;
1062 }
1063 extensions.push(NodeExtension {
1064 kind: gql_type,
1065 attributes,
1066 relationships,
1067 });
1068 }
1069
1070 for node in &schema_snapshot.nodes {
1071 if !node.include_in_menu {
1072 continue;
1073 }
1074 let key = node.key();
1075 if desired_node_keys.contains(&key) {
1076 continue;
1077 }
1078 deprecated_object_types.push(node.qualified_name());
1079 nodes.push(NodeDef {
1080 name: node.name.clone(),
1081 namespace: node.namespace.clone(),
1082 label: None,
1083 description: None,
1084 icon: None,
1085 include_in_menu: Some(false),
1086 menu_placement: None,
1087 human_friendly_id: Vec::new(),
1088 display_label: None,
1089 default_filter: None,
1090 attributes: Vec::new(),
1091 relationships: Vec::new(),
1092 inherit_from: node.inherit_from.clone(),
1093 });
1094 }
1095
1096 if nodes.is_empty() && extensions.is_empty() {
1097 return Ok(None);
1098 }
1099
1100 let document = SchemaDocument {
1101 version: "1.0".to_string(),
1102 nodes,
1103 extensions: SchemaExtensions { nodes: extensions },
1104 };
1105
1106 let mut summary = missing.summary();
1107 if !deprecated_object_types.is_empty() {
1108 if !summary.is_empty() {
1109 summary.push_str("; ");
1110 }
1111 summary.push_str(&format!(
1112 "stale menu types: {}",
1113 deprecated_object_types.join(", ")
1114 ));
1115 }
1116
1117 let report = ProvisionReport {
1118 created_fields: Vec::new(),
1119 created_tags: Vec::new(),
1120 created_object_types,
1121 created_object_fields,
1122 deprecated_object_types,
1123 deprecated_object_fields: Vec::new(),
1124 deleted_object_types: Vec::new(),
1125 deleted_object_fields: Vec::new(),
1126 };
1127
1128 Ok(Some(ProvisionPlan {
1129 document,
1130 report,
1131 summary,
1132 }))
1133}
1134
1135fn write_schema_document(path: &Path, document: &SchemaDocument) -> Result<()> {
1136 if let Some(parent) = path.parent() {
1137 fs::create_dir_all(parent)
1138 .with_context(|| format!("create schema directory {}", parent.display()))?;
1139 }
1140 let raw = serde_yaml::to_string(document).context("serialize infrahub schema")?;
1141 fs::write(path, raw).with_context(|| format!("write schema {}", path.display()))?;
1142 Ok(())
1143}
1144
1145fn ensure_repository_config(repo_root: &Path, schema_path: &Path) -> Result<()> {
1146 let rel_path = schema_path
1147 .strip_prefix(repo_root)
1148 .with_context(|| {
1149 format!(
1150 "schema path {} must be inside repository root {}",
1151 schema_path.display(),
1152 repo_root.display()
1153 )
1154 })?
1155 .to_string_lossy()
1156 .replace('\\', "/");
1157
1158 let config_path = repo_root.join(".infrahub.yml");
1159 let mut root = if config_path.exists() {
1160 let raw = fs::read_to_string(&config_path)
1161 .with_context(|| format!("read {}", config_path.display()))?;
1162 serde_yaml::from_str::<YamlValue>(&raw)
1163 .with_context(|| format!("parse {}", config_path.display()))?
1164 } else {
1165 YamlValue::Mapping(YamlMapping::new())
1166 };
1167
1168 let mapping = root
1169 .as_mapping_mut()
1170 .ok_or_else(|| anyhow!(".infrahub.yml must be a mapping"))?;
1171 let schemas_key = YamlValue::String("schemas".to_string());
1172 let entry = mapping
1173 .entry(schemas_key)
1174 .or_insert_with(|| YamlValue::Sequence(Vec::new()));
1175 let list = entry
1176 .as_sequence_mut()
1177 .ok_or_else(|| anyhow!("schemas must be a list in .infrahub.yml"))?;
1178 if !list.iter().any(|v| v.as_str() == Some(&rel_path)) {
1179 list.push(YamlValue::String(rel_path));
1180 }
1181
1182 let raw = serde_yaml::to_string(&root).context("serialize .infrahub.yml")?;
1183 fs::write(&config_path, raw).with_context(|| format!("write {}", config_path.display()))?;
1184 Ok(())
1185}
1186
1187#[derive(Debug, Serialize)]
1188struct SchemaDocument {
1189 version: String,
1190 #[serde(skip_serializing_if = "Vec::is_empty")]
1191 nodes: Vec<NodeDef>,
1192 #[serde(skip_serializing_if = "SchemaExtensions::is_empty")]
1193 extensions: SchemaExtensions,
1194}
1195
1196#[derive(Debug, Default, Serialize)]
1197struct SchemaExtensions {
1198 #[serde(skip_serializing_if = "Vec::is_empty")]
1199 nodes: Vec<NodeExtension>,
1200}
1201
1202impl SchemaExtensions {
1203 fn is_empty(&self) -> bool {
1204 self.nodes.is_empty()
1205 }
1206}
1207
1208#[derive(Debug, Serialize)]
1209struct NodeDef {
1210 name: String,
1211 namespace: String,
1212 #[serde(skip_serializing_if = "Option::is_none")]
1213 label: Option<String>,
1214 #[serde(skip_serializing_if = "Option::is_none")]
1215 description: Option<String>,
1216 #[serde(skip_serializing_if = "Option::is_none")]
1217 icon: Option<String>,
1218 #[serde(skip_serializing_if = "Option::is_none")]
1219 include_in_menu: Option<bool>,
1220 #[serde(skip_serializing_if = "Option::is_none")]
1221 menu_placement: Option<String>,
1222 #[serde(skip_serializing_if = "Vec::is_empty")]
1223 inherit_from: Vec<String>,
1224 #[serde(skip_serializing_if = "Vec::is_empty")]
1225 human_friendly_id: Vec<String>,
1226 #[serde(skip_serializing_if = "Option::is_none")]
1227 display_label: Option<String>,
1228 #[serde(skip_serializing_if = "Option::is_none")]
1229 default_filter: Option<String>,
1230 #[serde(skip_serializing_if = "Vec::is_empty")]
1231 attributes: Vec<AttributeDef>,
1232 #[serde(skip_serializing_if = "Vec::is_empty")]
1233 relationships: Vec<RelationshipDef>,
1234}
1235
1236#[derive(Debug, Serialize)]
1237struct NodeExtension {
1238 kind: String,
1239 #[serde(skip_serializing_if = "Vec::is_empty")]
1240 attributes: Vec<AttributeDef>,
1241 #[serde(skip_serializing_if = "Vec::is_empty")]
1242 relationships: Vec<RelationshipDef>,
1243}
1244
1245#[derive(Debug, Serialize)]
1246struct AttributeDef {
1247 name: String,
1248 kind: String,
1249 #[serde(skip_serializing_if = "Option::is_none")]
1250 optional: Option<bool>,
1251 #[serde(skip_serializing_if = "Option::is_none")]
1252 unique: Option<bool>,
1253 #[serde(skip_serializing_if = "Option::is_none")]
1254 description: Option<String>,
1255 #[serde(skip_serializing_if = "Vec::is_empty", rename = "enum")]
1256 enum_values: Vec<String>,
1257 #[serde(skip_serializing_if = "Vec::is_empty")]
1258 choices: Vec<ChoiceDef>,
1259}
1260
1261#[derive(Debug, Serialize)]
1262struct ChoiceDef {
1263 name: String,
1264 #[serde(skip_serializing_if = "Option::is_none")]
1265 description: Option<String>,
1266 #[serde(skip_serializing_if = "Option::is_none")]
1267 color: Option<String>,
1268}
1269
1270#[derive(Debug, Serialize)]
1271struct RelationshipDef {
1272 name: String,
1273 peer: String,
1274 kind: String,
1275 cardinality: String,
1276 #[serde(skip_serializing_if = "Option::is_none")]
1277 identifier: Option<String>,
1278 #[serde(skip_serializing_if = "Option::is_none")]
1279 optional: Option<bool>,
1280 #[serde(skip_serializing_if = "Option::is_none")]
1281 direction: Option<String>,
1282 #[serde(skip_serializing_if = "Option::is_none")]
1283 description: Option<String>,
1284}
1285
1286#[derive(Debug, Clone)]
1287struct TypeNameParts {
1288 namespace: String,
1289 name: String,
1290}
1291
1292#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
1293struct NodeKey {
1294 namespace: String,
1295 name: String,
1296}
1297
1298impl NodeKey {
1299 fn new(namespace: impl Into<String>, name: impl Into<String>) -> Self {
1300 Self {
1301 namespace: namespace.into(),
1302 name: name.into(),
1303 }
1304 }
1305}
1306
1307fn type_name_parts(type_name: &str) -> Result<TypeNameParts> {
1308 if let Some((namespace_raw, name_raw)) = type_name.split_once('.') {
1309 let namespace = to_pascal_case(namespace_raw);
1310 let name = to_pascal_case(name_raw);
1311 return Ok(TypeNameParts { namespace, name });
1312 }
1313
1314 let parts = split_camel_case(type_name);
1315 if parts.len() >= 2 {
1316 let namespace = parts[0].clone();
1317 let name = parts[1..].join("");
1318 return Ok(TypeNameParts { namespace, name });
1319 }
1320
1321 Err(anyhow!(
1322 "infrahub schema provisioning requires namespaced types (e.g. namespace.type)"
1323 ))
1324}
1325
1326fn gql_type_name(type_name: &TypeName) -> String {
1327 gql_type_name_str(type_name.as_str())
1328}
1329
1330fn gql_type_name_str(type_name: &str) -> String {
1331 if let Some((namespace_raw, name_raw)) = type_name.split_once('.') {
1332 let namespace = to_pascal_case(namespace_raw);
1333 let name = to_pascal_case(name_raw);
1334 return format!("{namespace}{name}");
1335 }
1336 type_name.to_string()
1337}
1338
1339fn collect_field_defs(
1340 type_name: &str,
1341 type_schema: &alembic_core::TypeSchema,
1342 include_fields: Option<&BTreeSet<String>>,
1343) -> Result<(Vec<AttributeDef>, Vec<RelationshipDef>, Vec<String>)> {
1344 let mut attributes = Vec::new();
1345 let mut relationships = Vec::new();
1346 let mut key_attrs = Vec::new();
1347 let mut seen = BTreeSet::new();
1348 let source_kind = identifier_part(&gql_type_name_str(type_name));
1349
1350 let mut handle_field =
1351 |field: &str, schema: &alembic_core::FieldSchema, is_key: bool| -> Result<()> {
1352 if let Some(include) = include_fields {
1353 if !include.contains(field) {
1354 return Ok(());
1355 }
1356 }
1357 if !seen.insert(field.to_string()) {
1358 return Ok(());
1359 }
1360 match &schema.r#type {
1361 FieldType::Ref { target } => {
1362 relationships.push(relationship_def(
1363 field,
1364 target,
1365 schema,
1366 "one",
1367 &source_kind,
1368 )?);
1369 }
1370 FieldType::ListRef { target } => {
1371 relationships.push(relationship_def(
1372 field,
1373 target,
1374 schema,
1375 "many",
1376 &source_kind,
1377 )?);
1378 }
1379 _ => {
1380 attributes.push(attribute_def(field, schema, is_key)?);
1381 if is_key {
1382 key_attrs.push(field.to_string());
1383 }
1384 }
1385 }
1386 Ok(())
1387 };
1388
1389 for (field, schema) in &type_schema.key {
1390 handle_field(field, schema, true)?;
1391 }
1392 for (field, schema) in &type_schema.fields {
1393 handle_field(field, schema, false)?;
1394 }
1395
1396 Ok((attributes, relationships, key_attrs))
1397}
1398
1399fn attribute_def(
1400 field: &str,
1401 schema: &alembic_core::FieldSchema,
1402 is_key: bool,
1403) -> Result<AttributeDef> {
1404 let kind = attribute_kind_for_field(&schema.r#type);
1405 let optional = Some(!schema.required);
1406 let unique = if is_key { Some(true) } else { None };
1407 let enum_values = Vec::new();
1408 let mut choices = Vec::new();
1409 if let FieldType::Enum { values } = &schema.r#type {
1410 for value in values {
1411 choices.push(ChoiceDef {
1412 name: value.clone(),
1413 description: None,
1414 color: None,
1415 });
1416 }
1417 }
1418
1419 Ok(AttributeDef {
1420 name: field.to_string(),
1421 kind,
1422 optional,
1423 unique,
1424 description: schema.description.clone(),
1425 enum_values,
1426 choices,
1427 })
1428}
1429
1430fn relationship_def(
1431 field: &str,
1432 target: &str,
1433 schema: &alembic_core::FieldSchema,
1434 cardinality: &str,
1435 source_kind: &str,
1436) -> Result<RelationshipDef> {
1437 Ok(RelationshipDef {
1438 name: field.to_string(),
1439 peer: gql_type_name_str(target),
1440 kind: "Attribute".to_string(),
1441 cardinality: cardinality.to_string(),
1442 identifier: Some(relationship_identifier(source_kind, field)),
1443 optional: Some(!schema.required),
1444 direction: Some("outbound".to_string()),
1445 description: schema.description.clone(),
1446 })
1447}
1448
1449fn relationship_identifier(source_kind: &str, field: &str) -> String {
1450 format!(
1451 "{}__{}",
1452 identifier_part(source_kind),
1453 identifier_part(field)
1454 )
1455}
1456
1457fn identifier_part(raw: &str) -> String {
1458 raw.chars()
1459 .map(|ch| {
1460 if ch.is_ascii_alphanumeric() {
1461 ch.to_ascii_lowercase()
1462 } else {
1463 '_'
1464 }
1465 })
1466 .collect()
1467}
1468
1469fn attribute_kind_for_field(field_type: &FieldType) -> String {
1470 match field_type {
1471 FieldType::String | FieldType::Text | FieldType::Uuid | FieldType::Slug => {
1472 "Text".to_string()
1473 }
1474 FieldType::Enum { .. } => "Dropdown".to_string(),
1475 FieldType::Int | FieldType::Float => "Number".to_string(),
1476 FieldType::Bool => "Boolean".to_string(),
1477 FieldType::Date | FieldType::Datetime | FieldType::Time => "DateTime".to_string(),
1478 FieldType::Json | FieldType::Map { .. } => "JSON".to_string(),
1479 FieldType::List { .. } => "List".to_string(),
1480 FieldType::IpAddress => "IPHost".to_string(),
1481 FieldType::Cidr | FieldType::Prefix => "IPNetwork".to_string(),
1482 FieldType::Mac => "MacAddress".to_string(),
1483 FieldType::Ref { .. } | FieldType::ListRef { .. } => "Text".to_string(),
1484 }
1485}
1486
1487fn display_label_for_keys(keys: &[String]) -> (Option<String>, Option<String>) {
1488 let Some(primary) = keys.first() else {
1489 return (None, None);
1490 };
1491 (
1492 Some(format!("{{{{ {}__value }}}}", primary)),
1493 Some(format!("{}__value", primary)),
1494 )
1495}
1496
1497fn label_from_pascal(name: &str) -> String {
1498 let mut label = String::new();
1499 for (idx, ch) in name.chars().enumerate() {
1500 if idx > 0 && ch.is_uppercase() {
1501 label.push(' ');
1502 }
1503 label.push(ch);
1504 }
1505 label
1506}
1507
1508fn menu_anchor_map(schema: &Schema) -> Result<BTreeMap<String, String>> {
1509 let mut by_namespace: BTreeMap<String, Vec<(String, String)>> = BTreeMap::new();
1510 for type_name in schema.types.keys() {
1511 let parts = type_name_parts(type_name)?;
1512 by_namespace
1513 .entry(parts.namespace)
1514 .or_default()
1515 .push((parts.name, type_name.clone()));
1516 }
1517
1518 let mut anchors = BTreeMap::new();
1519 for (namespace, entries) in by_namespace {
1520 let anchor = pick_menu_anchor(&entries);
1521 anchors.insert(namespace, anchor);
1522 }
1523 Ok(anchors)
1524}
1525
1526fn pick_menu_anchor(entries: &[(String, String)]) -> String {
1527 entries
1528 .iter()
1529 .map(|(_, type_name)| type_name)
1530 .min()
1531 .cloned()
1532 .unwrap_or_default()
1533}
1534
1535fn menu_placement_for(
1536 anchors: &BTreeMap<String, String>,
1537 parts: &TypeNameParts,
1538 gql_type: &str,
1539) -> Option<String> {
1540 let anchor = anchors
1541 .get(&parts.namespace)
1542 .map(|type_name| gql_type_name_str(type_name))?;
1543 if anchor == gql_type {
1544 None
1545 } else {
1546 Some(anchor)
1547 }
1548}
1549
1550fn to_pascal_case(raw: &str) -> String {
1551 raw.split(['_', '-', ' ', '.'])
1552 .filter(|part| !part.is_empty())
1553 .map(|part| {
1554 let mut chars = part.chars();
1555 let Some(first) = chars.next() else {
1556 return String::new();
1557 };
1558 let mut out = String::new();
1559 out.extend(first.to_uppercase());
1560 out.push_str(&chars.as_str().to_lowercase());
1561 out
1562 })
1563 .collect::<Vec<_>>()
1564 .join("")
1565}
1566
1567fn split_camel_case(raw: &str) -> Vec<String> {
1568 let mut parts = Vec::new();
1569 let mut current = String::new();
1570 for ch in raw.chars() {
1571 if ch.is_uppercase() && !current.is_empty() {
1572 parts.push(current.clone());
1573 current.clear();
1574 }
1575 current.push(ch);
1576 }
1577 if !current.is_empty() {
1578 parts.push(current);
1579 }
1580 parts
1581}
1582
1583fn validate_kind(
1584 type_name: &str,
1585 field: &str,
1586 field_type: &FieldType,
1587 kind: &FieldKind,
1588) -> Result<()> {
1589 match field_type {
1590 FieldType::Ref { .. } => match kind {
1591 FieldKind::RelationSingle(_) => Ok(()),
1592 _ => Err(anyhow!(
1593 "expected {}.{} to be a single relation in infrahub",
1594 type_name,
1595 field
1596 )),
1597 },
1598 FieldType::ListRef { .. } => match kind {
1599 FieldKind::RelationList(_) => Ok(()),
1600 _ => Err(anyhow!(
1601 "expected {}.{} to be a list relation in infrahub",
1602 type_name,
1603 field
1604 )),
1605 },
1606 _ => match kind {
1607 FieldKind::Attribute => Ok(()),
1608 _ => Err(anyhow!(
1609 "expected {}.{} to be an attribute in infrahub",
1610 type_name,
1611 field
1612 )),
1613 },
1614 }
1615}
1616
1617fn field_schema_for<'a>(
1618 type_schema: &'a alembic_core::TypeSchema,
1619 field: &str,
1620) -> Option<&'a alembic_core::FieldSchema> {
1621 type_schema
1622 .fields
1623 .get(field)
1624 .or_else(|| type_schema.key.get(field))
1625}
1626
1627fn build_input(
1628 attrs: &JsonMap,
1629 type_schema: &alembic_core::TypeSchema,
1630 resolved: &BTreeMap<Uid, BackendId>,
1631) -> Result<Value> {
1632 let mut map = Map::new();
1633 for (field, value) in attrs.iter() {
1634 let field_schema = field_schema_for(type_schema, field)
1635 .ok_or_else(|| anyhow!("missing schema for field {field}"))?;
1636 if value.is_null() {
1637 map.insert(field.clone(), Value::Null);
1638 continue;
1639 }
1640 validate_value(field, &field_schema.r#type, value)?;
1641 let resolved_value = resolve_value_for_type(
1642 &field_schema.r#type,
1643 value.clone(),
1644 resolved,
1645 |id| match id {
1646 BackendId::Int(n) => json!({ "id": n.to_string() }),
1647 BackendId::String(s) => json!({ "id": s }),
1648 },
1649 )?;
1650 match field_schema.r#type {
1651 FieldType::Ref { .. } | FieldType::ListRef { .. } => {
1652 map.insert(field.clone(), resolved_value);
1653 }
1654 _ => {
1655 map.insert(field.clone(), json!({ "value": resolved_value }));
1656 }
1657 }
1658 }
1659 Ok(Value::Object(map))
1660}
1661
1662fn validate_value(field: &str, field_type: &FieldType, value: &Value) -> Result<()> {
1663 if value.is_null() {
1664 return Ok(());
1665 }
1666 match field_type {
1667 FieldType::String
1668 | FieldType::Text
1669 | FieldType::Uuid
1670 | FieldType::Date
1671 | FieldType::Datetime
1672 | FieldType::Time
1673 | FieldType::IpAddress
1674 | FieldType::Cidr
1675 | FieldType::Prefix
1676 | FieldType::Mac
1677 | FieldType::Slug
1678 | FieldType::Enum { .. } => {
1679 if !value.is_string() {
1680 return Err(anyhow!("field {field} expects a string"));
1681 }
1682 }
1683 FieldType::Int => {
1684 if value.as_i64().is_none() && value.as_u64().is_none() {
1685 return Err(anyhow!("field {field} expects an integer"));
1686 }
1687 }
1688 FieldType::Float => {
1689 if !value.is_number() {
1690 return Err(anyhow!("field {field} expects a number"));
1691 }
1692 }
1693 FieldType::Bool => {
1694 if !value.is_boolean() {
1695 return Err(anyhow!("field {field} expects a boolean"));
1696 }
1697 }
1698 FieldType::List { .. } => {
1699 if !value.is_array() {
1700 return Err(anyhow!("field {field} expects a list"));
1701 }
1702 }
1703 FieldType::Map { .. } => {
1704 if !value.is_object() {
1705 return Err(anyhow!("field {field} expects a map"));
1706 }
1707 }
1708 FieldType::Json => {}
1709 FieldType::Ref { .. } => {
1710 if !value.is_string() {
1711 return Err(anyhow!("field {field} expects a ref uid string"));
1712 }
1713 }
1714 FieldType::ListRef { .. } => {
1715 if !value.is_array() {
1716 return Err(anyhow!("field {field} expects a list of ref uids"));
1717 }
1718 }
1719 }
1720 Ok(())
1721}
1722
1723#[derive(Debug, Default, Clone)]
1724struct StateMappings {
1725 by_type: BTreeMap<String, BTreeMap<BackendId, Uid>>,
1726}
1727
1728impl StateMappings {
1729 fn uid_for(&self, type_name: &str, backend_id: &BackendId) -> Option<Uid> {
1730 self.by_type
1731 .get(type_name)
1732 .and_then(|mapping| mapping.get(backend_id).copied())
1733 }
1734}
1735
1736fn state_mappings(state: &StateStore) -> StateMappings {
1737 let mut by_type = BTreeMap::new();
1738 for (type_name, mapping) in state.all_mappings() {
1739 let mut id_to_uid = BTreeMap::new();
1740 for (uid, backend_id) in mapping {
1741 id_to_uid.insert(backend_id.clone(), *uid);
1742 }
1743 by_type.insert(type_name.as_str().to_string(), id_to_uid);
1744 }
1745 StateMappings { by_type }
1746}
1747
1748fn resolved_from_state(state: &StateStore) -> BTreeMap<Uid, BackendId> {
1749 let mut resolved = BTreeMap::new();
1750 for mapping in state.all_mappings().values() {
1751 for (uid, backend_id) in mapping {
1752 resolved.insert(*uid, backend_id.clone());
1753 }
1754 }
1755 resolved
1756}
1757
1758fn normalize_attrs_refs(
1759 attrs: &JsonMap,
1760 type_schema: &alembic_core::TypeSchema,
1761 mappings: &StateMappings,
1762) -> JsonMap {
1763 let mut normalized = attrs.clone();
1764 for (field, schema) in &type_schema.fields {
1765 match &schema.r#type {
1766 FieldType::Ref { target } => {
1767 if let Some(value) = attrs.get(field) {
1768 normalized.insert(
1769 field.clone(),
1770 normalize_ref_value(value.clone(), target, mappings),
1771 );
1772 }
1773 }
1774 FieldType::ListRef { target } => {
1775 if let Some(value) = attrs.get(field) {
1776 let updated = if let Value::Array(items) = value {
1777 let mapped = items
1778 .iter()
1779 .cloned()
1780 .map(|item| normalize_ref_value(item, target, mappings))
1781 .collect::<Vec<_>>();
1782 Value::Array(mapped)
1783 } else {
1784 value.clone()
1785 };
1786 normalized.insert(field.clone(), updated);
1787 }
1788 }
1789 _ => {}
1790 }
1791 }
1792 normalized
1793}
1794
1795fn normalize_ref_value(value: Value, target: &str, mappings: &StateMappings) -> Value {
1796 if value.is_null() {
1797 return value;
1798 }
1799 let backend_id = match backend_id_from_value(&value) {
1800 Some(id) => id,
1801 None => return value,
1802 };
1803 mappings
1804 .uid_for(target, &backend_id)
1805 .map(|uid| Value::String(uid.to_string()))
1806 .unwrap_or(value)
1807}
1808
1809fn backend_id_from_value(value: &Value) -> Option<BackendId> {
1810 match value {
1811 Value::Number(n) => n.as_u64().map(BackendId::Int).or_else(|| {
1812 n.as_i64()
1813 .and_then(|v| u64::try_from(v).ok())
1814 .map(BackendId::Int)
1815 }),
1816 Value::String(s) => Some(BackendId::String(s.clone())),
1817 Value::Object(map) => map.get("id").and_then(backend_id_from_value),
1818 _ => None,
1819 }
1820}
1821
1822fn is_missing_ref_error(err: &anyhow::Error) -> bool {
1823 err.downcast_ref::<AdapterApplyError>()
1824 .is_some_and(|e| matches!(e, AdapterApplyError::MissingRef { .. }))
1825}
1826
1827fn describe_missing_refs(ops: &[Op], resolved: &BTreeMap<Uid, BackendId>) -> String {
1828 let mut missing = BTreeSet::new();
1829 for op in ops {
1830 if let Op::Create { desired, .. } | Op::Update { desired, .. } = op {
1831 for value in desired.attrs.values() {
1832 if let Some(uid) = extract_ref_uid(value) {
1833 if !resolved.contains_key(&uid) {
1834 missing.insert(uid);
1835 }
1836 }
1837 }
1838 }
1839 }
1840 missing
1841 .into_iter()
1842 .map(|uid| uid.to_string())
1843 .collect::<Vec<_>>()
1844 .join(", ")
1845}
1846
1847fn extract_ref_uid(value: &Value) -> Option<Uid> {
1848 match value {
1849 Value::String(raw) => Uid::parse_str(raw).ok(),
1850 Value::Array(items) => items.iter().find_map(extract_ref_uid),
1851 _ => None,
1852 }
1853}
1854
1855#[cfg(test)]
1856mod tests {
1857 use super::*;
1858 use alembic_core::{
1859 key_string, FieldSchema, FieldType, JsonMap, Key, Object, Schema, TypeName, TypeSchema,
1860 };
1861 use alembic_engine::{AdapterApplyError, BackendId, Op, StateData, StateStore};
1862 use httpmock::prelude::*;
1863 use serde_json::json;
1864 use std::collections::{BTreeMap, BTreeSet};
1865 use std::fs;
1866 use std::path::{Path, PathBuf};
1867 use std::time::{SystemTime, UNIX_EPOCH};
1868
1869 const GRAPHQL_SCHEMA: &str = r#"
1870interface AttributeInterface { value: String }
1871type TextAttribute implements AttributeInterface { value: String }
1872type RelatedNode { id: String kind: String }
1873type Owner { id: String }
1874type Peer { id: String }
1875type NestedEdgedOwner { node: Owner }
1876type NestedPaginatedPeerEdge { node: Peer }
1877type NestedPaginatedPeerConnection { edges: [NestedPaginatedPeerEdge] }
1878type DcimSite {
1879 id: ID
1880 hfid: String
1881 name: TextAttribute
1882 parent: RelatedNode
1883 children: [RelatedNode]
1884 owner: NestedEdgedOwner
1885 peers: NestedPaginatedPeerConnection
1886}
1887type DcimSiteEdge { node: DcimSite }
1888type DcimSiteConnection { count: Int edges: [DcimSiteEdge] }
1889type Query { DcimSite(offset: Int, limit: Int): DcimSiteConnection }
1890schema { query: Query }
1891"#;
1892
1893 fn field_schema(field_type: FieldType, required: bool) -> FieldSchema {
1894 FieldSchema {
1895 r#type: field_type,
1896 required,
1897 nullable: false,
1898 format: None,
1899 pattern: None,
1900 description: None,
1901 }
1902 }
1903
1904 fn type_schema(
1905 key_fields: Vec<(&str, FieldSchema)>,
1906 fields: Vec<(&str, FieldSchema)>,
1907 ) -> TypeSchema {
1908 let mut key = BTreeMap::new();
1909 for (name, schema) in key_fields {
1910 key.insert(name.to_string(), schema);
1911 }
1912 let mut field_map = BTreeMap::new();
1913 for (name, schema) in fields {
1914 field_map.insert(name.to_string(), schema);
1915 }
1916 TypeSchema {
1917 key,
1918 fields: field_map,
1919 }
1920 }
1921
1922 fn schema_with(types: Vec<(&str, TypeSchema)>) -> Schema {
1923 let mut map = BTreeMap::new();
1924 for (name, schema) in types {
1925 map.insert(name.to_string(), schema);
1926 }
1927 Schema { types: map }
1928 }
1929
1930 fn temp_dir(prefix: &str) -> PathBuf {
1931 let mut dir = std::env::temp_dir();
1932 let now = SystemTime::now()
1933 .duration_since(UNIX_EPOCH)
1934 .unwrap()
1935 .as_nanos();
1936 dir.push(format!("alembic-{prefix}-{now}-{}", std::process::id()));
1937 fs::create_dir_all(&dir).unwrap();
1938 dir
1939 }
1940
1941 #[cfg(unix)]
1942 fn make_executable(path: &Path) {
1943 use std::os::unix::fs::PermissionsExt;
1944 let mut perms = fs::metadata(path).unwrap().permissions();
1945 perms.set_mode(0o755);
1946 fs::set_permissions(path, perms).unwrap();
1947 }
1948
1949 fn write_executable(path: &Path, contents: &str) {
1950 fs::write(path, contents).unwrap();
1951 #[cfg(unix)]
1952 make_executable(path);
1953 }
1954
1955 #[test]
1956 fn schema_info_parse_and_field_kinds() {
1957 let schema_info = SchemaInfo::parse(GRAPHQL_SCHEMA).unwrap();
1958 assert!(schema_info.attribute_types.contains("TextAttribute"));
1959
1960 let type_schema = type_schema(
1961 vec![("name", field_schema(FieldType::String, true))],
1962 vec![
1963 (
1964 "parent",
1965 field_schema(
1966 FieldType::Ref {
1967 target: "dcim.site".to_string(),
1968 },
1969 false,
1970 ),
1971 ),
1972 (
1973 "children",
1974 field_schema(
1975 FieldType::ListRef {
1976 target: "dcim.site".to_string(),
1977 },
1978 false,
1979 ),
1980 ),
1981 (
1982 "owner",
1983 field_schema(
1984 FieldType::Ref {
1985 target: "dcim.owner".to_string(),
1986 },
1987 false,
1988 ),
1989 ),
1990 (
1991 "peers",
1992 field_schema(
1993 FieldType::ListRef {
1994 target: "dcim.peer".to_string(),
1995 },
1996 false,
1997 ),
1998 ),
1999 ],
2000 );
2001
2002 let fields = field_names_for_schema(&type_schema);
2003 let kinds = schema_info
2004 .field_kinds("DcimSite", &type_schema, &fields)
2005 .unwrap();
2006
2007 assert!(matches!(kinds.get("name"), Some(FieldKind::Attribute)));
2008 assert!(matches!(
2009 kinds.get("parent"),
2010 Some(FieldKind::RelationSingle(RelationShape::RelatedNode))
2011 ));
2012 assert!(matches!(
2013 kinds.get("children"),
2014 Some(FieldKind::RelationList(RelationShape::RelatedNode))
2015 ));
2016 assert!(matches!(
2017 kinds.get("owner"),
2018 Some(FieldKind::RelationSingle(RelationShape::NestedEdged))
2019 ));
2020 assert!(matches!(
2021 kinds.get("peers"),
2022 Some(FieldKind::RelationList(RelationShape::NestedPaginated))
2023 ));
2024
2025 let err = validate_kind(
2026 "DcimSite",
2027 "name",
2028 &FieldType::Ref {
2029 target: "dcim.site".to_string(),
2030 },
2031 &FieldKind::Attribute,
2032 )
2033 .unwrap_err();
2034 assert!(err.to_string().contains("expected DcimSite.name"));
2035 }
2036
2037 #[test]
2038 fn build_selection_and_extract_attrs() {
2039 let mut kinds = BTreeMap::new();
2040 kinds.insert("attr".to_string(), FieldKind::Attribute);
2041 kinds.insert(
2042 "rel_one".to_string(),
2043 FieldKind::RelationSingle(RelationShape::RelatedNode),
2044 );
2045 kinds.insert(
2046 "rel_edge".to_string(),
2047 FieldKind::RelationSingle(RelationShape::NestedEdged),
2048 );
2049 kinds.insert(
2050 "rel_page".to_string(),
2051 FieldKind::RelationSingle(RelationShape::NestedPaginated),
2052 );
2053 kinds.insert(
2054 "rel_many".to_string(),
2055 FieldKind::RelationList(RelationShape::RelatedNode),
2056 );
2057 kinds.insert(
2058 "rel_many_page".to_string(),
2059 FieldKind::RelationList(RelationShape::NestedPaginated),
2060 );
2061 kinds.insert(
2062 "rel_many_edge".to_string(),
2063 FieldKind::RelationList(RelationShape::NestedEdged),
2064 );
2065
2066 let selection = build_selection(&kinds);
2067 assert!(selection.contains("attr { value }"));
2068 assert!(selection.contains("rel_one { id kind }"));
2069 assert!(selection.contains("rel_edge { node { id } }"));
2070 assert!(selection.contains("rel_page { node { id } }"));
2071 assert!(selection.contains("rel_many { id kind }"));
2072 assert!(selection.contains("rel_many_page { edges { node { id } } }"));
2073 assert!(selection.contains("rel_many_edge { node { id } }"));
2074
2075 let node = json!({
2076 "attr": {"value": "alpha"},
2077 "rel_one": {"id": "r1", "kind": "DcimSite"},
2078 "rel_edge": {"node": {"id": "r2"}},
2079 "rel_page": {"node": {"id": "r3"}},
2080 "rel_many": [{"id": "m1", "kind": "DcimSite"}, {"id": "m2", "kind": "DcimSite"}],
2081 "rel_many_page": {"edges": [{"node": {"id": "p1"}}, {"node": {"id": "p2"}}]},
2082 "rel_many_edge": [{"node": {"id": "e1"}}, {"node": {"id": "e2"}}],
2083 "missing": null
2084 });
2085
2086 let attrs = extract_attrs(&node, &kinds).unwrap();
2087 assert_eq!(attrs.get("attr"), Some(&json!("alpha")));
2088 assert_eq!(attrs.get("rel_one"), Some(&json!("r1")));
2089 assert_eq!(attrs.get("rel_edge"), Some(&json!("r2")));
2090 assert_eq!(attrs.get("rel_page"), Some(&json!("r3")));
2091 assert_eq!(attrs.get("rel_many"), Some(&json!(["m1", "m2"])));
2092 assert_eq!(attrs.get("rel_many_page"), Some(&json!(["p1", "p2"])));
2093 assert_eq!(attrs.get("rel_many_edge"), Some(&json!(["e1", "e2"])));
2094 assert!(!attrs.contains_key("missing"));
2095 }
2096
2097 #[test]
2098 fn schema_missing_and_validate_schema() {
2099 let type_schema = type_schema(
2100 vec![("name", field_schema(FieldType::String, true))],
2101 vec![(
2102 "region",
2103 field_schema(
2104 FieldType::Ref {
2105 target: "dcim.region".to_string(),
2106 },
2107 false,
2108 ),
2109 )],
2110 );
2111 let schema = schema_with(vec![("dcim.site", type_schema)]);
2112
2113 let mut fields = BTreeMap::new();
2114 fields.insert(
2115 "name".to_string(),
2116 GraphField {
2117 base_type: "TextAttribute".to_string(),
2118 is_list: false,
2119 },
2120 );
2121 let mut type_fields = BTreeMap::new();
2122 type_fields.insert("DcimSite".to_string(), fields);
2123 let schema_info = SchemaInfo {
2124 attribute_types: BTreeSet::new(),
2125 type_fields,
2126 };
2127
2128 let missing = schema_missing(&schema, &schema_info);
2129 assert!(missing
2130 .fields
2131 .iter()
2132 .any(|field| field == "dcim.site.region"));
2133
2134 let err = validate_schema(&schema, &schema_info).unwrap_err();
2135 assert!(err.to_string().contains("infrahub schema mismatch"));
2136 }
2137
2138 #[test]
2139 fn build_provision_plan_creates_nodes_and_extensions() {
2140 let site_schema = type_schema(
2141 vec![("name", field_schema(FieldType::String, true))],
2142 vec![(
2143 "region",
2144 field_schema(
2145 FieldType::Ref {
2146 target: "dcim.region".to_string(),
2147 },
2148 false,
2149 ),
2150 )],
2151 );
2152 let prefix_schema = type_schema(
2153 vec![("prefix", field_schema(FieldType::Cidr, true))],
2154 vec![(
2155 "site",
2156 field_schema(
2157 FieldType::Ref {
2158 target: "dcim.site".to_string(),
2159 },
2160 false,
2161 ),
2162 )],
2163 );
2164 let schema = schema_with(vec![
2165 ("dcim.site", site_schema),
2166 ("ipam.prefix", prefix_schema),
2167 ]);
2168
2169 let mut fields = BTreeMap::new();
2170 fields.insert(
2171 "name".to_string(),
2172 GraphField {
2173 base_type: "TextAttribute".to_string(),
2174 is_list: false,
2175 },
2176 );
2177 let mut type_fields = BTreeMap::new();
2178 type_fields.insert("DcimSite".to_string(), fields);
2179 let schema_info = SchemaInfo {
2180 attribute_types: BTreeSet::new(),
2181 type_fields,
2182 };
2183
2184 let snapshot = SchemaSnapshot::default();
2185 let plan = build_provision_plan(&schema, &schema_info, &snapshot)
2186 .unwrap()
2187 .unwrap();
2188 assert!(plan
2189 .report
2190 .created_object_types
2191 .contains(&"ipam.prefix".to_string()));
2192 assert!(plan
2193 .report
2194 .created_object_fields
2195 .contains(&"dcim.site.region".to_string()));
2196 assert_eq!(plan.document.nodes.len(), 2);
2197 assert_eq!(plan.document.extensions.nodes.len(), 1);
2198 let mut names = plan
2199 .document
2200 .nodes
2201 .iter()
2202 .map(|node| format!("{}.{}", node.namespace, node.name))
2203 .collect::<Vec<_>>();
2204 names.sort();
2205 assert!(names.contains(&"Dcim.Site".to_string()));
2206 assert!(names.contains(&"Ipam.Prefix".to_string()));
2207 }
2208
2209 #[test]
2210 fn write_schema_document_and_repository_config() {
2211 let doc = SchemaDocument {
2212 version: "1.0".to_string(),
2213 nodes: vec![NodeDef {
2214 name: "Site".to_string(),
2215 namespace: "Dcim".to_string(),
2216 label: Some("Site".to_string()),
2217 description: None,
2218 icon: None,
2219 include_in_menu: None,
2220 menu_placement: None,
2221 inherit_from: Vec::new(),
2222 human_friendly_id: vec!["name__value".to_string()],
2223 display_label: Some("{{ name__value }}".to_string()),
2224 default_filter: Some("name__value".to_string()),
2225 attributes: Vec::new(),
2226 relationships: Vec::new(),
2227 }],
2228 extensions: SchemaExtensions::default(),
2229 };
2230
2231 let dir = temp_dir("schema-doc");
2232 let schema_path = dir.join("schema/schema.yaml");
2233 write_schema_document(&schema_path, &doc).unwrap();
2234 let raw = fs::read_to_string(&schema_path).unwrap();
2235 assert!(raw.contains("version"));
2236
2237 let repo_root = temp_dir("repo");
2238 let nested = repo_root.join("schemas/site.yaml");
2239 fs::create_dir_all(nested.parent().unwrap()).unwrap();
2240 fs::write(&nested, "version: 1.0").unwrap();
2241 ensure_repository_config(&repo_root, &nested).unwrap();
2242 ensure_repository_config(&repo_root, &nested).unwrap();
2243 let config_path = repo_root.join(".infrahub.yml");
2244 let config = fs::read_to_string(&config_path).unwrap();
2245 assert!(config.contains("schemas"));
2246 assert!(config.contains("schemas/site.yaml"));
2247 assert_eq!(config.matches("schemas/site.yaml").count(), 1);
2248
2249 let outside = temp_dir("outside").join("schema.yaml");
2250 fs::write(&outside, "version: 1.0").unwrap();
2251 let err = ensure_repository_config(&repo_root, &outside).unwrap_err();
2252 assert!(err.to_string().contains("must be inside repository root"));
2253 }
2254
2255 #[test]
2256 fn build_input_and_validate_value() {
2257 let type_schema = type_schema(
2258 Vec::new(),
2259 vec![
2260 ("name", field_schema(FieldType::String, true)),
2261 ("count", field_schema(FieldType::Int, false)),
2262 (
2263 "parent",
2264 field_schema(
2265 FieldType::Ref {
2266 target: "dcim.site".to_string(),
2267 },
2268 false,
2269 ),
2270 ),
2271 (
2272 "tags",
2273 field_schema(
2274 FieldType::ListRef {
2275 target: "dcim.tag".to_string(),
2276 },
2277 false,
2278 ),
2279 ),
2280 ],
2281 );
2282
2283 let uid_parent = Uid::parse_str("00000000-0000-0000-0000-000000000001").unwrap();
2284 let uid_tag_a = Uid::parse_str("00000000-0000-0000-0000-000000000002").unwrap();
2285 let uid_tag_b = Uid::parse_str("00000000-0000-0000-0000-000000000003").unwrap();
2286
2287 let attrs = JsonMap::from(BTreeMap::from([
2288 ("name".to_string(), json!("Site-1")),
2289 ("count".to_string(), json!(5)),
2290 ("parent".to_string(), json!(uid_parent.to_string())),
2291 (
2292 "tags".to_string(),
2293 json!([uid_tag_a.to_string(), uid_tag_b.to_string()]),
2294 ),
2295 ]));
2296
2297 let mut resolved = BTreeMap::new();
2298 resolved.insert(uid_parent, BackendId::String("p1".to_string()));
2299 resolved.insert(uid_tag_a, BackendId::String("t1".to_string()));
2300 resolved.insert(uid_tag_b, BackendId::String("t2".to_string()));
2301
2302 let input = build_input(&attrs, &type_schema, &resolved).unwrap();
2303 assert_eq!(
2304 input,
2305 json!({
2306 "name": {"value": "Site-1"},
2307 "count": {"value": 5},
2308 "parent": {"id": "p1"},
2309 "tags": [{"id": "t1"}, {"id": "t2"}],
2310 })
2311 );
2312
2313 let err = validate_value("count", &FieldType::Int, &json!("oops")).unwrap_err();
2314 assert!(err.to_string().contains("expects an integer"));
2315 }
2316
2317 #[test]
2318 fn normalize_refs_and_backend_id() {
2319 let uid = Uid::parse_str("00000000-0000-0000-0000-000000000010").unwrap();
2320 let mut mappings = StateMappings::default();
2321 mappings.by_type.insert(
2322 "dcim.site".to_string(),
2323 BTreeMap::from([(BackendId::String("site-1".to_string()), uid)]),
2324 );
2325
2326 let type_schema = type_schema(
2327 Vec::new(),
2328 vec![
2329 (
2330 "parent",
2331 field_schema(
2332 FieldType::Ref {
2333 target: "dcim.site".to_string(),
2334 },
2335 false,
2336 ),
2337 ),
2338 (
2339 "children",
2340 field_schema(
2341 FieldType::ListRef {
2342 target: "dcim.site".to_string(),
2343 },
2344 false,
2345 ),
2346 ),
2347 ],
2348 );
2349
2350 let attrs = JsonMap::from(BTreeMap::from([
2351 ("parent".to_string(), json!("site-1")),
2352 ("children".to_string(), json!(["site-1", "site-2"])),
2353 ]));
2354
2355 let normalized = normalize_attrs_refs(&attrs, &type_schema, &mappings);
2356 assert_eq!(normalized.get("parent"), Some(&json!(uid.to_string())));
2357 assert_eq!(
2358 normalized.get("children"),
2359 Some(&json!([uid.to_string(), "site-2"]))
2360 );
2361
2362 assert_eq!(
2363 backend_id_from_value(&json!({"id": "abc"})),
2364 Some(BackendId::String("abc".to_string()))
2365 );
2366 assert_eq!(backend_id_from_value(&json!(42)), Some(BackendId::Int(42)));
2367 assert_eq!(backend_id_from_value(&json!(-1)), None);
2368 }
2369
2370 #[test]
2371 fn describe_missing_refs_and_extract_ref_uid() {
2372 let uid_present = Uid::parse_str("00000000-0000-0000-0000-000000000020").unwrap();
2373 let uid_missing = Uid::parse_str("00000000-0000-0000-0000-000000000021").unwrap();
2374
2375 let attrs = JsonMap::from(BTreeMap::from([
2376 ("ref".to_string(), json!(uid_missing.to_string())),
2377 ("refs".to_string(), json!([uid_present.to_string()])),
2378 ]));
2379
2380 let key = Key::from(BTreeMap::from([("name".to_string(), json!("site"))]));
2381 let obj = Object {
2382 uid: uid_present,
2383 type_name: TypeName::new("dcim.site"),
2384 key,
2385 attrs,
2386 source: None,
2387 };
2388
2389 let op = Op::Create {
2390 uid: uid_present,
2391 type_name: TypeName::new("dcim.site"),
2392 desired: obj,
2393 };
2394
2395 let mut resolved = BTreeMap::new();
2396 resolved.insert(uid_present, BackendId::String("ok".to_string()));
2397
2398 let missing = describe_missing_refs(&[op], &resolved);
2399 assert!(missing.contains(&uid_missing.to_string()));
2400
2401 let err = anyhow::Error::new(AdapterApplyError::MissingRef { uid: uid_missing });
2402 assert!(is_missing_ref_error(&err));
2403
2404 let extracted = extract_ref_uid(&json!([uid_present.to_string(), uid_missing.to_string()]));
2405 assert_eq!(extracted, Some(uid_present));
2406 }
2407
2408 #[test]
2409 fn attribute_kind_for_field_variants() {
2410 let cases = vec![
2411 (FieldType::String, "Text"),
2412 (FieldType::Text, "Text"),
2413 (FieldType::Uuid, "Text"),
2414 (FieldType::Slug, "Text"),
2415 (
2416 FieldType::Enum {
2417 values: vec!["a".to_string()],
2418 },
2419 "Dropdown",
2420 ),
2421 (FieldType::Int, "Number"),
2422 (FieldType::Float, "Number"),
2423 (FieldType::Bool, "Boolean"),
2424 (FieldType::Date, "DateTime"),
2425 (FieldType::Datetime, "DateTime"),
2426 (FieldType::Time, "DateTime"),
2427 (FieldType::Json, "JSON"),
2428 (
2429 FieldType::Map {
2430 value: Box::new(FieldType::String),
2431 },
2432 "JSON",
2433 ),
2434 (
2435 FieldType::List {
2436 item: Box::new(FieldType::String),
2437 },
2438 "List",
2439 ),
2440 (FieldType::IpAddress, "IPHost"),
2441 (FieldType::Cidr, "IPNetwork"),
2442 (FieldType::Prefix, "IPNetwork"),
2443 (FieldType::Mac, "MacAddress"),
2444 (
2445 FieldType::Ref {
2446 target: "dcim.site".to_string(),
2447 },
2448 "Text",
2449 ),
2450 (
2451 FieldType::ListRef {
2452 target: "dcim.site".to_string(),
2453 },
2454 "Text",
2455 ),
2456 ];
2457
2458 for (field_type, expected) in cases {
2459 assert_eq!(attribute_kind_for_field(&field_type), expected);
2460 }
2461 }
2462
2463 #[test]
2464 fn string_helpers() {
2465 assert_eq!(to_pascal_case("dcim_site"), "DcimSite");
2466 assert_eq!(to_pascal_case("ipam-prefix"), "IpamPrefix");
2467 assert_eq!(label_from_pascal("DeviceType"), "Device Type");
2468 assert_eq!(
2469 split_camel_case("DcimSite"),
2470 vec!["Dcim".to_string(), "Site".to_string()]
2471 );
2472 assert_eq!(gql_type_name_str("dcim.site"), "DcimSite");
2473 assert_eq!(gql_type_name_str("Device"), "Device");
2474 assert_eq!(display_label_for_keys(&[]), (None, None));
2475 }
2476
2477 #[tokio::test]
2478 async fn apply_schema_infrahubctl_executes() {
2479 let dir = temp_dir("infrahubctl");
2480 let args_path = dir.join("args.txt");
2481 let addr_path = dir.join("addr.txt");
2482 let token_path = dir.join("token.txt");
2483 let script_path = dir.join("infrahubctl");
2484 let script = format!(
2485 "#!/usr/bin/env bash\nset -euo pipefail\nprintf '%s' \"$*\" > \"{}\"\nprintf '%s' \"$INFRAHUB_ADDRESS\" > \"{}\"\nprintf '%s' \"$INFRAHUB_API_TOKEN\" > \"{}\"\n",
2486 args_path.display(),
2487 addr_path.display(),
2488 token_path.display()
2489 );
2490 write_executable(&script_path, &script);
2491
2492 let schema_path = dir.join("schema.yaml");
2493 fs::write(&schema_path, "version: 1.0").unwrap();
2494
2495 let adapter = InfrahubAdapter::new("http://example.test", "token-123", None).unwrap();
2496 let mut config = SchemaPushConfig::infrahubctl(schema_path.clone());
2497 config.infrahubctl_path = Some(script_path);
2498 config.branch = Some("main".to_string());
2499
2500 adapter.apply_schema_infrahubctl(&config).await.unwrap();
2501
2502 let args = fs::read_to_string(&args_path).unwrap();
2503 assert!(args.contains("schema load"));
2504 assert!(args.contains(schema_path.to_str().unwrap()));
2505 assert!(args.contains("--branch main"));
2506 assert_eq!(
2507 fs::read_to_string(&addr_path).unwrap(),
2508 "http://example.test"
2509 );
2510 assert_eq!(fs::read_to_string(&token_path).unwrap(), "token-123");
2511 }
2512
2513 #[tokio::test]
2514 async fn apply_schema_repository_flow() {
2515 let server = MockServer::start();
2516 let repo_mock = server.mock(|when, then| {
2517 when.method(POST)
2518 .path("/graphql")
2519 .body_contains("CoreRepository");
2520 then.status(200).json_body(json!({
2521 "data": {
2522 "CoreRepository": { "edges": [ { "node": { "id": "repo-1" } } ] }
2523 },
2524 "errors": []
2525 }));
2526 });
2527 let process_mock = server.mock(|when, then| {
2528 when.method(POST)
2529 .path("/graphql")
2530 .body_contains("InfrahubRepositoryProcess");
2531 then.status(200).json_body(json!({
2532 "data": {
2533 "InfrahubRepositoryProcess": { "ok": true, "task": { "id": "task-1" } }
2534 },
2535 "errors": []
2536 }));
2537 });
2538
2539 let repo_root = temp_dir("repo");
2540 let schema_path = repo_root.join("schemas/site.yaml");
2541 fs::create_dir_all(schema_path.parent().unwrap()).unwrap();
2542 fs::write(&schema_path, "version: 1.0").unwrap();
2543
2544 let adapter = InfrahubAdapter::new(&server.base_url(), "token", None).unwrap();
2545 let config = SchemaPushConfig {
2546 schema_path: schema_path.clone(),
2547 mode: SchemaApplyMode::Repository,
2548 repository_id: None,
2549 repository_name: Some("repo-name".to_string()),
2550 repository_root: Some(repo_root.clone()),
2551 branch: None,
2552 infrahubctl_path: None,
2553 };
2554
2555 adapter.apply_schema_repository(&config).await.unwrap();
2556 repo_mock.assert();
2557 process_mock.assert();
2558 let config_path = repo_root.join(".infrahub.yml");
2559 let config_raw = fs::read_to_string(&config_path).unwrap();
2560 assert!(config_raw.contains("schemas/site.yaml"));
2561 }
2562
2563 #[tokio::test]
2564 async fn read_observes_objects() {
2565 let server = MockServer::start();
2566 server.mock(|when, then| {
2567 when.method(GET).path("/schema.graphql");
2568 then.status(200).body(GRAPHQL_SCHEMA);
2569 });
2570 server.mock(|when, then| {
2571 when.method(POST)
2572 .path("/graphql")
2573 .body_contains("DcimSite");
2574 then.status(200).json_body(json!({
2575 "data": {
2576 "DcimSite": {
2577 "count": 1,
2578 "edges": [
2579 { "node": { "id": "site-1", "hfid": "site-1", "name": { "value": "Site One" } } }
2580 ]
2581 }
2582 },
2583 "errors": []
2584 }));
2585 });
2586
2587 let adapter = InfrahubAdapter::new(&server.base_url(), "token", None).unwrap();
2588 let schema = schema_with(vec![(
2589 "dcim.site",
2590 type_schema(
2591 vec![("name", field_schema(FieldType::String, true))],
2592 vec![],
2593 ),
2594 )]);
2595 let state = StateStore::new(None, StateData::default());
2596 let observed = adapter.read(&schema, &[], &state).await.unwrap();
2597 assert_eq!(observed.by_key.len(), 1);
2598 let key = Key::from(BTreeMap::from([("name".to_string(), json!("Site One"))]));
2599 let object = observed
2600 .by_key
2601 .get(&(TypeName::new("dcim.site"), key_string(&key)))
2602 .unwrap();
2603 assert_eq!(object.attrs.get("name"), Some(&json!("Site One")));
2604 }
2605
2606 #[tokio::test]
2607 async fn write_applies_create_update_delete() {
2608 let server = MockServer::start();
2609 server.mock(|when, then| {
2610 when.method(GET).path("/schema.graphql");
2611 then.status(200).body(GRAPHQL_SCHEMA);
2612 });
2613 server.mock(|when, then| {
2614 when.method(POST).path("/graphql").body_contains("Create");
2615 then.status(200).json_body(json!({
2616 "data": { "DcimSiteCreate": { "ok": true, "object": { "id": "site-1" } } },
2617 "errors": []
2618 }));
2619 });
2620 server.mock(|when, then| {
2621 when.method(POST).path("/graphql").body_contains("Update");
2622 then.status(200).json_body(json!({
2623 "data": { "DcimSiteUpdate": { "ok": true, "object": { "id": "site-2" } } },
2624 "errors": []
2625 }));
2626 });
2627 server.mock(|when, then| {
2628 when.method(POST).path("/graphql").body_contains("Delete");
2629 then.status(200).json_body(json!({
2630 "data": { "DcimSiteDelete": { "ok": true } },
2631 "errors": []
2632 }));
2633 });
2634
2635 let adapter = InfrahubAdapter::new(&server.base_url(), "token", None).unwrap();
2636 let schema = schema_with(vec![(
2637 "dcim.site",
2638 type_schema(
2639 vec![("name", field_schema(FieldType::String, true))],
2640 vec![],
2641 ),
2642 )]);
2643
2644 let uid_create = Uid::parse_str("00000000-0000-0000-0000-000000000100").unwrap();
2645 let uid_update = Uid::parse_str("00000000-0000-0000-0000-000000000101").unwrap();
2646 let uid_delete = Uid::parse_str("00000000-0000-0000-0000-000000000102").unwrap();
2647
2648 let key = Key::from(BTreeMap::from([("name".to_string(), json!("Site A"))]));
2649 let create_obj = Object {
2650 uid: uid_create,
2651 type_name: TypeName::new("dcim.site"),
2652 key: key.clone(),
2653 attrs: JsonMap::from(BTreeMap::from([("name".to_string(), json!("Site A"))])),
2654 source: None,
2655 };
2656 let update_obj = Object {
2657 uid: uid_update,
2658 type_name: TypeName::new("dcim.site"),
2659 key: key.clone(),
2660 attrs: JsonMap::from(BTreeMap::from([("name".to_string(), json!("Site A"))])),
2661 source: None,
2662 };
2663
2664 let ops = vec![
2665 Op::Create {
2666 uid: uid_create,
2667 type_name: TypeName::new("dcim.site"),
2668 desired: create_obj,
2669 },
2670 Op::Update {
2671 uid: uid_update,
2672 type_name: TypeName::new("dcim.site"),
2673 desired: update_obj,
2674 changes: Vec::new(),
2675 backend_id: Some(BackendId::String("site-2".to_string())),
2676 },
2677 Op::Delete {
2678 uid: uid_delete,
2679 type_name: TypeName::new("dcim.site"),
2680 key,
2681 backend_id: Some(BackendId::String("site-3".to_string())),
2682 },
2683 ];
2684
2685 let state = StateStore::new(None, StateData::default());
2686 let report = adapter.write(&schema, &ops, &state).await.unwrap();
2687 assert_eq!(report.applied.len(), 3);
2688 }
2689
2690 #[tokio::test]
2691 async fn lookup_backend_id_resolves() {
2692 let server = MockServer::start();
2693 server.mock(|when, then| {
2694 when.method(GET).path("/schema.graphql");
2695 then.status(200).body(GRAPHQL_SCHEMA);
2696 });
2697 server.mock(|when, then| {
2698 when.method(POST)
2699 .path("/graphql")
2700 .body_contains("DcimSite");
2701 then.status(200).json_body(json!({
2702 "data": {
2703 "DcimSite": {
2704 "count": 1,
2705 "edges": [
2706 { "node": { "id": "site-42", "hfid": "site-42", "name": { "value": "Site Z" } } }
2707 ]
2708 }
2709 },
2710 "errors": []
2711 }));
2712 });
2713
2714 let adapter = InfrahubAdapter::new(&server.base_url(), "token", None).unwrap();
2715 let type_schema = type_schema(
2716 vec![("name", field_schema(FieldType::String, true))],
2717 vec![],
2718 );
2719 let key = Key::from(BTreeMap::from([("name".to_string(), json!("Site Z"))]));
2720 let id = adapter
2721 .lookup_backend_id(&TypeName::new("dcim.site"), &type_schema, &key)
2722 .await
2723 .unwrap();
2724 assert_eq!(id, "site-42");
2725 }
2726}