1use std::collections::HashMap;
2use std::env;
3use std::fs;
4use std::path::{Path, PathBuf};
5use std::sync::Arc;
6
7use anyhow::{Context, Result, anyhow, bail};
8use greentic_flow::flow_bundle::{blake3_hex, canonicalize_json, load_and_validate_bundle};
9use greentic_pack::PackKind;
10use greentic_pack::builder::{
11 ComponentArtifact, ComponentDescriptor, ComponentPin as PackComponentPin, DistributionSection,
12 FlowBundle as PackFlowBundle, ImportRef, NodeRef as PackNodeRef, PACK_VERSION, PackBuilder,
13 PackMeta, Provenance, Signing,
14};
15use greentic_pack::events::EventsSection;
16use greentic_pack::messaging::MessagingSection;
17use greentic_pack::repo::{InterfaceBinding, RepoPackSection};
18use semver::Version;
19use semver::VersionReq;
20use serde::Deserialize;
21use serde_json::{Value as JsonValue, json};
22use time::OffsetDateTime;
23use time::format_description::well_known::Rfc3339;
24
25use crate::component_resolver::{
26 ComponentResolver, NodeSchemaError, ResolvedComponent, ResolvedNode,
27};
28use crate::path_safety::normalize_under_root;
29
30#[derive(Debug, Clone, Copy)]
31pub enum PackSigning {
32 Dev,
33 None,
34}
35
36impl From<PackSigning> for Signing {
37 fn from(value: PackSigning) -> Self {
38 match value {
39 PackSigning::Dev => Signing::Dev,
40 PackSigning::None => Signing::None,
41 }
42 }
43}
44
45pub fn run(
46 flow_path: &Path,
47 output_path: &Path,
48 signing: PackSigning,
49 meta_path: Option<&Path>,
50 component_dir: Option<&Path>,
51) -> Result<()> {
52 let workspace_root = env::current_dir()
53 .context("failed to resolve workspace root")?
54 .canonicalize()
55 .context("failed to canonicalize workspace root")?;
56 let safe_flow = normalize_under_root(&workspace_root, flow_path)?;
57 let safe_meta = meta_path
58 .map(|path| normalize_under_root(&workspace_root, path))
59 .transpose()?;
60 let safe_component_dir = component_dir
61 .map(|dir| normalize_under_root(&workspace_root, dir))
62 .transpose()?;
63
64 build_once(
65 &safe_flow,
66 output_path,
67 signing,
68 safe_meta.as_deref(),
69 safe_component_dir.as_deref(),
70 )?;
71 if strict_mode_enabled() {
72 verify_determinism(
73 &safe_flow,
74 output_path,
75 signing,
76 safe_meta.as_deref(),
77 safe_component_dir.as_deref(),
78 )?;
79 }
80 Ok(())
81}
82
83fn build_once(
84 flow_path: &Path,
85 output_path: &Path,
86 signing: PackSigning,
87 meta_path: Option<&Path>,
88 component_dir: Option<&Path>,
89) -> Result<()> {
90 let flow_source = fs::read_to_string(flow_path)
91 .with_context(|| format!("failed to read {}", flow_path.display()))?;
92 let mut flow_doc_json: JsonValue =
93 serde_yaml_bw::from_str(&flow_source).with_context(|| {
94 format!(
95 "failed to parse {} for node resolution",
96 flow_path.display()
97 )
98 })?;
99 let bundle = load_and_validate_bundle(&flow_source, Some(flow_path))
100 .with_context(|| format!("flow validation failed for {}", flow_path.display()))?;
101
102 let mut resolver = ComponentResolver::new(component_dir.map(PathBuf::from));
103 let mut resolved_nodes = Vec::new();
104 let mut schema_errors = Vec::new();
105
106 for node in &bundle.nodes {
107 if is_builtin_component(&node.component.name) {
108 if node.component.name == "component.exec"
109 && let Some(exec_node) =
110 resolve_component_exec_node(&mut resolver, node, &flow_doc_json)?
111 {
112 schema_errors.extend(resolver.validate_node(&exec_node)?);
113 resolved_nodes.push(exec_node);
114 }
115 continue;
116 }
117 let resolved = resolver.resolve_node(node, &flow_doc_json)?;
118 schema_errors.extend(resolver.validate_node(&resolved)?);
119 resolved_nodes.push(resolved);
120 }
121
122 if !schema_errors.is_empty() {
123 report_schema_errors(&schema_errors)?;
124 }
125
126 ensure_node_operations(&mut flow_doc_json, &resolved_nodes)?;
129
130 write_resolved_configs(&resolved_nodes)?;
131
132 let meta = load_pack_meta(meta_path, &bundle)?;
133 let mut builder = PackBuilder::new(meta)
134 .with_flow(to_pack_flow_bundle(&bundle, &flow_doc_json, &flow_source))
135 .with_signing(signing.into())
136 .with_provenance(build_provenance());
137
138 for artifact in collect_component_artifacts(&resolved_nodes) {
139 builder = builder.with_component(artifact);
140 }
141
142 if let Some(parent) = output_path.parent()
143 && !parent.as_os_str().is_empty()
144 {
145 fs::create_dir_all(parent)
146 .with_context(|| format!("failed to create {}", parent.display()))?;
147 }
148
149 let build_result = builder
150 .build(output_path)
151 .context("pack build failed (sign/build stage)")?;
152 println!(
153 "✓ Pack built at {} (manifest hash {})",
154 build_result.out_path.display(),
155 build_result.manifest_hash_blake3
156 );
157
158 Ok(())
159}
160
161fn strict_mode_enabled() -> bool {
162 matches!(
163 std::env::var("LOCAL_CHECK_STRICT")
164 .unwrap_or_default()
165 .as_str(),
166 "1" | "true" | "TRUE"
167 )
168}
169
170fn verify_determinism(
171 flow_path: &Path,
172 output_path: &Path,
173 signing: PackSigning,
174 meta_path: Option<&Path>,
175 component_dir: Option<&Path>,
176) -> Result<()> {
177 let temp_dir = tempfile::tempdir().context("failed to create tempdir for determinism check")?;
178 let temp_pack = temp_dir.path().join("deterministic.gtpack");
179 build_once(flow_path, &temp_pack, signing, meta_path, component_dir)
180 .context("determinism build failed")?;
181 let workspace_root = env::current_dir()
182 .context("failed to resolve workspace root")?
183 .canonicalize()
184 .context("failed to canonicalize workspace root")?;
185 let safe_output = normalize_under_root(&workspace_root, output_path)?;
186 let expected = fs::read(&safe_output).context("failed to read primary pack for determinism")?;
187 let actual = fs::read(&temp_pack).context("failed to read temp pack for determinism")?;
188 if expected != actual {
189 bail!("LOCAL_CHECK_STRICT detected non-deterministic pack output");
190 }
191 println!("LOCAL_CHECK_STRICT verified deterministic pack output");
192 Ok(())
193}
194
195fn to_pack_flow_bundle(
196 bundle: &greentic_flow::flow_bundle::FlowBundle,
197 flow_doc_json: &JsonValue,
198 flow_yaml: &str,
199) -> PackFlowBundle {
200 let canonical_json = canonicalize_json(flow_doc_json);
201
202 PackFlowBundle {
203 id: bundle.id.clone(),
204 kind: bundle.kind.clone(),
205 entry: bundle.entry.clone(),
206 yaml: flow_yaml.to_string(),
207 json: canonical_json.clone(),
208 hash_blake3: blake3_hex(
209 serde_json::to_vec(&canonical_json).expect("canonical flow JSON serialization"),
210 ),
211 nodes: bundle
212 .nodes
213 .iter()
214 .map(|node| PackNodeRef {
215 node_id: node.node_id.clone(),
216 component: PackComponentPin {
217 name: node.component.name.clone(),
218 version_req: node.component.version_req.clone(),
219 },
220 schema_id: node.schema_id.clone(),
221 })
222 .collect(),
223 }
224}
225
226fn ensure_node_operations(flow_doc_json: &mut JsonValue, nodes: &[ResolvedNode]) -> Result<()> {
227 let Some(nodes_map) = flow_doc_json
228 .get_mut("nodes")
229 .and_then(|v| v.as_object_mut())
230 else {
231 return Ok(());
232 };
233
234 for node in nodes {
235 let Some(entry) = nodes_map
236 .get_mut(&node.node_id)
237 .and_then(|v| v.as_object_mut())
238 else {
239 continue;
240 };
241 let Some(config) = entry.get_mut(&node.component.name) else {
242 continue;
243 };
244 let Some(cfg_map) = config.as_object_mut() else {
245 continue;
246 };
247
248 let has_op = cfg_map
249 .get("operation")
250 .and_then(|v| v.as_str())
251 .map(|s| !s.trim().is_empty())
252 .unwrap_or(false)
253 || cfg_map
254 .get("op")
255 .and_then(|v| v.as_str())
256 .map(|s| !s.trim().is_empty())
257 .unwrap_or(false);
258
259 if has_op {
260 continue;
261 }
262
263 if let Some(op) = default_operation(&node.component)? {
264 cfg_map
265 .entry("operation")
266 .or_insert(JsonValue::String(op.clone()));
267 cfg_map.entry("op").or_insert(JsonValue::String(op));
268 }
269 }
270
271 Ok(())
272}
273
274fn default_operation(component: &ResolvedComponent) -> Result<Option<String>> {
275 let manifest_json = component.manifest_json.as_deref().unwrap_or_default();
276 let manifest: JsonValue =
277 serde_json::from_str(manifest_json).context("invalid manifest JSON")?;
278 let op_name = manifest
279 .get("operations")
280 .and_then(|ops| ops.as_array())
281 .and_then(|ops| ops.first())
282 .and_then(|op| op.get("name"))
283 .and_then(|v| v.as_str())
284 .map(|s| s.to_string());
285 Ok(op_name)
286}
287
288fn write_resolved_configs(nodes: &[ResolvedNode]) -> Result<()> {
289 let root = Path::new(".greentic").join("resolved_config");
290 fs::create_dir_all(&root).context("failed to create .greentic/resolved_config")?;
291 for node in nodes {
292 let path = root.join(format!("{}.json", node.node_id));
293 let contents = serde_json::to_string_pretty(&json!({
294 "node_id": node.node_id,
295 "component": node.component.name,
296 "version": node.component.version.to_string(),
297 "config": node.config,
298 }))?;
299 fs::write(&path, contents)
300 .with_context(|| format!("failed to write {}", path.display()))?;
301 }
302 Ok(())
303}
304
305fn collect_component_artifacts(nodes: &[ResolvedNode]) -> Vec<ComponentArtifact> {
306 let mut map: HashMap<String, ComponentArtifact> = HashMap::new();
307 for node in nodes {
308 let component = &node.component;
309 let key = format!("{}@{}", component.name, component.version);
310 map.entry(key).or_insert_with(|| to_artifact(component));
311 }
312 map.into_values().collect()
313}
314
315fn is_builtin_component(name: &str) -> bool {
316 name == "component.exec"
317 || name == "flow.call"
318 || name == "session.wait"
319 || name.starts_with("emit")
320}
321
322fn resolve_component_exec_node(
323 resolver: &mut ComponentResolver,
324 node: &greentic_flow::flow_bundle::NodeRef,
325 flow_doc_json: &JsonValue,
326) -> Result<Option<ResolvedNode>> {
327 let nodes = flow_doc_json
328 .get("nodes")
329 .and_then(|value| value.as_object())
330 .ok_or_else(|| anyhow!("flow document missing nodes map"))?;
331 let Some(node_value) = nodes.get(&node.node_id) else {
332 bail!("node {} missing from flow document", node.node_id);
333 };
334 let payload = node_value
335 .get("component.exec")
336 .ok_or_else(|| anyhow!("component.exec payload missing for node {}", node.node_id))?;
337 let component_ref = payload
338 .get("component")
339 .and_then(|value| value.as_str())
340 .ok_or_else(|| {
341 anyhow!(
342 "component.exec requires `component` for node {}",
343 node.node_id
344 )
345 })?;
346 let (name, version_req) = parse_component_ref(component_ref)?;
347 let resolved_component = resolver.resolve_component(&name, &version_req)?;
348 Ok(Some(ResolvedNode {
349 node_id: node.node_id.clone(),
350 component: resolved_component,
351 pointer: format!("/nodes/{}", node.node_id),
352 config: payload.clone(),
353 }))
354}
355
356fn parse_component_ref(raw: &str) -> Result<(String, VersionReq)> {
357 if let Some((name, ver)) = raw.split_once('@') {
358 let vr = VersionReq::parse(ver.trim())
359 .with_context(|| format!("invalid version requirement `{ver}`"))?;
360 Ok((name.trim().to_string(), vr))
361 } else {
362 Ok((raw.trim().to_string(), VersionReq::default()))
363 }
364}
365
366fn to_artifact(component: &Arc<ResolvedComponent>) -> ComponentArtifact {
367 let hash = component
368 .wasm_hash
369 .strip_prefix("blake3:")
370 .unwrap_or(&component.wasm_hash)
371 .to_string();
372 ComponentArtifact {
373 name: component.name.clone(),
374 version: component.version.clone(),
375 wasm_path: component.wasm_path.clone(),
376 schema_json: component.schema_json.clone(),
377 manifest_json: component.manifest_json.clone(),
378 capabilities: component.capabilities_json.clone(),
379 world: Some(component.world.clone()),
380 hash_blake3: Some(hash),
381 }
382}
383
384fn report_schema_errors(errors: &[NodeSchemaError]) -> Result<()> {
385 let mut message = String::new();
386 for err in errors {
387 message.push_str(&format!(
388 "- node `{}` ({}) {}: {}\n",
389 err.node_id, err.component, err.pointer, err.message
390 ));
391 }
392 bail!("component schema validation failed:\n{message}");
393}
394
395fn load_pack_meta(
396 meta_path: Option<&Path>,
397 bundle: &greentic_flow::flow_bundle::FlowBundle,
398) -> Result<PackMeta> {
399 let config = if let Some(path) = meta_path {
400 let raw = fs::read_to_string(path)
401 .with_context(|| format!("failed to read {}", path.display()))?;
402 toml::from_str::<PackMetaToml>(&raw)
403 .with_context(|| format!("invalid pack metadata {}", path.display()))?
404 } else {
405 PackMetaToml::default()
406 };
407
408 let pack_id = config
409 .pack_id
410 .unwrap_or_else(|| format!("dev.local.{}", bundle.id));
411 let version = config
412 .version
413 .as_deref()
414 .unwrap_or("0.1.0")
415 .parse::<Version>()
416 .context("invalid pack version in metadata")?;
417 let pack_version = config.pack_version.unwrap_or(PACK_VERSION);
418 let name = config.name.unwrap_or_else(|| bundle.id.clone());
419 let description = config.description;
420 let authors = config.authors.unwrap_or_default();
421 let license = config.license;
422 let homepage = config.homepage;
423 let support = config.support;
424 let vendor = config.vendor;
425 let kind = config.kind;
426 let events = config.events;
427 let repo = config.repo;
428 let messaging = config.messaging;
429 let interfaces = config.interfaces.unwrap_or_default();
430 let imports = config
431 .imports
432 .unwrap_or_default()
433 .into_iter()
434 .map(|imp| ImportRef {
435 pack_id: imp.pack_id,
436 version_req: imp.version_req,
437 })
438 .collect();
439 let entry_flows = config
440 .entry_flows
441 .unwrap_or_else(|| vec![bundle.id.clone()]);
442 let created_at_utc = config.created_at_utc.unwrap_or_else(|| {
443 OffsetDateTime::now_utc()
444 .format(&Rfc3339)
445 .unwrap_or_default()
446 });
447 let annotations = config.annotations.map(toml_to_json_map).unwrap_or_default();
448 let distribution = config.distribution;
449 let components = config.components.unwrap_or_default();
450
451 Ok(PackMeta {
452 pack_version,
453 pack_id,
454 version,
455 name,
456 description,
457 authors,
458 license,
459 homepage,
460 support,
461 vendor,
462 imports,
463 kind,
464 entry_flows,
465 created_at_utc,
466 events,
467 repo,
468 messaging,
469 interfaces,
470 annotations,
471 distribution,
472 components,
473 })
474}
475
476fn toml_to_json_map(table: toml::value::Table) -> serde_json::Map<String, JsonValue> {
477 table
478 .into_iter()
479 .map(|(key, value)| {
480 let json_value: JsonValue = value.try_into().unwrap_or(JsonValue::Null);
481 (key, json_value)
482 })
483 .collect()
484}
485
486fn build_provenance() -> Provenance {
487 Provenance {
488 builder: format!("greentic-dev {}", env!("CARGO_PKG_VERSION")),
489 git_commit: git_rev().ok(),
490 git_repo: git_remote().ok(),
491 toolchain: None,
492 built_at_utc: OffsetDateTime::now_utc()
493 .format(&Rfc3339)
494 .unwrap_or_else(|_| "unknown".into()),
495 host: std::env::var("HOSTNAME").ok(),
496 notes: Some("Built via greentic-dev pack build".into()),
497 }
498}
499
500fn git_rev() -> Result<String> {
501 let output = std::process::Command::new("git")
502 .args(["rev-parse", "HEAD"])
503 .output()?;
504 if !output.status.success() {
505 bail!("git rev-parse failed");
506 }
507 Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
508}
509
510fn git_remote() -> Result<String> {
511 let output = std::process::Command::new("git")
512 .args(["config", "--get", "remote.origin.url"])
513 .output()?;
514 if !output.status.success() {
515 bail!("git remote lookup failed");
516 }
517 Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
518}
519
520#[derive(Debug, Deserialize, Default)]
521struct PackMetaToml {
522 pack_version: Option<u32>,
523 pack_id: Option<String>,
524 version: Option<String>,
525 name: Option<String>,
526 kind: Option<PackKind>,
527 description: Option<String>,
528 authors: Option<Vec<String>>,
529 license: Option<String>,
530 homepage: Option<String>,
531 support: Option<String>,
532 vendor: Option<String>,
533 entry_flows: Option<Vec<String>>,
534 events: Option<EventsSection>,
535 repo: Option<RepoPackSection>,
536 messaging: Option<MessagingSection>,
537 interfaces: Option<Vec<InterfaceBinding>>,
538 imports: Option<Vec<ImportToml>>,
539 annotations: Option<toml::value::Table>,
540 created_at_utc: Option<String>,
541 distribution: Option<DistributionSection>,
542 components: Option<Vec<ComponentDescriptor>>,
543}
544
545#[derive(Debug, Deserialize)]
546struct ImportToml {
547 pack_id: String,
548 version_req: String,
549}