use anyhow::{anyhow, Context, Result};
use regex::{Captures, Regex};
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use petgraph::visit::EdgeRef;
use crate::core::graph::{Action, Node, NodeType, SwitchCase, SwitchRoute, WorkflowGraph};
use crate::core::parser::GraphParser;
#[cfg(not(target_arch = "wasm32"))]
use crate::registry::cache::find_entry_in_dir;
#[cfg(not(target_arch = "wasm32"))]
use crate::registry::package::{is_registry_import, parse_registry_import};
lazy_static::lazy_static! {
static ref VAR_REF_RE: Regex = Regex::new(r"\$([a-zA-Z_][a-zA-Z0-9_]*)(\.[a-zA-Z0-9_.]+)?").unwrap();
static ref BARE_REF_RE: Regex = Regex::new(r"\b([a-zA-Z_][a-zA-Z0-9_]*)(\.[a-zA-Z0-9_.]+)").unwrap();
}
pub fn expand_at_prefix(pattern: &str, at_base: Option<&Path>) -> String {
let Some(base) = at_base else {
return pattern.to_string();
};
if let Some(rest) = pattern.strip_prefix("@/") {
base.join(rest).to_string_lossy().replace('\\', "/")
} else {
pattern.to_string()
}
}
pub fn expand_at_prefixes(patterns: &[String], at_base: Option<&Path>) -> Vec<String> {
patterns
.iter()
.map(|p| expand_at_prefix(p, at_base))
.collect()
}
#[cfg(not(target_arch = "wasm32"))]
pub fn resolve_lib_imports(
workflow: &mut WorkflowGraph,
base_dir: &Path,
import_stack: &mut Vec<PathBuf>,
at_base: Option<&Path>,
) -> Result<()> {
if workflow.lib_imports.is_empty() {
return Ok(());
}
let imports: Vec<(String, String)> = workflow.lib_imports.clone().into_iter().collect();
let auto_namespaces = workflow.lib_auto_namespaces.clone();
for (parser_namespace, rel_path) in imports {
let stdlib_name = rel_path.strip_prefix("std/").unwrap_or(&rel_path);
if is_registry_import(stdlib_name) || rel_path.starts_with("std/") {
let clean_name = stdlib_name.trim_end_matches(".jg");
if let Some(content) = crate::core::stdlib::get(clean_name) {
let lib_graph = GraphParser::parse_lib(content)
.with_context(|| format!("Stdlib parse error: '{}'", clean_name))?;
let namespace = if !auto_namespaces.contains(&parser_namespace) {
parser_namespace.clone()
} else {
clean_name.to_string()
};
for (func_name, func_def) in lib_graph.functions {
let namespaced = format!("{}.{}", namespace, func_name);
workflow.functions.insert(namespaced, func_def);
}
for (class_name, class_def) in lib_graph.classes {
let namespaced = format!("{}.{}", namespace, class_name);
workflow.classes.insert(namespaced, class_def);
}
for (type_name, method_name, func_def) in lib_graph.pending_methods {
let namespaced_type = format!("{}.{}", namespace, type_name);
workflow
.pending_methods
.push((namespaced_type, method_name, func_def));
}
continue;
}
}
if is_registry_import(&rel_path) {
let (pkg_name, version_req) = parse_registry_import(&rel_path)?;
let jg_modules_path = find_jg_modules_dir(base_dir).map(|d| d.join(&pkg_name));
let entry_path = if let Some(ref pkg_dir) = jg_modules_path {
if pkg_dir.exists() {
find_entry_in_dir(pkg_dir)?
} else {
auto_install_package(&pkg_name, version_req.as_deref(), base_dir)?
}
} else {
auto_install_package(&pkg_name, version_req.as_deref(), base_dir)?
};
let canonical = entry_path.canonicalize().with_context(|| {
format!(
"Lib import error: Cannot resolve registry package '{}' entry at {:?}",
pkg_name, entry_path
)
})?;
if import_stack.contains(&canonical) {
return Err(anyhow!(
"Circular lib import detected: '{}' ({:?})\nImport chain: {:?}",
parser_namespace,
canonical,
import_stack
));
}
import_stack.push(canonical.clone());
let content = std::fs::read_to_string(&canonical)
.with_context(|| format!("Lib import error: Cannot read '{:?}'", canonical))?;
let mut lib_graph = GraphParser::parse_lib(&content)
.with_context(|| format!("Lib import error: Failed to parse '{:?}'", canonical))?;
let lib_base_dir = canonical.parent().unwrap_or(Path::new("."));
resolve_lib_imports(&mut lib_graph, lib_base_dir, import_stack, at_base)?;
let namespace = if !auto_namespaces.contains(&parser_namespace) {
parser_namespace.clone()
} else {
pkg_name.clone()
};
for (func_name, func_def) in lib_graph.functions {
let namespaced = format!("{}.{}", namespace, func_name);
workflow.functions.insert(namespaced, func_def);
}
for (class_name, class_def) in lib_graph.classes {
let namespaced = format!("{}.{}", namespace, class_name);
workflow.classes.insert(namespaced, class_def);
}
for (type_name, method_name, func_def) in lib_graph.pending_methods {
let namespaced_type = format!("{}.{}", namespace, type_name);
workflow
.pending_methods
.push((namespaced_type, method_name, func_def));
}
import_stack.pop();
continue;
}
let expanded = expand_at_prefix(&rel_path, at_base);
let abs_path = if Path::new(&expanded).is_absolute() {
PathBuf::from(&expanded)
} else {
base_dir.join(&expanded)
};
let canonical = abs_path.canonicalize().with_context(|| {
format!(
"Lib import error: Cannot resolve path '{}' (base: {:?})",
rel_path, base_dir
)
})?;
if import_stack.contains(&canonical) {
return Err(anyhow!(
"Circular lib import detected: '{}' ({:?})\nImport chain: {:?}",
parser_namespace,
canonical,
import_stack
));
}
import_stack.push(canonical.clone());
let content = std::fs::read_to_string(&canonical)
.with_context(|| format!("Lib import error: Cannot read '{:?}'", canonical))?;
let mut lib_graph = GraphParser::parse_lib(&content)
.with_context(|| format!("Lib import error: Failed to parse '{:?}'", canonical))?;
let lib_base_dir = canonical.parent().unwrap_or(Path::new("."));
resolve_lib_imports(&mut lib_graph, lib_base_dir, import_stack, at_base)?;
let namespace = parser_namespace.clone();
for (func_name, func_def) in lib_graph.functions {
let namespaced = format!("{}.{}", namespace, func_name);
workflow.functions.insert(namespaced, func_def);
}
for (class_name, class_def) in lib_graph.classes {
let namespaced = format!("{}.{}", namespace, class_name);
workflow.classes.insert(namespaced, class_def);
}
for (type_name, method_name, func_def) in lib_graph.pending_methods {
let namespaced_type = format!("{}.{}", namespace, type_name);
workflow
.pending_methods
.push((namespaced_type, method_name, func_def));
}
import_stack.pop();
}
Ok(())
}
#[cfg(not(target_arch = "wasm32"))]
pub fn resolve_flow_imports(
workflow: &mut WorkflowGraph,
base_dir: &Path,
import_stack: &mut Vec<PathBuf>,
at_base: Option<&Path>,
) -> Result<()> {
if workflow.flow_imports.is_empty() {
commit_pending_edges(workflow)?;
expand_wildcard_edges(workflow)?;
return Ok(());
}
let imports: Vec<(String, String)> = workflow.flow_imports.clone().into_iter().collect();
for (alias, rel_path) in imports {
let expanded_rel = expand_at_prefix(&rel_path, at_base);
let abs_path = if Path::new(&expanded_rel).is_absolute() {
PathBuf::from(&expanded_rel)
} else {
base_dir.join(&expanded_rel)
};
let canonical = abs_path.canonicalize().with_context(|| {
format!(
"Flow import error: Cannot resolve path '{}' (base: {:?})",
rel_path, base_dir
)
})?;
if import_stack.contains(&canonical) {
return Err(anyhow!(
"Circular flow import detected: '{}' ({:?})\nImport chain: {:?}",
alias,
canonical,
import_stack
));
}
import_stack.push(canonical.clone());
let content = std::fs::read_to_string(&canonical)
.with_context(|| format!("Flow import error: Cannot read '{:?}'", canonical))?;
let mut child_graph = GraphParser::parse(&content)
.with_context(|| format!("Flow import error: Failed to parse '{:?}'", canonical))?;
let child_base_dir = canonical.parent().unwrap_or(Path::new("."));
resolve_flow_imports(&mut child_graph, child_base_dir, import_stack, at_base)?;
merge_subgraph(workflow, &child_graph, &alias, child_base_dir, at_base)?;
import_stack.pop();
}
commit_pending_edges(workflow)?;
expand_wildcard_edges(workflow)?;
Ok(())
}
fn merge_subgraph(
parent: &mut WorkflowGraph,
child: &WorkflowGraph,
prefix: &str,
child_base_dir: &Path,
at_base: Option<&Path>,
) -> Result<()> {
let child_node_ids: HashSet<String> = child
.graph
.node_indices()
.map(|idx| child.graph[idx].id.clone())
.collect();
for idx in child.graph.node_indices() {
let child_node = &child.graph[idx];
let prefixed_id = format!("{}.{}", prefix, child_node.id);
let prefixed_node_type = prefix_node_type(&child_node.node_type, prefix, &child_node_ids);
let new_node = Node {
id: prefixed_id.clone(),
node_type: prefixed_node_type,
};
let new_idx = parent.graph.add_node(new_node);
parent.node_map.insert(prefixed_id, new_idx);
}
for edge_ref in child.graph.edge_references() {
let from_id = format!("{}.{}", prefix, child.graph[edge_ref.source()].id);
let to_id = format!("{}.{}", prefix, child.graph[edge_ref.target()].id);
let mut edge = edge_ref.weight().clone();
if let Some(ref cond) = edge.condition {
edge.condition = Some(prefix_variables(cond, prefix, &child_node_ids));
}
let f_idx = *parent.node_map.get(&from_id).ok_or_else(|| {
anyhow!(
"Merge error: source node '{}' not found after merge",
from_id
)
})?;
let t_idx = *parent
.node_map
.get(&to_id)
.ok_or_else(|| anyhow!("Merge error: target node '{}' not found after merge", to_id))?;
parent.graph.add_edge(f_idx, t_idx, edge);
}
for (key, route) in &child.switch_routes {
let prefixed_key = format!("{}.{}", prefix, key);
let prefixed_route = SwitchRoute {
subject: prefix_variables(&route.subject, prefix, &child_node_ids),
cases: route
.cases
.iter()
.map(|c| SwitchCase {
value: c.value.clone(),
target: format!("{}.{}", prefix, c.target),
is_ok: c.is_ok,
is_err: c.is_err,
err_kind: c.err_kind.clone(),
})
.collect(),
};
parent.switch_routes.insert(prefixed_key, prefixed_route);
}
for (f_id, t_id, mut edge) in child.pending_edges.clone() {
let prefixed_f = format!("{}.{}", prefix, f_id);
let prefixed_t = format!("{}.{}", prefix, t_id);
if let Some(ref cond) = edge.condition {
edge.condition = Some(prefix_variables(cond, prefix, &child_node_ids));
}
parent.pending_edges.push((prefixed_f, prefixed_t, edge));
}
for pattern in &child.prompt_patterns {
let expanded = expand_at_prefix(pattern, at_base);
if Path::new(&expanded).is_absolute() {
parent.prompt_patterns.push(expanded);
} else {
parent
.prompt_patterns
.push(child_base_dir.join(&expanded).to_string_lossy().to_string());
}
}
for pattern in &child.tool_patterns {
let expanded = expand_at_prefix(pattern, at_base);
if Path::new(&expanded).is_absolute() {
parent.tool_patterns.push(expanded);
} else {
parent
.tool_patterns
.push(child_base_dir.join(&expanded).to_string_lossy().to_string());
}
}
for import in &child.python_imports {
if !parent.python_imports.contains(import) {
parent.python_imports.push(import.clone());
}
}
Ok(())
}
fn commit_pending_edges(workflow: &mut WorkflowGraph) -> Result<()> {
let pending = std::mem::take(&mut workflow.pending_edges);
for (f_id, t_id, edge) in pending {
let f_idx = *workflow.node_map.get(&f_id).ok_or_else(|| {
anyhow!(
"Graph Error: Pending edge references undefined node '{}'. \
Did you declare it in 'flows:' and define it in the imported workflow?",
f_id
)
})?;
let t_idx = *workflow.node_map.get(&t_id).ok_or_else(|| {
anyhow!(
"Graph Error: Pending edge references undefined node '{}'. \
Did you declare it in 'flows:' and define it in the imported workflow?",
t_id
)
})?;
workflow.graph.add_edge(f_idx, t_idx, edge);
}
Ok(())
}
fn expand_wildcard_edges(workflow: &mut WorkflowGraph) -> Result<()> {
let pending = std::mem::take(&mut workflow.pending_wildcard_edges);
if pending.is_empty() {
return Ok(());
}
for (from_pattern, to_pattern, edge) in pending {
let from_ids = expand_glob(&from_pattern, &workflow.node_map);
let to_ids = expand_glob(&to_pattern, &workflow.node_map);
if from_ids.is_empty() {
return Err(anyhow!(
"Wildcard edge: pattern '{}' matched no nodes",
from_pattern
));
}
if to_ids.is_empty() {
return Err(anyhow!(
"Wildcard edge: pattern '{}' matched no nodes",
to_pattern
));
}
for f_id in &from_ids {
for t_id in &to_ids {
if f_id == t_id {
continue; }
let f_idx = workflow.node_map[f_id.as_str()];
let t_idx = workflow.node_map[t_id.as_str()];
workflow.graph.add_edge(f_idx, t_idx, edge.clone());
}
}
}
Ok(())
}
fn expand_glob(
pattern: &str,
node_map: &std::collections::HashMap<String, petgraph::graph::NodeIndex>,
) -> Vec<String> {
if !pattern.contains('*') {
return if node_map.contains_key(pattern) {
vec![pattern.to_string()]
} else {
vec![]
};
}
let parts: Vec<&str> = pattern.split('*').collect();
let escaped: Vec<String> = parts.iter().map(|p| regex::escape(p)).collect();
let re_str = format!("^{}$", escaped.join(".*"));
let re = match Regex::new(&re_str) {
Ok(r) => r,
Err(_) => return vec![],
};
node_map
.keys()
.filter(|k| re.is_match(k))
.cloned()
.collect()
}
fn prefix_variables(text: &str, prefix: &str, child_node_ids: &HashSet<String>) -> String {
let result = VAR_REF_RE
.replace_all(text, |caps: &Captures| {
let first_segment = &caps[1];
if child_node_ids.contains(first_segment) {
let rest = caps.get(2).map(|m| m.as_str()).unwrap_or("");
format!("{}.{}{}", prefix, first_segment, rest)
} else {
caps[0].to_string()
}
})
.to_string();
BARE_REF_RE
.replace_all(&result, |caps: &Captures| {
let first_segment = &caps[1];
if child_node_ids.contains(first_segment) {
let rest = &caps[2];
format!("{}.{}{}", prefix, first_segment, rest)
} else {
caps[0].to_string()
}
})
.to_string()
}
fn prefix_node_type(
node_type: &NodeType,
prefix: &str,
child_node_ids: &HashSet<String>,
) -> NodeType {
match node_type {
NodeType::Task(action) => {
let prefixed_params: std::collections::HashMap<String, String> = action
.params
.iter()
.map(|(k, v)| (k.clone(), prefix_variables(v, prefix, child_node_ids)))
.collect();
NodeType::Task(Action {
name: action.name.clone(),
params: prefixed_params,
})
}
NodeType::Loop { condition, body } => {
let prefixed_cond = prefix_variables(condition, prefix, child_node_ids);
let prefixed_body = prefix_subgraph_body(body, prefix, child_node_ids);
NodeType::Loop {
condition: prefixed_cond,
body: Box::new(prefixed_body),
}
}
NodeType::Foreach {
item,
list,
body,
parallel,
} => {
let prefixed_list = prefix_variables(list, prefix, child_node_ids);
let prefixed_body = prefix_subgraph_body(body, prefix, child_node_ids);
NodeType::Foreach {
item: item.clone(),
list: prefixed_list,
body: Box::new(prefixed_body),
parallel: *parallel,
}
}
NodeType::Literal(val) => NodeType::Literal(val.clone()),
NodeType::_ExternalCall {
call_path,
args,
kwargs,
} => {
let prefixed_args: Vec<String> = args
.iter()
.map(|a| prefix_variables(a, prefix, child_node_ids))
.collect();
let prefixed_kwargs: std::collections::HashMap<String, String> = kwargs
.iter()
.map(|(k, v)| (k.clone(), prefix_variables(v, prefix, child_node_ids)))
.collect();
NodeType::_ExternalCall {
call_path: call_path.clone(),
args: prefixed_args,
kwargs: prefixed_kwargs,
}
}
NodeType::NewInstance { class_name, args } => {
let prefixed_args: std::collections::HashMap<String, String> = args
.iter()
.map(|(k, v)| (k.clone(), prefix_variables(v, prefix, child_node_ids)))
.collect();
NodeType::NewInstance {
class_name: class_name.clone(),
args: prefixed_args,
}
}
NodeType::MethodCall {
instance_path,
method_name,
args,
} => {
let prefixed_args: std::collections::HashMap<String, String> = args
.iter()
.map(|(k, v)| (k.clone(), prefix_variables(v, prefix, child_node_ids)))
.collect();
NodeType::MethodCall {
instance_path: instance_path.clone(),
method_name: method_name.clone(),
args: prefixed_args,
}
}
NodeType::Assert(expr_str) => {
NodeType::Assert(prefix_variables(expr_str, prefix, child_node_ids))
}
NodeType::AssignCall { var, action } => {
let prefixed_params: std::collections::HashMap<String, String> = action
.params
.iter()
.map(|(k, v)| (k.clone(), prefix_variables(v, prefix, child_node_ids)))
.collect();
NodeType::AssignCall {
var: var.clone(),
action: Action {
name: action.name.clone(),
params: prefixed_params,
},
}
}
NodeType::ReturnErr(val) => {
NodeType::ReturnErr(val.clone())
}
NodeType::Yield(expr) => NodeType::Yield(prefix_variables(expr, prefix, child_node_ids)),
}
}
fn prefix_subgraph_body(
body: &WorkflowGraph,
prefix: &str,
child_node_ids: &HashSet<String>,
) -> WorkflowGraph {
let mut new_body = body.clone();
for idx in new_body.graph.node_indices() {
let node = &new_body.graph[idx];
let new_type = prefix_node_type(&node.node_type, prefix, child_node_ids);
new_body.graph[idx].node_type = new_type;
}
for edge_idx in new_body.graph.edge_indices() {
let edge = &new_body.graph[edge_idx];
if let Some(ref cond) = edge.condition {
let new_cond = prefix_variables(cond, prefix, child_node_ids);
new_body.graph[edge_idx].condition = Some(new_cond);
}
}
new_body
}
#[cfg(not(target_arch = "wasm32"))]
fn find_jg_modules_dir(start: &Path) -> Option<PathBuf> {
let mut dir = start.to_path_buf();
loop {
let candidate = dir.join("jg_modules");
if candidate.is_dir() {
return Some(candidate);
}
if dir.join("jgpackage.toml").exists() {
return Some(candidate); }
if !dir.pop() {
break;
}
}
Some(start.join("jg_modules"))
}
#[cfg(not(target_arch = "wasm32"))]
fn auto_install_package(
pkg_name: &str,
version_req: Option<&str>,
project_dir: &Path,
) -> Result<PathBuf> {
tracing::info!("Auto-installing registry package '{}' ...", pkg_name);
let registry_url = crate::services::config::JuglansConfig::load()
.ok()
.and_then(|c| c.registry.map(|r| r.url))
.unwrap_or_else(|| "https://jgr.juglans.ai".to_string());
let installer = crate::registry::installer::PackageInstaller::with_defaults(®istry_url)
.with_context(|| "Failed to create package installer")?;
let handle = tokio::runtime::Handle::try_current().with_context(|| {
format!(
"Cannot auto-install package '{}': no async runtime available. \
Run 'juglans add {}' first, or ensure the workflow is run with 'juglans'.",
pkg_name, pkg_name
)
})?;
let name = pkg_name.to_string();
let ver = version_req.map(|s| s.to_string());
let proj = project_dir.to_path_buf();
let installed = handle
.block_on(async move { installer.install(&name, ver.as_deref(), &proj).await })
.with_context(|| format!("Failed to auto-install package '{}'", pkg_name))?;
Ok(installed.entry_path)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_prefix_variables_basic() {
let mut node_ids = HashSet::new();
node_ids.insert("verify".to_string());
node_ids.insert("extract".to_string());
assert_eq!(
prefix_variables("verify.output", "auth", &node_ids),
"auth.verify.output"
);
assert_eq!(
prefix_variables("extract.output.intent", "auth", &node_ids),
"auth.extract.output.intent"
);
assert_eq!(
prefix_variables("input.message", "auth", &node_ids),
"input.message"
);
assert_eq!(prefix_variables("output", "auth", &node_ids), "output");
}
#[test]
fn test_prefix_variables_mixed() {
let mut node_ids = HashSet::new();
node_ids.insert("classify".to_string());
let input = r#"classify.output.intent == "trade" && ready"#;
let result = prefix_variables(input, "trading", &node_ids);
assert_eq!(
result,
r#"trading.classify.output.intent == "trade" && ready"#
);
}
#[test]
fn test_expand_at_prefix() {
let base = Path::new("/project/src");
assert_eq!(
expand_at_prefix("@/prompts/foo.jgx", Some(base)),
"/project/src/prompts/foo.jgx"
);
assert_eq!(expand_at_prefix("./local/file", Some(base)), "./local/file");
assert_eq!(
expand_at_prefix("relative/path", Some(base)),
"relative/path"
);
assert_eq!(expand_at_prefix("@noslash", Some(base)), "@noslash");
assert_eq!(
expand_at_prefix("@/prompts/foo.jgx", None),
"@/prompts/foo.jgx"
);
}
#[test]
fn test_expand_at_prefixes_batch() {
let base = Path::new("/project");
let patterns = vec![
"@/prompts/*.jgx".to_string(),
"./local/file.jgx".to_string(),
"@/tools/my-tools.json".to_string(),
];
let result = expand_at_prefixes(&patterns, Some(base));
assert_eq!(result[0], "/project/prompts/*.jgx");
assert_eq!(result[1], "./local/file.jgx");
assert_eq!(result[2], "/project/tools/my-tools.json");
}
#[test]
fn test_prefix_variables_no_match() {
let node_ids = HashSet::new();
assert_eq!(
prefix_variables("$output + $ctx.x", "ns", &node_ids),
"$output + $ctx.x"
);
}
#[test]
fn test_resolve_lib_imports_explicit_namespace() {
use std::io::Write;
let dir = std::env::temp_dir().join("juglans_test_lib_explicit");
let _ = std::fs::create_dir_all(&dir);
let lib_path = dir.join("sqlite.jg");
let mut f = std::fs::File::create(&lib_path).unwrap();
writeln!(
f,
r#"
[read(table)]: bash(command="sqlite3 db.sqlite 'SELECT * FROM " + table + "'")
[write(table, data)]: bash(command="echo " + data)
"#
)
.unwrap();
let main_content = format!(
r#"
libs: {{ db: "{}" }}
[step1]: db.read(table="users")
"#,
lib_path.to_string_lossy()
);
let mut graph = GraphParser::parse(&main_content).unwrap();
assert_eq!(
graph.lib_imports.get("db").unwrap(),
lib_path.to_str().unwrap()
);
assert!(!graph.lib_auto_namespaces.contains("db"));
let mut import_stack = vec![];
resolve_lib_imports(&mut graph, &dir, &mut import_stack, None).unwrap();
assert!(
graph.functions.contains_key("db.read"),
"functions: {:?}",
graph.functions.keys().collect::<Vec<_>>()
);
assert!(graph.functions.contains_key("db.write"));
assert!(!graph.functions.contains_key("sqlite3.read")); }
#[test]
fn test_resolve_lib_imports_auto_namespace_with_stem() {
use std::io::Write;
let dir = std::env::temp_dir().join("juglans_test_lib_slug");
let _ = std::fs::create_dir_all(&dir);
let lib_path = dir.join("my_sqlite_lib.jg");
let mut f = std::fs::File::create(&lib_path).unwrap();
writeln!(
f,
r#"
[query(sql)]: bash(command="sqlite3 db.sqlite '" + sql + "'")
"#
)
.unwrap();
let main_content = format!(
r#"
libs: ["{}"]
[step1]: my_sqlite_lib.query(sql="SELECT 1")
"#,
lib_path.to_string_lossy()
);
let mut graph = GraphParser::parse(&main_content).unwrap();
assert!(graph.lib_imports.contains_key("my_sqlite_lib"));
assert!(graph.lib_auto_namespaces.contains("my_sqlite_lib"));
let mut import_stack = vec![];
resolve_lib_imports(&mut graph, &dir, &mut import_stack, None).unwrap();
assert!(
graph.functions.contains_key("my_sqlite_lib.query"),
"functions: {:?}",
graph.functions.keys().collect::<Vec<_>>()
);
}
#[test]
fn test_resolve_lib_imports_auto_namespace_no_slug() {
use std::io::Write;
let dir = std::env::temp_dir().join("juglans_test_lib_stem");
let _ = std::fs::create_dir_all(&dir);
let lib_path = dir.join("utils.jg");
let mut f = std::fs::File::create(&lib_path).unwrap();
writeln!(
f,
r#"
[helper(x)]: bash(command="echo " + x)
"#
)
.unwrap();
let main_content = format!(
r#"
libs: ["{}"]
[step1]: utils.helper(x="test")
"#,
lib_path.to_string_lossy()
);
let mut graph = GraphParser::parse(&main_content).unwrap();
let mut import_stack = vec![];
resolve_lib_imports(&mut graph, &dir, &mut import_stack, None).unwrap();
assert!(
graph.functions.contains_key("utils.helper"),
"functions: {:?}",
graph.functions.keys().collect::<Vec<_>>()
);
}
}