helix_core/
lib.rs

1mod lexer;
2mod parser;
3mod ast;
4mod semantic;
5mod codegen;
6mod types;
7pub mod json;
8pub mod error;
9pub mod hlx;
10pub mod server;
11#[cfg(test)]
12mod tests;
13#[cfg(test)]
14mod benches;
15#[cfg(test)]
16mod integration;
17#[cfg(test)]
18#[path = "tests/integration_tests.rs"]
19mod integration_tests;
20#[cfg(feature = "compiler")]
21pub mod compiler;
22pub use types::{
23    HelixConfig, ProjectConfig, AgentConfig, WorkflowConfig, MemoryConfig, ContextConfig,
24    CrewConfig, PipelineConfig, RetryConfig, TriggerConfig, StepConfig, Value,
25    load_default_config,
26};
27pub use ast::{
28    HelixAst, Declaration, Expression, Statement, AgentDecl, WorkflowDecl, MemoryDecl,
29    ContextDecl, CrewDecl, PipelineDecl,
30};
31pub use lexer::{Token, SourceLocation};
32pub use parser::{Parser, ParseError};
33pub use semantic::{SemanticAnalyzer, SemanticError};
34pub use codegen::{CodeGenerator, HelixIR};
35#[cfg(feature = "compiler")]
36pub use compiler::optimizer::OptimizationLevel;
37pub use types::HelixLoader;
38use std::path::Path;
39pub fn parse(source: &str) -> Result<HelixAst, ParseError> {
40    parse_with_locations(source).or_else(|_| parse_legacy(source))
41}
42pub fn parse_with_locations(source: &str) -> Result<HelixAst, ParseError> {
43    use lexer::{tokenize_with_locations, SourceMap};
44    let tokens_with_loc = tokenize_with_locations(source)
45        .map_err(|e| ParseError {
46            message: format!("Lexer error: {}", e),
47            location: None,
48            token_index: 0,
49            expected: None,
50            found: String::new(),
51            context: String::new(),
52        })?;
53    let source_map = SourceMap {
54        tokens: tokens_with_loc.clone(),
55        source: source.to_string(),
56    };
57    let mut parser = Parser::new_with_source_map(source_map);
58    parser
59        .parse()
60        .map_err(|msg| ParseError {
61            message: msg,
62            location: None,
63            token_index: 0,
64            expected: None,
65            found: String::new(),
66            context: String::new(),
67        })
68}
69fn parse_legacy(source: &str) -> Result<HelixAst, ParseError> {
70    let tokens = lexer::tokenize(source)
71        .map_err(|e| ParseError {
72            message: format!("Lexer error: {}", e),
73            location: None,
74            token_index: 0,
75            expected: None,
76            found: String::new(),
77            context: String::new(),
78        })?;
79    let mut parser = Parser::new(tokens);
80    parser
81        .parse()
82        .map_err(|msg| ParseError {
83            message: msg,
84            location: None,
85            token_index: 0,
86            expected: None,
87            found: String::new(),
88            context: String::new(),
89        })
90}
91pub fn parse_and_validate(source: &str) -> Result<HelixConfig, String> {
92    let ast = parse(source).map_err(|e| e.to_string())?;
93    validate(&ast)?;
94    ast_to_config(ast)
95}
96pub fn validate(ast: &HelixAst) -> Result<(), String> {
97    let mut analyzer = SemanticAnalyzer::new();
98    analyzer
99        .analyze(ast)
100        .map_err(|errors| {
101            errors.iter().map(|e| format!("{:?}", e)).collect::<Vec<_>>().join("\n")
102        })
103}
104pub fn ast_to_config(ast: HelixAst) -> Result<HelixConfig, String> {
105    let loader = types::HelixLoader::new();
106    loader.ast_to_config(ast).map_err(|e| e.to_string())
107}
108pub fn load_file<P: AsRef<Path>>(path: P) -> Result<HelixConfig, String> {
109    let content = std::fs::read_to_string(path)
110        .map_err(|e| format!("Failed to read file: {}", e))?;
111    parse_and_validate(&content)
112}
113pub fn load_directory<P: AsRef<Path>>(path: P) -> Result<Vec<HelixConfig>, String> {
114    let mut configs = Vec::new();
115    let entries = std::fs::read_dir(path)
116        .map_err(|e| format!("Failed to read directory: {}", e))?;
117    for entry in entries {
118        let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?;
119        let path = entry.path();
120        if path.extension().and_then(|s| s.to_str()) == Some("HELIX") {
121            let config = load_file(&path)?;
122            configs.push(config);
123        }
124    }
125    Ok(configs)
126}
127pub fn pretty_print(ast: &HelixAst) -> String {
128    let mut printer = ast::AstPrettyPrinter::new();
129    printer.print(ast)
130}
131#[cfg(feature = "compiler")]
132pub use compiler::tools::migrate::Migrator;
133#[cfg(feature = "compiler")]
134pub use compiler::{
135    ModuleSystem, DependencyBundler, ModuleResolver, HelixVM, VMExecutor, VMConfig,
136};
137#[cfg(feature = "cli")]
138pub use compiler::workflow::watch::{HelixWatcher, CompileWatcher, HotReloadManager};
139pub use hlx::{HlxDatasetProcessor, HlxBridge, DatasetConfig, ValidationResult, CacheStats};