helix_core/
lib.rs

1mod lexer;
2mod parser;
3mod ast;
4mod semantic;
5mod codegen;
6mod types;
7pub mod error;
8pub mod server;
9#[cfg(test)]
10mod tests;
11#[cfg(test)]
12mod benches;
13#[cfg(test)]
14mod integration;
15#[cfg(test)]
16#[path = "tests/integration_tests.rs"]
17mod integration_tests;
18#[cfg(feature = "compiler")]
19pub mod compiler;
20pub use types::{
21    HelixConfig, ProjectConfig, AgentConfig, WorkflowConfig, MemoryConfig, ContextConfig,
22    CrewConfig, PipelineConfig, RetryConfig, TriggerConfig, StepConfig, Value,
23    load_default_config,
24};
25pub use ast::{
26    HelixAst, Declaration, Expression, Statement, AgentDecl, WorkflowDecl, MemoryDecl,
27    ContextDecl, CrewDecl, PipelineDecl,
28};
29pub use lexer::{Token, SourceLocation};
30pub use parser::{Parser, ParseError};
31pub use semantic::{SemanticAnalyzer, SemanticError};
32pub use codegen::{CodeGenerator, HelixIR};
33#[cfg(feature = "compiler")]
34pub use compiler::optimizer::OptimizationLevel;
35pub use types::HelixLoader;
36use std::path::Path;
37pub fn parse(source: &str) -> Result<HelixAst, ParseError> {
38    parse_with_locations(source).or_else(|_| parse_legacy(source))
39}
40pub fn parse_with_locations(source: &str) -> Result<HelixAst, ParseError> {
41    use lexer::{tokenize_with_locations, SourceMap};
42    let tokens_with_loc = tokenize_with_locations(source)
43        .map_err(|e| ParseError {
44            message: format!("Lexer error: {}", e),
45            location: None,
46            token_index: 0,
47            expected: None,
48            found: String::new(),
49            context: String::new(),
50        })?;
51    let source_map = SourceMap {
52        tokens: tokens_with_loc.clone(),
53        source: source.to_string(),
54    };
55    let mut parser = Parser::new_with_source_map(source_map);
56    parser
57        .parse()
58        .map_err(|msg| ParseError {
59            message: msg,
60            location: None,
61            token_index: 0,
62            expected: None,
63            found: String::new(),
64            context: String::new(),
65        })
66}
67fn parse_legacy(source: &str) -> Result<HelixAst, ParseError> {
68    let tokens = lexer::tokenize(source)
69        .map_err(|e| ParseError {
70            message: format!("Lexer error: {}", e),
71            location: None,
72            token_index: 0,
73            expected: None,
74            found: String::new(),
75            context: String::new(),
76        })?;
77    let mut parser = Parser::new(tokens);
78    parser
79        .parse()
80        .map_err(|msg| ParseError {
81            message: msg,
82            location: None,
83            token_index: 0,
84            expected: None,
85            found: String::new(),
86            context: String::new(),
87        })
88}
89pub fn parse_and_validate(source: &str) -> Result<HelixConfig, String> {
90    let ast = parse(source).map_err(|e| e.to_string())?;
91    validate(&ast)?;
92    ast_to_config(ast)
93}
94pub fn validate(ast: &HelixAst) -> Result<(), String> {
95    let mut analyzer = SemanticAnalyzer::new();
96    analyzer
97        .analyze(ast)
98        .map_err(|errors| {
99            errors.iter().map(|e| format!("{:?}", e)).collect::<Vec<_>>().join("\n")
100        })
101}
102pub fn ast_to_config(ast: HelixAst) -> Result<HelixConfig, String> {
103    let loader = types::HelixLoader::new();
104    loader.ast_to_config(ast).map_err(|e| e.to_string())
105}
106pub fn load_file<P: AsRef<Path>>(path: P) -> Result<HelixConfig, String> {
107    let content = std::fs::read_to_string(path)
108        .map_err(|e| format!("Failed to read file: {}", e))?;
109    parse_and_validate(&content)
110}
111pub fn load_directory<P: AsRef<Path>>(path: P) -> Result<Vec<HelixConfig>, String> {
112    let mut configs = Vec::new();
113    let entries = std::fs::read_dir(path)
114        .map_err(|e| format!("Failed to read directory: {}", e))?;
115    for entry in entries {
116        let entry = entry.map_err(|e| format!("Failed to read entry: {}", e))?;
117        let path = entry.path();
118        if path.extension().and_then(|s| s.to_str()) == Some("HELIX") {
119            let config = load_file(&path)?;
120            configs.push(config);
121        }
122    }
123    Ok(configs)
124}
125pub fn pretty_print(ast: &HelixAst) -> String {
126    let mut printer = ast::AstPrettyPrinter::new();
127    printer.print(ast)
128}
129#[cfg(feature = "compiler")]
130pub use compiler::tools::migrate::Migrator;
131#[cfg(feature = "compiler")]
132pub use compiler::{
133    ModuleSystem, DependencyBundler, ModuleResolver, HelixVM, VMExecutor, VMConfig,
134};
135#[cfg(feature = "cli")]
136pub use compiler::workflow::watch::{HelixWatcher, CompileWatcher, HotReloadManager};