1use serde::{Deserialize, Serialize};
6use std::collections::HashMap;
7use std::path::{Path, PathBuf};
8use std::time::Duration;
9use tree_sitter::{Language, Node, Parser, Tree};
10
11pub const SYMBI_EXTENSION: &str = "symbi";
13
14pub const LEGACY_DSL_EXTENSION: &str = "dsl";
18
19pub fn is_symbi_file(path: &Path) -> bool {
22 path.extension()
23 .and_then(|ext| ext.to_str())
24 .is_some_and(|ext| ext == SYMBI_EXTENSION || ext == LEGACY_DSL_EXTENSION)
25}
26
27pub fn strip_symbi_extension(name: &str) -> Option<&str> {
31 name.strip_suffix(".symbi")
32 .or_else(|| name.strip_suffix(".dsl"))
33}
34
35const MAX_AST_DEPTH: usize = 256;
41
42#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
45pub enum SandboxTier {
46 Docker,
48 GVisor,
50 Firecracker,
52 E2B,
54}
55
56impl std::fmt::Display for SandboxTier {
57 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
58 match self {
59 SandboxTier::Docker => write!(f, "docker"),
60 SandboxTier::GVisor => write!(f, "gvisor"),
61 SandboxTier::Firecracker => write!(f, "firecracker"),
62 SandboxTier::E2B => write!(f, "e2b"),
63 }
64 }
65}
66
67extern "C" {
70 fn tree_sitter_symbiont() -> Language;
71}
72
73pub fn parse_dsl(source_code: &str) -> Result<Tree, Box<dyn std::error::Error>> {
103 let language = unsafe { tree_sitter_symbiont() };
108
109 let mut parser = Parser::new();
110 parser.set_language(language)?;
111
112 let tree = parser
113 .parse(source_code, None)
114 .ok_or("Failed to parse DSL code")?;
115
116 Ok(tree)
117}
118
119pub fn print_ast(node: Node, source: &str, depth: usize) {
121 let indent = " ".repeat(depth);
122 let node_text = if node.child_count() == 0 {
123 let start = node.start_byte();
124 let end = node.end_byte();
125 format!(" \"{}\"", &source[start..end].replace('\n', "\\n"))
126 } else {
127 String::new()
128 };
129
130 println!(
131 "{}{}: {}{}",
132 indent,
133 node.kind(),
134 node_text,
135 if node.is_error() { " [ERROR]" } else { "" }
136 );
137
138 for i in 0..node.child_count() {
139 if let Some(child) = node.child(i) {
140 print_ast(child, source, depth + 1);
141 }
142 }
143}
144
145#[derive(Debug, Clone, PartialEq)]
147pub struct WithAttribute {
148 pub name: String,
149 pub value: String,
150}
151
152#[derive(Debug, Clone, PartialEq)]
154pub struct WithBlock {
155 pub attributes: Vec<WithAttribute>,
156 pub sandbox_tier: Option<SandboxTier>,
157 pub timeout: Option<u64>,
158}
159
160impl WithBlock {
161 pub fn new() -> Self {
162 Self {
163 attributes: Vec::new(),
164 sandbox_tier: None,
165 timeout: None,
166 }
167 }
168
169 pub fn parse_sandbox_tier(value: &str) -> Result<SandboxTier, String> {
171 let cleaned_value = value.trim_matches('"');
173 match cleaned_value.to_lowercase().as_str() {
174 "docker" => Ok(SandboxTier::Docker),
175 "gvisor" => Ok(SandboxTier::GVisor),
176 "firecracker" => Ok(SandboxTier::Firecracker),
177 "e2b" => Ok(SandboxTier::E2B),
178 _ => Err(format!(
179 "Invalid sandbox tier: {}. Valid options are: docker, gvisor, firecracker, e2b",
180 value
181 )),
182 }
183 }
184}
185
186impl Default for WithBlock {
187 fn default() -> Self {
188 Self::new()
189 }
190}
191
192pub fn extract_metadata(tree: &Tree, source: &str) -> HashMap<String, String> {
194 let mut metadata = HashMap::new();
195 let root_node = tree.root_node();
196
197 let _cursor = root_node.walk();
199
200 fn traverse_for_metadata(
201 node: Node,
202 source: &str,
203 metadata: &mut HashMap<String, String>,
204 depth: usize,
205 ) {
206 if depth > MAX_AST_DEPTH {
207 tracing::warn!(
208 "DSL metadata traversal aborted: depth {} exceeds MAX_AST_DEPTH {}",
209 depth,
210 MAX_AST_DEPTH
211 );
212 return;
213 }
214 if node.kind() == "metadata_block" {
215 for i in 0..node.child_count() {
217 if let Some(child) = node.child(i) {
218 if child.kind() == "metadata_pair" {
219 if let (Some(key_node), Some(value_node)) = (child.child(0), child.child(2))
220 {
221 let key =
222 source[key_node.start_byte()..key_node.end_byte()].to_string();
223 let value =
224 source[value_node.start_byte()..value_node.end_byte()].to_string();
225 metadata.insert(key, value);
226 }
227 }
228 }
229 }
230 }
231
232 for i in 0..node.child_count() {
234 if let Some(child) = node.child(i) {
235 traverse_for_metadata(child, source, metadata, depth + 1);
236 }
237 }
238 }
239
240 traverse_for_metadata(root_node, source, &mut metadata, 0);
241 metadata
242}
243
244pub fn extract_with_blocks(tree: &Tree, source: &str) -> Result<Vec<WithBlock>, String> {
246 let mut with_blocks = Vec::new();
247 let root_node = tree.root_node();
248
249 fn traverse_for_with_blocks(
250 node: Node,
251 source: &str,
252 with_blocks: &mut Vec<WithBlock>,
253 depth: usize,
254 ) -> Result<(), String> {
255 if depth > MAX_AST_DEPTH {
256 return Err(format!(
257 "DSL AST traversal depth exceeded MAX_AST_DEPTH ({})",
258 MAX_AST_DEPTH
259 ));
260 }
261 if node.kind() == "with_block" {
262 let mut with_block = WithBlock::new();
263
264 for i in 0..node.child_count() {
266 if let Some(child) = node.child(i) {
267 if child.kind() == "with_attribute" {
268 if let (Some(name_node), Some(value_node)) =
269 (child.child(0), child.child(2))
270 {
271 let name =
272 source[name_node.start_byte()..name_node.end_byte()].to_string();
273 let value =
274 source[value_node.start_byte()..value_node.end_byte()].to_string();
275
276 let attribute = WithAttribute {
277 name: name.clone(),
278 value: value.clone(),
279 };
280 with_block.attributes.push(attribute);
281
282 match name.as_str() {
284 "sandbox" => match WithBlock::parse_sandbox_tier(&value) {
285 Ok(tier) => with_block.sandbox_tier = Some(tier),
286 Err(e) => return Err(e),
287 },
288 "timeout" => {
289 let timeout_str = value.trim_matches('"');
290 let normalized = if let Some(n) =
292 timeout_str.strip_suffix(".seconds")
293 {
294 format!("{}s", n.trim())
295 } else if let Some(n) = timeout_str.strip_suffix(".minutes") {
296 format!("{}m", n.trim())
297 } else if let Some(n) = timeout_str.strip_suffix(".hours") {
298 format!("{}h", n.trim())
299 } else {
300 timeout_str.to_string()
301 };
302
303 if let Ok(duration) = humantime::parse_duration(&normalized) {
305 with_block.timeout = Some(duration.as_secs());
306 } else if let Ok(seconds) = normalized.parse::<u64>() {
307 with_block.timeout = Some(seconds);
308 } else {
309 return Err(format!("Invalid timeout value: {}", value));
310 }
311 }
312 _ => {} }
314 }
315 }
316 }
317 }
318
319 with_blocks.push(with_block);
320 }
321
322 for i in 0..node.child_count() {
324 if let Some(child) = node.child(i) {
325 traverse_for_with_blocks(child, source, with_blocks, depth + 1)?;
326 }
327 }
328
329 Ok(())
330 }
331
332 traverse_for_with_blocks(root_node, source, &mut with_blocks, 0)?;
333 Ok(with_blocks)
334}
335
336#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
338pub struct ScheduleDefinition {
339 pub name: String,
341 pub cron: Option<String>,
343 pub at: Option<String>,
345 pub timezone: String,
347 pub agent: Option<String>,
349 pub policy: Option<String>,
351 pub audit: Option<String>,
353 pub one_shot: bool,
355 pub deliver: Option<String>,
357}
358
359impl ScheduleDefinition {
360 fn new(name: String) -> Self {
361 Self {
362 name,
363 cron: None,
364 at: None,
365 timezone: "UTC".to_string(),
366 agent: None,
367 policy: None,
368 audit: None,
369 one_shot: false,
370 deliver: None,
371 }
372 }
373}
374
375pub fn extract_schedule_definitions(
381 tree: &Tree,
382 source: &str,
383) -> Result<Vec<ScheduleDefinition>, String> {
384 let mut schedules = Vec::new();
385 let root_node = tree.root_node();
386
387 fn traverse_for_schedules(
388 node: Node,
389 source: &str,
390 schedules: &mut Vec<ScheduleDefinition>,
391 depth: usize,
392 ) -> Result<(), String> {
393 if depth > MAX_AST_DEPTH {
394 return Err(format!(
395 "DSL AST traversal depth exceeded MAX_AST_DEPTH ({})",
396 MAX_AST_DEPTH
397 ));
398 }
399 if node.kind() == "schedule_definition" {
400 let name_node = node
402 .child(1)
403 .ok_or_else(|| "schedule_definition missing name".to_string())?;
404 let name = source[name_node.start_byte()..name_node.end_byte()].to_string();
405 let mut sched = ScheduleDefinition::new(name);
406
407 for i in 0..node.child_count() {
408 if let Some(child) = node.child(i) {
409 if child.kind() == "schedule_property" {
410 if let (Some(key_node), Some(val_node)) = (child.child(0), child.child(2)) {
412 let key =
413 source[key_node.start_byte()..key_node.end_byte()].to_string();
414 let raw_value =
415 source[val_node.start_byte()..val_node.end_byte()].to_string();
416 let value = raw_value.trim_matches('"').to_string();
418
419 match key.as_str() {
420 "cron" => sched.cron = Some(value),
421 "at" => sched.at = Some(value),
422 "timezone" => sched.timezone = value,
423 "agent" => sched.agent = Some(value),
424 "policy" => sched.policy = Some(value),
425 "audit" => sched.audit = Some(value),
426 "one_shot" => sched.one_shot = value == "true",
427 "deliver" => sched.deliver = Some(value),
428 _ => {
429 }
431 }
432 }
433 }
434 }
435 }
436
437 match (&sched.cron, &sched.at) {
439 (None, None) => {
440 return Err(format!(
441 "schedule '{}': must specify either 'cron' or 'at'",
442 sched.name
443 ));
444 }
445 (Some(_), Some(_)) => {
446 return Err(format!(
447 "schedule '{}': cannot specify both 'cron' and 'at'",
448 sched.name
449 ));
450 }
451 _ => {}
452 }
453
454 schedules.push(sched);
455 }
456
457 for i in 0..node.child_count() {
459 if let Some(child) = node.child(i) {
460 traverse_for_schedules(child, source, schedules, depth + 1)?;
461 }
462 }
463
464 Ok(())
465 }
466
467 traverse_for_schedules(root_node, source, &mut schedules, 0)?;
468 Ok(schedules)
469}
470
471#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
473pub enum MemoryStoreType {
474 Markdown,
476}
477
478#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
480pub struct MemorySearchConfig {
481 pub vector_weight: f64,
483 pub keyword_weight: f64,
485}
486
487impl Default for MemorySearchConfig {
488 fn default() -> Self {
489 Self {
490 vector_weight: 0.7,
491 keyword_weight: 0.3,
492 }
493 }
494}
495
496#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
498pub struct MemoryDefinition {
499 pub name: String,
501 pub store: MemoryStoreType,
503 pub path: PathBuf,
505 pub retention: Duration,
507 pub search: Option<MemorySearchConfig>,
509}
510
511impl MemoryDefinition {
512 fn new(name: String) -> Self {
513 Self {
514 name,
515 store: MemoryStoreType::Markdown,
516 path: PathBuf::from("data/agents"),
517 retention: Duration::from_secs(90 * 86400),
518 search: None,
519 }
520 }
521}
522
523pub fn extract_memory_definitions(
528 tree: &Tree,
529 source: &str,
530) -> Result<Vec<MemoryDefinition>, String> {
531 let mut memories = Vec::new();
532 let root_node = tree.root_node();
533
534 fn traverse_for_memories(
535 node: Node,
536 source: &str,
537 memories: &mut Vec<MemoryDefinition>,
538 depth: usize,
539 ) -> Result<(), String> {
540 if depth > MAX_AST_DEPTH {
541 return Err(format!(
542 "DSL AST traversal depth exceeded MAX_AST_DEPTH ({})",
543 MAX_AST_DEPTH
544 ));
545 }
546 if node.kind() == "memory_definition" {
547 let name_node = node
549 .child(1)
550 .ok_or_else(|| "memory_definition missing name".to_string())?;
551 let name = source[name_node.start_byte()..name_node.end_byte()].to_string();
552 let mut mem = MemoryDefinition::new(name);
553
554 for i in 0..node.child_count() {
555 if let Some(child) = node.child(i) {
556 match child.kind() {
557 "memory_property" => {
558 if let (Some(key_node), Some(val_node)) =
561 (child.child(0), child.child(1))
562 {
563 let key =
564 source[key_node.start_byte()..key_node.end_byte()].to_string();
565 let raw_value =
566 source[val_node.start_byte()..val_node.end_byte()].to_string();
567 let value = raw_value.trim_matches('"').to_string();
568
569 match key.as_str() {
570 "store" => match value.to_lowercase().as_str() {
571 "markdown" => mem.store = MemoryStoreType::Markdown,
572 _ => {
573 return Err(format!(
574 "memory '{}': unknown store type '{}'",
575 mem.name, value
576 ));
577 }
578 },
579 "path" => mem.path = PathBuf::from(value),
580 "retention" => {
581 mem.retention =
582 humantime::parse_duration(&value).map_err(|e| {
583 format!(
584 "memory '{}': invalid retention '{}': {}",
585 mem.name, value, e
586 )
587 })?;
588 }
589 _ => {
590 }
592 }
593 }
594 }
595 "memory_search_block" => {
596 let mut search = MemorySearchConfig::default();
598 for j in 0..child.child_count() {
599 if let Some(prop_node) = child.child(j) {
600 if prop_node.kind() == "memory_search_property" {
601 if let (Some(key_node), Some(val_node)) =
604 (prop_node.child(0), prop_node.child(1))
605 {
606 let key = source
607 [key_node.start_byte()..key_node.end_byte()]
608 .to_string();
609 let raw_value = source
610 [val_node.start_byte()..val_node.end_byte()]
611 .to_string();
612
613 match key.as_str() {
614 "vector_weight" => {
615 search.vector_weight = raw_value
616 .parse::<f64>()
617 .map_err(|e| {
618 format!(
619 "memory: invalid vector_weight '{}': {}",
620 raw_value, e
621 )
622 })?;
623 }
624 "keyword_weight" => {
625 search.keyword_weight = raw_value
626 .parse::<f64>()
627 .map_err(|e| {
628 format!(
629 "memory: invalid keyword_weight '{}': {}",
630 raw_value, e
631 )
632 })?;
633 }
634 _ => {
635 }
637 }
638 }
639 }
640 }
641 }
642 mem.search = Some(search);
643 }
644 _ => {}
645 }
646 }
647 }
648
649 memories.push(mem);
650 }
651
652 for i in 0..node.child_count() {
654 if let Some(child) = node.child(i) {
655 traverse_for_memories(child, source, memories, depth + 1)?;
656 }
657 }
658
659 Ok(())
660 }
661
662 traverse_for_memories(root_node, source, &mut memories, 0)?;
663 Ok(memories)
664}
665
666#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
668pub enum WebhookProvider {
669 GitHub,
671 Stripe,
673 Slack,
675 Custom,
677}
678
679impl std::str::FromStr for WebhookProvider {
680 type Err = std::convert::Infallible;
681
682 fn from_str(s: &str) -> Result<Self, Self::Err> {
684 Ok(match s.to_lowercase().as_str() {
685 "github" => WebhookProvider::GitHub,
686 "stripe" => WebhookProvider::Stripe,
687 "slack" => WebhookProvider::Slack,
688 _ => WebhookProvider::Custom,
689 })
690 }
691}
692
693impl WebhookProvider {
694 pub fn parse(s: &str) -> Self {
696 s.parse().unwrap()
697 }
698}
699
700#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
702pub struct WebhookFilter {
703 pub json_path: String,
705 pub equals: Option<String>,
707 pub contains: Option<String>,
709}
710
711#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
713pub struct WebhookDefinition {
714 pub name: String,
716 pub path: String,
718 pub provider: WebhookProvider,
720 pub secret: String,
722 pub agent: Option<String>,
724 pub filter: Option<WebhookFilter>,
726}
727
728impl WebhookDefinition {
729 fn new(name: String) -> Self {
730 Self {
731 name,
732 path: String::new(),
733 provider: WebhookProvider::Custom,
734 secret: String::new(),
735 agent: None,
736 filter: None,
737 }
738 }
739}
740
741pub fn extract_webhook_definitions(
746 tree: &Tree,
747 source: &str,
748) -> Result<Vec<WebhookDefinition>, String> {
749 let mut webhooks = Vec::new();
750 let root_node = tree.root_node();
751
752 fn apply_webhook_property(
754 key: &str,
755 value: &str,
756 webhook: &mut WebhookDefinition,
757 has_path: &mut bool,
758 ) {
759 match key {
760 "path" => {
761 webhook.path = value.to_string();
762 *has_path = true;
763 }
764 "provider" => {
765 webhook.provider = WebhookProvider::parse(value);
766 }
767 "secret" => webhook.secret = value.to_string(),
768 "agent" => webhook.agent = Some(value.to_string()),
769 _ => {
770 }
772 }
773 }
774
775 fn extract_webhook_props_from_node(
778 node: Node,
779 source: &str,
780 webhook: &mut WebhookDefinition,
781 has_path: &mut bool,
782 ) {
783 match node.kind() {
784 "webhook_property" => {
785 if let (Some(key_node), Some(val_node)) = (node.child(0), node.child(1)) {
788 let key = source[key_node.start_byte()..key_node.end_byte()].to_string();
789 let raw_value = source[val_node.start_byte()..val_node.end_byte()].to_string();
790 let value = raw_value.trim_matches('"').to_string();
791 apply_webhook_property(&key, &value, webhook, has_path);
792 }
793 }
794 "ERROR" => {
795 let mut i = 0;
800 while i < node.child_count() {
801 if let Some(child) = node.child(i) {
802 if child.kind() == "identifier" {
803 if let Some(next) = node.child(i + 1) {
805 if next.kind() == "identifier" {
806 let key =
807 source[child.start_byte()..child.end_byte()].to_string();
808 let value =
809 source[next.start_byte()..next.end_byte()].to_string();
810 apply_webhook_property(&key, &value, webhook, has_path);
811 i += 2;
812 continue;
813 }
814 }
815 } else if child.kind() == "webhook_property" {
816 extract_webhook_props_from_node(child, source, webhook, has_path);
818 }
819 }
820 i += 1;
821 }
822 }
823 _ => {}
824 }
825 }
826
827 fn traverse_for_webhooks(
828 node: Node,
829 source: &str,
830 webhooks: &mut Vec<WebhookDefinition>,
831 depth: usize,
832 ) -> Result<(), String> {
833 if depth > MAX_AST_DEPTH {
834 return Err(format!(
835 "DSL AST traversal depth exceeded MAX_AST_DEPTH ({})",
836 MAX_AST_DEPTH
837 ));
838 }
839 if node.kind() == "webhook_definition" {
840 let name_node = node
842 .child(1)
843 .ok_or_else(|| "webhook_definition missing name".to_string())?;
844 let name = source[name_node.start_byte()..name_node.end_byte()].to_string();
845 let mut webhook = WebhookDefinition::new(name);
846 let mut has_path = false;
847
848 for i in 0..node.child_count() {
849 if let Some(child) = node.child(i) {
850 if child.kind() == "webhook_filter_block" {
851 let mut json_path = String::new();
853 let mut equals = None;
854 let mut contains = None;
855
856 for j in 0..child.child_count() {
857 if let Some(prop_node) = child.child(j) {
858 if prop_node.kind() == "webhook_filter_property" {
859 if let (Some(key_node), Some(val_node)) =
862 (prop_node.child(0), prop_node.child(1))
863 {
864 let key = source
865 [key_node.start_byte()..key_node.end_byte()]
866 .to_string();
867 let raw_value = source
868 [val_node.start_byte()..val_node.end_byte()]
869 .to_string();
870 let value = raw_value.trim_matches('"').to_string();
871
872 match key.as_str() {
873 "json_path" => json_path = value,
874 "equals" => equals = Some(value),
875 "contains" => contains = Some(value),
876 _ => {
877 }
879 }
880 }
881 }
882 }
883 }
884
885 webhook.filter = Some(WebhookFilter {
886 json_path,
887 equals,
888 contains,
889 });
890 } else {
891 extract_webhook_props_from_node(child, source, &mut webhook, &mut has_path);
892 }
893 }
894 }
895
896 if !has_path {
898 return Err(format!("webhook '{}': must specify 'path'", webhook.name));
899 }
900
901 webhooks.push(webhook);
902 }
903
904 for i in 0..node.child_count() {
906 if let Some(child) = node.child(i) {
907 traverse_for_webhooks(child, source, webhooks, depth + 1)?;
908 }
909 }
910
911 Ok(())
912 }
913
914 traverse_for_webhooks(root_node, source, &mut webhooks, 0)?;
915 Ok(webhooks)
916}
917
918#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
920pub struct ChannelDefinition {
921 pub name: String,
922 pub platform: Option<String>,
923 pub workspace: Option<String>,
924 pub channels: Vec<String>,
925 pub default_agent: Option<String>,
926 pub dlp_profile: Option<String>,
927 pub audit_level: Option<String>,
928 pub default_deny: bool,
929 pub policy_rules: Vec<ChannelPolicyRule>,
930 pub data_classification: Vec<DataClassificationEntry>,
931}
932
933impl ChannelDefinition {
934 fn new(name: String) -> Self {
935 Self {
936 name,
937 platform: None,
938 workspace: None,
939 channels: Vec::new(),
940 default_agent: None,
941 dlp_profile: None,
942 audit_level: None,
943 default_deny: false,
944 policy_rules: Vec::new(),
945 data_classification: Vec::new(),
946 }
947 }
948}
949
950#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
952pub struct ChannelPolicyRule {
953 pub action: String,
955 pub expression: String,
957}
958
959#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
961pub struct DataClassificationEntry {
962 pub category: String,
964 pub action: String,
966}
967
968pub fn extract_channel_definitions(
973 tree: &Tree,
974 source: &str,
975) -> Result<Vec<ChannelDefinition>, String> {
976 let mut channels = Vec::new();
977 let root_node = tree.root_node();
978
979 fn extract_array_strings(node: Node, source: &str) -> Vec<String> {
980 let mut items = Vec::new();
981 for i in 0..node.child_count() {
982 if let Some(child) = node.child(i) {
983 if child.kind() == "expression" || child.kind() == "string" {
984 let text = source[child.start_byte()..child.end_byte()].to_string();
986 items.push(text.trim_matches('"').to_string());
987 } else if child.kind() != "[" && child.kind() != "]" && child.kind() != "," {
988 let nested = extract_array_strings(child, source);
990 items.extend(nested);
991 }
992 }
993 }
994 items
995 }
996
997 fn traverse_for_channels(
998 node: Node,
999 source: &str,
1000 channels: &mut Vec<ChannelDefinition>,
1001 depth: usize,
1002 ) -> Result<(), String> {
1003 if depth > MAX_AST_DEPTH {
1004 return Err(format!(
1005 "DSL AST traversal depth exceeded MAX_AST_DEPTH ({})",
1006 MAX_AST_DEPTH
1007 ));
1008 }
1009 if node.kind() == "channel_definition" {
1010 let name_node = node
1011 .child(1)
1012 .ok_or_else(|| "channel_definition missing name".to_string())?;
1013 let name = source[name_node.start_byte()..name_node.end_byte()].to_string();
1014 let mut chan = ChannelDefinition::new(name);
1015
1016 for i in 0..node.child_count() {
1017 if let Some(child) = node.child(i) {
1018 match child.kind() {
1019 "channel_property" => {
1020 if let (Some(key_node), Some(val_node)) =
1022 (child.child(0), child.child(2))
1023 {
1024 let key =
1025 source[key_node.start_byte()..key_node.end_byte()].to_string();
1026
1027 if val_node.kind() == "array" {
1028 let items = extract_array_strings(val_node, source);
1030 if key == "channels" {
1031 chan.channels = items;
1032 }
1033 } else {
1034 let raw_value = source
1035 [val_node.start_byte()..val_node.end_byte()]
1036 .to_string();
1037 let value = raw_value.trim_matches('"').to_string();
1038
1039 match key.as_str() {
1040 "platform" => chan.platform = Some(value),
1041 "workspace" => chan.workspace = Some(value),
1042 "default_agent" => chan.default_agent = Some(value),
1043 "dlp_profile" => chan.dlp_profile = Some(value),
1044 "audit_level" => chan.audit_level = Some(value),
1045 "default_deny" => chan.default_deny = value == "true",
1046 _ => {
1047 }
1049 }
1050 }
1051 }
1052 }
1053 "channel_policy_block" => {
1054 for j in 0..child.child_count() {
1056 if let Some(rule_node) = child.child(j) {
1057 if rule_node.kind() == "policy_rule" {
1058 if let (Some(action_node), Some(expr_node)) =
1060 (rule_node.child(0), rule_node.child(2))
1061 {
1062 let action = source
1063 [action_node.start_byte()..action_node.end_byte()]
1064 .to_string();
1065 let expression = source
1066 [expr_node.start_byte()..expr_node.end_byte()]
1067 .to_string();
1068 chan.policy_rules
1069 .push(ChannelPolicyRule { action, expression });
1070 }
1071 }
1072 }
1073 }
1074 }
1075 "channel_data_classification_block" => {
1076 for j in 0..child.child_count() {
1078 if let Some(rule_node) = child.child(j) {
1079 if rule_node.kind() == "data_classification_rule" {
1080 if let (Some(cat_node), Some(act_node)) =
1082 (rule_node.child(0), rule_node.child(2))
1083 {
1084 let category = source
1085 [cat_node.start_byte()..cat_node.end_byte()]
1086 .to_string();
1087 let action = source
1088 [act_node.start_byte()..act_node.end_byte()]
1089 .to_string();
1090 chan.data_classification
1091 .push(DataClassificationEntry { category, action });
1092 }
1093 }
1094 }
1095 }
1096 }
1097 _ => {}
1098 }
1099 }
1100 }
1101
1102 if chan.platform.is_none() {
1104 return Err(format!("channel '{}': must specify 'platform'", chan.name));
1105 }
1106
1107 channels.push(chan);
1108 }
1109
1110 for i in 0..node.child_count() {
1112 if let Some(child) = node.child(i) {
1113 traverse_for_channels(child, source, channels, depth + 1)?;
1114 }
1115 }
1116
1117 Ok(())
1118 }
1119
1120 traverse_for_channels(root_node, source, &mut channels, 0)?;
1121 Ok(channels)
1122}
1123
1124#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
1126pub struct DslDiagnostic {
1127 pub start_line: usize,
1129 pub start_col: usize,
1131 pub end_line: usize,
1133 pub end_col: usize,
1135 pub snippet: String,
1137 pub depth: usize,
1139}
1140
1141impl std::fmt::Display for DslDiagnostic {
1142 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1143 write!(
1144 f,
1145 "ERROR at {}:{}-{}:{}: '{}'",
1146 self.start_line, self.start_col, self.end_line, self.end_col, self.snippet
1147 )
1148 }
1149}
1150
1151pub fn find_errors(node: Node, source: &str, depth: usize) -> Vec<DslDiagnostic> {
1153 let mut diagnostics = Vec::new();
1154 collect_errors(node, source, depth, &mut diagnostics);
1155 diagnostics
1156}
1157
1158fn collect_errors(node: Node, source: &str, depth: usize, diagnostics: &mut Vec<DslDiagnostic>) {
1159 if node.kind() == "ERROR" {
1160 let start = node.start_position();
1161 let end = node.end_position();
1162 let text = &source[node.start_byte()..node.end_byte()];
1163 diagnostics.push(DslDiagnostic {
1164 start_line: start.row + 1,
1165 start_col: start.column + 1,
1166 end_line: end.row + 1,
1167 end_col: end.column + 1,
1168 snippet: text.to_string(),
1169 depth,
1170 });
1171 }
1172
1173 for i in 0..node.child_count() {
1174 if let Some(child) = node.child(i) {
1175 collect_errors(child, source, depth + 1, diagnostics);
1176 }
1177 }
1178}
1179
1180#[cfg(test)]
1181mod tests {
1182 use super::*;
1183
1184 #[test]
1185 fn test_extension_helpers() {
1186 assert!(is_symbi_file(Path::new("foo.symbi")));
1187 assert!(is_symbi_file(Path::new("foo.dsl")));
1188 assert!(is_symbi_file(Path::new("agents/bar.symbi")));
1189 assert!(!is_symbi_file(Path::new("foo.txt")));
1190 assert!(!is_symbi_file(Path::new("foo")));
1191 assert!(!is_symbi_file(Path::new("foo.SYMBI")));
1192
1193 assert_eq!(strip_symbi_extension("agent.symbi"), Some("agent"));
1194 assert_eq!(strip_symbi_extension("agent.dsl"), Some("agent"));
1195 assert_eq!(strip_symbi_extension("agent"), None);
1196 assert_eq!(strip_symbi_extension("agent.txt"), None);
1197 }
1198
1199 #[test]
1200 fn test_basic_parsing() {
1201 let simple_dsl = r#"
1202 agent TestAgent {
1203 capabilities: [test]
1204 }
1205 "#;
1206
1207 let result = parse_dsl(simple_dsl);
1208 assert!(result.is_ok(), "Basic DSL parsing should succeed");
1209 }
1210
1211 #[test]
1212 fn test_metadata_extraction() {
1213 let dsl_with_metadata = r#"
1214 metadata {
1215 version: "1.0",
1216 author: "Test"
1217 }
1218 "#;
1219
1220 if let Ok(tree) = parse_dsl(dsl_with_metadata) {
1221 let metadata = extract_metadata(&tree, dsl_with_metadata);
1222 assert!(!metadata.is_empty(), "Should extract metadata");
1223 }
1224 }
1225
1226 #[test]
1227 fn test_with_block_parsing() {
1228 let agent_with_sandbox = r#"
1229 agent code_runner(script: String) -> Output {
1230 with sandbox = "e2b", timeout = 60.seconds {
1231 return execute(script);
1232 }
1233 }
1234 "#;
1235
1236 if let Ok(tree) = parse_dsl(agent_with_sandbox) {
1237 let with_blocks = extract_with_blocks(&tree, agent_with_sandbox).unwrap();
1238 assert_eq!(with_blocks.len(), 1, "Should extract one with block");
1239
1240 let with_block = &with_blocks[0];
1241 assert_eq!(with_block.sandbox_tier, Some(SandboxTier::E2B));
1242 assert_eq!(with_block.timeout, Some(60));
1243 }
1244 }
1245
1246 #[test]
1247 fn test_sandbox_tier_validation() {
1248 assert_eq!(
1249 WithBlock::parse_sandbox_tier("docker"),
1250 Ok(SandboxTier::Docker)
1251 );
1252 assert_eq!(
1253 WithBlock::parse_sandbox_tier("gvisor"),
1254 Ok(SandboxTier::GVisor)
1255 );
1256 assert_eq!(
1257 WithBlock::parse_sandbox_tier("firecracker"),
1258 Ok(SandboxTier::Firecracker)
1259 );
1260 assert_eq!(WithBlock::parse_sandbox_tier("e2b"), Ok(SandboxTier::E2B));
1261
1262 assert_eq!(
1264 WithBlock::parse_sandbox_tier("\"docker\""),
1265 Ok(SandboxTier::Docker)
1266 );
1267
1268 assert!(WithBlock::parse_sandbox_tier("invalid").is_err());
1270 }
1271
1272 #[test]
1273 fn test_schedule_definition_parsing() {
1274 let dsl = r#"
1275 schedule morning_report {
1276 cron: "0 7 * * 1-5",
1277 timezone: "America/New_York",
1278 agent: "compliance_reporter",
1279 policy: "hipaa_guard",
1280 audit: "all_operations"
1281 }
1282 "#;
1283
1284 let tree = parse_dsl(dsl).expect("should parse");
1285 let schedules = extract_schedule_definitions(&tree, dsl).unwrap();
1286 assert_eq!(schedules.len(), 1);
1287
1288 let s = &schedules[0];
1289 assert_eq!(s.name, "morning_report");
1290 assert_eq!(s.cron.as_deref(), Some("0 7 * * 1-5"));
1291 assert_eq!(s.timezone, "America/New_York");
1292 assert_eq!(s.agent.as_deref(), Some("compliance_reporter"));
1293 assert_eq!(s.policy.as_deref(), Some("hipaa_guard"));
1294 assert_eq!(s.audit.as_deref(), Some("all_operations"));
1295 assert!(!s.one_shot);
1296 }
1297
1298 #[test]
1299 fn test_schedule_one_shot() {
1300 let dsl = r#"
1301 schedule quarterly_check {
1302 at: "2026-03-31T23:59:00",
1303 timezone: "UTC",
1304 agent: "sox_auditor",
1305 one_shot: true
1306 }
1307 "#;
1308
1309 let tree = parse_dsl(dsl).expect("should parse");
1310 let schedules = extract_schedule_definitions(&tree, dsl).unwrap();
1311 assert_eq!(schedules.len(), 1);
1312
1313 let s = &schedules[0];
1314 assert_eq!(s.name, "quarterly_check");
1315 assert!(s.cron.is_none());
1316 assert_eq!(s.at.as_deref(), Some("2026-03-31T23:59:00"));
1317 assert!(s.one_shot);
1318 }
1319
1320 #[test]
1321 fn test_schedule_missing_cron_and_at() {
1322 let dsl = r#"
1323 schedule bad_schedule {
1324 timezone: "UTC",
1325 agent: "some_agent"
1326 }
1327 "#;
1328
1329 let tree = parse_dsl(dsl).expect("should parse");
1330 let result = extract_schedule_definitions(&tree, dsl);
1331 assert!(result.is_err());
1332 assert!(result.unwrap_err().contains("must specify either"));
1333 }
1334
1335 #[test]
1336 fn test_schedule_both_cron_and_at_rejected() {
1337 let dsl = r#"
1338 schedule conflicting {
1339 cron: "0 * * * *",
1340 at: "2026-01-01T00:00:00",
1341 agent: "some_agent"
1342 }
1343 "#;
1344
1345 let tree = parse_dsl(dsl).expect("should parse");
1346 let result = extract_schedule_definitions(&tree, dsl);
1347 assert!(result.is_err());
1348 assert!(result.unwrap_err().contains("cannot specify both"));
1349 }
1350
1351 #[test]
1352 fn test_schedule_with_delivery() {
1353 let dsl = r#"
1354 schedule alerter {
1355 cron: "*/30 * * * *",
1356 agent: "compliance_agent",
1357 deliver: "slack://compliance-alerts"
1358 }
1359 "#;
1360
1361 let tree = parse_dsl(dsl).expect("should parse");
1362 let schedules = extract_schedule_definitions(&tree, dsl).unwrap();
1363 assert_eq!(schedules.len(), 1);
1364 assert_eq!(
1365 schedules[0].deliver.as_deref(),
1366 Some("slack://compliance-alerts")
1367 );
1368 }
1369
1370 #[test]
1371 fn test_multiple_schedules() {
1372 let dsl = r#"
1373 schedule job_a {
1374 cron: "0 * * * *",
1375 agent: "agent_a"
1376 }
1377 schedule job_b {
1378 at: "2026-06-01T12:00:00",
1379 agent: "agent_b",
1380 one_shot: true
1381 }
1382 "#;
1383
1384 let tree = parse_dsl(dsl).expect("should parse");
1385 let schedules = extract_schedule_definitions(&tree, dsl).unwrap();
1386 assert_eq!(schedules.len(), 2);
1387 assert_eq!(schedules[0].name, "job_a");
1388 assert_eq!(schedules[1].name, "job_b");
1389 }
1390
1391 #[test]
1392 fn test_with_block_attributes() {
1393 let agent_with_multiple_attrs = r#"
1394 agent test_agent {
1395 with sandbox = "docker", timeout = 30.seconds {
1396 return success();
1397 }
1398 }
1399 "#;
1400
1401 if let Ok(tree) = parse_dsl(agent_with_multiple_attrs) {
1402 let with_blocks = extract_with_blocks(&tree, agent_with_multiple_attrs).unwrap();
1403 assert_eq!(with_blocks.len(), 1);
1404
1405 let with_block = &with_blocks[0];
1406 assert_eq!(with_block.attributes.len(), 2);
1407 assert_eq!(with_block.sandbox_tier, Some(SandboxTier::Docker));
1408 assert_eq!(with_block.timeout, Some(30));
1409 }
1410 }
1411
1412 #[test]
1413 fn test_timeout_seconds_suffix() {
1414 let dsl = r#"
1415 agent runner(s: String) -> Output {
1416 with timeout = 45.seconds {
1417 return execute(s);
1418 }
1419 }
1420 "#;
1421 let tree = parse_dsl(dsl).unwrap();
1422 let blocks = extract_with_blocks(&tree, dsl).unwrap();
1423 assert_eq!(blocks.len(), 1);
1424 assert_eq!(blocks[0].timeout, Some(45));
1425 }
1426
1427 #[test]
1428 fn test_timeout_minutes_suffix() {
1429 let dsl = r#"
1430 agent runner(s: String) -> Output {
1431 with timeout = 30.minutes {
1432 return execute(s);
1433 }
1434 }
1435 "#;
1436 let tree = parse_dsl(dsl).unwrap();
1437 let blocks = extract_with_blocks(&tree, dsl).unwrap();
1438 assert_eq!(blocks.len(), 1);
1439 assert_eq!(blocks[0].timeout, Some(30 * 60));
1440 }
1441
1442 #[test]
1443 fn test_timeout_hours_suffix() {
1444 let dsl = r#"
1445 agent runner(s: String) -> Output {
1446 with timeout = 2.hours {
1447 return execute(s);
1448 }
1449 }
1450 "#;
1451 let tree = parse_dsl(dsl).unwrap();
1452 let blocks = extract_with_blocks(&tree, dsl).unwrap();
1453 assert_eq!(blocks.len(), 1);
1454 assert_eq!(blocks[0].timeout, Some(2 * 3600));
1455 }
1456
1457 #[test]
1458 fn test_timeout_bare_number() {
1459 let dsl = r#"
1460 agent runner(s: String) -> Output {
1461 with timeout = 120 {
1462 return execute(s);
1463 }
1464 }
1465 "#;
1466 let tree = parse_dsl(dsl).unwrap();
1467 let blocks = extract_with_blocks(&tree, dsl).unwrap();
1468 assert_eq!(blocks.len(), 1);
1469 assert_eq!(blocks[0].timeout, Some(120));
1470 }
1471}