1use std::collections::HashMap;
2use std::io::{Write, Read};
3use serde::{Serialize, Deserialize};
4use crate::dna::atp::ast::*;
5use crate::dna::atp::types::TimeUnit;
6use crate::dna::atp::types::Duration;
7pub use crate::dna::atp::types::SecretRef;
8
9#[derive(Debug, Clone, Serialize, Deserialize)]
10pub struct HelixIR {
11 pub version: u32,
12 pub metadata: Metadata,
13 pub symbol_table: SymbolTable,
14 pub instructions: Vec<Instruction>,
15 pub string_pool: StringPool,
16 pub constants: ConstantPool,
17}
18#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct Metadata {
20 pub source_file: Option<String>,
21 pub compile_time: u64,
22 pub compiler_version: String,
23 pub checksum: Option<u64>,
24}
25impl Default for Metadata {
26 fn default() -> Self {
27 Self {
28 source_file: None,
29 compile_time: std::time::SystemTime::now()
30 .duration_since(std::time::UNIX_EPOCH)
31 .unwrap()
32 .as_secs(),
33 compiler_version: "1.0.0".to_string(),
34 checksum: None,
35 }
36 }
37}
38#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct SymbolTable {
40 pub agents: HashMap<u32, AgentSymbol>,
41 pub workflows: HashMap<u32, WorkflowSymbol>,
42 pub contexts: HashMap<u32, ContextSymbol>,
43 pub crews: HashMap<u32, CrewSymbol>,
44 pub next_id: u32,
45}
46impl Default for SymbolTable {
47 fn default() -> Self {
48 Self {
49 agents: HashMap::new(),
50 workflows: HashMap::new(),
51 contexts: HashMap::new(),
52 crews: HashMap::new(),
53 next_id: 1,
54 }
55 }
56}
57#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct AgentSymbol {
59 pub id: u32,
60 pub name_idx: u32,
61 pub model_idx: u32,
62 pub role_idx: u32,
63 pub temperature: Option<f32>,
64 pub max_tokens: Option<u32>,
65 pub capabilities: Vec<u32>,
66 pub backstory_idx: Option<u32>,
67}
68#[derive(Debug, Clone, Serialize, Deserialize)]
69pub struct WorkflowSymbol {
70 pub id: u32,
71 pub name_idx: u32,
72 pub trigger_type: TriggerType,
73 pub steps: Vec<u32>,
74 pub pipeline: Option<Vec<u32>>,
75}
76#[derive(Debug, Clone, Serialize, Deserialize)]
77pub struct ContextSymbol {
78 pub id: u32,
79 pub name_idx: u32,
80 pub environment_idx: u32,
81 pub debug: bool,
82 pub max_tokens: Option<u64>,
83 pub secrets: HashMap<u32, SecretType>,
84}
85#[derive(Debug, Clone, Serialize, Deserialize)]
86pub struct CrewSymbol {
87 pub id: u32,
88 pub name_idx: u32,
89 pub agent_ids: Vec<u32>,
90 pub process_type: ProcessTypeIR,
91 pub manager_id: Option<u32>,
92}
93#[derive(Debug, Clone, Serialize, Deserialize)]
94pub enum TriggerType {
95 Manual,
96 Schedule(u32),
97 Webhook(u32),
98 Event(u32),
99 FileWatch(u32),
100}
101#[derive(Debug, Clone, Serialize, Deserialize)]
102pub enum ProcessTypeIR {
103 Sequential,
104 Hierarchical,
105 Parallel,
106 Consensus,
107}
108#[derive(Debug, Clone, Serialize, Deserialize)]
109pub enum SecretType {
110 Environment(u32),
111 Vault(u32),
112 File(u32),
113}
114#[derive(Debug, Clone, Serialize, Deserialize)]
115pub enum Instruction {
116 DeclareAgent(u32),
117 DeclareWorkflow(u32),
118 DeclareContext(u32),
119 DeclareCrew(u32),
120 SetProperty { target: u32, key: u32, value: ConstantValue },
121 SetCapability { agent: u32, capability: u32 },
122 SetSecret { context: u32, key: u32, secret: SecretType },
123 DefineStep { workflow: u32, step: StepDefinition },
124 DefinePipeline { workflow: u32, nodes: Vec<PipelineNodeIR> },
125 ResolveReference { ref_type: ReferenceType, index: u32 },
126 SetMetadata { key: u32, value: u32 },
127}
128#[derive(Debug, Clone, Serialize, Deserialize)]
129pub struct StepDefinition {
130 pub id: u32,
131 pub name_idx: u32,
132 pub agent_id: Option<u32>,
133 pub crew_ids: Option<Vec<u32>>,
134 pub task_idx: Option<u32>,
135 pub timeout: Option<DurationIR>,
136 pub parallel: bool,
137 pub depends_on: Vec<u32>,
138 pub retry: Option<RetryPolicy>,
139}
140#[derive(Debug, Clone, Serialize, Deserialize)]
141pub struct RetryPolicy {
142 pub max_attempts: u32,
143 pub delay: DurationIR,
144 pub backoff: BackoffStrategyIR,
145}
146#[derive(Debug, Clone, Serialize, Deserialize)]
147pub enum BackoffStrategyIR {
148 Fixed,
149 Linear,
150 Exponential,
151}
152#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
153pub struct DurationIR {
154 pub value: u64,
155 pub unit: TimeUnitIR,
156}
157#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
158pub enum TimeUnitIR {
159 Seconds,
160 Minutes,
161 Hours,
162 Days,
163}
164#[derive(Debug, Clone, Serialize, Deserialize)]
165pub enum PipelineNodeIR {
166 Step(u32),
167 Parallel(Vec<PipelineNodeIR>),
168 Conditional {
169 condition: u32,
170 then_branch: Box<PipelineNodeIR>,
171 else_branch: Option<Box<PipelineNodeIR>>,
172 },
173}
174#[derive(Debug, Clone, Serialize, Deserialize)]
175pub enum ReferenceType {
176 Environment,
177 Memory,
178 Variable,
179}
180#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
181pub enum ConstantValue {
182 String(u32),
183 Number(f64),
184 Bool(bool),
185 Duration(DurationIR),
186 Null,
187}
188#[derive(Debug, Clone, Serialize, Deserialize)]
189pub struct StringPool {
190 pub strings: Vec<String>,
191 pub index: HashMap<String, u32>,
192}
193impl Default for StringPool {
194 fn default() -> Self {
195 Self::new()
196 }
197}
198impl StringPool {
199 pub fn new() -> Self {
200 StringPool {
201 strings: Vec::new(),
202 index: HashMap::new(),
203 }
204 }
205 pub fn intern(&mut self, s: &str) -> u32 {
206 if let Some(&idx) = self.index.get(s) {
207 idx
208 } else {
209 let idx = self.strings.len() as u32;
210 self.strings.push(s.to_string());
211 self.index.insert(s.to_string(), idx);
212 idx
213 }
214 }
215 pub fn get(&self, idx: u32) -> Option<&String> {
216 self.strings.get(idx as usize)
217 }
218}
219#[derive(Debug, Clone, Serialize, Deserialize)]
220pub struct ConstantPool {
221 constants: Vec<ConstantValue>,
222}
223impl Default for ConstantPool {
224 fn default() -> Self {
225 Self::new()
226 }
227}
228impl ConstantPool {
229 pub fn new() -> Self {
230 ConstantPool {
231 constants: Vec::new(),
232 }
233 }
234 pub fn add(&mut self, value: ConstantValue) -> u32 {
235 let idx = self.constants.len() as u32;
236 self.constants.push(value);
237 idx
238 }
239 pub fn get(&self, idx: u32) -> Option<&ConstantValue> {
240 self.constants.get(idx as usize)
241 }
242}
243pub struct CodeGenerator {
244 ir: HelixIR,
245 current_id: u32,
246}
247impl CodeGenerator {
248 pub fn new() -> Self {
249 CodeGenerator {
250 ir: HelixIR {
251 version: 1,
252 metadata: Metadata {
253 source_file: None,
254 compile_time: std::time::SystemTime::now()
255 .duration_since(std::time::UNIX_EPOCH)
256 .unwrap()
257 .as_secs(),
258 compiler_version: "1.0.0".to_string(),
259 checksum: None,
260 },
261 symbol_table: SymbolTable {
262 agents: HashMap::new(),
263 workflows: HashMap::new(),
264 contexts: HashMap::new(),
265 crews: HashMap::new(),
266 next_id: 1,
267 },
268 instructions: Vec::new(),
269 string_pool: StringPool::new(),
270 constants: ConstantPool::new(),
271 },
272 current_id: 1,
273 }
274 }
275 fn next_id(&mut self) -> u32 {
276 let id = self.current_id;
277 self.current_id += 1;
278 id
279 }
280 pub fn generate(&mut self, ast: &HelixAst) -> HelixIR {
281 for decl in &ast.declarations {
282 self.generate_declaration(decl);
283 }
284 self.optimize();
285 self.ir.metadata.checksum = Some(self.calculate_checksum());
286 self.ir.clone()
287 }
288 fn generate_declaration(&mut self, decl: &Declaration) {
289 match decl {
290 Declaration::Agent(agent) => self.generate_agent(agent),
291 Declaration::Workflow(workflow) => self.generate_workflow(workflow),
292 Declaration::Context(context) => self.generate_context(context),
293 Declaration::Crew(crew) => self.generate_crew(crew),
294 _ => {}
295 }
296 }
297 fn generate_agent(&mut self, agent: &AgentDecl) {
298 let id = self.next_id();
299 let name_idx = self.ir.string_pool.intern(&agent.name);
300 let model_idx = agent
301 .properties
302 .get("model")
303 .and_then(|e| e.as_string())
304 .map(|s| self.ir.string_pool.intern(&s))
305 .unwrap_or_else(|| self.ir.string_pool.intern("gpt-4"));
306 let role_idx = agent
307 .properties
308 .get("role")
309 .and_then(|e| e.as_string())
310 .map(|s| self.ir.string_pool.intern(&s))
311 .unwrap_or_else(|| self.ir.string_pool.intern("Assistant"));
312 let temperature = agent
313 .properties
314 .get("temperature")
315 .and_then(|e| e.as_number())
316 .map(|n| n as f32);
317 let max_tokens = agent
318 .properties
319 .get("max_tokens")
320 .and_then(|e| e.as_number())
321 .map(|n| n as u32);
322 let capabilities = agent
323 .capabilities
324 .as_ref()
325 .map(|caps| caps.iter().map(|c| self.ir.string_pool.intern(c)).collect())
326 .unwrap_or_default();
327 let backstory_idx = agent
328 .backstory
329 .as_ref()
330 .map(|b| {
331 let backstory_text = b.lines.join("\n");
332 self.ir.string_pool.intern(&backstory_text)
333 });
334 let symbol = AgentSymbol {
335 id,
336 name_idx,
337 model_idx,
338 role_idx,
339 temperature,
340 max_tokens,
341 capabilities,
342 backstory_idx,
343 };
344 self.ir.symbol_table.agents.insert(id, symbol);
345 self.ir.instructions.push(Instruction::DeclareAgent(id));
346 }
347 fn generate_workflow(&mut self, workflow: &WorkflowDecl) {
348 let id = self.next_id();
349 let name_idx = self.ir.string_pool.intern(&workflow.name);
350 let trigger_type = if let Some(trigger) = &workflow.trigger {
351 self.parse_trigger(trigger)
352 } else {
353 TriggerType::Manual
354 };
355 let mut step_ids = Vec::new();
356 for step in &workflow.steps {
357 let step_def = self.generate_step(step, id);
358 step_ids.push(step_def.id);
359 self.ir
360 .instructions
361 .push(Instruction::DefineStep {
362 workflow: id,
363 step: step_def,
364 });
365 }
366 let pipeline = workflow
367 .pipeline
368 .as_ref()
369 .map(|p| {
370 p.flow
371 .iter()
372 .filter_map(|node| {
373 if let PipelineNode::Step(name) = node {
374 workflow
375 .steps
376 .iter()
377 .find(|s| s.name == *name)
378 .and_then(|_s| step_ids.iter().find(|&&_sid| { true }))
379 .copied()
380 } else {
381 None
382 }
383 })
384 .collect()
385 });
386 let symbol = WorkflowSymbol {
387 id,
388 name_idx,
389 trigger_type,
390 steps: step_ids,
391 pipeline,
392 };
393 self.ir.symbol_table.workflows.insert(id, symbol);
394 self.ir.instructions.push(Instruction::DeclareWorkflow(id));
395 }
396 fn generate_step(&mut self, step: &StepDecl, _workflow_id: u32) -> StepDefinition {
397 let id = self.next_id();
398 let name_idx = self.ir.string_pool.intern(&step.name);
399 let agent_id = step
400 .agent
401 .as_ref()
402 .and_then(|name| {
403 self.ir
404 .symbol_table
405 .agents
406 .values()
407 .find(|a| self.ir.string_pool.get(a.name_idx) == Some(name))
408 .map(|a| a.id)
409 });
410 let task_idx = step.task.as_ref().map(|t| self.ir.string_pool.intern(t));
411 let timeout = step
412 .properties
413 .get("timeout")
414 .and_then(|e| self.expression_to_duration(e));
415 let parallel = step
416 .properties
417 .get("parallel")
418 .and_then(|e| e.as_bool())
419 .unwrap_or(false);
420 let depends_on = step
421 .properties
422 .get("depends_on")
423 .and_then(|e| e.as_array())
424 .map(|deps| {
425 deps.iter()
426 .filter_map(|d| { d.as_string().and_then(|_name| { Some(0) }) })
427 .collect()
428 })
429 .unwrap_or_default();
430 let retry = step
431 .properties
432 .get("retry")
433 .and_then(|e| e.as_object())
434 .and_then(|obj| self.parse_retry_config(obj));
435 StepDefinition {
436 id,
437 name_idx,
438 agent_id,
439 crew_ids: step.crew.as_ref().map(|_| Vec::new()),
440 task_idx,
441 timeout,
442 parallel,
443 depends_on,
444 retry,
445 }
446 }
447 fn generate_context(&mut self, context: &ContextDecl) {
448 let id = self.next_id();
449 let name_idx = self.ir.string_pool.intern(&context.name);
450 let environment_idx = self.ir.string_pool.intern(&context.environment);
451 let debug = context
452 .properties
453 .get("debug")
454 .and_then(|e| e.as_bool())
455 .unwrap_or(false);
456 let max_tokens = context
457 .properties
458 .get("max_tokens")
459 .and_then(|e| e.as_number())
460 .map(|n| n as u64);
461 let secrets = context
462 .secrets
463 .as_ref()
464 .map(|s| {
465 s.iter()
466 .map(|(key, secret_ref)| {
467 let key_idx = self.ir.string_pool.intern(key);
468 let secret_type = match secret_ref {
469 SecretRef::Environment(var) => {
470 SecretType::Environment(self.ir.string_pool.intern(var))
471 }
472 SecretRef::Vault(path) => {
473 SecretType::Vault(self.ir.string_pool.intern(path))
474 }
475 SecretRef::File(path) => {
476 SecretType::File(self.ir.string_pool.intern(path))
477 }
478 };
479 (key_idx, secret_type)
480 })
481 .collect()
482 })
483 .unwrap_or_default();
484 let symbol = ContextSymbol {
485 id,
486 name_idx,
487 environment_idx,
488 debug,
489 max_tokens,
490 secrets,
491 };
492 self.ir.symbol_table.contexts.insert(id, symbol);
493 self.ir.instructions.push(Instruction::DeclareContext(id));
494 }
495 fn generate_crew(&mut self, crew: &CrewDecl) {
496 let id = self.next_id();
497 let name_idx = self.ir.string_pool.intern(&crew.name);
498 let agent_ids = crew
499 .agents
500 .iter()
501 .filter_map(|name| {
502 self.ir
503 .symbol_table
504 .agents
505 .values()
506 .find(|a| self.ir.string_pool.get(a.name_idx) == Some(name))
507 .map(|a| a.id)
508 })
509 .collect();
510 let process_type = crew
511 .process_type
512 .as_ref()
513 .and_then(|p| match p.as_str() {
514 "sequential" => Some(ProcessTypeIR::Sequential),
515 "hierarchical" => Some(ProcessTypeIR::Hierarchical),
516 "parallel" => Some(ProcessTypeIR::Parallel),
517 "consensus" => Some(ProcessTypeIR::Consensus),
518 _ => None,
519 })
520 .unwrap_or(ProcessTypeIR::Sequential);
521 let manager_id = crew
522 .properties
523 .get("manager")
524 .and_then(|e| e.as_string())
525 .and_then(|name| {
526 self.ir
527 .symbol_table
528 .agents
529 .values()
530 .find(|a| self.ir.string_pool.get(a.name_idx) == Some(&name))
531 .map(|a| a.id)
532 });
533 let symbol = CrewSymbol {
534 id,
535 name_idx,
536 agent_ids,
537 process_type,
538 manager_id,
539 };
540 self.ir.symbol_table.crews.insert(id, symbol);
541 self.ir.instructions.push(Instruction::DeclareCrew(id));
542 }
543 fn parse_trigger(&mut self, trigger: &Expression) -> TriggerType {
544 match trigger {
545 Expression::String(s) | Expression::Identifier(s) => {
546 if s == "manual" {
547 TriggerType::Manual
548 } else if s.starts_with("schedule:") {
549 let cron = s.trim_start_matches("schedule:");
550 TriggerType::Schedule(self.ir.string_pool.intern(cron))
551 } else if s.starts_with("webhook:") {
552 let url = s.trim_start_matches("webhook:");
553 TriggerType::Webhook(self.ir.string_pool.intern(url))
554 } else if s.starts_with("event:") {
555 let event = s.trim_start_matches("event:");
556 TriggerType::Event(self.ir.string_pool.intern(event))
557 } else if s.starts_with("file:") {
558 let pattern = s.trim_start_matches("file:");
559 TriggerType::FileWatch(self.ir.string_pool.intern(pattern))
560 } else {
561 TriggerType::Manual
562 }
563 }
564 Expression::Object(map) => {
565 if let Some(type_expr) = map.get("type") {
566 self.parse_trigger(type_expr)
567 } else {
568 TriggerType::Manual
569 }
570 }
571 _ => TriggerType::Manual,
572 }
573 }
574 fn expression_to_duration(&mut self, expr: &Expression) -> Option<DurationIR> {
575 match expr {
576 Expression::Duration(d) => {
577 Some(DurationIR {
578 value: d.value,
579 unit: match d.unit {
580 TimeUnit::Seconds => TimeUnitIR::Seconds,
581 TimeUnit::Minutes => TimeUnitIR::Minutes,
582 TimeUnit::Hours => TimeUnitIR::Hours,
583 TimeUnit::Days => TimeUnitIR::Days,
584 },
585 })
586 }
587 _ => None,
588 }
589 }
590 fn parse_retry_config(
591 &mut self,
592 obj: &HashMap<String, Expression>,
593 ) -> Option<RetryPolicy> {
594 let max_attempts = obj
595 .get("max_attempts")
596 .and_then(|e| e.as_number())
597 .map(|n| n as u32)?;
598 let delay = obj.get("delay").and_then(|e| self.expression_to_duration(e))?;
599 let backoff = obj
600 .get("backoff")
601 .and_then(|e| e.as_string())
602 .and_then(|s| match s.as_str() {
603 "fixed" => Some(BackoffStrategyIR::Fixed),
604 "linear" => Some(BackoffStrategyIR::Linear),
605 "exponential" => Some(BackoffStrategyIR::Exponential),
606 _ => None,
607 })
608 .unwrap_or(BackoffStrategyIR::Fixed);
609 Some(RetryPolicy {
610 max_attempts,
611 delay,
612 backoff,
613 })
614 }
615 fn optimize(&mut self) {
616 self.constant_folding();
617 self.dead_code_elimination();
618 self.string_deduplication();
619 }
620 fn constant_folding(&mut self) {}
621 fn dead_code_elimination(&mut self) {}
622 fn string_deduplication(&mut self) {}
623 fn calculate_checksum(&self) -> u64 {
624 use std::collections::hash_map::DefaultHasher;
625 use std::hash::{Hash, Hasher};
626 let mut hasher = DefaultHasher::new();
627 self.ir.version.hash(&mut hasher);
628 self.ir.instructions.len().hash(&mut hasher);
629 hasher.finish()
630 }
631}
632#[allow(dead_code)]
633pub struct BinarySerializer;
634#[allow(dead_code)]
635impl BinarySerializer {
636 pub fn serialize(ir: &HelixIR) -> Result<Vec<u8>, std::io::Error> {
637 #[cfg(feature = "compiler")]
638 {
639 bincode::serialize(ir)
640 .map_err(|e| { std::io::Error::new(std::io::ErrorKind::Other, e) })
641 }
642 #[cfg(not(feature = "compiler"))]
643 {
644 Err(
645 std::io::Error::new(
646 std::io::ErrorKind::Unsupported,
647 "Binary serialization requires the 'compiler' feature",
648 ),
649 )
650 }
651 }
652 pub fn deserialize(data: &[u8]) -> Result<HelixIR, std::io::Error> {
653 #[cfg(feature = "compiler")]
654 {
655 bincode::deserialize(data)
656 .map_err(|e| { std::io::Error::new(std::io::ErrorKind::Other, e) })
657 }
658 #[cfg(not(feature = "compiler"))]
659 {
660 Err(
661 std::io::Error::new(
662 std::io::ErrorKind::Unsupported,
663 "Binary deserialization requires the 'compiler' feature",
664 ),
665 )
666 }
667 }
668 pub fn write_to_file(ir: &HelixIR, path: &str) -> Result<(), std::io::Error> {
669 let data = Self::serialize(ir)?;
670 let mut file = std::fs::File::create(path)?;
671 file.write_all(&data)?;
672 Ok(())
673 }
674 pub fn read_from_file(path: &str) -> Result<HelixIR, std::io::Error> {
675 let mut file = std::fs::File::open(path)?;
676 let mut data = Vec::new();
677 file.read_to_end(&mut data)?;
678 Self::deserialize(&data)
679 }
680}
681#[allow(dead_code)]
682pub struct VersionChecker;
683#[allow(dead_code)]
684impl VersionChecker {
685 const CURRENT_VERSION: u32 = 1;
686 const MIN_COMPATIBLE_VERSION: u32 = 1;
687 pub fn is_compatible(ir: &HelixIR) -> bool {
688 ir.version >= Self::MIN_COMPATIBLE_VERSION && ir.version <= Self::CURRENT_VERSION
689 }
690 pub fn migrate(ir: &mut HelixIR) -> Result<(), String> {
691 if ir.version < Self::MIN_COMPATIBLE_VERSION {
692 return Err(
693 format!(
694 "IR version {} is too old. Minimum supported version is {}", ir
695 .version, Self::MIN_COMPATIBLE_VERSION
696 ),
697 );
698 }
699 if ir.version > Self::CURRENT_VERSION {
700 return Err(
701 format!(
702 "IR version {} is newer than current version {}", ir.version,
703 Self::CURRENT_VERSION
704 ),
705 );
706 }
707 while ir.version < Self::CURRENT_VERSION {
708 match ir.version {
709 _ => ir.version += 1,
710 }
711 }
712 Ok(())
713 }
714}
715#[cfg(test)]
716mod tests {
717 use super::*;
718 #[test]
719 fn test_string_pool() {
720 let mut pool = StringPool::new();
721 let idx1 = pool.intern("hello");
722 let idx2 = pool.intern("world");
723 let idx3 = pool.intern("hello");
724 assert_eq!(idx1, idx3);
725 assert_ne!(idx1, idx2);
726 assert_eq!(pool.get(idx1), Some(& "hello".to_string()));
727 assert_eq!(pool.get(idx2), Some(& "world".to_string()));
728 }
729 #[test]
730 fn test_constant_pool() {
731 let mut pool = ConstantPool::new();
732 let idx1 = pool.add(ConstantValue::Number(42.0));
733 let idx2 = pool.add(ConstantValue::Bool(true));
734 assert_eq!(pool.get(idx1), Some(& ConstantValue::Number(42.0)));
735 assert_eq!(pool.get(idx2), Some(& ConstantValue::Bool(true)));
736 }
737 #[test]
738 fn test_version_compatibility() {
739 let mut ir = HelixIR {
740 version: 1,
741 metadata: Metadata {
742 source_file: None,
743 compile_time: 0,
744 compiler_version: "1.0.0".to_string(),
745 checksum: None,
746 },
747 symbol_table: SymbolTable {
748 agents: HashMap::new(),
749 workflows: HashMap::new(),
750 contexts: HashMap::new(),
751 crews: HashMap::new(),
752 next_id: 1,
753 },
754 instructions: Vec::new(),
755 string_pool: StringPool::new(),
756 constants: ConstantPool::new(),
757 };
758 assert!(VersionChecker::is_compatible(& ir));
759 assert!(VersionChecker::migrate(& mut ir).is_ok());
760 }
761}