1use std::collections::HashMap;
2use std::io::{Write, Read};
3use serde::{Serialize, Deserialize};
4use super::ast::*;
5use crate::types::TimeUnit;
6#[derive(Debug, Clone, Serialize, Deserialize)]
7pub struct HelixIR {
8 pub version: u32,
9 pub metadata: Metadata,
10 pub symbol_table: SymbolTable,
11 pub instructions: Vec<Instruction>,
12 pub string_pool: StringPool,
13 pub constants: ConstantPool,
14}
15#[derive(Debug, Clone, Serialize, Deserialize)]
16pub struct Metadata {
17 pub source_file: Option<String>,
18 pub compile_time: u64,
19 pub compiler_version: String,
20 pub checksum: Option<u64>,
21}
22impl Default for Metadata {
23 fn default() -> Self {
24 Self {
25 source_file: None,
26 compile_time: std::time::SystemTime::now()
27 .duration_since(std::time::UNIX_EPOCH)
28 .unwrap()
29 .as_secs(),
30 compiler_version: "1.0.0".to_string(),
31 checksum: None,
32 }
33 }
34}
35#[derive(Debug, Clone, Serialize, Deserialize)]
36pub struct SymbolTable {
37 pub agents: HashMap<u32, AgentSymbol>,
38 pub workflows: HashMap<u32, WorkflowSymbol>,
39 pub contexts: HashMap<u32, ContextSymbol>,
40 pub crews: HashMap<u32, CrewSymbol>,
41 pub next_id: u32,
42}
43impl Default for SymbolTable {
44 fn default() -> Self {
45 Self {
46 agents: HashMap::new(),
47 workflows: HashMap::new(),
48 contexts: HashMap::new(),
49 crews: HashMap::new(),
50 next_id: 1,
51 }
52 }
53}
54#[derive(Debug, Clone, Serialize, Deserialize)]
55pub struct AgentSymbol {
56 pub id: u32,
57 pub name_idx: u32,
58 pub model_idx: u32,
59 pub role_idx: u32,
60 pub temperature: Option<f32>,
61 pub max_tokens: Option<u32>,
62 pub capabilities: Vec<u32>,
63 pub backstory_idx: Option<u32>,
64}
65#[derive(Debug, Clone, Serialize, Deserialize)]
66pub struct WorkflowSymbol {
67 pub id: u32,
68 pub name_idx: u32,
69 pub trigger_type: TriggerType,
70 pub steps: Vec<u32>,
71 pub pipeline: Option<Vec<u32>>,
72}
73#[derive(Debug, Clone, Serialize, Deserialize)]
74pub struct ContextSymbol {
75 pub id: u32,
76 pub name_idx: u32,
77 pub environment_idx: u32,
78 pub debug: bool,
79 pub max_tokens: Option<u64>,
80 pub secrets: HashMap<u32, SecretType>,
81}
82#[derive(Debug, Clone, Serialize, Deserialize)]
83pub struct CrewSymbol {
84 pub id: u32,
85 pub name_idx: u32,
86 pub agent_ids: Vec<u32>,
87 pub process_type: ProcessTypeIR,
88 pub manager_id: Option<u32>,
89}
90#[derive(Debug, Clone, Serialize, Deserialize)]
91pub enum TriggerType {
92 Manual,
93 Schedule(u32),
94 Webhook(u32),
95 Event(u32),
96 FileWatch(u32),
97}
98#[derive(Debug, Clone, Serialize, Deserialize)]
99pub enum ProcessTypeIR {
100 Sequential,
101 Hierarchical,
102 Parallel,
103 Consensus,
104}
105#[derive(Debug, Clone, Serialize, Deserialize)]
106pub enum SecretType {
107 Environment(u32),
108 Vault(u32),
109 File(u32),
110}
111#[derive(Debug, Clone, Serialize, Deserialize)]
112pub enum Instruction {
113 DeclareAgent(u32),
114 DeclareWorkflow(u32),
115 DeclareContext(u32),
116 DeclareCrew(u32),
117 SetProperty { target: u32, key: u32, value: ConstantValue },
118 SetCapability { agent: u32, capability: u32 },
119 SetSecret { context: u32, key: u32, secret: SecretType },
120 DefineStep { workflow: u32, step: StepDefinition },
121 DefinePipeline { workflow: u32, nodes: Vec<PipelineNodeIR> },
122 ResolveReference { ref_type: ReferenceType, index: u32 },
123 SetMetadata { key: u32, value: u32 },
124}
125#[derive(Debug, Clone, Serialize, Deserialize)]
126pub struct StepDefinition {
127 pub id: u32,
128 pub name_idx: u32,
129 pub agent_id: Option<u32>,
130 pub crew_ids: Option<Vec<u32>>,
131 pub task_idx: Option<u32>,
132 pub timeout: Option<DurationIR>,
133 pub parallel: bool,
134 pub depends_on: Vec<u32>,
135 pub retry: Option<RetryPolicy>,
136}
137#[derive(Debug, Clone, Serialize, Deserialize)]
138pub struct RetryPolicy {
139 pub max_attempts: u32,
140 pub delay: DurationIR,
141 pub backoff: BackoffStrategyIR,
142}
143#[derive(Debug, Clone, Serialize, Deserialize)]
144pub enum BackoffStrategyIR {
145 Fixed,
146 Linear,
147 Exponential,
148}
149#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
150pub struct DurationIR {
151 pub value: u64,
152 pub unit: TimeUnitIR,
153}
154#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
155pub enum TimeUnitIR {
156 Seconds,
157 Minutes,
158 Hours,
159 Days,
160}
161#[derive(Debug, Clone, Serialize, Deserialize)]
162pub enum PipelineNodeIR {
163 Step(u32),
164 Parallel(Vec<PipelineNodeIR>),
165 Conditional {
166 condition: u32,
167 then_branch: Box<PipelineNodeIR>,
168 else_branch: Option<Box<PipelineNodeIR>>,
169 },
170}
171#[derive(Debug, Clone, Serialize, Deserialize)]
172pub enum ReferenceType {
173 Environment,
174 Memory,
175 Variable,
176}
177#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
178pub enum ConstantValue {
179 String(u32),
180 Number(f64),
181 Bool(bool),
182 Duration(DurationIR),
183 Null,
184}
185#[derive(Debug, Clone, Serialize, Deserialize)]
186pub struct StringPool {
187 pub strings: Vec<String>,
188 pub index: HashMap<String, u32>,
189}
190impl Default for StringPool {
191 fn default() -> Self {
192 Self::new()
193 }
194}
195impl StringPool {
196 pub fn new() -> Self {
197 StringPool {
198 strings: Vec::new(),
199 index: HashMap::new(),
200 }
201 }
202 pub fn intern(&mut self, s: &str) -> u32 {
203 if let Some(&idx) = self.index.get(s) {
204 idx
205 } else {
206 let idx = self.strings.len() as u32;
207 self.strings.push(s.to_string());
208 self.index.insert(s.to_string(), idx);
209 idx
210 }
211 }
212 pub fn get(&self, idx: u32) -> Option<&String> {
213 self.strings.get(idx as usize)
214 }
215}
216#[derive(Debug, Clone, Serialize, Deserialize)]
217pub struct ConstantPool {
218 constants: Vec<ConstantValue>,
219}
220impl Default for ConstantPool {
221 fn default() -> Self {
222 Self::new()
223 }
224}
225impl ConstantPool {
226 pub fn new() -> Self {
227 ConstantPool {
228 constants: Vec::new(),
229 }
230 }
231 pub fn add(&mut self, value: ConstantValue) -> u32 {
232 let idx = self.constants.len() as u32;
233 self.constants.push(value);
234 idx
235 }
236 pub fn get(&self, idx: u32) -> Option<&ConstantValue> {
237 self.constants.get(idx as usize)
238 }
239}
240pub struct CodeGenerator {
241 ir: HelixIR,
242 current_id: u32,
243}
244impl CodeGenerator {
245 pub fn new() -> Self {
246 CodeGenerator {
247 ir: HelixIR {
248 version: 1,
249 metadata: Metadata {
250 source_file: None,
251 compile_time: std::time::SystemTime::now()
252 .duration_since(std::time::UNIX_EPOCH)
253 .unwrap()
254 .as_secs(),
255 compiler_version: "1.0.0".to_string(),
256 checksum: None,
257 },
258 symbol_table: SymbolTable {
259 agents: HashMap::new(),
260 workflows: HashMap::new(),
261 contexts: HashMap::new(),
262 crews: HashMap::new(),
263 next_id: 1,
264 },
265 instructions: Vec::new(),
266 string_pool: StringPool::new(),
267 constants: ConstantPool::new(),
268 },
269 current_id: 1,
270 }
271 }
272 fn next_id(&mut self) -> u32 {
273 let id = self.current_id;
274 self.current_id += 1;
275 id
276 }
277 pub fn generate(&mut self, ast: &HelixAst) -> HelixIR {
278 for decl in &ast.declarations {
279 self.generate_declaration(decl);
280 }
281 self.optimize();
282 self.ir.metadata.checksum = Some(self.calculate_checksum());
283 self.ir.clone()
284 }
285 fn generate_declaration(&mut self, decl: &Declaration) {
286 match decl {
287 Declaration::Agent(agent) => self.generate_agent(agent),
288 Declaration::Workflow(workflow) => self.generate_workflow(workflow),
289 Declaration::Context(context) => self.generate_context(context),
290 Declaration::Crew(crew) => self.generate_crew(crew),
291 _ => {}
292 }
293 }
294 fn generate_agent(&mut self, agent: &AgentDecl) {
295 let id = self.next_id();
296 let name_idx = self.ir.string_pool.intern(&agent.name);
297 let model_idx = agent
298 .properties
299 .get("model")
300 .and_then(|e| e.as_string())
301 .map(|s| self.ir.string_pool.intern(&s))
302 .unwrap_or_else(|| self.ir.string_pool.intern("gpt-4"));
303 let role_idx = agent
304 .properties
305 .get("role")
306 .and_then(|e| e.as_string())
307 .map(|s| self.ir.string_pool.intern(&s))
308 .unwrap_or_else(|| self.ir.string_pool.intern("Assistant"));
309 let temperature = agent
310 .properties
311 .get("temperature")
312 .and_then(|e| e.as_number())
313 .map(|n| n as f32);
314 let max_tokens = agent
315 .properties
316 .get("max_tokens")
317 .and_then(|e| e.as_number())
318 .map(|n| n as u32);
319 let capabilities = agent
320 .capabilities
321 .as_ref()
322 .map(|caps| caps.iter().map(|c| self.ir.string_pool.intern(c)).collect())
323 .unwrap_or_default();
324 let backstory_idx = agent
325 .backstory
326 .as_ref()
327 .map(|b| {
328 let backstory_text = b.lines.join("\n");
329 self.ir.string_pool.intern(&backstory_text)
330 });
331 let symbol = AgentSymbol {
332 id,
333 name_idx,
334 model_idx,
335 role_idx,
336 temperature,
337 max_tokens,
338 capabilities,
339 backstory_idx,
340 };
341 self.ir.symbol_table.agents.insert(id, symbol);
342 self.ir.instructions.push(Instruction::DeclareAgent(id));
343 }
344 fn generate_workflow(&mut self, workflow: &WorkflowDecl) {
345 let id = self.next_id();
346 let name_idx = self.ir.string_pool.intern(&workflow.name);
347 let trigger_type = if let Some(trigger) = &workflow.trigger {
348 self.parse_trigger(trigger)
349 } else {
350 TriggerType::Manual
351 };
352 let mut step_ids = Vec::new();
353 for step in &workflow.steps {
354 let step_def = self.generate_step(step, id);
355 step_ids.push(step_def.id);
356 self.ir
357 .instructions
358 .push(Instruction::DefineStep {
359 workflow: id,
360 step: step_def,
361 });
362 }
363 let pipeline = workflow
364 .pipeline
365 .as_ref()
366 .map(|p| {
367 p.flow
368 .iter()
369 .filter_map(|node| {
370 if let PipelineNode::Step(name) = node {
371 workflow
372 .steps
373 .iter()
374 .find(|s| s.name == *name)
375 .and_then(|_s| step_ids.iter().find(|&&_sid| { true }))
376 .copied()
377 } else {
378 None
379 }
380 })
381 .collect()
382 });
383 let symbol = WorkflowSymbol {
384 id,
385 name_idx,
386 trigger_type,
387 steps: step_ids,
388 pipeline,
389 };
390 self.ir.symbol_table.workflows.insert(id, symbol);
391 self.ir.instructions.push(Instruction::DeclareWorkflow(id));
392 }
393 fn generate_step(&mut self, step: &StepDecl, _workflow_id: u32) -> StepDefinition {
394 let id = self.next_id();
395 let name_idx = self.ir.string_pool.intern(&step.name);
396 let agent_id = step
397 .agent
398 .as_ref()
399 .and_then(|name| {
400 self.ir
401 .symbol_table
402 .agents
403 .values()
404 .find(|a| self.ir.string_pool.get(a.name_idx) == Some(name))
405 .map(|a| a.id)
406 });
407 let task_idx = step.task.as_ref().map(|t| self.ir.string_pool.intern(t));
408 let timeout = step
409 .properties
410 .get("timeout")
411 .and_then(|e| self.expression_to_duration(e));
412 let parallel = step
413 .properties
414 .get("parallel")
415 .and_then(|e| e.as_bool())
416 .unwrap_or(false);
417 let depends_on = step
418 .properties
419 .get("depends_on")
420 .and_then(|e| e.as_array())
421 .map(|deps| {
422 deps.iter()
423 .filter_map(|d| { d.as_string().and_then(|_name| { Some(0) }) })
424 .collect()
425 })
426 .unwrap_or_default();
427 let retry = step
428 .properties
429 .get("retry")
430 .and_then(|e| e.as_object())
431 .and_then(|obj| self.parse_retry_config(obj));
432 StepDefinition {
433 id,
434 name_idx,
435 agent_id,
436 crew_ids: step.crew.as_ref().map(|_| Vec::new()),
437 task_idx,
438 timeout,
439 parallel,
440 depends_on,
441 retry,
442 }
443 }
444 fn generate_context(&mut self, context: &ContextDecl) {
445 let id = self.next_id();
446 let name_idx = self.ir.string_pool.intern(&context.name);
447 let environment_idx = self.ir.string_pool.intern(&context.environment);
448 let debug = context
449 .properties
450 .get("debug")
451 .and_then(|e| e.as_bool())
452 .unwrap_or(false);
453 let max_tokens = context
454 .properties
455 .get("max_tokens")
456 .and_then(|e| e.as_number())
457 .map(|n| n as u64);
458 let secrets = context
459 .secrets
460 .as_ref()
461 .map(|s| {
462 s.iter()
463 .map(|(key, secret_ref)| {
464 let key_idx = self.ir.string_pool.intern(key);
465 let secret_type = match secret_ref {
466 SecretRef::Environment(var) => {
467 SecretType::Environment(self.ir.string_pool.intern(var))
468 }
469 SecretRef::Vault(path) => {
470 SecretType::Vault(self.ir.string_pool.intern(path))
471 }
472 SecretRef::File(path) => {
473 SecretType::File(self.ir.string_pool.intern(path))
474 }
475 };
476 (key_idx, secret_type)
477 })
478 .collect()
479 })
480 .unwrap_or_default();
481 let symbol = ContextSymbol {
482 id,
483 name_idx,
484 environment_idx,
485 debug,
486 max_tokens,
487 secrets,
488 };
489 self.ir.symbol_table.contexts.insert(id, symbol);
490 self.ir.instructions.push(Instruction::DeclareContext(id));
491 }
492 fn generate_crew(&mut self, crew: &CrewDecl) {
493 let id = self.next_id();
494 let name_idx = self.ir.string_pool.intern(&crew.name);
495 let agent_ids = crew
496 .agents
497 .iter()
498 .filter_map(|name| {
499 self.ir
500 .symbol_table
501 .agents
502 .values()
503 .find(|a| self.ir.string_pool.get(a.name_idx) == Some(name))
504 .map(|a| a.id)
505 })
506 .collect();
507 let process_type = crew
508 .process_type
509 .as_ref()
510 .and_then(|p| match p.as_str() {
511 "sequential" => Some(ProcessTypeIR::Sequential),
512 "hierarchical" => Some(ProcessTypeIR::Hierarchical),
513 "parallel" => Some(ProcessTypeIR::Parallel),
514 "consensus" => Some(ProcessTypeIR::Consensus),
515 _ => None,
516 })
517 .unwrap_or(ProcessTypeIR::Sequential);
518 let manager_id = crew
519 .properties
520 .get("manager")
521 .and_then(|e| e.as_string())
522 .and_then(|name| {
523 self.ir
524 .symbol_table
525 .agents
526 .values()
527 .find(|a| self.ir.string_pool.get(a.name_idx) == Some(&name))
528 .map(|a| a.id)
529 });
530 let symbol = CrewSymbol {
531 id,
532 name_idx,
533 agent_ids,
534 process_type,
535 manager_id,
536 };
537 self.ir.symbol_table.crews.insert(id, symbol);
538 self.ir.instructions.push(Instruction::DeclareCrew(id));
539 }
540 fn parse_trigger(&mut self, trigger: &Expression) -> TriggerType {
541 match trigger {
542 Expression::String(s) | Expression::Identifier(s) => {
543 if s == "manual" {
544 TriggerType::Manual
545 } else if s.starts_with("schedule:") {
546 let cron = s.trim_start_matches("schedule:");
547 TriggerType::Schedule(self.ir.string_pool.intern(cron))
548 } else if s.starts_with("webhook:") {
549 let url = s.trim_start_matches("webhook:");
550 TriggerType::Webhook(self.ir.string_pool.intern(url))
551 } else if s.starts_with("event:") {
552 let event = s.trim_start_matches("event:");
553 TriggerType::Event(self.ir.string_pool.intern(event))
554 } else if s.starts_with("file:") {
555 let pattern = s.trim_start_matches("file:");
556 TriggerType::FileWatch(self.ir.string_pool.intern(pattern))
557 } else {
558 TriggerType::Manual
559 }
560 }
561 Expression::Object(map) => {
562 if let Some(type_expr) = map.get("type") {
563 self.parse_trigger(type_expr)
564 } else {
565 TriggerType::Manual
566 }
567 }
568 _ => TriggerType::Manual,
569 }
570 }
571 fn expression_to_duration(&mut self, expr: &Expression) -> Option<DurationIR> {
572 match expr {
573 Expression::Duration(d) => {
574 Some(DurationIR {
575 value: d.value,
576 unit: match d.unit {
577 TimeUnit::Seconds => TimeUnitIR::Seconds,
578 TimeUnit::Minutes => TimeUnitIR::Minutes,
579 TimeUnit::Hours => TimeUnitIR::Hours,
580 TimeUnit::Days => TimeUnitIR::Days,
581 },
582 })
583 }
584 _ => None,
585 }
586 }
587 fn parse_retry_config(
588 &mut self,
589 obj: &HashMap<String, Expression>,
590 ) -> Option<RetryPolicy> {
591 let max_attempts = obj
592 .get("max_attempts")
593 .and_then(|e| e.as_number())
594 .map(|n| n as u32)?;
595 let delay = obj.get("delay").and_then(|e| self.expression_to_duration(e))?;
596 let backoff = obj
597 .get("backoff")
598 .and_then(|e| e.as_string())
599 .and_then(|s| match s.as_str() {
600 "fixed" => Some(BackoffStrategyIR::Fixed),
601 "linear" => Some(BackoffStrategyIR::Linear),
602 "exponential" => Some(BackoffStrategyIR::Exponential),
603 _ => None,
604 })
605 .unwrap_or(BackoffStrategyIR::Fixed);
606 Some(RetryPolicy {
607 max_attempts,
608 delay,
609 backoff,
610 })
611 }
612 fn optimize(&mut self) {
613 self.constant_folding();
614 self.dead_code_elimination();
615 self.string_deduplication();
616 }
617 fn constant_folding(&mut self) {}
618 fn dead_code_elimination(&mut self) {}
619 fn string_deduplication(&mut self) {}
620 fn calculate_checksum(&self) -> u64 {
621 use std::collections::hash_map::DefaultHasher;
622 use std::hash::{Hash, Hasher};
623 let mut hasher = DefaultHasher::new();
624 self.ir.version.hash(&mut hasher);
625 self.ir.instructions.len().hash(&mut hasher);
626 hasher.finish()
627 }
628}
629#[allow(dead_code)]
630pub struct BinarySerializer;
631#[allow(dead_code)]
632impl BinarySerializer {
633 pub fn serialize(ir: &HelixIR) -> Result<Vec<u8>, std::io::Error> {
634 #[cfg(feature = "compiler")]
635 {
636 bincode::serialize(ir)
637 .map_err(|e| { std::io::Error::new(std::io::ErrorKind::Other, e) })
638 }
639 #[cfg(not(feature = "compiler"))]
640 {
641 Err(
642 std::io::Error::new(
643 std::io::ErrorKind::Unsupported,
644 "Binary serialization requires the 'compiler' feature",
645 ),
646 )
647 }
648 }
649 pub fn deserialize(data: &[u8]) -> Result<HelixIR, std::io::Error> {
650 #[cfg(feature = "compiler")]
651 {
652 bincode::deserialize(data)
653 .map_err(|e| { std::io::Error::new(std::io::ErrorKind::Other, e) })
654 }
655 #[cfg(not(feature = "compiler"))]
656 {
657 Err(
658 std::io::Error::new(
659 std::io::ErrorKind::Unsupported,
660 "Binary deserialization requires the 'compiler' feature",
661 ),
662 )
663 }
664 }
665 pub fn write_to_file(ir: &HelixIR, path: &str) -> Result<(), std::io::Error> {
666 let data = Self::serialize(ir)?;
667 let mut file = std::fs::File::create(path)?;
668 file.write_all(&data)?;
669 Ok(())
670 }
671 pub fn read_from_file(path: &str) -> Result<HelixIR, std::io::Error> {
672 let mut file = std::fs::File::open(path)?;
673 let mut data = Vec::new();
674 file.read_to_end(&mut data)?;
675 Self::deserialize(&data)
676 }
677}
678#[allow(dead_code)]
679pub struct VersionChecker;
680#[allow(dead_code)]
681impl VersionChecker {
682 const CURRENT_VERSION: u32 = 1;
683 const MIN_COMPATIBLE_VERSION: u32 = 1;
684 pub fn is_compatible(ir: &HelixIR) -> bool {
685 ir.version >= Self::MIN_COMPATIBLE_VERSION && ir.version <= Self::CURRENT_VERSION
686 }
687 pub fn migrate(ir: &mut HelixIR) -> Result<(), String> {
688 if ir.version < Self::MIN_COMPATIBLE_VERSION {
689 return Err(
690 format!(
691 "IR version {} is too old. Minimum supported version is {}", ir
692 .version, Self::MIN_COMPATIBLE_VERSION
693 ),
694 );
695 }
696 if ir.version > Self::CURRENT_VERSION {
697 return Err(
698 format!(
699 "IR version {} is newer than current version {}", ir.version,
700 Self::CURRENT_VERSION
701 ),
702 );
703 }
704 while ir.version < Self::CURRENT_VERSION {
705 match ir.version {
706 _ => ir.version += 1,
707 }
708 }
709 Ok(())
710 }
711}
712#[cfg(test)]
713mod tests {
714 use super::*;
715 #[test]
716 fn test_string_pool() {
717 let mut pool = StringPool::new();
718 let idx1 = pool.intern("hello");
719 let idx2 = pool.intern("world");
720 let idx3 = pool.intern("hello");
721 assert_eq!(idx1, idx3);
722 assert_ne!(idx1, idx2);
723 assert_eq!(pool.get(idx1), Some(& "hello".to_string()));
724 assert_eq!(pool.get(idx2), Some(& "world".to_string()));
725 }
726 #[test]
727 fn test_constant_pool() {
728 let mut pool = ConstantPool::new();
729 let idx1 = pool.add(ConstantValue::Number(42.0));
730 let idx2 = pool.add(ConstantValue::Bool(true));
731 assert_eq!(pool.get(idx1), Some(& ConstantValue::Number(42.0)));
732 assert_eq!(pool.get(idx2), Some(& ConstantValue::Bool(true)));
733 }
734 #[test]
735 fn test_version_compatibility() {
736 let mut ir = HelixIR {
737 version: 1,
738 metadata: Metadata {
739 source_file: None,
740 compile_time: 0,
741 compiler_version: "1.0.0".to_string(),
742 checksum: None,
743 },
744 symbol_table: SymbolTable {
745 agents: HashMap::new(),
746 workflows: HashMap::new(),
747 contexts: HashMap::new(),
748 crews: HashMap::new(),
749 next_id: 1,
750 },
751 instructions: Vec::new(),
752 string_pool: StringPool::new(),
753 constants: ConstantPool::new(),
754 };
755 assert!(VersionChecker::is_compatible(& ir));
756 assert!(VersionChecker::migrate(& mut ir).is_ok());
757 }
758}