1use ::prelude::{BTreeSet, VecDeque};
2use module::{Module, Type};
3use opcode::Memarg;
4use std::ops::Range;
5
6#[derive(Serialize, Deserialize, Debug, Clone)]
7pub struct FlowGraph {
8 pub blocks: Vec<BasicBlock>
9}
10
11#[derive(Default, Serialize, Deserialize, Debug, Clone)]
12pub struct BasicBlock {
13 pub ops: Vec<(Option<ValueId>, Opcode)>,
14 pub br: Option<Branch>
15}
16
17#[derive(Serialize, Deserialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
18pub struct ValueId(pub usize);
19
20#[derive(Serialize, Deserialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
21pub struct BlockId(pub usize);
22
23#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
24pub enum Branch {
25 Br(BlockId),
26 BrEither(ValueId, BlockId, BlockId),
27 BrTable(ValueId, Vec<BlockId>, BlockId),
28 Return(Option<ValueId>)
29}
30
31#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
32pub enum Opcode {
33 Phi(Vec<ValueId>),
34
35 Select(ValueId, ValueId, ValueId), GetLocal(u32),
38 SetLocal(u32, ValueId),
39 GetGlobal(u32),
40 SetGlobal(u32, ValueId),
41
42 CurrentMemory,
43 GrowMemory(ValueId),
44
45 Unreachable,
46
47 Call(u32, Vec<ValueId>),
48 CallIndirect(u32, ValueId, Vec<ValueId>),
49
50 I32Const(i32),
51
52 I32Clz(ValueId),
53 I32Ctz(ValueId),
54 I32Popcnt(ValueId),
55
56 I32Add(ValueId, ValueId),
57 I32Sub(ValueId, ValueId),
58 I32Mul(ValueId, ValueId),
59 I32DivU(ValueId, ValueId),
60 I32DivS(ValueId, ValueId),
61 I32RemU(ValueId, ValueId),
62 I32RemS(ValueId, ValueId),
63 I32And(ValueId, ValueId),
64 I32Or(ValueId, ValueId),
65 I32Xor(ValueId, ValueId),
66 I32Shl(ValueId, ValueId),
67 I32ShrU(ValueId, ValueId),
68 I32ShrS(ValueId, ValueId),
69 I32Rotl(ValueId, ValueId),
70 I32Rotr(ValueId, ValueId),
71
72 I32Eqz(ValueId),
73
74 I32Eq(ValueId, ValueId),
75 I32Ne(ValueId, ValueId),
76 I32LtU(ValueId, ValueId),
77 I32LtS(ValueId, ValueId),
78 I32LeU(ValueId, ValueId),
79 I32LeS(ValueId, ValueId),
80 I32GtU(ValueId, ValueId),
81 I32GtS(ValueId, ValueId),
82 I32GeU(ValueId, ValueId),
83 I32GeS(ValueId, ValueId),
84
85 I32WrapI64(ValueId),
86
87 I32Load(Memarg, ValueId),
88 I32Store(Memarg, ValueId , ValueId ),
89 I32Load8U(Memarg, ValueId),
90 I32Load8S(Memarg, ValueId),
91 I32Load16U(Memarg, ValueId),
92 I32Load16S(Memarg, ValueId),
93 I32Store8(Memarg, ValueId , ValueId ),
94 I32Store16(Memarg, ValueId , ValueId ),
95
96 I64Const(i64),
97
98 I64Clz(ValueId),
99 I64Ctz(ValueId),
100 I64Popcnt(ValueId),
101
102 I64Add(ValueId, ValueId),
103 I64Sub(ValueId, ValueId),
104 I64Mul(ValueId, ValueId),
105 I64DivU(ValueId, ValueId),
106 I64DivS(ValueId, ValueId),
107 I64RemU(ValueId, ValueId),
108 I64RemS(ValueId, ValueId),
109 I64And(ValueId, ValueId),
110 I64Or(ValueId, ValueId),
111 I64Xor(ValueId, ValueId),
112 I64Shl(ValueId, ValueId),
113 I64ShrU(ValueId, ValueId),
114 I64ShrS(ValueId, ValueId),
115 I64Rotl(ValueId, ValueId),
116 I64Rotr(ValueId, ValueId),
117
118 I64Eqz(ValueId),
119
120 I64Eq(ValueId, ValueId),
121 I64Ne(ValueId, ValueId),
122 I64LtU(ValueId, ValueId),
123 I64LtS(ValueId, ValueId),
124 I64LeU(ValueId, ValueId),
125 I64LeS(ValueId, ValueId),
126 I64GtU(ValueId, ValueId),
127 I64GtS(ValueId, ValueId),
128 I64GeU(ValueId, ValueId),
129 I64GeS(ValueId, ValueId),
130
131 I64ExtendI32U(ValueId),
132 I64ExtendI32S(ValueId),
133
134 I64Load(Memarg, ValueId),
135 I64Store(Memarg, ValueId , ValueId ),
136 I64Load8U(Memarg, ValueId),
137 I64Load8S(Memarg, ValueId),
138 I64Load16U(Memarg, ValueId),
139 I64Load16S(Memarg, ValueId),
140 I64Load32U(Memarg, ValueId),
141 I64Load32S(Memarg, ValueId),
142 I64Store8(Memarg, ValueId , ValueId ),
143 I64Store16(Memarg, ValueId , ValueId ),
144 I64Store32(Memarg, ValueId , ValueId ),
145
146 F32Const(u32),
147 F64Const(u64),
148 F32ReinterpretI32(ValueId),
149 F64ReinterpretI64(ValueId),
150 I32ReinterpretF32(ValueId),
151 I64ReinterpretF64(ValueId),
152 I32TruncSF32(ValueId),
153 I32TruncUF32(ValueId),
154 I32TruncSF64(ValueId),
155 I32TruncUF64(ValueId),
156 I64TruncSF32(ValueId),
157 I64TruncUF32(ValueId),
158 I64TruncSF64(ValueId),
159 I64TruncUF64(ValueId),
160 F32ConvertSI32(ValueId),
161 F32ConvertUI32(ValueId),
162 F32ConvertSI64(ValueId),
163 F32ConvertUI64(ValueId),
164 F64ConvertSI32(ValueId),
165 F64ConvertUI32(ValueId),
166 F64ConvertSI64(ValueId),
167 F64ConvertUI64(ValueId),
168 F32DemoteF64(ValueId),
169 F64PromoteF32(ValueId),
170 F32Abs(ValueId),
171 F32Neg(ValueId),
172 F32Ceil(ValueId),
173 F32Floor(ValueId),
174 F32Trunc(ValueId),
175 F32Nearest(ValueId),
176 F32Sqrt(ValueId),
177
178 F32Add(ValueId, ValueId),
179 F32Sub(ValueId, ValueId),
180 F32Mul(ValueId, ValueId),
181 F32Div(ValueId, ValueId),
182 F32Min(ValueId, ValueId),
183 F32Max(ValueId, ValueId),
184 F32Copysign(ValueId, ValueId),
185 F32Eq(ValueId, ValueId),
186 F32Ne(ValueId, ValueId),
187 F32Lt(ValueId, ValueId),
188 F32Gt(ValueId, ValueId),
189 F32Le(ValueId, ValueId),
190 F32Ge(ValueId, ValueId),
191
192 F64Abs(ValueId),
193 F64Neg(ValueId),
194 F64Ceil(ValueId),
195 F64Floor(ValueId),
196 F64Trunc(ValueId),
197 F64Nearest(ValueId),
198 F64Sqrt(ValueId),
199
200 F64Add(ValueId, ValueId),
201 F64Sub(ValueId, ValueId),
202 F64Mul(ValueId, ValueId),
203 F64Div(ValueId, ValueId),
204 F64Min(ValueId, ValueId),
205 F64Max(ValueId, ValueId),
206 F64Copysign(ValueId, ValueId),
207 F64Eq(ValueId, ValueId),
208 F64Ne(ValueId, ValueId),
209 F64Lt(ValueId, ValueId),
210 F64Gt(ValueId, ValueId),
211 F64Le(ValueId, ValueId),
212 F64Ge(ValueId, ValueId),
213
214 NativeInvoke(u32, Vec<ValueId>),
215 Memcpy(ValueId, ValueId, ValueId) }
217
218#[derive(Default, Clone, Debug)]
219struct BlockInfo {
220 pre: BTreeSet<BlockId>,
221 succ: BTreeSet<BlockId>,
222
223 backedges: BTreeSet<BlockId>,
224
225 outgoing_values: Vec<ValueId>,
226
227 cycle: bool,
229
230 scan_completed: bool
231}
232
233struct DedupBfs<T: Ord + PartialOrd + Copy> {
234 scanned: BTreeSet<T>,
235 queue: VecDeque<T>
236}
237
238impl<T: Ord + PartialOrd + Copy> DedupBfs<T> {
239 fn new() -> DedupBfs<T> {
240 DedupBfs {
241 scanned: BTreeSet::new(),
242 queue: VecDeque::new()
243 }
244 }
245
246 fn next(&mut self) -> Option<T> {
247 self.queue.pop_back()
248 }
249
250 fn push(&mut self, val: T) {
251 if self.scanned.contains(&val) {
252 return;
253 }
254
255 self.queue.push_front(val);
256 self.scanned.insert(val);
257 }
258
259 fn is_scanned(&self, val: &T) -> bool {
260 self.scanned.contains(val)
261 }
262}
263
264macro_rules! impl_generic_binop {
265 ($name:ident, $outgoing:expr, $value_id_feed:expr, $out:expr) => {
266 {
267 let b = $outgoing.pop().unwrap();
268 let a = $outgoing.pop().unwrap();
269 let val_id = ValueId($value_id_feed.next().unwrap());
270 $outgoing.push(val_id);
271 $out.ops.push((Some(val_id), Opcode::$name(a, b)));
272 }
273 }
274}
275
276macro_rules! impl_generic_unop {
277 ($name:ident, $outgoing:expr, $value_id_feed:expr, $out:expr) => {
278 {
279 let v = $outgoing.pop().unwrap();
280 let val_id = ValueId($value_id_feed.next().unwrap());
281 $outgoing.push(val_id);
282 $out.ops.push((Some(val_id), Opcode::$name(v)));
283 }
284 }
285}
286
287macro_rules! impl_mem_load {
288 ($name:ident, $mem_arg:expr, $outgoing:expr, $value_id_feed:expr, $out:expr) => {
289 {
290 let index = $outgoing.pop().unwrap();
291 let val_id = ValueId($value_id_feed.next().unwrap());
292 $outgoing.push(val_id);
293 $out.ops.push((Some(val_id), Opcode::$name($mem_arg, index)));
294 }
295 }
296}
297
298macro_rules! impl_mem_store {
299 ($name:ident, $mem_arg:expr, $outgoing:expr, $value_id_feed:expr, $out:expr) => {
300 {
301 let val = $outgoing.pop().unwrap();
302 let index = $outgoing.pop().unwrap();
303 $out.ops.push((None, Opcode::$name($mem_arg, index, val)));
304 }
305 }
306}
307
308struct Analyzer<'a> {
309 cfg: &'a ::cfgraph::CFGraph,
310 m: &'a Module,
311 value_id_feed: Range<usize>,
312 block_info: Vec<BlockInfo>,
313 basic_blocks: Vec<BasicBlock>
314}
315
316impl<'a> Analyzer<'a> {
317 fn new(cfg: &'a ::cfgraph::CFGraph, m: &'a Module) -> Analyzer<'a> {
318 let mut analyzer = Analyzer {
319 cfg: cfg,
320 m: m,
321 value_id_feed: (0..0xffffffffusize),
322 block_info: vec! [ BlockInfo::default(); cfg.blocks.len() ],
323 basic_blocks: vec! [ BasicBlock::default(); cfg.blocks.len() ]
324 };
325
326 collect_graph_info(cfg, &mut analyzer.block_info);
327
328 analyzer
329 }
330
331 fn analyze_cycles(&mut self) {
332 if self.block_info.len() == 0 {
333 return;
334 }
335
336 let mut dfs_stack: Vec<BlockId> = Vec::new();
337 let mut dfs_reached: Vec<bool> = vec! [ false; self.block_info.len() ];
338
339 dfs_stack.push(BlockId(0));
340 dfs_reached[0] = true;
341
342 while let Some(id) = dfs_stack.pop() {
343 let mut inner_bfs: DedupBfs<BlockId> = DedupBfs::new();
345 inner_bfs.push(id);
346
347 let mut is_cycle = false;
348
349 while let Some(that) = inner_bfs.next() {
350 if self.block_info[that.0].cycle {
351 continue;
352 }
353 for t in &self.block_info[that.0].succ {
354 if *t == id {
355 is_cycle = true;
356 break;
357 }
358 inner_bfs.push(*t);
359 }
360
361 if is_cycle {
362 self.block_info[that.0].backedges.insert(id);
363 break;
364 }
365 }
366
367 if is_cycle {
368 self.block_info[id.0].cycle = true;
369 }
370
371 for t in &self.block_info[id.0].succ {
372 if !dfs_reached[t.0] {
373 dfs_stack.push(*t);
374 dfs_reached[t.0] = true;
375 }
376 }
377 }
378 }
379
380 fn longest_common_stack_sequence(incoming: &[&[ValueId]]) -> (Vec<ValueId>, Option<Vec<ValueId>>) {
381 if incoming.len() == 0 {
382 return (vec! [], None);
383 }
384
385 let common_len = incoming[0].len();
386 for s in incoming {
387 if s.len() != common_len {
388 panic!("Invalid stack state: All incoming states must have the same depth");
389 }
390 }
391
392 if common_len == 0 {
393 return (vec! [], None);
394 }
395
396 let mut seq: Vec<ValueId> = Vec::new();
397
398 for i in 0..common_len {
399 let mut valid = true;
400 for (a, b) in incoming.iter().zip(incoming.iter().skip(1)) {
401 if a[i] != b[i] {
402 valid = false;
403 break;
404 }
405 }
406 if valid {
407 seq.push(incoming[0][i]);
408 } else {
409 break;
410 }
411 }
412
413 if seq.len() == common_len {
414 (seq, None)
415 } else if seq.len() == common_len - 1 {
416 let last_elements: Vec<ValueId> = incoming.iter().map(|v| *v.last().unwrap()).collect();
417 (seq, Some(last_elements))
418 } else {
419 panic!("Invalid stack state: Got more than one different elements");
420 }
421 }
422
423 fn generate_all(&mut self) {
424 self.analyze_cycles();
425
426 let n_blocks = self.basic_blocks.len();
427 for i in 0..n_blocks {
428 self.analyze_and_generate(BlockId(i));
429 }
430 }
431
432 fn analyze_and_generate(&mut self, blk_id: BlockId) {
433 if self.block_info[blk_id.0].scan_completed {
434 return;
435 }
436
437 self.block_info[blk_id.0].scan_completed = true;
438
439 let pres: Vec<BlockId> = self.block_info[blk_id.0].pre
443 .iter()
444 .filter(|v| !self.block_info[v.0].backedges.contains(&blk_id))
445 .map(|v| *v)
446 .collect();
447
448 for pre in &pres {
449 self.analyze_and_generate(*pre);
450 }
451
452 let pre_stack_seqs: Vec<&[ValueId]> = pres
453 .iter()
454 .map(|b| self.block_info[b.0].outgoing_values.as_slice())
455 .collect();
456
457 let (lcss, incoming) = Self::longest_common_stack_sequence(&pre_stack_seqs);
458
459 self.block_info[blk_id.0].outgoing_values = lcss;
460
461 if let Some(values) = incoming {
462 let new_value = ValueId(self.value_id_feed.next().unwrap());
463 self.basic_blocks[blk_id.0].ops.push((Some(new_value), Opcode::Phi(values)));
464 self.block_info[blk_id.0].outgoing_values.push(new_value);
465 }
466
467 Self::gen_opcodes(
468 self.m,
469 &mut self.value_id_feed,
470 &self.cfg.blocks[blk_id.0],
471 &mut self.basic_blocks[blk_id.0],
472 &mut self.block_info[blk_id.0]
473 );
474 }
475
476 fn gen_opcodes(
477 m: &Module,
478 value_id_feed: &mut Range<usize>,
479 blk: &::cfgraph::BasicBlock,
480 out: &mut BasicBlock,
481 blk_info: &mut BlockInfo
482 ) {
483 use ::opcode::Opcode as RawOp;
484
485 for op in &blk.opcodes {
486 let outgoing = &mut blk_info.outgoing_values;
487 let mut terminate: bool = false;
488
489 match *op {
490 RawOp::Drop => {
491 outgoing.pop().unwrap();
492 },
493 RawOp::Select => {
494 let c = outgoing.pop().unwrap();
495 let val2 = outgoing.pop().unwrap();
496 let val1 = outgoing.pop().unwrap();
497
498 let val_id = ValueId(value_id_feed.next().unwrap());
499 outgoing.push(val_id);
500
501 out.ops.push((Some(val_id), Opcode::Select(c, val1, val2)));
502 },
503
504 RawOp::GetLocal(id) => {
505 let val_id = ValueId(value_id_feed.next().unwrap());
506 outgoing.push(val_id);
507
508 out.ops.push((Some(val_id), Opcode::GetLocal(id)));
509 },
510 RawOp::SetLocal(id) => {
511 let val = outgoing.pop().unwrap();
512 out.ops.push((None, Opcode::SetLocal(id, val)));
513 },
514 RawOp::TeeLocal(id) => {
515 let val = *outgoing.last().unwrap();
516 out.ops.push((None, Opcode::SetLocal(id, val)));
517 },
518 RawOp::GetGlobal(id) => {
519 let val_id = ValueId(value_id_feed.next().unwrap());
520 outgoing.push(val_id);
521
522 out.ops.push((Some(val_id), Opcode::GetGlobal(id)));
523 },
524 RawOp::SetGlobal(id) => {
525 let val = outgoing.pop().unwrap();
526 out.ops.push((None, Opcode::SetGlobal(id, val)));
527 },
528 RawOp::CurrentMemory => {
529 let val_id = ValueId(value_id_feed.next().unwrap());
530 outgoing.push(val_id);
531
532 out.ops.push((Some(val_id), Opcode::CurrentMemory));
533 },
534 RawOp::GrowMemory => {
535 let val = outgoing.pop().unwrap();
536
537 let val_id = ValueId(value_id_feed.next().unwrap());
538 outgoing.push(val_id);
539
540 out.ops.push((Some(val_id), Opcode::GrowMemory(val)));
541 },
542 RawOp::Nop => {},
543 RawOp::Unreachable => {
544 out.ops.push((None, Opcode::Unreachable));
545 terminate = true;
546 },
547 RawOp::Call(funcidx) => {
548 let f = &m.functions[funcidx as usize];
549 let Type::Func(ref ty_args, ref ty_ret) = &m.types[f.typeidx as usize];
550
551 let mut args: Vec<ValueId> = Vec::with_capacity(ty_args.len());
552 for _ in 0..ty_args.len() {
553 args.push(outgoing.pop().unwrap());
554 }
555 args.reverse();
556
557 out.ops.push((
558 match ty_ret.len() {
559 0 => None,
560 _ => {
561 let val_id = ValueId(value_id_feed.next().unwrap());
562 outgoing.push(val_id);
563 Some(val_id)
564 }
565 },
566 Opcode::Call(funcidx, args)
567 ));
568 },
569 RawOp::CallIndirect(typeidx) => {
570 let Type::Func(ref ty_args, ref ty_ret) = &m.types[typeidx as usize];
571
572 let fn_index = outgoing.pop().unwrap();
573
574 let mut args: Vec<ValueId> = Vec::with_capacity(ty_args.len());
575 for _ in 0..ty_args.len() {
576 args.push(outgoing.pop().unwrap());
577 }
578 args.reverse();
579
580 out.ops.push((
581 match ty_ret.len() {
582 0 => None,
583 _ => {
584 let val_id = ValueId(value_id_feed.next().unwrap());
585 outgoing.push(val_id);
586 Some(val_id)
587 }
588 },
589 Opcode::CallIndirect(typeidx, fn_index, args)
590 ));
591 },
592 RawOp::I32Const(v) => {
593 let val_id = ValueId(value_id_feed.next().unwrap());
594 outgoing.push(val_id);
595
596 out.ops.push((Some(val_id), Opcode::I32Const(v)));
597 },
598
599 RawOp::I32Clz => impl_generic_unop!(I32Clz, outgoing, value_id_feed, out),
600 RawOp::I32Ctz => impl_generic_unop!(I32Ctz, outgoing, value_id_feed, out),
601 RawOp::I32Popcnt => impl_generic_unop!(I32Popcnt, outgoing, value_id_feed, out),
602
603 RawOp::I32Add => impl_generic_binop!(I32Add, outgoing, value_id_feed, out),
604 RawOp::I32Sub => impl_generic_binop!(I32Sub, outgoing, value_id_feed, out),
605 RawOp::I32Mul => impl_generic_binop!(I32Mul, outgoing, value_id_feed, out),
606 RawOp::I32DivU => impl_generic_binop!(I32DivU, outgoing, value_id_feed, out),
607 RawOp::I32DivS => impl_generic_binop!(I32DivS, outgoing, value_id_feed, out),
608 RawOp::I32RemU => impl_generic_binop!(I32RemU, outgoing, value_id_feed, out),
609 RawOp::I32RemS => impl_generic_binop!(I32RemS, outgoing, value_id_feed, out),
610 RawOp::I32And => impl_generic_binop!(I32And, outgoing, value_id_feed, out),
611 RawOp::I32Or => impl_generic_binop!(I32Or, outgoing, value_id_feed, out),
612 RawOp::I32Xor => impl_generic_binop!(I32Xor, outgoing, value_id_feed, out),
613 RawOp::I32Shl => impl_generic_binop!(I32Shl, outgoing, value_id_feed, out),
614 RawOp::I32ShrU => impl_generic_binop!(I32ShrU, outgoing, value_id_feed, out),
615 RawOp::I32ShrS => impl_generic_binop!(I32ShrS, outgoing, value_id_feed, out),
616 RawOp::I32Rotl => impl_generic_binop!(I32Rotl, outgoing, value_id_feed, out),
617 RawOp::I32Rotr => impl_generic_binop!(I32Rotr, outgoing, value_id_feed, out),
618
619 RawOp::I32Eqz => impl_generic_unop!(I32Eqz, outgoing, value_id_feed, out),
620
621 RawOp::I32Eq => impl_generic_binop!(I32Eq, outgoing, value_id_feed, out),
622 RawOp::I32Ne => impl_generic_binop!(I32Ne, outgoing, value_id_feed, out),
623 RawOp::I32LtU => impl_generic_binop!(I32LtU, outgoing, value_id_feed, out),
624 RawOp::I32LtS => impl_generic_binop!(I32LtS, outgoing, value_id_feed, out),
625 RawOp::I32LeU => impl_generic_binop!(I32LeU, outgoing, value_id_feed, out),
626 RawOp::I32LeS => impl_generic_binop!(I32LeS, outgoing, value_id_feed, out),
627 RawOp::I32GtU => impl_generic_binop!(I32GtU, outgoing, value_id_feed, out),
628 RawOp::I32GtS => impl_generic_binop!(I32GtS, outgoing, value_id_feed, out),
629 RawOp::I32GeU => impl_generic_binop!(I32GeU, outgoing, value_id_feed, out),
630 RawOp::I32GeS => impl_generic_binop!(I32GeS, outgoing, value_id_feed, out),
631
632 RawOp::I32WrapI64 => impl_generic_unop!(I32WrapI64, outgoing, value_id_feed, out),
633
634 RawOp::I32Load(m) => impl_mem_load!(I32Load, m, outgoing, value_id_feed, out),
635 RawOp::I32Store(m) => impl_mem_store!(I32Store, m, outgoing, value_id_feed, out),
636 RawOp::I32Load8U(m) => impl_mem_load!(I32Load8U, m, outgoing, value_id_feed, out),
637 RawOp::I32Load8S(m) => impl_mem_load!(I32Load8S, m, outgoing, value_id_feed, out),
638 RawOp::I32Load16U(m) => impl_mem_load!(I32Load16U, m, outgoing, value_id_feed, out),
639 RawOp::I32Load16S(m) => impl_mem_load!(I32Load16S, m, outgoing, value_id_feed, out),
640 RawOp::I32Store8(m) => impl_mem_store!(I32Store8, m, outgoing, value_id_feed, out),
641 RawOp::I32Store16(m) => impl_mem_store!(I32Store16, m, outgoing, value_id_feed, out),
642
643 RawOp::I64Const(v) => {
644 let val_id = ValueId(value_id_feed.next().unwrap());
645 outgoing.push(val_id);
646
647 out.ops.push((Some(val_id), Opcode::I64Const(v)));
648 },
649
650 RawOp::I64Clz => impl_generic_unop!(I64Clz, outgoing, value_id_feed, out),
651 RawOp::I64Ctz => impl_generic_unop!(I64Ctz, outgoing, value_id_feed, out),
652 RawOp::I64Popcnt => impl_generic_unop!(I64Popcnt, outgoing, value_id_feed, out),
653
654 RawOp::I64Add => impl_generic_binop!(I64Add, outgoing, value_id_feed, out),
655 RawOp::I64Sub => impl_generic_binop!(I64Sub, outgoing, value_id_feed, out),
656 RawOp::I64Mul => impl_generic_binop!(I64Mul, outgoing, value_id_feed, out),
657 RawOp::I64DivU => impl_generic_binop!(I64DivU, outgoing, value_id_feed, out),
658 RawOp::I64DivS => impl_generic_binop!(I64DivS, outgoing, value_id_feed, out),
659 RawOp::I64RemU => impl_generic_binop!(I64RemU, outgoing, value_id_feed, out),
660 RawOp::I64RemS => impl_generic_binop!(I64RemS, outgoing, value_id_feed, out),
661 RawOp::I64And => impl_generic_binop!(I64And, outgoing, value_id_feed, out),
662 RawOp::I64Or => impl_generic_binop!(I64Or, outgoing, value_id_feed, out),
663 RawOp::I64Xor => impl_generic_binop!(I64Xor, outgoing, value_id_feed, out),
664 RawOp::I64Shl => impl_generic_binop!(I64Shl, outgoing, value_id_feed, out),
665 RawOp::I64ShrU => impl_generic_binop!(I64ShrU, outgoing, value_id_feed, out),
666 RawOp::I64ShrS => impl_generic_binop!(I64ShrS, outgoing, value_id_feed, out),
667 RawOp::I64Rotl => impl_generic_binop!(I64Rotl, outgoing, value_id_feed, out),
668 RawOp::I64Rotr => impl_generic_binop!(I64Rotr, outgoing, value_id_feed, out),
669
670 RawOp::I64Eqz => impl_generic_unop!(I64Eqz, outgoing, value_id_feed, out),
671
672 RawOp::I64Eq => impl_generic_binop!(I64Eq, outgoing, value_id_feed, out),
673 RawOp::I64Ne => impl_generic_binop!(I64Ne, outgoing, value_id_feed, out),
674 RawOp::I64LtU => impl_generic_binop!(I64LtU, outgoing, value_id_feed, out),
675 RawOp::I64LtS => impl_generic_binop!(I64LtS, outgoing, value_id_feed, out),
676 RawOp::I64LeU => impl_generic_binop!(I64LeU, outgoing, value_id_feed, out),
677 RawOp::I64LeS => impl_generic_binop!(I64LeS, outgoing, value_id_feed, out),
678 RawOp::I64GtU => impl_generic_binop!(I64GtU, outgoing, value_id_feed, out),
679 RawOp::I64GtS => impl_generic_binop!(I64GtS, outgoing, value_id_feed, out),
680 RawOp::I64GeU => impl_generic_binop!(I64GeU, outgoing, value_id_feed, out),
681 RawOp::I64GeS => impl_generic_binop!(I64GeS, outgoing, value_id_feed, out),
682
683 RawOp::I64ExtendI32U => impl_generic_unop!(I64ExtendI32U, outgoing, value_id_feed, out),
684 RawOp::I64ExtendI32S => impl_generic_unop!(I64ExtendI32S, outgoing, value_id_feed, out),
685
686 RawOp::I64Load(m) => impl_mem_load!(I64Load, m, outgoing, value_id_feed, out),
687 RawOp::I64Store(m) => impl_mem_store!(I64Store, m, outgoing, value_id_feed, out),
688 RawOp::I64Load8U(m) => impl_mem_load!(I64Load8U, m, outgoing, value_id_feed, out),
689 RawOp::I64Load8S(m) => impl_mem_load!(I64Load8S, m, outgoing, value_id_feed, out),
690 RawOp::I64Load16U(m) => impl_mem_load!(I64Load16U, m, outgoing, value_id_feed, out),
691 RawOp::I64Load16S(m) => impl_mem_load!(I64Load16S, m, outgoing, value_id_feed, out),
692 RawOp::I64Load32U(m) => impl_mem_load!(I64Load32U, m, outgoing, value_id_feed, out),
693 RawOp::I64Load32S(m) => impl_mem_load!(I64Load32S, m, outgoing, value_id_feed, out),
694 RawOp::I64Store8(m) => impl_mem_store!(I64Store8, m, outgoing, value_id_feed, out),
695 RawOp::I64Store16(m) => impl_mem_store!(I64Store16, m, outgoing, value_id_feed, out),
696 RawOp::I64Store32(m) => impl_mem_store!(I64Store32, m, outgoing, value_id_feed, out),
697
698 RawOp::F32Const(v) => {
699 let val_id = ValueId(value_id_feed.next().unwrap());
700 outgoing.push(val_id);
701
702 out.ops.push((Some(val_id), Opcode::F32Const(v)));
703 },
704 RawOp::F64Const(v) => {
705 let val_id = ValueId(value_id_feed.next().unwrap());
706 outgoing.push(val_id);
707
708 out.ops.push((Some(val_id), Opcode::F64Const(v)));
709 },
710
711 RawOp::F32ReinterpretI32 => impl_generic_unop!(F32ReinterpretI32, outgoing, value_id_feed, out),
712 RawOp::F64ReinterpretI64 => impl_generic_unop!(F64ReinterpretI64, outgoing, value_id_feed, out),
713 RawOp::I32ReinterpretF32 => impl_generic_unop!(I32ReinterpretF32, outgoing, value_id_feed, out),
714 RawOp::I64ReinterpretF64 => impl_generic_unop!(I64ReinterpretF64, outgoing, value_id_feed, out),
715 RawOp::I32TruncSF32 => impl_generic_unop!(I32TruncSF32, outgoing, value_id_feed, out),
716 RawOp::I32TruncUF32 => impl_generic_unop!(I32TruncUF32, outgoing, value_id_feed, out),
717 RawOp::I32TruncSF64 => impl_generic_unop!(I32TruncSF64, outgoing, value_id_feed, out),
718 RawOp::I32TruncUF64 => impl_generic_unop!(I32TruncUF64, outgoing, value_id_feed, out),
719 RawOp::I64TruncSF32 => impl_generic_unop!(I64TruncSF32, outgoing, value_id_feed, out),
720 RawOp::I64TruncUF32 => impl_generic_unop!(I64TruncUF32, outgoing, value_id_feed, out),
721 RawOp::I64TruncSF64 => impl_generic_unop!(I64TruncSF64, outgoing, value_id_feed, out),
722 RawOp::I64TruncUF64 => impl_generic_unop!(I64TruncUF64, outgoing, value_id_feed, out),
723 RawOp::F32ConvertSI32 => impl_generic_unop!(F32ConvertSI32, outgoing, value_id_feed, out),
724 RawOp::F32ConvertUI32 => impl_generic_unop!(F32ConvertUI32, outgoing, value_id_feed, out),
725 RawOp::F32ConvertSI64 => impl_generic_unop!(F32ConvertSI64, outgoing, value_id_feed, out),
726 RawOp::F32ConvertUI64 => impl_generic_unop!(F32ConvertUI64, outgoing, value_id_feed, out),
727 RawOp::F64ConvertSI32 => impl_generic_unop!(F64ConvertSI32, outgoing, value_id_feed, out),
728 RawOp::F64ConvertUI32 => impl_generic_unop!(F64ConvertUI32, outgoing, value_id_feed, out),
729 RawOp::F64ConvertSI64 => impl_generic_unop!(F64ConvertSI64, outgoing, value_id_feed, out),
730 RawOp::F64ConvertUI64 => impl_generic_unop!(F64ConvertUI64, outgoing, value_id_feed, out),
731 RawOp::F32DemoteF64 => impl_generic_unop!(F32DemoteF64, outgoing, value_id_feed, out),
732 RawOp::F64PromoteF32 => impl_generic_unop!(F64PromoteF32, outgoing, value_id_feed, out),
733 RawOp::F32Abs => impl_generic_unop!(F32Abs, outgoing, value_id_feed, out),
734 RawOp::F32Neg => impl_generic_unop!(F32Neg, outgoing, value_id_feed, out),
735 RawOp::F32Ceil => impl_generic_unop!(F32Ceil, outgoing, value_id_feed, out),
736 RawOp::F32Floor => impl_generic_unop!(F32Floor, outgoing, value_id_feed, out),
737 RawOp::F32Trunc => impl_generic_unop!(F32Trunc, outgoing, value_id_feed, out),
738 RawOp::F32Nearest => impl_generic_unop!(F32Nearest, outgoing, value_id_feed, out),
739 RawOp::F32Sqrt => impl_generic_unop!(F32Sqrt, outgoing, value_id_feed, out),
740
741 RawOp::F32Add => impl_generic_binop!(F32Add, outgoing, value_id_feed, out),
742 RawOp::F32Sub => impl_generic_binop!(F32Sub, outgoing, value_id_feed, out),
743 RawOp::F32Mul => impl_generic_binop!(F32Mul, outgoing, value_id_feed, out),
744 RawOp::F32Div => impl_generic_binop!(F32Div, outgoing, value_id_feed, out),
745 RawOp::F32Min => impl_generic_binop!(F32Min, outgoing, value_id_feed, out),
746 RawOp::F32Max => impl_generic_binop!(F32Max, outgoing, value_id_feed, out),
747 RawOp::F32Copysign => impl_generic_binop!(F32Copysign, outgoing, value_id_feed, out),
748 RawOp::F32Eq => impl_generic_binop!(F32Eq, outgoing, value_id_feed, out),
749 RawOp::F32Ne => impl_generic_binop!(F32Ne, outgoing, value_id_feed, out),
750 RawOp::F32Lt => impl_generic_binop!(F32Lt, outgoing, value_id_feed, out),
751 RawOp::F32Gt => impl_generic_binop!(F32Gt, outgoing, value_id_feed, out),
752 RawOp::F32Le => impl_generic_binop!(F32Le, outgoing, value_id_feed, out),
753 RawOp::F32Ge => impl_generic_binop!(F32Ge, outgoing, value_id_feed, out),
754
755 RawOp::F64Abs => impl_generic_unop!(F64Abs, outgoing, value_id_feed, out),
756 RawOp::F64Neg => impl_generic_unop!(F64Neg, outgoing, value_id_feed, out),
757 RawOp::F64Ceil => impl_generic_unop!(F64Ceil, outgoing, value_id_feed, out),
758 RawOp::F64Floor => impl_generic_unop!(F64Floor, outgoing, value_id_feed, out),
759 RawOp::F64Trunc => impl_generic_unop!(F64Trunc, outgoing, value_id_feed, out),
760 RawOp::F64Nearest => impl_generic_unop!(F64Nearest, outgoing, value_id_feed, out),
761 RawOp::F64Sqrt => impl_generic_unop!(F64Sqrt, outgoing, value_id_feed, out),
762
763 RawOp::F64Add => impl_generic_binop!(F64Add, outgoing, value_id_feed, out),
764 RawOp::F64Sub => impl_generic_binop!(F64Sub, outgoing, value_id_feed, out),
765 RawOp::F64Mul => impl_generic_binop!(F64Mul, outgoing, value_id_feed, out),
766 RawOp::F64Div => impl_generic_binop!(F64Div, outgoing, value_id_feed, out),
767 RawOp::F64Min => impl_generic_binop!(F64Min, outgoing, value_id_feed, out),
768 RawOp::F64Max => impl_generic_binop!(F64Max, outgoing, value_id_feed, out),
769 RawOp::F64Copysign => impl_generic_binop!(F64Copysign, outgoing, value_id_feed, out),
770 RawOp::F64Eq => impl_generic_binop!(F64Eq, outgoing, value_id_feed, out),
771 RawOp::F64Ne => impl_generic_binop!(F64Ne, outgoing, value_id_feed, out),
772 RawOp::F64Lt => impl_generic_binop!(F64Lt, outgoing, value_id_feed, out),
773 RawOp::F64Gt => impl_generic_binop!(F64Gt, outgoing, value_id_feed, out),
774 RawOp::F64Le => impl_generic_binop!(F64Le, outgoing, value_id_feed, out),
775 RawOp::F64Ge => impl_generic_binop!(F64Ge, outgoing, value_id_feed, out),
776
777 RawOp::Jmp(_) | RawOp::JmpIf(_)
778 | RawOp::JmpEither(_, _) | RawOp::JmpTable(_, _)
779 | RawOp::Return => unreachable!(),
780
781 RawOp::NativeInvoke(native_idx) => {
782 let f = &m.natives[native_idx as usize];
783 let Type::Func(ref ty_args, ref ty_ret) = &m.types[f.typeidx as usize];
784
785 let mut args: Vec<ValueId> = Vec::with_capacity(ty_args.len());
786 for _ in 0..ty_args.len() {
787 args.push(outgoing.pop().unwrap());
788 }
789 args.reverse();
790
791 out.ops.push((
792 match ty_ret.len() {
793 0 => None,
794 _ => {
795 let val_id = ValueId(value_id_feed.next().unwrap());
796 outgoing.push(val_id);
797 Some(val_id)
798 }
799 },
800 Opcode::NativeInvoke(native_idx, args)
801 ));
802 },
803
804 RawOp::Memcpy => {
805 let n_bytes = outgoing.pop().unwrap();
806 let src = outgoing.pop().unwrap();
807 let dest = outgoing.pop().unwrap();
808 out.ops.push((None, Opcode::Memcpy(dest, src, n_bytes)));
809 },
810
811 RawOp::NotImplemented(ref s) => {
812 out.ops.push((None, Opcode::Unreachable));
813 terminate = true;
814 }
815 }
816
817 if terminate {
818 break;
819 }
820 }
821
822 out.br = Some(match *blk.br.as_ref().unwrap() {
823 ::cfgraph::Branch::Jmp(::cfgraph::BlockId(id)) => {
824 Branch::Br(BlockId(id))
825 },
826 ::cfgraph::Branch::JmpEither(
827 ::cfgraph::BlockId(a),
828 ::cfgraph::BlockId(b)
829 ) => {
830 Branch::BrEither(
831 blk_info.outgoing_values.pop().unwrap(),
832 BlockId(a),
833 BlockId(b)
834 )
835 },
836 ::cfgraph::Branch::JmpTable(
837 ref targets,
838 otherwise
839 ) => {
840 let mut out_targets: Vec<BlockId> = Vec::with_capacity(targets.len());
841
842 for t in targets {
843 out_targets.push(BlockId(t.0));
844 }
845
846 Branch::BrTable(
847 blk_info.outgoing_values.pop().unwrap(),
848 out_targets,
849 BlockId(otherwise.0)
850 )
851 },
852 ::cfgraph::Branch::Return => {
853 Branch::Return(blk_info.outgoing_values.pop())
854 }
855 });
856 }
857}
858
859impl FlowGraph {
860 pub fn from_cfg(cfg: &::cfgraph::CFGraph, m: &Module) -> FlowGraph {
861 let mut analyzer = Analyzer::new(cfg, m);
862 analyzer.generate_all();
863
864 FlowGraph {
865 blocks: analyzer.basic_blocks
866 }
867 }
868}
869
870fn collect_graph_info(cfg: &::cfgraph::CFGraph, out: &mut [BlockInfo]) {
871 for (i, blk) in cfg.blocks.iter().enumerate() {
872 use cfgraph::Branch;
873 let mut br_unreachable: bool = false;
874 for op in &blk.opcodes {
875 match *op {
876 ::opcode::Opcode::Unreachable => {
877 br_unreachable = true;
878 break;
879 },
880 _ => {}
881 }
882 }
883
884 if br_unreachable {
886 continue;
887 }
888
889 match *blk.br.as_ref().unwrap() {
890 Branch::Jmp(::cfgraph::BlockId(id)) => {
891 out[id].pre.insert(BlockId(i));
892 out[i].succ.insert(BlockId(id));
893 },
894 Branch::JmpEither(::cfgraph::BlockId(a), ::cfgraph::BlockId(b)) => {
895 out[a].pre.insert(BlockId(i));
896 out[b].pre.insert(BlockId(i));
897 out[i].succ.insert(BlockId(a));
898 out[i].succ.insert(BlockId(b));
899 },
900 Branch::JmpTable(ref targets, ::cfgraph::BlockId(otherwise)) => {
901 for t in targets {
902 out[t.0].pre.insert(BlockId(i));
903 out[i].succ.insert(BlockId(t.0));
904 }
905 out[otherwise].pre.insert(BlockId(i));
906 out[i].succ.insert(BlockId(otherwise));
907 }
908 Branch::Return => {}
909 }
910 }
911}
912
913#[cfg(test)]
914mod tests {
915 use super::*;
916
917 #[test]
918 fn test_cycle_analysis() {
919 use ::opcode::Opcode as RawOp;
920 let opcodes = &[
921 RawOp::I32Const(42), RawOp::Jmp(2), RawOp::I32Const(0), RawOp::JmpEither(4, 7), RawOp::Jmp(5), RawOp::I32Const(1), RawOp::Jmp(9), RawOp::I32Const(2), RawOp::Jmp(9), RawOp::Jmp(10), RawOp::JmpEither(2, 11), RawOp::Return ];
942
943 let cfg = ::cfgraph::CFGraph::from_function(opcodes).unwrap();
944 let m = Module::default();
945
946 let mut analyzer = Analyzer::new(&cfg, &m);
947 analyzer.generate_all();
948
949 println!("{:?}", analyzer.block_info);
950 println!("{:?}", analyzer.basic_blocks);
951
952 }
961
962 #[test]
963 fn test_basic_ssa_transform() {
964 use ::opcode::Opcode as RawOp;
965 let opcodes = &[
966 RawOp::I32Const(42),
967 RawOp::JmpIf(4),
968 RawOp::I32Const(1),
969 RawOp::Jmp(5),
970 RawOp::I32Const(2),
971 RawOp::Return
972 ];
973
974 let cfg = ::cfgraph::CFGraph::from_function(opcodes).unwrap();
975 let ssa = FlowGraph::from_cfg(&cfg, &Module::default());
976 println!("{:?}", ssa);
977 assert_eq!(ssa.blocks.len(), 4);
978 assert_eq!(ssa.blocks[3].ops[0], (
979 Some(ValueId(3)),
980 Opcode::Phi(vec! [ ValueId(1), ValueId(2) ])
981 ));
982 }
983
984 #[test]
985 fn test_circular_transform() {
986 use ::opcode::Opcode as RawOp;
987 let opcodes = &[
988 RawOp::I32Const(42),
989 RawOp::JmpIf(3),
990 RawOp::Jmp(0),
991 RawOp::Jmp(0),
993 RawOp::Return
994 ];
995
996 let cfg = ::cfgraph::CFGraph::from_function(opcodes).unwrap();
997 let ssa = FlowGraph::from_cfg(&cfg, &Module::default());
998 println!("{:?}", ssa);
999 assert_eq!(ssa.blocks.len(), 4);
1000 assert_eq!(ssa.blocks[0].br, Some(Branch::BrEither(
1001 ValueId(0),
1002 BlockId(2),
1003 BlockId(1)
1004 )));
1005 assert_eq!(ssa.blocks[1].br, Some(Branch::Br(BlockId(0))));
1006 assert_eq!(ssa.blocks[2].br, Some(Branch::Br(BlockId(0))));
1007 }
1008
1009 #[test]
1010 fn test_unreachable() {
1011 use ::opcode::Opcode as RawOp;
1012 let opcodes = &[
1013 RawOp::I32Const(42),
1014 RawOp::JmpIf(12),
1015 RawOp::Unreachable,
1016 RawOp::Drop,
1017 RawOp::Drop,
1018 RawOp::Drop,
1019 RawOp::I32Const(100),
1020 RawOp::I32Const(100),
1021 RawOp::I32Const(100),
1022 RawOp::I32Const(100),
1023 RawOp::I32Const(100),
1024 RawOp::I32Const(100),
1025 RawOp::Return
1026 ];
1027
1028 let cfg = ::cfgraph::CFGraph::from_function(opcodes).unwrap();
1029 let ssa = FlowGraph::from_cfg(&cfg, &Module::default());
1030 }
1032
1033 #[test]
1034 fn test_call() {
1035 use ::module::{Function, FunctionBody, ValType};
1036 use ::opcode::Opcode as RawOp;
1037
1038 let opcodes = &[
1039 RawOp::I32Const(42),
1040 RawOp::Call(0),
1041 RawOp::Return
1042 ];
1043
1044 let cfg = ::cfgraph::CFGraph::from_function(opcodes).unwrap();
1045
1046 let mut m = Module::default();
1047 m.types.push(Type::Func(vec! [ ValType::I32 ], vec! [ ValType::I32 ]));
1048 m.functions.push(Function {
1049 name: None,
1050 typeidx: 0,
1051 locals: vec! [],
1052 body: FunctionBody { opcodes: vec! [] }
1053 });
1054 let ssa = FlowGraph::from_cfg(&cfg, &m);
1055 println!("{:?}", ssa);
1056
1057 assert_eq!(ssa.blocks.len(), 1);
1058 assert_eq!(
1059 ssa.blocks[0].ops[0],
1060 (Some(ValueId(0)), Opcode::I32Const(42))
1061 );
1062 assert_eq!(
1063 ssa.blocks[0].ops[1],
1064 (Some(ValueId(1)), Opcode::Call(0, vec! [ ValueId(0) ]))
1065 );
1066 assert_eq!(
1067 ssa.blocks[0].br,
1068 Some(Branch::Return(Some(ValueId(1))))
1069 );
1070 }
1071}