use alloc::sync::Arc;
use core::ops::ControlFlow;
use crate::{
BreakReason, Host, Stopper,
continuation_stack::{Continuation, ContinuationStack},
execution::{
ExecutionState, InternalBreakReason, execute_op, finalize_clock_cycle_with_continuation,
finalize_clock_cycle_with_continuation_and_op_helpers,
},
mast::{BasicBlockNode, MastForest, MastNodeId},
operation::Operation,
processor::Processor,
tracer::Tracer,
};
#[inline(always)]
pub(super) fn execute_basic_block_node_from_start<P, H, S, T>(
state: &mut ExecutionState<'_, P, H, S, T>,
basic_block_node: &BasicBlockNode,
node_id: MastNodeId,
current_forest: &Arc<MastForest>,
) -> ControlFlow<InternalBreakReason>
where
P: Processor,
H: Host,
S: Stopper<Processor = P>,
T: Tracer<Processor = P>,
{
state.tracer.start_clock_cycle(
state.processor,
Continuation::StartNode(node_id),
state.continuation_stack,
current_forest,
);
state
.processor
.execute_before_enter_decorators(node_id, current_forest, state.host)
.map_break(InternalBreakReason::from)?;
finalize_clock_cycle_with_continuation(
state.processor,
state.tracer,
state.stopper,
state.continuation_stack,
|| {
Some(Continuation::ResumeBasicBlock {
node_id,
batch_index: 0,
op_idx_in_batch: 0,
})
},
current_forest,
)
.map_break(InternalBreakReason::from)?;
if !basic_block_node.op_batches().is_empty() {
execute_op_batch(state, basic_block_node, 0, 0, 0, current_forest)?;
}
execute_basic_block_node_from_batch(state, basic_block_node, node_id, 1, current_forest)
}
#[inline(always)]
pub(super) fn execute_basic_block_node_from_op_idx<P, H, S, T>(
state: &mut ExecutionState<'_, P, H, S, T>,
basic_block_node: &BasicBlockNode,
node_id: MastNodeId,
start_batch_index: usize,
start_op_idx_in_batch: usize,
current_forest: &Arc<MastForest>,
) -> ControlFlow<InternalBreakReason>
where
P: Processor,
H: Host,
S: Stopper<Processor = P>,
T: Tracer<Processor = P>,
{
let batch_offset_in_block = basic_block_node
.op_batches()
.iter()
.take(start_batch_index)
.map(|batch| batch.ops().len())
.sum();
execute_op_batch(
state,
basic_block_node,
start_batch_index,
start_op_idx_in_batch,
batch_offset_in_block,
current_forest,
)?;
execute_basic_block_node_from_batch(
state,
basic_block_node,
node_id,
start_batch_index + 1,
current_forest,
)
}
#[inline(always)]
pub(super) fn execute_basic_block_node_from_batch<P, H, S, T>(
state: &mut ExecutionState<'_, P, H, S, T>,
basic_block_node: &BasicBlockNode,
node_id: MastNodeId,
start_batch_index: usize,
current_forest: &Arc<MastForest>,
) -> ControlFlow<InternalBreakReason>
where
P: Processor,
H: Host,
S: Stopper<Processor = P>,
T: Tracer<Processor = P>,
{
let mut batch_offset_in_block = basic_block_node
.op_batches()
.iter()
.take(start_batch_index)
.map(|batch| batch.ops().len())
.sum();
for (batch_index, op_batch) in
basic_block_node.op_batches().iter().enumerate().skip(start_batch_index)
{
{
state.tracer.start_clock_cycle(
state.processor,
Continuation::Respan { node_id, batch_index },
state.continuation_stack,
current_forest,
);
finalize_clock_cycle_with_continuation(
state.processor,
state.tracer,
state.stopper,
state.continuation_stack,
|| {
Some(Continuation::ResumeBasicBlock {
node_id,
batch_index,
op_idx_in_batch: 0,
})
},
current_forest,
)
.map_break(InternalBreakReason::from)?;
}
execute_op_batch(
state,
basic_block_node,
batch_index,
0,
batch_offset_in_block,
current_forest,
)?;
batch_offset_in_block += op_batch.ops().len();
}
finish_basic_block(state, basic_block_node, node_id, current_forest)
.map_break(InternalBreakReason::from)
}
#[inline(always)]
pub(super) fn finish_basic_block<P, H, S, T>(
state: &mut ExecutionState<'_, P, H, S, T>,
basic_block_node: &BasicBlockNode,
node_id: MastNodeId,
current_forest: &Arc<MastForest>,
) -> ControlFlow<BreakReason>
where
P: Processor,
H: Host,
S: Stopper<Processor = P>,
T: Tracer<Processor = P>,
{
state.tracer.start_clock_cycle(
state.processor,
Continuation::FinishBasicBlock(node_id),
state.continuation_stack,
current_forest,
);
finalize_clock_cycle_with_continuation(
state.processor,
state.tracer,
state.stopper,
state.continuation_stack,
|| Some(Continuation::AfterExitDecoratorsBasicBlock(node_id)),
current_forest,
)?;
state.processor.execute_end_of_block_decorators(
basic_block_node,
node_id,
current_forest,
state.host,
)?;
state
.processor
.execute_after_exit_decorators(node_id, current_forest, state.host)
}
#[inline(always)]
fn execute_op_batch<P, H, S, T>(
state: &mut ExecutionState<'_, P, H, S, T>,
basic_block: &BasicBlockNode,
batch_index: usize,
start_op_idx: usize,
batch_offset_in_block: usize,
current_forest: &Arc<MastForest>,
) -> ControlFlow<InternalBreakReason>
where
P: Processor,
H: Host,
S: Stopper<Processor = P>,
T: Tracer<Processor = P>,
{
let batch = &basic_block.op_batches()[batch_index];
let node_id = basic_block
.linked_id()
.expect("basic block node should be linked when executing operations");
for (op_idx_in_batch, op) in batch.ops().iter().enumerate().skip(start_op_idx) {
let op_idx_in_block = batch_offset_in_block + op_idx_in_batch;
state.tracer.start_clock_cycle(
state.processor,
Continuation::ResumeBasicBlock { node_id, batch_index, op_idx_in_batch },
state.continuation_stack,
current_forest,
);
state
.processor
.execute_decorators_for_op(node_id, op_idx_in_block, current_forest, state.host)
.map_break(InternalBreakReason::from)?;
let operation_helpers = match op {
Operation::Emit => {
return ControlFlow::Break(InternalBreakReason::Emit {
basic_block_node_id: node_id,
op_idx: op_idx_in_block,
continuation: get_continuation_after_executing_operation(
basic_block,
node_id,
batch_index,
op_idx_in_batch,
),
});
},
_ => {
match execute_op(
state.processor,
op,
op_idx_in_block,
current_forest,
node_id,
state.host,
state.tracer,
) {
Ok(operation_helpers) => operation_helpers,
Err(err) => {
return ControlFlow::Break(BreakReason::Err(err).into());
},
}
},
};
finalize_clock_cycle_with_continuation_and_op_helpers(
state.processor,
state.tracer,
state.stopper,
state.continuation_stack,
|| {
Some(get_continuation_after_executing_operation(
basic_block,
node_id,
batch_index,
op_idx_in_batch,
))
},
operation_helpers,
current_forest,
)
.map_break(InternalBreakReason::from)?;
}
ControlFlow::Continue(())
}
#[inline(always)]
fn get_continuation_after_executing_operation(
basic_block_node: &BasicBlockNode,
node_id: MastNodeId,
batch_index: usize,
op_idx_in_batch: usize,
) -> Continuation {
let last_op_idx_in_batch = basic_block_node.op_batches()[batch_index].ops().len() - 1;
let last_batch_idx_in_block = basic_block_node.num_op_batches() - 1;
if op_idx_in_batch < last_op_idx_in_batch {
Continuation::ResumeBasicBlock {
node_id,
batch_index,
op_idx_in_batch: op_idx_in_batch + 1,
}
} else if batch_index < last_batch_idx_in_block {
Continuation::Respan { node_id, batch_index: batch_index + 1 }
} else {
Continuation::FinishBasicBlock(node_id)
}
}
pub fn finish_emit_op_execution<P, S, T>(
post_emit_continuation: Continuation,
processor: &mut P,
continuation_stack: &mut ContinuationStack,
current_forest: &Arc<MastForest>,
tracer: &mut T,
stopper: &S,
) -> ControlFlow<BreakReason>
where
P: Processor,
S: Stopper<Processor = P>,
T: Tracer<Processor = P>,
{
finalize_clock_cycle_with_continuation(
processor,
tracer,
stopper,
continuation_stack,
{
let post_emit_continuation = post_emit_continuation.clone();
|| Some(post_emit_continuation)
},
current_forest,
)?;
continuation_stack.push_continuation(post_emit_continuation);
ControlFlow::Continue(())
}