use crate::cook::execution::mapreduce::agent_command_executor::AgentCommandExecutor;
use crate::cook::execution::mapreduce::checkpoint::storage::CheckpointStorage;
use crate::cook::workflow::WorkflowStep;
use crate::worktree::WorktreeManager;
use serde_json::Value;
use std::collections::HashMap;
use std::sync::Arc;
#[derive(Clone)]
pub struct MapEnv {
pub worktree_manager: Arc<WorktreeManager>,
pub command_executor: Arc<AgentCommandExecutor>,
pub storage: Arc<dyn CheckpointStorage>,
pub agent_template: Vec<WorkflowStep>,
pub job_id: String,
pub max_parallel: usize,
pub workflow_env: HashMap<String, Value>,
pub config: HashMap<String, Value>,
}
#[derive(Clone)]
pub struct PhaseEnv {
pub command_executor: Arc<AgentCommandExecutor>,
pub storage: Arc<dyn CheckpointStorage>,
pub variables: HashMap<String, Value>,
pub workflow_env: HashMap<String, Value>,
}
pub struct MapEnvParams {
pub worktree_manager: Arc<WorktreeManager>,
pub command_executor: Arc<AgentCommandExecutor>,
pub storage: Arc<dyn CheckpointStorage>,
pub agent_template: Vec<WorkflowStep>,
pub job_id: String,
pub max_parallel: usize,
pub workflow_env: HashMap<String, Value>,
pub config: HashMap<String, Value>,
}
impl MapEnv {
pub fn new(params: MapEnvParams) -> Self {
let worktree_manager = params.worktree_manager;
let command_executor = params.command_executor;
let storage = params.storage;
let agent_template = params.agent_template;
let job_id = params.job_id;
let max_parallel = params.max_parallel;
let workflow_env = params.workflow_env;
let config = params.config;
Self {
worktree_manager,
command_executor,
storage,
agent_template,
job_id,
max_parallel,
workflow_env,
config,
}
}
}
impl PhaseEnv {
pub fn new(
command_executor: Arc<AgentCommandExecutor>,
storage: Arc<dyn CheckpointStorage>,
variables: HashMap<String, Value>,
workflow_env: HashMap<String, Value>,
) -> Self {
Self {
command_executor,
storage,
variables,
workflow_env,
}
}
}