Skip to main content

proof_engine/behavior/
planner.rs

1//! Goal-Oriented Action Planning (GOAP).
2//!
3//! # Overview
4//!
5//! GOAP lets an AI agent automatically figure out *how* to reach a goal by
6//! searching for the cheapest sequence of actions that transforms the current
7//! world state into the goal state.
8//!
9//! ## Core types
10//!
11//! | Type | Role |
12//! |------|------|
13//! | [`WorldState`]   | Named boolean + float condition map |
14//! | [`Action`]       | Preconditions, effects, cost, duration |
15//! | [`GoalStack`]    | Priority-ordered goals for one agent |
16//! | [`GoapPlanner`]  | A* plan search |
17//! | [`PlanExecutor`] | Runs a plan, detects state drift, replans |
18//!
19//! ## Example
20//! ```ignore
21//! let mut state = WorldState::new();
22//! state.set_bool("has_weapon", false);
23//! state.set_bool("enemy_dead", false);
24//!
25//! let pick_up = Action::new("pick_up_weapon", 1.0)
26//!     .require_bool("has_weapon", false)
27//!     .effect_bool("has_weapon", true);
28//!
29//! let attack = Action::new("attack_enemy", 2.0)
30//!     .require_bool("has_weapon", true)
31//!     .require_bool("enemy_dead", false)
32//!     .effect_bool("enemy_dead", true);
33//!
34//! let mut goal = WorldState::new();
35//! goal.set_bool("enemy_dead", true);
36//!
37//! let plan = GoapPlanner::plan(&state, &goal, &[pick_up, attack], 10);
38//! // plan == Some(["pick_up_weapon", "attack_enemy"])
39//! ```
40
41use std::collections::{BinaryHeap, HashMap, HashSet, VecDeque};
42use std::cmp::Ordering;
43
44// ── WorldState ────────────────────────────────────────────────────────────────
45
46/// A set of named conditions describing the current state of the world from
47/// one agent's perspective.
48///
49/// Conditions have two flavours:
50/// - **Bool** — classic GOAP true/false flags.
51/// - **Float** — numeric values (health, ammo count, distance…) used for
52///   richer precondition checking.
53#[derive(Debug, Clone, PartialEq, Default)]
54pub struct WorldState {
55    bools:  HashMap<String, bool>,
56    floats: HashMap<String, f32>,
57}
58
59impl WorldState {
60    pub fn new() -> Self { Self::default() }
61
62    // ── bool conditions ───────────────────────────────────────────────────────
63
64    pub fn set_bool(&mut self, key: &str, value: bool) {
65        self.bools.insert(key.to_string(), value);
66    }
67
68    pub fn get_bool(&self, key: &str) -> bool {
69        *self.bools.get(key).unwrap_or(&false)
70    }
71
72    pub fn has_bool(&self, key: &str) -> bool {
73        self.bools.contains_key(key)
74    }
75
76    // ── float conditions ──────────────────────────────────────────────────────
77
78    pub fn set_float(&mut self, key: &str, value: f32) {
79        self.floats.insert(key.to_string(), value);
80    }
81
82    pub fn get_float(&self, key: &str) -> f32 {
83        *self.floats.get(key).unwrap_or(&0.0)
84    }
85
86    pub fn has_float(&self, key: &str) -> bool {
87        self.floats.contains_key(key)
88    }
89
90    // ── satisfaction checking ─────────────────────────────────────────────────
91
92    /// Does `self` satisfy every condition in `goal`?
93    ///
94    /// Bool conditions must match exactly.
95    /// Float conditions in `goal` are treated as *lower bounds* — i.e.
96    /// `self.float >= goal.float`.
97    pub fn satisfies(&self, goal: &WorldState) -> bool {
98        for (k, &v) in &goal.bools {
99            if self.get_bool(k) != v { return false; }
100        }
101        for (k, &v) in &goal.floats {
102            if self.get_float(k) < v { return false; }
103        }
104        true
105    }
106
107    /// Number of unsatisfied conditions from `goal`.  Used as A* heuristic.
108    pub fn distance_to(&self, goal: &WorldState) -> usize {
109        let bool_unsatisfied = goal.bools.iter()
110            .filter(|(k, &v)| self.get_bool(k) != v)
111            .count();
112        let float_unsatisfied = goal.floats.iter()
113            .filter(|(k, &v)| self.get_float(k) < v)
114            .count();
115        bool_unsatisfied + float_unsatisfied
116    }
117
118    /// Apply an action's effects to produce the successor state.
119    pub fn apply(&self, effects: &ActionEffects) -> WorldState {
120        let mut next = self.clone();
121        for (k, &v) in &effects.bools { next.bools.insert(k.clone(), v); }
122        for (k, &v) in &effects.floats_add {
123            let cur = next.get_float(k);
124            next.floats.insert(k.clone(), cur + v);
125        }
126        for (k, &v) in &effects.floats_set {
127            next.floats.insert(k.clone(), v);
128        }
129        next
130    }
131
132    /// Merge another state into self, overwriting on conflict.
133    pub fn merge_from(&mut self, other: &WorldState) {
134        for (k, &v) in &other.bools  { self.bools.insert(k.clone(), v); }
135        for (k, &v) in &other.floats { self.floats.insert(k.clone(), v); }
136    }
137
138    /// True if this state has no conditions at all.
139    pub fn is_empty(&self) -> bool {
140        self.bools.is_empty() && self.floats.is_empty()
141    }
142
143    /// Create a snapshot key for closed-set deduplication.
144    fn snapshot_key(&self) -> StateKey {
145        let mut bool_pairs: Vec<(String, bool)> = self.bools.iter()
146            .map(|(k, &v)| (k.clone(), v)).collect();
147        bool_pairs.sort_by(|a, b| a.0.cmp(&b.0));
148
149        let mut float_pairs: Vec<(String, u32)> = self.floats.iter()
150            .map(|(k, &v)| (k.clone(), v.to_bits())).collect();
151        float_pairs.sort_by(|a, b| a.0.cmp(&b.0));
152
153        StateKey { bools: bool_pairs, floats: float_pairs }
154    }
155}
156
157#[derive(Debug, Clone, PartialEq, Eq, Hash)]
158struct StateKey {
159    bools:  Vec<(String, bool)>,
160    floats: Vec<(String, u32)>,
161}
162
163// ── ActionEffects ─────────────────────────────────────────────────────────────
164
165/// The effects portion of an [`Action`], separated out so that `WorldState`
166/// can apply them without borrowing the whole action.
167#[derive(Debug, Clone, Default)]
168pub struct ActionEffects {
169    /// Set named bool conditions.
170    pub bools:      HashMap<String, bool>,
171    /// Add a delta to named float conditions.
172    pub floats_add: HashMap<String, f32>,
173    /// Set named float conditions to an absolute value.
174    pub floats_set: HashMap<String, f32>,
175}
176
177impl ActionEffects {
178    pub fn new() -> Self { Self::default() }
179
180    pub fn set_bool(mut self, key: &str, value: bool) -> Self {
181        self.bools.insert(key.to_string(), value);
182        self
183    }
184
185    pub fn add_float(mut self, key: &str, delta: f32) -> Self {
186        self.floats_add.insert(key.to_string(), delta);
187        self
188    }
189
190    pub fn set_float(mut self, key: &str, value: f32) -> Self {
191        self.floats_set.insert(key.to_string(), value);
192        self
193    }
194}
195
196// ── Preconditions ─────────────────────────────────────────────────────────────
197
198/// The preconditions an action requires to be applicable.
199#[derive(Debug, Clone, Default)]
200pub struct Preconditions {
201    pub bools:       HashMap<String, bool>,
202    /// Key must be >= threshold.
203    pub floats_gte:  HashMap<String, f32>,
204    /// Key must be <= threshold.
205    pub floats_lte:  HashMap<String, f32>,
206    /// Key must be > threshold.
207    pub floats_gt:   HashMap<String, f32>,
208    /// Key must be < threshold.
209    pub floats_lt:   HashMap<String, f32>,
210}
211
212impl Preconditions {
213    pub fn new() -> Self { Self::default() }
214
215    pub fn require_bool(mut self, key: &str, value: bool) -> Self {
216        self.bools.insert(key.to_string(), value);
217        self
218    }
219
220    pub fn require_float_gte(mut self, key: &str, min: f32) -> Self {
221        self.floats_gte.insert(key.to_string(), min);
222        self
223    }
224
225    pub fn require_float_lte(mut self, key: &str, max: f32) -> Self {
226        self.floats_lte.insert(key.to_string(), max);
227        self
228    }
229
230    pub fn require_float_gt(mut self, key: &str, min: f32) -> Self {
231        self.floats_gt.insert(key.to_string(), min);
232        self
233    }
234
235    pub fn require_float_lt(mut self, key: &str, max: f32) -> Self {
236        self.floats_lt.insert(key.to_string(), max);
237        self
238    }
239
240    /// Returns true if `state` satisfies all preconditions.
241    pub fn satisfied_by(&self, state: &WorldState) -> bool {
242        for (k, &v) in &self.bools {
243            if state.get_bool(k) != v { return false; }
244        }
245        for (k, &t) in &self.floats_gte { if state.get_float(k) <  t { return false; } }
246        for (k, &t) in &self.floats_lte { if state.get_float(k) >  t { return false; } }
247        for (k, &t) in &self.floats_gt  { if state.get_float(k) <= t { return false; } }
248        for (k, &t) in &self.floats_lt  { if state.get_float(k) >= t { return false; } }
249        true
250    }
251}
252
253// ── Action ────────────────────────────────────────────────────────────────────
254
255/// A GOAP action that an agent can execute.
256///
257/// Each action has:
258/// - A **cost** (lower = preferred by the planner).
259/// - A set of **preconditions** that must hold before it can run.
260/// - A set of **effects** that it applies to the world state.
261/// - An optional **duration** in simulated seconds.
262/// - An optional **interruption priority** (higher = harder to interrupt).
263#[derive(Debug, Clone)]
264pub struct Action {
265    /// Unique name identifying this action.
266    pub name:          String,
267    /// Base cost (used by A* to prefer cheaper plans).
268    pub cost:          f32,
269    /// Preconditions that must hold.
270    pub preconditions: Preconditions,
271    /// Effects applied to world state on completion.
272    pub effects:       ActionEffects,
273    /// Estimated duration in seconds (used by the executor).
274    pub duration_secs: f32,
275    /// Priority when being interrupted by a higher-priority action.
276    pub interrupt_priority: u32,
277    /// If true, the planner will not use this action (temporarily disabled).
278    pub disabled:      bool,
279    /// User data tag for categorizing actions.
280    pub tags:          Vec<String>,
281}
282
283impl Action {
284    pub fn new(name: &str, cost: f32) -> Self {
285        Self {
286            name:              name.to_string(),
287            cost,
288            preconditions:     Preconditions::new(),
289            effects:           ActionEffects::new(),
290            duration_secs:     0.0,
291            interrupt_priority: 0,
292            disabled:          false,
293            tags:              Vec::new(),
294        }
295    }
296
297    // ── Fluent precondition builders ──────────────────────────────────────────
298
299    pub fn require_bool(mut self, key: &str, value: bool) -> Self {
300        self.preconditions = self.preconditions.require_bool(key, value);
301        self
302    }
303
304    pub fn require_float_gte(mut self, key: &str, min: f32) -> Self {
305        self.preconditions = self.preconditions.require_float_gte(key, min);
306        self
307    }
308
309    pub fn require_float_lte(mut self, key: &str, max: f32) -> Self {
310        self.preconditions = self.preconditions.require_float_lte(key, max);
311        self
312    }
313
314    pub fn require_float_gt(mut self, key: &str, val: f32) -> Self {
315        self.preconditions = self.preconditions.require_float_gt(key, val);
316        self
317    }
318
319    pub fn require_float_lt(mut self, key: &str, val: f32) -> Self {
320        self.preconditions = self.preconditions.require_float_lt(key, val);
321        self
322    }
323
324    // ── Fluent effect builders ────────────────────────────────────────────────
325
326    pub fn effect_bool(mut self, key: &str, value: bool) -> Self {
327        self.effects = self.effects.set_bool(key, value);
328        self
329    }
330
331    pub fn effect_add_float(mut self, key: &str, delta: f32) -> Self {
332        self.effects = self.effects.add_float(key, delta);
333        self
334    }
335
336    pub fn effect_set_float(mut self, key: &str, value: f32) -> Self {
337        self.effects = self.effects.set_float(key, value);
338        self
339    }
340
341    // ── Other builders ────────────────────────────────────────────────────────
342
343    pub fn with_duration(mut self, secs: f32) -> Self {
344        self.duration_secs = secs;
345        self
346    }
347
348    pub fn with_interrupt_priority(mut self, p: u32) -> Self {
349        self.interrupt_priority = p;
350        self
351    }
352
353    pub fn with_tag(mut self, tag: &str) -> Self {
354        self.tags.push(tag.to_string());
355        self
356    }
357
358    pub fn disabled(mut self) -> Self {
359        self.disabled = true;
360        self
361    }
362
363    // ── Applicability ─────────────────────────────────────────────────────────
364
365    pub fn is_applicable(&self, state: &WorldState) -> bool {
366        !self.disabled && self.preconditions.satisfied_by(state)
367    }
368
369    /// Compute the successor state by applying this action's effects.
370    pub fn apply_effects(&self, state: &WorldState) -> WorldState {
371        state.apply(&self.effects)
372    }
373}
374
375// ── Goal ─────────────────────────────────────────────────────────────────────
376
377/// A named goal with a desired `WorldState` and a priority.
378///
379/// Higher priority goals pre-empt lower priority ones.
380#[derive(Debug, Clone)]
381pub struct Goal {
382    pub name:     String,
383    pub state:    WorldState,
384    pub priority: u32,
385    /// If set, this goal expires after `ttl_secs` simulation seconds.
386    pub ttl_secs: Option<f32>,
387    created_at:   f32,
388}
389
390impl Goal {
391    pub fn new(name: &str, state: WorldState, priority: u32) -> Self {
392        Self { name: name.to_string(), state, priority, ttl_secs: None, created_at: 0.0 }
393    }
394
395    pub fn with_ttl(mut self, ttl_secs: f32) -> Self {
396        self.ttl_secs = Some(ttl_secs);
397        self
398    }
399
400    pub fn is_expired(&self, sim_time: f32) -> bool {
401        self.ttl_secs.map_or(false, |ttl| sim_time - self.created_at > ttl)
402    }
403}
404
405// ── GoalStack ─────────────────────────────────────────────────────────────────
406
407/// A priority-ordered collection of goals for one agent.
408///
409/// The active goal is always the one with the highest `priority`.  If two
410/// goals share the same priority the one added first wins (stable sort).
411#[derive(Debug, Default)]
412pub struct GoalStack {
413    goals:    Vec<Goal>,
414    sim_time: f32,
415}
416
417impl GoalStack {
418    pub fn new() -> Self { Self::default() }
419
420    /// Push a new goal.  The stack is re-sorted automatically.
421    pub fn push(&mut self, mut goal: Goal) {
422        goal.created_at = self.sim_time;
423        self.goals.push(goal);
424        // Stable descending sort by priority.
425        self.goals.sort_by(|a, b| b.priority.cmp(&a.priority));
426    }
427
428    /// Remove the goal with `name`.
429    pub fn remove(&mut self, name: &str) {
430        self.goals.retain(|g| g.name != name);
431    }
432
433    /// Return the highest-priority active goal, if any.
434    pub fn active(&self) -> Option<&Goal> {
435        self.goals.iter().find(|g| !g.is_expired(self.sim_time))
436    }
437
438    /// Advance simulation time and prune expired goals.
439    pub fn tick(&mut self, dt: f32) {
440        self.sim_time += dt;
441        let t = self.sim_time;
442        self.goals.retain(|g| !g.is_expired(t));
443    }
444
445    pub fn is_empty(&self) -> bool { self.goals.is_empty() }
446    pub fn len(&self)     -> usize { self.goals.len() }
447
448    /// Iterate all goals (highest priority first).
449    pub fn iter(&self) -> impl Iterator<Item = &Goal> {
450        self.goals.iter()
451    }
452
453    /// True if any goal with `name` exists and is not expired.
454    pub fn has_goal(&self, name: &str) -> bool {
455        self.goals.iter().any(|g| g.name == name && !g.is_expired(self.sim_time))
456    }
457
458    pub fn sim_time(&self) -> f32 { self.sim_time }
459}
460
461// ── A* search node ────────────────────────────────────────────────────────────
462
463#[derive(Clone)]
464struct SearchNode {
465    state:     WorldState,
466    /// Action names taken to reach this state.
467    path:      Vec<String>,
468    /// Accumulated cost g(n).
469    cost:      f32,
470    /// Heuristic estimate h(n).
471    heuristic: usize,
472}
473
474impl SearchNode {
475    fn f(&self)     -> f32 { self.cost + self.heuristic as f32 }
476    fn f_ord(&self) -> u64 { (self.f() * 1_000_000.0) as u64 }
477}
478
479impl PartialEq for SearchNode {
480    fn eq(&self, other: &Self) -> bool { self.f_ord() == other.f_ord() }
481}
482impl Eq for SearchNode {}
483
484impl PartialOrd for SearchNode {
485    fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) }
486}
487
488impl Ord for SearchNode {
489    fn cmp(&self, other: &Self) -> Ordering {
490        // Min-heap: reverse order so that smaller f() has higher priority.
491        other.f_ord().cmp(&self.f_ord())
492    }
493}
494
495// ── GoapPlanner ───────────────────────────────────────────────────────────────
496
497/// Stateless A* GOAP planner.
498pub struct GoapPlanner;
499
500impl GoapPlanner {
501    /// Find the cheapest sequence of action names that transforms `start` into
502    /// a state satisfying `goal`, or `None` if no plan is reachable within
503    /// `max_depth` actions.
504    pub fn plan(
505        start:     &WorldState,
506        goal:      &WorldState,
507        actions:   &[Action],
508        max_depth: usize,
509    ) -> Option<Vec<String>> {
510        if start.satisfies(goal) {
511            return Some(Vec::new()); // already satisfied
512        }
513
514        let mut open:   BinaryHeap<SearchNode> = BinaryHeap::new();
515        let mut closed: HashSet<StateKey>       = HashSet::new();
516
517        open.push(SearchNode {
518            state:     start.clone(),
519            path:      Vec::new(),
520            cost:      0.0,
521            heuristic: start.distance_to(goal),
522        });
523
524        while let Some(node) = open.pop() {
525            if node.state.satisfies(goal) {
526                return Some(node.path);
527            }
528
529            if node.path.len() >= max_depth { continue; }
530
531            let key = node.state.snapshot_key();
532            if closed.contains(&key) { continue; }
533            closed.insert(key);
534
535            for action in actions {
536                if !action.is_applicable(&node.state) { continue; }
537
538                let next_state = action.apply_effects(&node.state);
539                let next_key   = next_state.snapshot_key();
540                if closed.contains(&next_key) { continue; }
541
542                let next_cost = node.cost + action.cost;
543                let mut next_path = node.path.clone();
544                next_path.push(action.name.clone());
545
546                open.push(SearchNode {
547                    state:     next_state.clone(),
548                    path:      next_path,
549                    cost:      next_cost,
550                    heuristic: next_state.distance_to(goal),
551                });
552            }
553        }
554
555        None // no plan found
556    }
557
558    /// Plan and return full detail: path of action names + total estimated cost.
559    pub fn plan_with_cost(
560        start:     &WorldState,
561        goal:      &WorldState,
562        actions:   &[Action],
563        max_depth: usize,
564    ) -> Option<(Vec<String>, f32)> {
565        if start.satisfies(goal) {
566            return Some((Vec::new(), 0.0));
567        }
568
569        let mut open:   BinaryHeap<SearchNode> = BinaryHeap::new();
570        let mut closed: HashSet<StateKey>       = HashSet::new();
571
572        open.push(SearchNode {
573            state:     start.clone(),
574            path:      Vec::new(),
575            cost:      0.0,
576            heuristic: start.distance_to(goal),
577        });
578
579        while let Some(node) = open.pop() {
580            if node.state.satisfies(goal) {
581                let cost = node.cost;
582                return Some((node.path, cost));
583            }
584
585            if node.path.len() >= max_depth { continue; }
586
587            let key = node.state.snapshot_key();
588            if closed.contains(&key) { continue; }
589            closed.insert(key);
590
591            for action in actions {
592                if !action.is_applicable(&node.state) { continue; }
593
594                let next_state = action.apply_effects(&node.state);
595                let next_key   = next_state.snapshot_key();
596                if closed.contains(&next_key) { continue; }
597
598                let next_cost = node.cost + action.cost;
599                let mut next_path = node.path.clone();
600                next_path.push(action.name.clone());
601
602                open.push(SearchNode {
603                    state:     next_state.clone(),
604                    path:      next_path,
605                    cost:      next_cost,
606                    heuristic: next_state.distance_to(goal),
607                });
608            }
609        }
610
611        None
612    }
613
614    /// Return all valid plans up to `max_plans` alternatives, sorted by cost.
615    pub fn plan_alternatives(
616        start:     &WorldState,
617        goal:      &WorldState,
618        actions:   &[Action],
619        max_depth: usize,
620        max_plans: usize,
621    ) -> Vec<(Vec<String>, f32)> {
622        let mut results: Vec<(Vec<String>, f32)> = Vec::new();
623
624        // BFS / bounded DFS collecting all goal-reaching paths.
625        let mut queue: VecDeque<SearchNode> = VecDeque::new();
626        queue.push_back(SearchNode {
627            state:     start.clone(),
628            path:      Vec::new(),
629            cost:      0.0,
630            heuristic: start.distance_to(goal),
631        });
632
633        let mut visited: HashSet<StateKey> = HashSet::new();
634
635        while let Some(node) = queue.pop_front() {
636            if results.len() >= max_plans { break; }
637
638            if node.state.satisfies(goal) {
639                results.push((node.path.clone(), node.cost));
640                continue; // don't expand further from a goal node
641            }
642
643            if node.path.len() >= max_depth { continue; }
644
645            let key = node.state.snapshot_key();
646            if visited.contains(&key) { continue; }
647            visited.insert(key);
648
649            for action in actions {
650                if !action.is_applicable(&node.state) { continue; }
651                let next_state = action.apply_effects(&node.state);
652                let mut next_path = node.path.clone();
653                next_path.push(action.name.clone());
654                queue.push_back(SearchNode {
655                    state:     next_state.clone(),
656                    path:      next_path,
657                    cost:      node.cost + action.cost,
658                    heuristic: next_state.distance_to(goal),
659                });
660            }
661        }
662
663        results.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(Ordering::Equal));
664        results
665    }
666}
667
668// ── PlanStep ──────────────────────────────────────────────────────────────────
669
670/// The status of a single step in a running plan.
671#[derive(Debug, Clone, Copy, PartialEq, Eq)]
672pub enum PlanStepStatus {
673    NotStarted,
674    InProgress,
675    Completed,
676    Failed,
677    Interrupted,
678}
679
680/// Runtime state of one action being executed.
681#[derive(Debug, Clone)]
682pub struct PlanStep {
683    pub action_name: String,
684    pub status:      PlanStepStatus,
685    /// Elapsed time since this step started, in seconds.
686    pub elapsed:     f32,
687    /// Expected duration (from `Action::duration_secs`).
688    pub duration:    f32,
689}
690
691impl PlanStep {
692    fn new(action_name: &str, duration: f32) -> Self {
693        Self {
694            action_name: action_name.to_string(),
695            status:      PlanStepStatus::NotStarted,
696            elapsed:     0.0,
697            duration,
698        }
699    }
700}
701
702// ── PlanExecutorState ─────────────────────────────────────────────────────────
703
704/// The overall state of the plan executor.
705#[derive(Debug, Clone, Copy, PartialEq, Eq)]
706pub enum ExecutorState {
707    /// No plan is loaded.
708    Idle,
709    /// A plan is loaded and currently executing.
710    Executing,
711    /// The plan finished successfully (goal satisfied).
712    Succeeded,
713    /// A step failed and replanning is in progress.
714    Replanning,
715    /// No plan could be found (goal unreachable).
716    Failed,
717    /// The executor was explicitly interrupted.
718    Interrupted,
719}
720
721// ── PlanExecutor ──────────────────────────────────────────────────────────────
722
723/// Drives execution of a GOAP plan, applying each action's effects to the
724/// simulated world state, and replanning when the actual state diverges from
725/// the expected state.
726///
727/// # Replanning policy
728///
729/// After each action completes, the executor compares the **observed** world
730/// state (provided by the game) against the **expected** state (computed by
731/// applying planned effects).  If they differ in any condition that the next
732/// action cares about, the executor discards the remainder of the plan and
733/// calls the planner again.
734///
735/// # Interruption
736///
737/// An external caller can call [`PlanExecutor::interrupt`] at any time.  The
738/// current step is marked `Interrupted` and the executor transitions to the
739/// `Interrupted` state.  The caller may then push a new goal and call
740/// [`PlanExecutor::start`] to begin fresh.
741#[derive(Debug)]
742pub struct PlanExecutor {
743    /// The action library used for (re)planning.
744    actions:           Vec<Action>,
745    /// Current plan steps.
746    steps:             Vec<PlanStep>,
747    /// Index of the currently executing step.
748    current:           usize,
749    /// The simulated world state as the planner believes it to be.
750    sim_state:         WorldState,
751    /// The goal the executor is working toward.
752    goal:              Option<WorldState>,
753    /// Current executor state.
754    pub state:         ExecutorState,
755    /// Maximum plan depth for the A* search.
756    pub max_depth:     usize,
757    /// Total simulation time elapsed.
758    pub sim_time:      f32,
759    /// How many times the executor has replanned this goal.
760    pub replan_count:  u32,
761    /// Maximum replanning attempts before giving up.
762    pub max_replans:   u32,
763    /// Snapshot of the world state at the start of the current step, used
764    /// to detect unexpected changes.
765    step_start_state:  WorldState,
766    /// Log of completed action names in order.
767    pub history:       Vec<String>,
768}
769
770impl PlanExecutor {
771    pub fn new(actions: Vec<Action>, max_depth: usize) -> Self {
772        Self {
773            actions,
774            steps:            Vec::new(),
775            current:          0,
776            sim_state:        WorldState::new(),
777            goal:             None,
778            state:            ExecutorState::Idle,
779            max_depth,
780            sim_time:         0.0,
781            replan_count:     0,
782            max_replans:      5,
783            step_start_state: WorldState::new(),
784            history:          Vec::new(),
785        }
786    }
787
788    /// Set the initial world state.
789    pub fn set_world_state(&mut self, state: WorldState) {
790        self.sim_state = state;
791    }
792
793    /// Update a single bool condition in the simulated world state.
794    pub fn update_bool(&mut self, key: &str, value: bool) {
795        self.sim_state.set_bool(key, value);
796    }
797
798    /// Update a single float condition in the simulated world state.
799    pub fn update_float(&mut self, key: &str, value: f32) {
800        self.sim_state.set_float(key, value);
801    }
802
803    /// Push a new goal and (re)plan immediately.
804    ///
805    /// Returns `Ok(plan_length)` on success or `Err` if no plan exists.
806    pub fn start(&mut self, goal: WorldState) -> Result<usize, PlanError> {
807        self.goal        = Some(goal.clone());
808        self.replan_count = 0;
809        self.history.clear();
810        self.do_plan(&goal)
811    }
812
813    /// Interrupt the current plan.
814    pub fn interrupt(&mut self) {
815        if let Some(step) = self.steps.get_mut(self.current) {
816            step.status = PlanStepStatus::Interrupted;
817        }
818        self.state = ExecutorState::Interrupted;
819    }
820
821    /// Tick the executor by `dt` seconds.
822    ///
823    /// `observe_state` should return the *current observed world state*,
824    /// incorporating any changes that happened outside the planner (e.g.
825    /// enemy died unexpectedly, health changed).
826    ///
827    /// Returns the name of the action that should be executing this tick, or
828    /// `None` if idle/finished.
829    pub fn tick(
830        &mut self,
831        dt:            f32,
832        observe_state: impl Fn(&WorldState) -> WorldState,
833    ) -> Option<&str> {
834        self.sim_time += dt;
835
836        match self.state {
837            ExecutorState::Idle
838            | ExecutorState::Succeeded
839            | ExecutorState::Failed
840            | ExecutorState::Interrupted => return None,
841
842            ExecutorState::Replanning => {
843                // Try to replan.
844                if let Some(goal) = self.goal.clone() {
845                    match self.do_plan(&goal) {
846                        Ok(_)  => {} // state set to Executing inside do_plan
847                        Err(_) => {
848                            self.state = ExecutorState::Failed;
849                            return None;
850                        }
851                    }
852                } else {
853                    self.state = ExecutorState::Idle;
854                    return None;
855                }
856            }
857
858            ExecutorState::Executing => {}
859        }
860
861        if self.current >= self.steps.len() {
862            self.state = ExecutorState::Succeeded;
863            return None;
864        }
865
866        {
867            let step = &mut self.steps[self.current];
868            if step.status == PlanStepStatus::NotStarted {
869                step.status = PlanStepStatus::InProgress;
870            }
871            step.elapsed += dt;
872        }
873        self.step_start_state = self.sim_state.clone();
874
875        // Check if the observed state has drifted from the expected state in
876        // ways that invalidate the current step's preconditions.
877        let observed = observe_state(&self.sim_state);
878        if self.state_has_drifted(&observed) {
879            // Merge observed changes into sim_state.
880            self.sim_state.merge_from(&observed);
881            self.steps[self.current].status = PlanStepStatus::Interrupted;
882
883            if self.replan_count >= self.max_replans {
884                self.state = ExecutorState::Failed;
885                return None;
886            }
887
888            self.replan_count += 1;
889            self.state = ExecutorState::Replanning;
890            return Some(&self.steps[self.current].action_name);
891        }
892
893        // Merge observed state normally.
894        self.sim_state.merge_from(&observed);
895
896        // Check if the step's duration has elapsed (simple time-based
897        // completion model — real games would use action callbacks).
898        let step = &self.steps[self.current];
899        let action_name = step.action_name.clone();
900        let elapsed     = step.elapsed;
901        let duration    = step.duration;
902
903        if duration > 0.0 && elapsed < duration {
904            return Some(&self.steps[self.current].action_name);
905        }
906
907        // Step completed: apply its effects to the simulated state.
908        if let Some(action) = self.find_action(&action_name) {
909            let effects = action.effects.clone();
910            self.sim_state = self.sim_state.apply(&effects);
911        }
912
913        self.steps[self.current].status = PlanStepStatus::Completed;
914        self.history.push(action_name);
915        self.current += 1;
916
917        // Check if we're done.
918        if self.current >= self.steps.len() {
919            self.state = ExecutorState::Succeeded;
920            return None;
921        }
922
923        // Verify the next step's preconditions hold in the current sim state.
924        let next_name = self.steps[self.current].action_name.clone();
925        if let Some(action) = self.find_action(&next_name) {
926            if !action.is_applicable(&self.sim_state) {
927                if self.replan_count >= self.max_replans {
928                    self.state = ExecutorState::Failed;
929                    return None;
930                }
931                self.replan_count += 1;
932                self.state = ExecutorState::Replanning;
933            }
934        }
935
936        Some(&self.steps[self.current.saturating_sub(1)].action_name)
937    }
938
939    /// The name of the action currently executing, if any.
940    pub fn current_action(&self) -> Option<&str> {
941        if self.state == ExecutorState::Executing && self.current < self.steps.len() {
942            Some(&self.steps[self.current].action_name)
943        } else {
944            None
945        }
946    }
947
948    /// The full current plan as action names.
949    pub fn plan_names(&self) -> Vec<&str> {
950        self.steps.iter().map(|s| s.action_name.as_str()).collect()
951    }
952
953    /// Progress through the current plan: `(completed_steps, total_steps)`.
954    pub fn progress(&self) -> (usize, usize) {
955        (self.current, self.steps.len())
956    }
957
958    /// True if the executor has successfully completed its goal.
959    pub fn has_succeeded(&self) -> bool { self.state == ExecutorState::Succeeded }
960
961    /// True if the executor has permanently failed.
962    pub fn has_failed(&self) -> bool { self.state == ExecutorState::Failed }
963
964    /// Reset the executor to idle without changing the action library.
965    pub fn reset(&mut self) {
966        self.steps     = Vec::new();
967        self.current   = 0;
968        self.goal      = None;
969        self.state     = ExecutorState::Idle;
970        self.replan_count = 0;
971        self.history.clear();
972    }
973
974    /// Access the current simulated world state.
975    pub fn world_state(&self) -> &WorldState { &self.sim_state }
976
977    // ── Internal helpers ──────────────────────────────────────────────────────
978
979    fn do_plan(&mut self, goal: &WorldState) -> Result<usize, PlanError> {
980        match GoapPlanner::plan(&self.sim_state, goal, &self.actions, self.max_depth) {
981            Some(names) => {
982                self.steps = names.iter().map(|n| {
983                    let dur = self.find_action(n).map(|a| a.duration_secs).unwrap_or(0.0);
984                    PlanStep::new(n, dur)
985                }).collect();
986                self.current = 0;
987                self.state   = ExecutorState::Executing;
988                let len = self.steps.len();
989                Ok(len)
990            }
991            None => {
992                self.state = ExecutorState::Failed;
993                Err(PlanError::NoPlanFound)
994            }
995        }
996    }
997
998    fn find_action(&self, name: &str) -> Option<&Action> {
999        self.actions.iter().find(|a| a.name == name)
1000    }
1001
1002    /// Check if observed world state has drifted from our expected sim state
1003    /// in ways that affect the *next* action's preconditions.
1004    fn state_has_drifted(&self, observed: &WorldState) -> bool {
1005        // Only check conditions relevant to the current step's action.
1006        if self.current >= self.steps.len() { return false; }
1007        let name = &self.steps[self.current].action_name;
1008        if let Some(action) = self.find_action(name) {
1009            // Check bool preconditions.
1010            for (k, &expected) in &action.preconditions.bools {
1011                if observed.has_bool(k) && observed.get_bool(k) != expected {
1012                    return true;
1013                }
1014            }
1015            // Check float preconditions.
1016            for (k, &min) in &action.preconditions.floats_gte {
1017                if observed.has_float(k) && observed.get_float(k) < min {
1018                    return true;
1019                }
1020            }
1021            for (k, &max) in &action.preconditions.floats_lte {
1022                if observed.has_float(k) && observed.get_float(k) > max {
1023                    return true;
1024                }
1025            }
1026        }
1027        false
1028    }
1029}
1030
1031// ── PlanError ─────────────────────────────────────────────────────────────────
1032
1033#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1034pub enum PlanError {
1035    /// The planner exhausted the search space with no plan found.
1036    NoPlanFound,
1037    /// The action library is empty.
1038    NoActions,
1039    /// The goal is already satisfied.
1040    AlreadySatisfied,
1041}
1042
1043impl std::fmt::Display for PlanError {
1044    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1045        match self {
1046            PlanError::NoPlanFound      => write!(f, "GOAP: no plan found"),
1047            PlanError::NoActions        => write!(f, "GOAP: action library is empty"),
1048            PlanError::AlreadySatisfied => write!(f, "GOAP: goal already satisfied"),
1049        }
1050    }
1051}
1052
1053impl std::error::Error for PlanError {}
1054
1055// ── ActionLibrary ─────────────────────────────────────────────────────────────
1056
1057/// A named, searchable collection of [`Action`]s.
1058#[derive(Debug, Default, Clone)]
1059pub struct ActionLibrary {
1060    actions: Vec<Action>,
1061}
1062
1063impl ActionLibrary {
1064    pub fn new() -> Self { Self::default() }
1065
1066    pub fn add(&mut self, action: Action) { self.actions.push(action); }
1067
1068    pub fn remove(&mut self, name: &str) {
1069        self.actions.retain(|a| a.name != name);
1070    }
1071
1072    pub fn get(&self, name: &str) -> Option<&Action> {
1073        self.actions.iter().find(|a| a.name == name)
1074    }
1075
1076    pub fn get_mut(&mut self, name: &str) -> Option<&mut Action> {
1077        self.actions.iter_mut().find(|a| a.name == name)
1078    }
1079
1080    pub fn enable(&mut self, name: &str) {
1081        if let Some(a) = self.get_mut(name) { a.disabled = false; }
1082    }
1083
1084    pub fn disable(&mut self, name: &str) {
1085        if let Some(a) = self.get_mut(name) { a.disabled = true; }
1086    }
1087
1088    pub fn all(&self) -> &[Action] { &self.actions }
1089
1090    pub fn by_tag(&self, tag: &str) -> Vec<&Action> {
1091        self.actions.iter().filter(|a| a.tags.iter().any(|t| t == tag)).collect()
1092    }
1093
1094    pub fn applicable(&self, state: &WorldState) -> Vec<&Action> {
1095        self.actions.iter().filter(|a| a.is_applicable(state)).collect()
1096    }
1097
1098    pub fn plan(
1099        &self,
1100        start: &WorldState,
1101        goal:  &WorldState,
1102        max_depth: usize,
1103    ) -> Option<Vec<String>> {
1104        GoapPlanner::plan(start, goal, &self.actions, max_depth)
1105    }
1106}
1107
1108// ── GoapAgent ─────────────────────────────────────────────────────────────────
1109
1110/// A self-contained GOAP-driven agent that combines a goal stack, an action
1111/// library, and a plan executor.
1112///
1113/// Each tick:
1114/// 1. The goal stack is updated (goals may expire).
1115/// 2. If the active goal has changed, a new plan is computed.
1116/// 3. The executor is ticked, driving the current action.
1117/// 4. The name of the current action is returned for the game to execute.
1118#[derive(Debug)]
1119pub struct GoapAgent {
1120    pub name:     String,
1121    pub goals:    GoalStack,
1122    pub library:  ActionLibrary,
1123    pub executor: PlanExecutor,
1124    active_goal:  Option<String>,
1125}
1126
1127impl GoapAgent {
1128    pub fn new(name: &str, actions: Vec<Action>, max_depth: usize) -> Self {
1129        let library  = ActionLibrary { actions: actions.clone() };
1130        let executor = PlanExecutor::new(actions, max_depth);
1131        Self {
1132            name: name.to_string(),
1133            goals: GoalStack::new(),
1134            library,
1135            executor,
1136            active_goal: None,
1137        }
1138    }
1139
1140    /// Push a goal onto the agent's goal stack.
1141    pub fn push_goal(&mut self, goal: Goal) {
1142        self.goals.push(goal);
1143    }
1144
1145    /// Set the observed world state (called from game logic each frame with
1146    /// fresh sensor data).
1147    pub fn set_world_state(&mut self, state: WorldState) {
1148        self.executor.set_world_state(state);
1149    }
1150
1151    /// Update a single bool in the world state.
1152    pub fn set_bool(&mut self, key: &str, value: bool) {
1153        self.executor.update_bool(key, value);
1154    }
1155
1156    /// Update a single float in the world state.
1157    pub fn set_float(&mut self, key: &str, value: f32) {
1158        self.executor.update_float(key, value);
1159    }
1160
1161    /// Tick the agent.  Returns the action name the agent wants to perform
1162    /// this frame, or `None` if idle.
1163    pub fn tick(&mut self, dt: f32) -> Option<&str> {
1164        self.goals.tick(dt);
1165
1166        // Check if active goal has changed.
1167        let desired = self.goals.active().map(|g| g.name.clone());
1168        if desired != self.active_goal {
1169            self.active_goal = desired.clone();
1170            if let Some(goal_name) = desired {
1171                if let Some(goal) = self.goals.iter()
1172                    .find(|g| g.name == goal_name)
1173                    .map(|g| g.state.clone())
1174                {
1175                    // Start a new plan toward the new goal.
1176                    let _ = self.executor.start(goal);
1177                }
1178            } else {
1179                self.executor.reset();
1180            }
1181        }
1182
1183        // Tick the executor.
1184        let world = self.executor.sim_state.clone();
1185        self.executor.tick(dt, |_| world.clone())
1186    }
1187
1188    pub fn current_action(&self) -> Option<&str> { self.executor.current_action() }
1189    pub fn is_idle(&self)        -> bool          { self.executor.state == ExecutorState::Idle }
1190    pub fn has_succeeded(&self)  -> bool          { self.executor.has_succeeded() }
1191    pub fn has_failed(&self)     -> bool          { self.executor.has_failed()    }
1192    pub fn plan_names(&self)     -> Vec<&str>     { self.executor.plan_names()    }
1193}
1194
1195// ── Tests ─────────────────────────────────────────────────────────────────────
1196
1197#[cfg(test)]
1198mod tests {
1199    use super::*;
1200
1201    fn build_test_actions() -> Vec<Action> {
1202        vec![
1203            Action::new("pick_up_weapon", 1.0)
1204                .require_bool("has_weapon", false)
1205                .effect_bool("has_weapon", true),
1206
1207            Action::new("attack_enemy", 2.0)
1208                .require_bool("has_weapon", true)
1209                .require_bool("enemy_dead", false)
1210                .effect_bool("enemy_dead", true),
1211
1212            Action::new("flee", 0.5)
1213                .require_bool("enemy_dead", false)
1214                .effect_bool("safe", true),
1215        ]
1216    }
1217
1218    #[test]
1219    fn plan_pick_up_then_attack() {
1220        let mut start = WorldState::new();
1221        start.set_bool("has_weapon", false);
1222        start.set_bool("enemy_dead", false);
1223
1224        let mut goal = WorldState::new();
1225        goal.set_bool("enemy_dead", true);
1226
1227        let actions = build_test_actions();
1228        let plan = GoapPlanner::plan(&start, &goal, &actions, 5);
1229        assert!(plan.is_some());
1230        let plan = plan.unwrap();
1231        assert_eq!(plan, vec!["pick_up_weapon", "attack_enemy"]);
1232    }
1233
1234    #[test]
1235    fn plan_already_satisfied() {
1236        let mut start = WorldState::new();
1237        start.set_bool("enemy_dead", true);
1238
1239        let mut goal = WorldState::new();
1240        goal.set_bool("enemy_dead", true);
1241
1242        let actions = build_test_actions();
1243        let plan = GoapPlanner::plan(&start, &goal, &actions, 5).unwrap();
1244        assert!(plan.is_empty(), "Goal already satisfied — plan should be empty");
1245    }
1246
1247    #[test]
1248    fn plan_no_solution() {
1249        let start   = WorldState::new();
1250        let mut goal = WorldState::new();
1251        goal.set_bool("magic_flag", true);
1252
1253        let actions = build_test_actions();
1254        let plan = GoapPlanner::plan(&start, &goal, &actions, 5);
1255        assert!(plan.is_none());
1256    }
1257
1258    #[test]
1259    fn world_state_satisfies() {
1260        let mut s = WorldState::new();
1261        s.set_bool("a", true);
1262        s.set_float("hp", 80.0);
1263
1264        let mut g = WorldState::new();
1265        g.set_bool("a", true);
1266        g.set_float("hp", 50.0); // requires hp >= 50
1267
1268        assert!(s.satisfies(&g));
1269
1270        s.set_float("hp", 30.0);
1271        assert!(!s.satisfies(&g));
1272    }
1273
1274    #[test]
1275    fn action_effects_applied() {
1276        let action = Action::new("test", 1.0)
1277            .effect_bool("door_open", true)
1278            .effect_set_float("energy", 0.0)
1279            .effect_add_float("gold", 10.0);
1280
1281        let mut state = WorldState::new();
1282        state.set_bool("door_open", false);
1283        state.set_float("energy", 100.0);
1284        state.set_float("gold", 5.0);
1285
1286        let next = action.apply_effects(&state);
1287        assert_eq!(next.get_bool("door_open"),  true);
1288        assert_eq!(next.get_float("energy"),    0.0);
1289        assert_eq!(next.get_float("gold"),      15.0);
1290    }
1291
1292    #[test]
1293    fn goal_stack_priority_order() {
1294        let mut stack = GoalStack::new();
1295
1296        let mut g1 = WorldState::new(); g1.set_bool("low_priority", true);
1297        let mut g2 = WorldState::new(); g2.set_bool("high_priority", true);
1298
1299        stack.push(Goal::new("low",  g1, 1));
1300        stack.push(Goal::new("high", g2, 10));
1301
1302        assert_eq!(stack.active().map(|g| g.name.as_str()), Some("high"));
1303    }
1304
1305    #[test]
1306    fn goal_ttl_expiry() {
1307        let mut stack = GoalStack::new();
1308        let mut g = WorldState::new(); g.set_bool("x", true);
1309        stack.push(Goal::new("temp", g, 1).with_ttl(0.5));
1310
1311        assert!(stack.active().is_some());
1312        stack.tick(1.0); // advance past TTL
1313        assert!(stack.active().is_none());
1314    }
1315
1316    #[test]
1317    fn executor_completes_plan() {
1318        let actions = build_test_actions();
1319        let mut executor = PlanExecutor::new(actions, 10);
1320
1321        let mut ws = WorldState::new();
1322        ws.set_bool("has_weapon", false);
1323        ws.set_bool("enemy_dead", false);
1324        executor.set_world_state(ws);
1325
1326        let mut goal = WorldState::new();
1327        goal.set_bool("enemy_dead", true);
1328
1329        let result = executor.start(goal);
1330        assert!(result.is_ok(), "Expected a plan to be found");
1331        assert_eq!(result.unwrap(), 2, "Expected 2-step plan");
1332
1333        let names = executor.plan_names();
1334        assert_eq!(names, vec!["pick_up_weapon", "attack_enemy"]);
1335    }
1336
1337    #[test]
1338    fn plan_alternatives_returns_multiple() {
1339        let actions = build_test_actions();
1340        let mut start = WorldState::new();
1341        start.set_bool("has_weapon", false);
1342        start.set_bool("enemy_dead", false);
1343
1344        let mut goal = WorldState::new();
1345        goal.set_bool("enemy_dead", true);
1346
1347        let alts = GoapPlanner::plan_alternatives(&start, &goal, &actions, 5, 3);
1348        assert!(!alts.is_empty());
1349    }
1350
1351    #[test]
1352    fn action_library_applicable() {
1353        let mut lib = ActionLibrary::new();
1354        for a in build_test_actions() { lib.add(a); }
1355
1356        let mut ws = WorldState::new();
1357        ws.set_bool("has_weapon", false);
1358        ws.set_bool("enemy_dead", false);
1359
1360        let applicable = lib.applicable(&ws);
1361        let names: Vec<&str> = applicable.iter().map(|a| a.name.as_str()).collect();
1362        assert!(names.contains(&"pick_up_weapon"));
1363        assert!(names.contains(&"flee"));
1364        assert!(!names.contains(&"attack_enemy")); // needs has_weapon=true
1365    }
1366
1367    #[test]
1368    fn float_precondition_plan() {
1369        let actions = vec![
1370            Action::new("heal", 1.0)
1371                .require_float_lte("hp", 50.0)
1372                .effect_set_float("hp", 100.0),
1373        ];
1374
1375        let mut start = WorldState::new();
1376        start.set_float("hp", 30.0);
1377
1378        let mut goal = WorldState::new();
1379        goal.set_float("hp", 80.0);
1380
1381        let plan = GoapPlanner::plan(&start, &goal, &actions, 3);
1382        assert!(plan.is_some());
1383        assert_eq!(plan.unwrap(), vec!["heal"]);
1384    }
1385}