oxi-agent 0.19.0

Agent runtime with tool-calling loop for AI coding assistants
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
/// Core agent implementation
use crate::config::AgentConfig;
use crate::config::ShouldStopAfterTurnContext;
use crate::events::AgentEvent;
use crate::state::{AgentState, SharedState};
use crate::tools::{AgentTool, ToolRegistry};
use crate::types::{Response, StopReason};
use anyhow::{Error, Result};
use oxi_ai::{
    transform_for_provider, CompactionManager, CompactionStrategy, LlmCompactor, Model, Provider,
};
use parking_lot::RwLock;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;

// ── ProviderResolver trait ────────────────────────────────────────

/// Trait for resolving providers and models within an Agent.
///
/// This abstracts away global static registries, allowing SDK users
/// to provide isolated provider/model lookups.
///
/// When using the SDK (`oxi-sdk`), the `Oxi` engine implements this trait.
/// When using `Agent::new()` directly, a global fallback is used.
pub trait ProviderResolver: Send + Sync + 'static {
    /// Resolve a provider by name, returning an Arc handle.
    fn resolve_provider(&self, name: &str) -> Option<Arc<dyn Provider>>;

    /// Resolve a model ID ("provider/model" or bare "model") to a Model.
    fn resolve_model(&self, model_id: &str) -> Option<Model>;
}

/// Global provider resolver — uses `oxi_ai` global functions.
///
/// This is the default resolver when using `Agent::new()`, preserving
/// backward compatibility with existing CLI usage.
pub(crate) struct GlobalProviderResolver;

impl ProviderResolver for GlobalProviderResolver {
    fn resolve_provider(&self, name: &str) -> Option<Arc<dyn Provider>> {
        oxi_ai::get_provider(name).map(Arc::from)
    }

    fn resolve_model(&self, model_id: &str) -> Option<Model> {
        crate::model_id::resolve_model_from_id(model_id)
    }
}

// ── AgentInner ────────────────────────────────────────────────────

/// Mutable agent internals protected by a read-write lock.
struct AgentInner {
    config: AgentConfig,
    provider: Arc<dyn Provider>,
}

impl Clone for AgentInner {
    fn clone(&self) -> Self {
        Self {
            config: self.config.clone(),
            provider: Arc::clone(&self.provider),
        }
    }
}

/// Agent runtime.
///
/// Manages provider, tool registry, state, and compaction, providing an
/// agentic loop for prompt execution, model switching, tool calls, and fallback.
///
/// Supports session continuation via [`continue_with`] and tokio-native
/// event streaming via [`run_tokio_stream`].
///
/// [`continue_with`]: Agent::continue_with
/// [`run_tokio_stream`]: Agent::run_tokio_stream
pub struct Agent {
    inner: RwLock<AgentInner>,
    tools: Arc<ToolRegistry>,
    state: SharedState,
    compaction_manager: CompactionManager,
    hooks: parking_lot::RwLock<crate::config::AgentHooks>,
    /// Guard: true while a run is in progress. Prevents concurrent runs.
    is_running: AtomicBool,
    /// Provider/model resolver. Uses global functions by default,
    /// or a custom resolver when created via `new_with_resolver()`.
    resolver: Arc<dyn ProviderResolver>,
}

impl Agent {
    /// Create a new agent with the given provider, config, and tool registry.
    ///
    /// Uses the global `oxi_ai::get_provider()` / `resolve_model_from_id()`
    /// for model switching. For isolated instances, use [`new_with_resolver`].
    ///
    /// [`new_with_resolver`]: Agent::new_with_resolver
    pub fn new(provider: Arc<dyn Provider>, config: AgentConfig, tools: Arc<ToolRegistry>) -> Self {
        let resolver = Arc::new(GlobalProviderResolver);
        Self::build_inner(provider, config, tools, resolver)
    }

    /// Create an agent with a custom provider/model resolver.
    ///
    /// This is the preferred constructor for SDK usage where provider
    /// and model registries must be isolated from global state.
    pub fn new_with_resolver(
        provider: Arc<dyn Provider>,
        config: AgentConfig,
        tools: Arc<ToolRegistry>,
        resolver: Arc<dyn ProviderResolver>,
    ) -> Self {
        Self::build_inner(provider, config, tools, resolver)
    }

    /// Internal constructor shared by `new()` and `new_with_resolver()`.
    fn build_inner(
        provider: Arc<dyn Provider>,
        config: AgentConfig,
        tools: Arc<ToolRegistry>,
        resolver: Arc<dyn ProviderResolver>,
    ) -> Self {
        let mut compaction_manager =
            CompactionManager::new(config.compaction_strategy.clone(), config.context_window);

        // Pre-initialize the LLM compactor if compaction is enabled
        if config.compaction_strategy != CompactionStrategy::Disabled {
            let model = resolver.resolve_model(&config.model_id);

            if let Some(model) = model {
                let llm_compactor =
                    Arc::new(LlmCompactor::new(model.clone(), Arc::clone(&provider)));
                compaction_manager.set_compactor(llm_compactor);
            }
        }

        Self {
            inner: RwLock::new(AgentInner { config, provider }),
            tools,
            state: SharedState::new(),
            compaction_manager,
            hooks: parking_lot::RwLock::new(crate::config::AgentHooks::default()),
            is_running: AtomicBool::new(false),
            resolver,
        }
    }

    /// Create an agent with an empty tool registry.
    pub fn new_empty(provider: Arc<dyn Provider>, config: AgentConfig) -> Self {
        Self::new(provider, config, Arc::new(ToolRegistry::new()))
    }

    /// Get the agent configuration (read guard)
    fn config(&self) -> parking_lot::RwLockReadGuard<'_, AgentInner> {
        self.inner.read()
    }

    /// Get a write guard for the agent inner state
    fn inner_mut(&self) -> parking_lot::RwLockWriteGuard<'_, AgentInner> {
        self.inner.write()
    }

    /// Get the current model ID
    pub fn model_id(&self) -> String {
        self.config().config.model_id.clone()
    }

    /// Switch the model used for future LLM calls.
    ///
    /// If the new model uses a different provider API, the conversation
    /// history is automatically transformed for cross-provider compatibility
    /// (e.g. thinking blocks are converted to `<thinking>` tags).
    ///
    /// # Arguments
    /// * `model_id` - New model ID in `provider/model` format
    ///
    /// # Returns
    /// `Ok(())` on success, or an error if the model/provider is unknown
    pub fn switch_model(&self, model_id: &str) -> Result<()> {
        let new_model = self
            .resolver
            .resolve_model(model_id)
            .ok_or_else(|| Error::msg(format!("Model '{}' not found", model_id)))?;

        // Create the new provider via resolver
        let new_provider = self
            .resolver
            .resolve_provider(&new_model.provider)
            .ok_or_else(|| Error::msg(format!("Provider '{}' not found", new_model.provider)))?;

        // Detect API change and transform messages if needed
        {
            let inner = self.config();
            let old_model_id = &inner.config.model_id;
            let old_api = self
                .resolver
                .resolve_model(old_model_id)
                .map(|m| m.api)
                .unwrap_or(oxi_ai::Api::AnthropicMessages);

            if old_api != new_model.api {
                // Transform existing messages for the new provider
                let messages = self.state.get_state().messages.clone();
                let transformed = transform_for_provider(&messages, &old_api, &new_model.api);
                self.state.update(|s| {
                    s.replace_messages(transformed);
                });
            }
        }

        // Update config and provider atomically
        let mut inner = self.inner_mut();
        inner.config.model_id = model_id.to_string();
        inner.provider = new_provider;

        Ok(())
    }

    /// Switch the model using a pre-resolved `Model` object.
    ///
    /// This is useful when the caller has already looked up the model
    /// and optionally created the provider.
    pub fn switch_to_model(&self, model: &oxi_ai::Model) -> Result<()> {
        let model_id = format!("{}/{}", model.provider, model.id);
        let new_provider = self
            .resolver
            .resolve_provider(&model.provider)
            .ok_or_else(|| Error::msg(format!("Provider '{}' not found", model.provider)))?;

        // Detect API change and transform messages if needed
        {
            let inner = self.config();
            let old_api = self
                .resolver
                .resolve_model(&inner.config.model_id)
                .map(|m| m.api)
                .unwrap_or(oxi_ai::Api::AnthropicMessages);

            if old_api != model.api {
                let messages = self.state.get_state().messages.clone();
                let transformed = transform_for_provider(&messages, &old_api, &model.api);
                self.state.update(|s| {
                    s.replace_messages(transformed);
                });
            }
        }

        let mut inner = self.inner_mut();
        inner.config.model_id = model_id;
        inner.provider = new_provider;

        Ok(())
    }

    /// Get a handle to the tool registry.
    pub fn tools(&self) -> Arc<ToolRegistry> {
        Arc::clone(&self.tools)
    }

    /// Get a snapshot of the current agent state.
    pub fn state(&self) -> AgentState {
        self.state.get_state()
    }

    /// Reset agent state for a new conversation
    pub fn reset(&self) {
        self.state.reset();
    }

    /// Register a tool that the agent can invoke during a run.
    pub fn add_tool<T: AgentTool + 'static>(&self, tool: T) {
        self.tools.register(tool);
    }

    /// Update the system prompt for future interactions.
    pub fn set_system_prompt(&self, prompt: String) {
        self.inner_mut().config.system_prompt = Some(prompt);
    }

    /// Get the compaction manager
    pub fn compaction_manager(&self) -> &CompactionManager {
        &self.compaction_manager
    }

    /// Run the agent with a prompt, collecting all events into a vector.
    ///
    /// Convenience wrapper around [`run_with_channel`] that gathers every
    /// [`AgentEvent`] produced during the run.
    pub async fn run(&self, prompt: String) -> Result<(Response, Vec<AgentEvent>)> {
        let mut events = Vec::new();
        let (tx, rx) = std::sync::mpsc::channel::<AgentEvent>();
        let result = self.run_with_channel(prompt, tx).await;
        while let Ok(event) = rx.recv() {
            events.push(event);
        }
        result.map(|r| (r, events))
    }

    /// Run the agent, delivering events through the provided channel.
    ///
    /// Delegates to [`AgentLoop`] which implements the same 2-level agentic
    /// loop matching pi-mono's architecture:
    ///
    /// ```text
    /// AgentLoop.run_messages()
    ///   Outer loop (follow-up messages):
    ///     Inner loop (tool calls + steering):
    ///       1. Inject pending messages (steering)
    ///       2. Compaction check
    ///       3. Stream LLM response (with accumulated partial messages)
    ///       4. Execute tool calls if any
    ///       5. Emit turn_end
    ///       6. Check shouldStopAfterTurn
    ///       7. Poll steering messages
    ///     Check follow-up messages
    ///     Exit
    /// ```
    pub async fn run_with_channel(
        &self,
        prompt: String,
        tx: std::sync::mpsc::Sender<AgentEvent>,
    ) -> Result<Response> {
        // pi-mono: Agent.prompt() throws if activeRun exists.
        // Prevent concurrent runs that would corrupt shared state.
        if self
            .is_running
            .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
            .is_err()
        {
            return Err(Error::msg("Agent is already running"));
        }

        let result = self.run_with_channel_inner(prompt, tx).await;

        // Always clear the running flag
        self.is_running.store(false, Ordering::SeqCst);
        result
    }

    /// Inner implementation of run_with_channel, called after the running guard is set.
    async fn run_with_channel_inner(
        &self,
        prompt: String,
        tx: std::sync::mpsc::Sender<AgentEvent>,
    ) -> Result<Response> {
        use crate::agent_loop::AgentLoop;

        let (
            provider,
            max_iterations,
            system_prompt,
            temperature,
            max_tokens,
            compaction_strategy,
            context_window,
            api_key,
            workspace_dir,
        ) = {
            let inner = self.inner.read();
            (
                Arc::clone(&inner.provider) as Arc<dyn Provider>,
                inner.config.max_iterations,
                inner.config.system_prompt.clone(),
                inner.config.temperature,
                inner.config.max_tokens,
                inner.config.compaction_strategy.clone(),
                inner.config.context_window,
                inner.config.api_key.clone(),
                inner.config.workspace_dir.clone(),
            )
        }; // release read lock

        // Build AgentLoopConfig from Agent's config
        let loop_config = crate::agent_loop::config::AgentLoopConfig {
            model_id: self.model_id(),
            system_prompt,
            max_iterations,
            temperature: temperature.unwrap_or(1.0) as f32,
            max_tokens: max_tokens.unwrap_or(4096) as u32,
            tool_execution: crate::config::ToolExecutionMode::Sequential,
            compaction_strategy,
            compaction_instruction: None,
            context_window,
            session_id: None,
            transport: None,
            compact_on_start: false,
            max_retry_delay_ms: None,
            auto_retry_enabled: true,
            auto_retry_max_attempts: 3,
            auto_retry_base_delay_ms: 1000,
            api_key,
            workspace_dir,
        };

        // Create AgentLoop. We give it a NEW SharedState and sync back after.
        // (SharedState is not Clone, so we create a fresh one from current state)
        let fresh_state = crate::state::SharedState::new();
        let current = self.state.get_state();
        fresh_state.update(|s| {
            *s = current;
        });

        let agent_loop = AgentLoop::new_with_resolver(
            provider,
            loop_config,
            Arc::clone(&self.tools),
            fresh_state,
            Arc::clone(&self.resolver),
        );

        // Pre-populate steering/follow-up from hooks
        {
            let hooks = self.hooks.read();
            if let Some(ref get_steering) = hooks.get_steering_messages {
                for msg_text in get_steering() {
                    agent_loop.steer(oxi_ai::Message::User(oxi_ai::UserMessage::new(msg_text)));
                }
            }
            if let Some(ref get_follow_up) = hooks.get_follow_up_messages {
                for msg_text in get_follow_up() {
                    agent_loop.follow_up(oxi_ai::Message::User(oxi_ai::UserMessage::new(msg_text)));
                }
            }
        }
        let al = agent_loop;

        // Wire should_stop_after_turn hook: share AgentLoop's external_stop
        // Arc with the emit callback. When the hook fires (Ctrl+C detected),
        // it sets ext_stop. AgentLoop checks this in should_stop_after_turn().
        //
        // Arc<dyn Fn> can be cloned, so we read it without consuming.
        let maybe_hook = {
            let hooks_r = self.hooks.read();
            hooks_r.should_stop_after_turn.clone()
        };
        let ext_stop = al.external_stop().clone();

        // Create emit callback that sends through the channel.
        // AgentLoop calls this synchronously. UnboundedSender::send() is
        // non-blocking and never drops events (unlike try_send on bounded).
        let tx_emit = tx.clone();

        // Run the agent loop
        tracing::info!("[AGENT] Starting agent run with channel");
        let result = al
            .run(prompt.clone(), move |event: AgentEvent| {
                // Forward event to channel (std::sync::mpsc — send from sync context)
                tracing::info!("[AGENT-EMIT] Event: {:?}", std::mem::discriminant(&event));
                if let Err(e) = tx_emit.send(event.clone()) {
                    tracing::error!(
                        "[AGENT-EMIT] Failed to send agent event to channel: {:?}",
                        e
                    );
                } else {
                    tracing::info!("[AGENT-EMIT] Successfully sent event");
                }

                // On TurnEnd, poll the should_stop_after_turn hook to detect Ctrl+C.
                // The hook wraps an AtomicBool (should_stop_flag from AgentSession).
                // We can't pass real context here, but the TUI hook only checks
                // the AtomicBool anyway: |ctx| should_stop_flag.load(SeqCst).
                if let Some(ref hook) = maybe_hook {
                    if let AgentEvent::TurnEnd {
                        ref assistant_message,
                        ref tool_results,
                        ..
                    } = event
                    {
                        // Build real context from actual turn data
                        let asst = match assistant_message {
                            oxi_ai::Message::Assistant(a) => a.clone(),
                            _ => {
                                // Can't extract assistant message, just check the hook with empty ctx
                                let ctx = ShouldStopAfterTurnContext {
                                    message: oxi_ai::AssistantMessage::new(
                                        oxi_ai::Api::OpenAiCompletions,
                                        "agent",
                                        "agent-model",
                                    ),
                                    tool_results: Vec::new(),
                                    iteration: 0,
                                };
                                if hook(&ctx) {
                                    ext_stop.store(true, Ordering::SeqCst);
                                }
                                return;
                            }
                        };
                        let ctx = ShouldStopAfterTurnContext {
                            message: asst,
                            tool_results: tool_results.clone(),
                            iteration: 0,
                        };
                        if hook(&ctx) {
                            ext_stop.store(true, Ordering::SeqCst);
                        }
                    }
                }
            })
            .await;

        match result {
            Ok(_events) => {
                // Sync state back from AgentLoop
                let loop_state = al.state().get_state();
                self.state.update(|s| {
                    *s = loop_state;
                });

                // Extract final response text from state
                let state = self.state.get_state();
                let final_text = state
                    .messages
                    .iter()
                    .rev()
                    .find_map(|m| match m {
                        oxi_ai::Message::Assistant(a) => a.content.iter().find_map(|b| match b {
                            oxi_ai::ContentBlock::Text(t) => Some(t.text.clone()),
                            _ => None,
                        }),
                        _ => None,
                    })
                    .unwrap_or_default();

                let stop_reason = state.stop_reason.unwrap_or(StopReason::Stop);

                Ok(Response {
                    content: final_text,
                    stop_reason,
                })
            }
            Err(e) => Err(e),
        }
    }

    // ── Helper methods for the agentic loop ────────────────────────

    /// Set hooks for the agent loop.
    pub fn set_hooks(&self, hooks: crate::config::AgentHooks) {
        let mut h = self.hooks.write();
        *h = hooks;
    }

    /// Run the agent, invoking `on_event` for each [`AgentEvent`] produced.
    ///
    /// Blocking convenience wrapper suitable for callers that prefer a
    /// callback-based API over a channel.
    pub async fn run_streaming<F>(&self, prompt: String, mut on_event: F) -> Result<Response>
    where
        F: FnMut(AgentEvent) + Send,
    {
        let (tx, rx) = std::sync::mpsc::channel::<AgentEvent>();
        let result = self.run_with_channel(prompt, tx).await;
        while let Ok(event) = rx.recv() {
            on_event(event);
        }
        result
    }

    // ── Session persistence ────────────────────────────────────────

    /// Export the agent state as a JSON value.
    ///
    /// The serialized state includes conversation messages, token counts,
    /// iteration progress, and stop reason. Use [`import_state`] to restore.
    ///
    /// [`import_state`]: Agent::import_state
    pub fn export_state(&self) -> Result<serde_json::Value> {
        let state = self.state.get_state();
        serde_json::to_value(&state).map_err(|e| Error::msg(format!("State export failed: {}", e)))
    }

    /// Import agent state from a JSON value.
    ///
    /// Restores conversation history, token counts, and iteration progress.
    /// Typically used together with [`export_state`] for session persistence.
    ///
    /// [`export_state`]: Agent::export_state
    pub fn import_state(&self, value: serde_json::Value) -> Result<()> {
        let state: AgentState = serde_json::from_value(value)
            .map_err(|e| Error::msg(format!("State import failed: {}", e)))?;
        self.state.update(|s| *s = state);
        Ok(())
    }

    // ── Session continuation ───────────────────────────────────────

    /// Continue the current session with a new prompt.
    ///
    /// Unlike `run()`, which can be used on a fresh agent, `continue_with`
    /// preserves the existing conversation state and appends the new prompt.
    /// This enables multi-turn interactions within the same session.
    pub async fn continue_with(&self, prompt: String) -> Result<(Response, Vec<AgentEvent>)> {
        let mut events = Vec::new();
        let (tx, rx) = std::sync::mpsc::channel::<AgentEvent>();
        let result = self.run_with_channel(prompt, tx).await;
        while let Ok(event) = rx.recv() {
            events.push(event);
        }
        result.map(|r| (r, events))
    }

    // ── Tokio-native streaming ─────────────────────────────────────

    /// Run the agent with tokio-native event streaming.
    ///
    /// Returns a `tokio::sync::mpsc::Receiver` for events and a
    /// `JoinHandle` for the response. This is the preferred API for
    /// async runtimes (WebSocket/SSE gateways, tokio-based servers).
    ///
    /// # Example
    ///
    /// ```ignore
    /// let (rx, handle) = agent.run_tokio_stream("Explain Rust".into()).await?;
    /// while let Some(event) = rx.recv().await {
    ///     println!("Event: {:?}", event.type_name());
    /// }
    /// let response = handle.await??;
    /// ```
    pub async fn run_tokio_stream(
        &self,
        prompt: String,
    ) -> Result<(
        tokio::sync::mpsc::Receiver<AgentEvent>,
        tokio::task::JoinHandle<Result<Response>>,
    )> {
        let (tx, rx) = tokio::sync::mpsc::channel::<AgentEvent>(256);

        if self
            .is_running
            .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
            .is_err()
        {
            return Err(Error::msg("Agent is already running"));
        }

        let should_stop_hook = self.hooks.read().should_stop_after_turn.clone();

        let state = self.state.clone();
        let inner = self.inner.read().clone();
        let tools = Arc::clone(&self.tools);
        let resolver = Arc::clone(&self.resolver);

        // Build AgentLoopConfig
        let loop_config = crate::agent_loop::config::AgentLoopConfig {
            model_id: inner.config.model_id.clone(),
            system_prompt: inner.config.system_prompt.clone(),
            max_iterations: inner.config.max_iterations,
            temperature: inner.config.temperature.unwrap_or(1.0) as f32,
            max_tokens: inner.config.max_tokens.unwrap_or(4096) as u32,
            tool_execution: crate::config::ToolExecutionMode::Sequential,
            compaction_strategy: inner.config.compaction_strategy.clone(),
            compaction_instruction: None,
            context_window: inner.config.context_window,
            session_id: None,
            transport: None,
            compact_on_start: false,
            max_retry_delay_ms: None,
            auto_retry_enabled: true,
            auto_retry_max_attempts: 3,
            auto_retry_base_delay_ms: 1000,
            api_key: inner.config.api_key.clone(),
            workspace_dir: inner.config.workspace_dir.clone(),
        };

        let provider: Arc<dyn Provider> = Arc::clone(&inner.provider);

        // Create fresh state from current
        let fresh_state = SharedState::new();
        let current = state.get_state();
        fresh_state.update(|s| *s = current);

        let agent_loop = crate::agent_loop::AgentLoop::new_with_resolver(
            provider,
            loop_config,
            tools,
            fresh_state,
            resolver,
        );

        let maybe_hook = should_stop_hook;
        let ext_stop = agent_loop.external_stop().clone();

        let is_running = Arc::new(AtomicBool::new(true));
        let is_running_clone = Arc::clone(&is_running);

        let handle = tokio::task::spawn(async move {
            let result = agent_loop
                .run(prompt, move |event: AgentEvent| {
                    // Forward to tokio channel (non-blocking)
                    let _ = tx.try_send(event.clone());

                    if let Some(ref hook) = maybe_hook {
                        if let AgentEvent::TurnEnd {
                            ref assistant_message,
                            ref tool_results,
                            ..
                        } = event
                        {
                            let asst = match assistant_message {
                                oxi_ai::Message::Assistant(a) => a.clone(),
                                _ => return,
                            };
                            let ctx = ShouldStopAfterTurnContext {
                                message: asst,
                                tool_results: tool_results.clone(),
                                iteration: 0,
                            };
                            if hook(&ctx) {
                                ext_stop.store(true, Ordering::SeqCst);
                            }
                        }
                    }
                })
                .await;

            // Clear the running flag
            is_running_clone.store(false, Ordering::SeqCst);

            match result {
                Ok(_events) => {
                    // State is shared via SharedState (Arc<RwLock>),
                    // so the caller can read it from agent.state() after completion.
                    Ok(Response {
                        content: String::new(),
                        stop_reason: StopReason::Stop,
                    })
                }
                Err(e) => Err(e),
            }
        });

        Ok((rx, handle))
    }
}