Skip to main content

bamboo_server/session_app/
execute.rs

1//! Execute use case: prepare a session for agent execution.
2
3use bamboo_agent_core::Role;
4use bamboo_domain::reasoning::ReasoningEffort;
5use bamboo_domain::Session;
6
7use super::errors::ExecutePreparationError;
8use super::provider_model::{
9    persist_legacy_model_provider, persist_model_ref, session_effective_model_ref,
10};
11use super::repository::SessionAccess;
12use super::types::{
13    ExecuteClientSync, ExecuteInput, ExecutePreparationOutcome, ExecuteSyncReason,
14    ExecutionConfigSnapshot, ServerExecuteSnapshot,
15};
16
17/// Prepare an execute: load session, resolve model/reasoning, validate,
18/// update metadata, return outcome.
19///
20/// The caller (handler) is responsible for runner reservation and agent spawning
21/// based on the returned outcome.
22pub async fn prepare_execute(
23    repo: &dyn SessionAccess,
24    config: ExecutionConfigSnapshot,
25    input: ExecuteInput,
26) -> Result<ExecutePreparationOutcome, ExecutePreparationError> {
27    // ---- Load session ----
28    let mut session = repo
29        .load_session(&input.session_id)
30        .await?
31        .ok_or_else(|| ExecutePreparationError::NotFound(input.session_id.clone()))?;
32
33    let is_child_session = session.kind == bamboo_agent_core::SessionKind::Child;
34    let server_snapshot = ServerExecuteSnapshot::from_session(&session);
35
36    // ---- Client sync check ----
37    if let Some(reason) = evaluate_client_sync(input.client_sync.as_ref(), &server_snapshot) {
38        return Ok(ExecutePreparationOutcome::SyncMismatch {
39            reason,
40            server_snapshot,
41        });
42    }
43
44    // ---- Resolve model cascade ----
45    // Flag ON (new): session.model_ref → request.model_ref → config.default_model_ref
46    // Flag OFF (old): session.model → config.default_model → request.model
47    let (effective_model_ref, effective_model, model_source) = if config.provider_model_ref_enabled
48    {
49        resolve_model_ref_cascade(&session, &input, &config)
50    } else {
51        let (effective_model, model_source) = resolve_model_cascade(&session, &input, &config);
52        (None, effective_model, model_source)
53    };
54
55    let Some(effective_model) = effective_model else {
56        return Ok(ExecutePreparationOutcome::ModelRequired);
57    };
58
59    // ---- Resolve reasoning effort cascade: session → request → provider default ----
60    let effective_reasoning_effort = session
61        .reasoning_effort
62        .or(input.request_reasoning_effort)
63        .or(config.default_reasoning_effort);
64
65    // ---- Reasoning source ----
66    let reasoning_effort_source = if session.reasoning_effort.is_some() {
67        "session"
68    } else if input.request_reasoning_effort.is_some() {
69        "request"
70    } else if config.default_reasoning_effort.is_some() {
71        "provider_default"
72    } else {
73        "none"
74    };
75
76    // ---- Image fallback validation ----
77    if let Err(error) =
78        validate_image_fallback_for_session(&session, config.image_fallback.as_ref())
79    {
80        return Ok(ExecutePreparationOutcome::ImageFallbackError(error));
81    }
82
83    // ---- Check for pending user message ----
84    if !server_snapshot.has_pending_user_message {
85        return Ok(ExecutePreparationOutcome::NoPendingMessage { server_snapshot });
86    }
87
88    // ---- Update session metadata ----
89    if let Some(model_ref) = effective_model_ref.as_ref() {
90        persist_model_ref(&mut session, model_ref);
91    } else {
92        persist_legacy_model_provider(
93            &mut session,
94            Some(effective_model.as_str()),
95            Some(config.provider_name.as_str()),
96        );
97    }
98    session.reasoning_effort = effective_reasoning_effort;
99
100    session
101        .metadata
102        .insert("model_source".to_string(), model_source.to_string());
103
104    if effective_reasoning_effort.is_some() {
105        session.metadata.insert(
106            "reasoning_effort_source".to_string(),
107            reasoning_effort_source.to_string(),
108        );
109        session.metadata.insert(
110            "reasoning_effort_compat".to_string(),
111            effective_reasoning_effort
112                .map(ReasoningEffort::as_str)
113                .unwrap_or_default()
114                .to_string(),
115        );
116    } else {
117        session.metadata.remove("reasoning_effort_source");
118        session.metadata.remove("reasoning_effort_compat");
119    }
120
121    // ---- Skill mode ----
122    if let Some(skill_mode) = input.request_skill_mode {
123        let trimmed = skill_mode.trim();
124        if trimmed.is_empty() {
125            session.metadata.remove("skill_mode");
126        } else {
127            session
128                .metadata
129                .insert("skill_mode".to_string(), trimmed.to_string());
130        }
131    }
132
133    // ---- Consume pending conclusion with options resume ----
134    consume_pending_conclusion_with_options_resume(&mut session);
135
136    Ok(ExecutePreparationOutcome::Ready {
137        session: Box::new(session),
138        effective_model,
139        effective_reasoning_effort,
140        model_source,
141        reasoning_source: reasoning_effort_source,
142        is_child_session,
143    })
144}
145
146/// Old-path model resolution: session.model → config.default_model → request.model
147pub(crate) fn resolve_model_cascade(
148    session: &Session,
149    input: &ExecuteInput,
150    config: &ExecutionConfigSnapshot,
151) -> (Option<String>, &'static str) {
152    let session_model = normalize_model(Some(session.model.as_str()));
153    let request_model = normalize_model(input.request_model.as_deref());
154    let request_model_used = request_model.is_some();
155    let model_source = if session_model.is_some() {
156        "session"
157    } else if config.default_model.is_some() {
158        "provider_default"
159    } else if request_model_used {
160        "request"
161    } else {
162        "none"
163    };
164    let effective_model = session_model
165        .or_else(|| config.default_model.clone())
166        .or(request_model);
167
168    (effective_model, model_source)
169}
170
171/// New-path model resolution: session.model_ref → request.model_ref → config.default_model_ref.
172pub(crate) fn resolve_model_ref_cascade(
173    session: &Session,
174    input: &ExecuteInput,
175    config: &ExecutionConfigSnapshot,
176) -> (
177    Option<bamboo_domain::ProviderModelRef>,
178    Option<String>,
179    &'static str,
180) {
181    let session_model_ref = session_effective_model_ref(session);
182    let request_model_ref = super::provider_model::derive_model_ref(
183        input.request_model_ref.as_ref(),
184        input.request_provider.as_deref(),
185        input.request_model.as_deref(),
186    );
187    let config_model_ref = config.default_model_ref.clone();
188
189    let (effective_model_ref, model_source) = if let Some(model_ref) = session_model_ref {
190        (Some(model_ref), "session")
191    } else if let Some(model_ref) = request_model_ref {
192        (Some(model_ref), "request")
193    } else if let Some(model_ref) = config_model_ref {
194        (Some(model_ref), "provider_default")
195    } else {
196        (None, "none")
197    };
198
199    if let Some(model_ref) = effective_model_ref {
200        let effective_model = normalize_model(Some(model_ref.model.as_str()));
201        (Some(model_ref), effective_model, model_source)
202    } else {
203        let (effective_model, legacy_source) = resolve_model_cascade(session, input, config);
204        (None, effective_model, legacy_source)
205    }
206}
207
208// ---- Internal helpers ----
209
210fn normalize_model(model: Option<&str>) -> Option<String> {
211    model
212        .map(str::trim)
213        .filter(|m| !m.is_empty() && *m != "unknown")
214        .map(String::from)
215}
216
217pub fn evaluate_client_sync(
218    client_sync: Option<&ExecuteClientSync>,
219    server_snapshot: &ServerExecuteSnapshot,
220) -> Option<ExecuteSyncReason> {
221    let client_sync = client_sync?;
222
223    let client_pending_question_tool_call_id = client_sync
224        .client_pending_question_tool_call_id
225        .as_deref()
226        .map(str::trim)
227        .filter(|value| !value.is_empty());
228    let server_pending_question_tool_call_id = server_snapshot
229        .pending_question_tool_call_id
230        .as_deref()
231        .map(str::trim)
232        .filter(|value| !value.is_empty());
233
234    if client_sync.client_has_pending_question != server_snapshot.has_pending_question {
235        return Some(ExecuteSyncReason::PendingQuestionMismatch);
236    }
237
238    if client_sync.client_has_pending_question
239        && client_pending_question_tool_call_id.is_some()
240        && client_pending_question_tool_call_id != server_pending_question_tool_call_id
241    {
242        return Some(ExecuteSyncReason::PendingQuestionMismatch);
243    }
244
245    if client_sync.client_message_count != server_snapshot.message_count {
246        return Some(ExecuteSyncReason::MessageCountMismatch);
247    }
248
249    let client_last_message_id = client_sync
250        .client_last_message_id
251        .as_deref()
252        .map(str::trim)
253        .filter(|value| !value.is_empty());
254    let server_last_message_id = server_snapshot
255        .last_message_id
256        .as_deref()
257        .map(str::trim)
258        .filter(|value| !value.is_empty());
259
260    if client_last_message_id != server_last_message_id {
261        return Some(ExecuteSyncReason::LastMessageIdMismatch);
262    }
263
264    None
265}
266
267fn validate_image_fallback_for_session(
268    session: &Session,
269    image_fallback: Option<&bamboo_engine::ImageFallbackConfig>,
270) -> Result<(), String> {
271    use bamboo_engine::ImageFallbackMode;
272
273    if matches!(
274        image_fallback,
275        Some(bamboo_engine::ImageFallbackConfig {
276            mode: ImageFallbackMode::Error,
277            ..
278        })
279    ) {
280        let images_seen = session
281            .messages
282            .iter()
283            .filter_map(|message| message.content_parts.as_ref())
284            .flat_map(|parts| parts.iter())
285            .filter(|part| matches!(part, bamboo_agent_core::MessagePart::ImageUrl { .. }))
286            .count();
287
288        if images_seen > 0 {
289            return Err(format!(
290                "This server does not currently support image inputs (found {images_seen} image part(s)). \
291                 Configure hooks.image_fallback.mode='placeholder' or 'ocr' to degrade gracefully."
292            ));
293        }
294    }
295
296    Ok(())
297}
298
299/// Whether the session has resumable user work (pending tool response, retry, or last message is from user).
300pub fn has_pending_user_message(session: &Session) -> bool {
301    if has_pending_conclusion_with_options_resume(session) || has_pending_retry_resume(session) {
302        return true;
303    }
304    session
305        .messages
306        .last()
307        .map(|message| matches!(message.role, Role::User))
308        .unwrap_or(false)
309}
310
311pub fn consume_pending_conclusion_with_options_resume(session: &mut Session) {
312    session
313        .metadata
314        .remove("conclusion_with_options_resume_pending");
315    session.metadata.remove("retry_resume_pending");
316    session.metadata.remove("retry_resume_reason");
317}
318
319pub fn has_pending_conclusion_with_options_resume(session: &Session) -> bool {
320    session
321        .metadata
322        .get("conclusion_with_options_resume_pending")
323        .is_some_and(|value| value == "true")
324}
325
326pub fn has_pending_retry_resume(session: &Session) -> bool {
327    session
328        .metadata
329        .get("retry_resume_pending")
330        .is_some_and(|value| value == "true")
331}
332
333impl ServerExecuteSnapshot {
334    pub fn from_session(session: &Session) -> Self {
335        Self {
336            message_count: session.messages.len(),
337            last_message_id: session.messages.last().map(|message| message.id.clone()),
338            has_pending_question: session.pending_question.is_some(),
339            pending_question_tool_call_id: session
340                .pending_question
341                .as_ref()
342                .map(|pending| pending.tool_call_id.clone()),
343            has_pending_user_message: has_pending_user_message(session),
344        }
345    }
346}
347
348#[cfg(test)]
349mod tests {
350    use super::*;
351    use bamboo_domain::ProviderModelRef;
352
353    fn make_session(model: &str) -> Session {
354        let mut s = Session::new("test-session", model);
355        // Add a user message so has_pending_user_message is true
356        s.messages.push(bamboo_agent_core::Message::user("hello"));
357        s
358    }
359
360    fn make_input() -> ExecuteInput {
361        ExecuteInput {
362            session_id: "test-session".to_string(),
363            request_model: None,
364            request_model_ref: None,
365            request_provider: None,
366            request_reasoning_effort: None,
367            request_skill_mode: None,
368            client_sync: None,
369        }
370    }
371
372    fn make_config() -> ExecutionConfigSnapshot {
373        ExecutionConfigSnapshot {
374            provider_model_ref_enabled: false,
375            ..Default::default()
376        }
377    }
378
379    // ---- normalize_model ----
380
381    #[test]
382    fn normalize_model_some() {
383        assert_eq!(normalize_model(Some("gpt-4")), Some("gpt-4".to_string()));
384    }
385
386    #[test]
387    fn normalize_model_trims_whitespace() {
388        assert_eq!(
389            normalize_model(Some("  gpt-4  ")),
390            Some("gpt-4".to_string())
391        );
392    }
393
394    #[test]
395    fn normalize_model_none() {
396        assert_eq!(normalize_model(None), None);
397    }
398
399    #[test]
400    fn normalize_model_empty() {
401        assert_eq!(normalize_model(Some("")), None);
402    }
403
404    #[test]
405    fn normalize_model_whitespace_only() {
406        assert_eq!(normalize_model(Some("   ")), None);
407    }
408
409    #[test]
410    fn normalize_model_unknown() {
411        assert_eq!(normalize_model(Some("unknown")), None);
412    }
413
414    // ---- resolve_model_cascade (flag OFF) ----
415
416    #[test]
417    fn cascade_old_prefers_session_model() {
418        let session = make_session("claude-3");
419        let input = make_input();
420        let config = make_config();
421
422        let (model, source) = resolve_model_cascade(&session, &input, &config);
423        assert_eq!(model, Some("claude-3".to_string()));
424        assert_eq!(source, "session");
425    }
426
427    #[test]
428    fn cascade_old_falls_back_to_config_default() {
429        let session = make_session("unknown");
430        let input = make_input();
431        let mut config = make_config();
432        config.default_model = Some("gpt-4o".to_string());
433
434        let (model, source) = resolve_model_cascade(&session, &input, &config);
435        assert_eq!(model, Some("gpt-4o".to_string()));
436        assert_eq!(source, "provider_default");
437    }
438
439    #[test]
440    fn cascade_old_falls_back_to_request_model() {
441        let session = make_session("unknown");
442        let mut input = make_input();
443        input.request_model = Some("gpt-4-turbo".to_string());
444        let config = make_config();
445
446        let (model, source) = resolve_model_cascade(&session, &input, &config);
447        assert_eq!(model, Some("gpt-4-turbo".to_string()));
448        assert_eq!(source, "request");
449    }
450
451    #[test]
452    fn cascade_old_no_model_returns_none() {
453        let session = make_session("unknown");
454        let input = make_input();
455        let config = make_config();
456
457        let (model, source) = resolve_model_cascade(&session, &input, &config);
458        assert_eq!(model, None);
459        assert_eq!(source, "none");
460    }
461
462    #[test]
463    fn cascade_old_session_overrides_request() {
464        let session = make_session("claude-3");
465        let mut input = make_input();
466        input.request_model = Some("gpt-4".to_string());
467        let config = make_config();
468
469        let (model, source) = resolve_model_cascade(&session, &input, &config);
470        assert_eq!(model, Some("claude-3".to_string()));
471        assert_eq!(source, "session");
472    }
473
474    // ---- resolve_model_ref_cascade (flag ON) ----
475
476    #[test]
477    fn cascade_new_prefers_session_model_ref() {
478        let mut session = make_session("unknown");
479        session.model_ref = Some(ProviderModelRef::new("anthropic", "claude-3"));
480        let input = make_input();
481        let mut config = make_config();
482        config.provider_model_ref_enabled = true;
483
484        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
485        assert_eq!(
486            model_ref,
487            Some(ProviderModelRef::new("anthropic", "claude-3"))
488        );
489        assert_eq!(model, Some("claude-3".to_string()));
490        assert_eq!(source, "session");
491    }
492
493    #[test]
494    fn cascade_new_falls_back_to_request_model_ref_before_config_default_ref() {
495        let session = make_session("unknown");
496        let mut input = make_input();
497        input.request_model_ref = Some(ProviderModelRef::new("gemini", "gemini-pro"));
498        let mut config = make_config();
499        config.provider_model_ref_enabled = true;
500        config.default_model_ref = Some(ProviderModelRef::new("openai", "gpt-4o"));
501
502        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
503        assert_eq!(
504            model_ref,
505            Some(ProviderModelRef::new("gemini", "gemini-pro"))
506        );
507        assert_eq!(model, Some("gemini-pro".to_string()));
508        assert_eq!(source, "request");
509    }
510
511    #[test]
512    fn cascade_new_falls_back_to_config_default_ref() {
513        let session = make_session("unknown");
514        let input = make_input();
515        let mut config = make_config();
516        config.provider_model_ref_enabled = true;
517        config.default_model_ref = Some(ProviderModelRef::new("openai", "gpt-4o"));
518
519        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
520        assert_eq!(model_ref, Some(ProviderModelRef::new("openai", "gpt-4o")));
521        assert_eq!(model, Some("gpt-4o".to_string()));
522        assert_eq!(source, "provider_default");
523    }
524
525    #[test]
526    fn cascade_new_falls_back_to_old_cascade_when_no_refs() {
527        let mut session = make_session("claude-3");
528        session.model_ref = None;
529        let input = make_input();
530        let mut config = make_config();
531        config.provider_model_ref_enabled = true;
532
533        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
534        assert_eq!(model_ref, None);
535        assert_eq!(model, Some("claude-3".to_string()));
536        assert_eq!(source, "session");
537    }
538
539    #[test]
540    fn cascade_new_session_ref_overrides_request_ref() {
541        let mut session = make_session("unknown");
542        session.model_ref = Some(ProviderModelRef::new("anthropic", "claude-3"));
543        let mut input = make_input();
544        input.request_model_ref = Some(ProviderModelRef::new("openai", "gpt-4o"));
545        let mut config = make_config();
546        config.provider_model_ref_enabled = true;
547
548        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
549        assert_eq!(
550            model_ref,
551            Some(ProviderModelRef::new("anthropic", "claude-3"))
552        );
553        assert_eq!(model, Some("claude-3".to_string()));
554        assert_eq!(source, "session");
555    }
556
557    #[test]
558    fn cascade_new_uses_session_provider_metadata_even_without_structured_ref() {
559        let mut session = make_session("gpt-4o");
560        session.model_ref = None;
561        session
562            .metadata
563            .insert("provider_name".to_string(), "openai".to_string());
564        let input = make_input();
565        let mut config = make_config();
566        config.provider_model_ref_enabled = true;
567
568        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
569        assert_eq!(model_ref, Some(ProviderModelRef::new("openai", "gpt-4o")));
570        assert_eq!(model, Some("gpt-4o".to_string()));
571        assert_eq!(source, "session");
572    }
573
574    #[test]
575    fn cascade_new_no_model_anywhere_returns_none() {
576        let session = make_session("unknown");
577        let input = make_input();
578        let mut config = make_config();
579        config.provider_model_ref_enabled = true;
580
581        let (model_ref, model, source) = resolve_model_ref_cascade(&session, &input, &config);
582        assert_eq!(model_ref, None);
583        assert_eq!(model, None);
584        assert_eq!(source, "none");
585    }
586
587    // ---- evaluate_client_sync ----
588
589    #[test]
590    fn sync_none_when_no_client_sync() {
591        let snapshot = ServerExecuteSnapshot {
592            message_count: 1,
593            last_message_id: Some("msg-1".to_string()),
594            has_pending_question: false,
595            pending_question_tool_call_id: None,
596            has_pending_user_message: true,
597        };
598        assert_eq!(evaluate_client_sync(None, &snapshot), None);
599    }
600
601    #[test]
602    fn sync_mismatch_pending_question_flag() {
603        let client_sync = ExecuteClientSync {
604            client_message_count: 1,
605            client_last_message_id: Some("msg-1".to_string()),
606            client_has_pending_question: true,
607            client_pending_question_tool_call_id: None,
608        };
609        let snapshot = ServerExecuteSnapshot {
610            message_count: 1,
611            last_message_id: Some("msg-1".to_string()),
612            has_pending_question: false,
613            pending_question_tool_call_id: None,
614            has_pending_user_message: true,
615        };
616        assert_eq!(
617            evaluate_client_sync(Some(&client_sync), &snapshot),
618            Some(ExecuteSyncReason::PendingQuestionMismatch)
619        );
620    }
621
622    #[test]
623    fn sync_mismatch_message_count() {
624        let client_sync = ExecuteClientSync {
625            client_message_count: 2,
626            client_last_message_id: Some("msg-2".to_string()),
627            client_has_pending_question: false,
628            client_pending_question_tool_call_id: None,
629        };
630        let snapshot = ServerExecuteSnapshot {
631            message_count: 1,
632            last_message_id: Some("msg-1".to_string()),
633            has_pending_question: false,
634            pending_question_tool_call_id: None,
635            has_pending_user_message: true,
636        };
637        assert_eq!(
638            evaluate_client_sync(Some(&client_sync), &snapshot),
639            Some(ExecuteSyncReason::MessageCountMismatch)
640        );
641    }
642
643    #[test]
644    fn sync_mismatch_last_message_id() {
645        let client_sync = ExecuteClientSync {
646            client_message_count: 1,
647            client_last_message_id: Some("msg-old".to_string()),
648            client_has_pending_question: false,
649            client_pending_question_tool_call_id: None,
650        };
651        let snapshot = ServerExecuteSnapshot {
652            message_count: 1,
653            last_message_id: Some("msg-new".to_string()),
654            has_pending_question: false,
655            pending_question_tool_call_id: None,
656            has_pending_user_message: true,
657        };
658        assert_eq!(
659            evaluate_client_sync(Some(&client_sync), &snapshot),
660            Some(ExecuteSyncReason::LastMessageIdMismatch)
661        );
662    }
663
664    #[test]
665    fn sync_ok_when_matching() {
666        let client_sync = ExecuteClientSync {
667            client_message_count: 1,
668            client_last_message_id: Some("msg-1".to_string()),
669            client_has_pending_question: false,
670            client_pending_question_tool_call_id: None,
671        };
672        let snapshot = ServerExecuteSnapshot {
673            message_count: 1,
674            last_message_id: Some("msg-1".to_string()),
675            has_pending_question: false,
676            pending_question_tool_call_id: None,
677            has_pending_user_message: true,
678        };
679        assert_eq!(evaluate_client_sync(Some(&client_sync), &snapshot), None);
680    }
681
682    #[test]
683    fn sync_ok_with_matching_pending_question_and_tool_call_id() {
684        let client_sync = ExecuteClientSync {
685            client_message_count: 2,
686            client_last_message_id: Some("msg-2".to_string()),
687            client_has_pending_question: true,
688            client_pending_question_tool_call_id: Some("tc-1".to_string()),
689        };
690        let snapshot = ServerExecuteSnapshot {
691            message_count: 2,
692            last_message_id: Some("msg-2".to_string()),
693            has_pending_question: true,
694            pending_question_tool_call_id: Some("tc-1".to_string()),
695            has_pending_user_message: false,
696        };
697        assert_eq!(evaluate_client_sync(Some(&client_sync), &snapshot), None);
698    }
699
700    #[test]
701    fn sync_mismatch_pending_question_tool_call_id() {
702        let client_sync = ExecuteClientSync {
703            client_message_count: 2,
704            client_last_message_id: Some("msg-2".to_string()),
705            client_has_pending_question: true,
706            client_pending_question_tool_call_id: Some("tc-old".to_string()),
707        };
708        let snapshot = ServerExecuteSnapshot {
709            message_count: 2,
710            last_message_id: Some("msg-2".to_string()),
711            has_pending_question: true,
712            pending_question_tool_call_id: Some("tc-new".to_string()),
713            has_pending_user_message: false,
714        };
715        assert_eq!(
716            evaluate_client_sync(Some(&client_sync), &snapshot),
717            Some(ExecuteSyncReason::PendingQuestionMismatch)
718        );
719    }
720
721    // ---- has_pending_user_message ----
722
723    #[test]
724    fn pending_user_message_true_when_last_is_user() {
725        let session = make_session("gpt-4");
726        assert!(has_pending_user_message(&session));
727    }
728
729    #[test]
730    fn pending_user_message_false_when_last_is_assistant() {
731        let mut session = make_session("gpt-4");
732        session
733            .messages
734            .push(bamboo_agent_core::Message::assistant("response", None));
735        assert!(!has_pending_user_message(&session));
736    }
737
738    #[test]
739    fn pending_user_message_false_when_empty() {
740        let session = Session::new("test", "gpt-4");
741        assert!(!has_pending_user_message(&session));
742    }
743
744    // ---- has_pending_conclusion_with_options_resume / has_pending_retry_resume ----
745
746    #[test]
747    fn conclusion_with_options_resume_true() {
748        let mut session = Session::new("test", "gpt-4");
749        session.metadata.insert(
750            "conclusion_with_options_resume_pending".to_string(),
751            "true".to_string(),
752        );
753        assert!(has_pending_conclusion_with_options_resume(&session));
754    }
755
756    #[test]
757    fn conclusion_with_options_resume_false_when_missing() {
758        let session = Session::new("test", "gpt-4");
759        assert!(!has_pending_conclusion_with_options_resume(&session));
760    }
761
762    #[test]
763    fn conclusion_with_options_resume_false_when_not_true() {
764        let mut session = Session::new("test", "gpt-4");
765        session.metadata.insert(
766            "conclusion_with_options_resume_pending".to_string(),
767            "false".to_string(),
768        );
769        assert!(!has_pending_conclusion_with_options_resume(&session));
770    }
771
772    #[test]
773    fn retry_resume_true() {
774        let mut session = Session::new("test", "gpt-4");
775        session
776            .metadata
777            .insert("retry_resume_pending".to_string(), "true".to_string());
778        assert!(has_pending_retry_resume(&session));
779    }
780
781    #[test]
782    fn retry_resume_false_when_missing() {
783        let session = Session::new("test", "gpt-4");
784        assert!(!has_pending_retry_resume(&session));
785    }
786
787    // ---- consume_pending_conclusion_with_options_resume ----
788
789    #[test]
790    fn consume_removes_resume_metadata() {
791        let mut session = Session::new("test", "gpt-4");
792        session.metadata.insert(
793            "conclusion_with_options_resume_pending".to_string(),
794            "true".to_string(),
795        );
796        session
797            .metadata
798            .insert("retry_resume_pending".to_string(), "true".to_string());
799        session
800            .metadata
801            .insert("retry_resume_reason".to_string(), "timeout".to_string());
802
803        consume_pending_conclusion_with_options_resume(&mut session);
804
805        assert!(!session
806            .metadata
807            .contains_key("conclusion_with_options_resume_pending"));
808        assert!(!session.metadata.contains_key("retry_resume_pending"));
809        assert!(!session.metadata.contains_key("retry_resume_reason"));
810    }
811
812    // ---- ServerExecuteSnapshot::from_session ----
813
814    #[test]
815    fn snapshot_from_session_counts_messages() {
816        let mut session = Session::new("test", "gpt-4");
817        session
818            .messages
819            .push(bamboo_agent_core::Message::user("hi"));
820        session
821            .messages
822            .push(bamboo_agent_core::Message::assistant("hello", None));
823        session.messages.last_mut().unwrap().id = "msg-2".to_string();
824
825        let snapshot = ServerExecuteSnapshot::from_session(&session);
826        assert_eq!(snapshot.message_count, 2);
827        assert_eq!(snapshot.last_message_id, Some("msg-2".to_string()));
828        assert!(!snapshot.has_pending_question);
829        assert!(!snapshot.has_pending_user_message);
830    }
831
832    #[test]
833    fn snapshot_empty_session() {
834        let session = Session::new("test", "gpt-4");
835        let snapshot = ServerExecuteSnapshot::from_session(&session);
836
837        assert_eq!(snapshot.message_count, 0);
838        assert_eq!(snapshot.last_message_id, None);
839        assert!(!snapshot.has_pending_question);
840        assert!(!snapshot.has_pending_user_message);
841    }
842}