Skip to main content

bamboo_server/session_app/
session_create.rs

1//! Session creation use case.
2//!
3//! Pure business logic for constructing a new session from request
4//! parameters and configuration defaults. The handler builds the
5//! value types from HTTP request / AppState, then delegates here.
6
7use bamboo_agent_core::{Message, Session};
8use bamboo_domain::reasoning::ReasoningEffort;
9use bamboo_domain::ProviderModelRef;
10
11use super::provider_model::{persist_legacy_model_provider, persist_model_ref};
12
13/// Request-level input for session creation.
14pub struct CreateSessionInput {
15    pub id: String,
16    pub title: Option<String>,
17    pub system_prompt: Option<String>,
18    pub model: Option<String>,
19    pub model_ref: Option<ProviderModelRef>,
20    pub reasoning_effort: Option<ReasoningEffort>,
21}
22
23/// Configuration defaults for session creation.
24///
25/// Captured from server `Config` as plain values so the crate stays
26/// decoupled from `bamboo-infrastructure-config`.
27pub struct CreateSessionConfig {
28    pub default_model: Option<String>,
29    pub default_reasoning_effort: Option<ReasoningEffort>,
30    pub global_default_prompt: String,
31    pub builtin_fallback_prompt: &'static str,
32}
33
34/// Build a new session from request input and config defaults.
35pub fn build_new_session(input: &CreateSessionInput, config: &CreateSessionConfig) -> Session {
36    let model = input
37        .model_ref
38        .as_ref()
39        .map(|model_ref| model_ref.model.clone())
40        .unwrap_or_else(|| resolve_model(input.model.as_deref(), config.default_model.as_deref()));
41    let mut session = Session::new(input.id.clone(), model);
42    if let Some(model_ref) = input.model_ref.as_ref() {
43        persist_model_ref(&mut session, model_ref);
44    } else {
45        persist_legacy_model_provider(&mut session, input.model.as_deref(), None);
46    }
47    session.reasoning_effort =
48        resolve_reasoning_effort(input.reasoning_effort, config.default_reasoning_effort);
49
50    if let Some(title) = trimmed_non_empty(input.title.as_deref()) {
51        session.title = title;
52    }
53    let explicit_prompt = trimmed_non_empty(input.system_prompt.as_deref());
54    let has_explicit_prompt = explicit_prompt.is_some();
55    let base_prompt = explicit_prompt.unwrap_or_else(|| {
56        let trimmed = config.global_default_prompt.trim();
57        if trimmed.is_empty() {
58            config.builtin_fallback_prompt.to_string()
59        } else {
60            trimmed.to_string()
61        }
62    });
63    session
64        .metadata
65        .insert("base_system_prompt".to_string(), base_prompt.clone());
66
67    if has_explicit_prompt {
68        session.add_message(Message::system(base_prompt));
69        bamboo_engine::runner::refresh_prompt_snapshot(&mut session);
70    }
71
72    session
73}
74
75/// Resolve the model from request → config → fallback.
76pub fn resolve_model(request_model: Option<&str>, config_model: Option<&str>) -> String {
77    trimmed_non_empty(request_model)
78        .or_else(|| config_model.map(ToString::to_string))
79        .unwrap_or_else(|| "unknown".to_string())
80}
81
82/// Resolve reasoning effort from request → config.
83pub fn resolve_reasoning_effort(
84    request_effort: Option<ReasoningEffort>,
85    config_effort: Option<ReasoningEffort>,
86) -> Option<ReasoningEffort> {
87    request_effort.or(config_effort)
88}
89
90/// Trim whitespace and return `None` for empty strings.
91pub fn trimmed_non_empty(value: Option<&str>) -> Option<String> {
92    value
93        .map(str::trim)
94        .filter(|value| !value.is_empty())
95        .map(ToString::to_string)
96}
97
98#[cfg(test)]
99mod tests {
100    use super::*;
101
102    const BUILTIN_FALLBACK: &str = "You are a helpful assistant.";
103
104    fn default_config() -> CreateSessionConfig {
105        CreateSessionConfig {
106            default_model: None,
107            default_reasoning_effort: None,
108            global_default_prompt: "Global fallback".to_string(),
109            builtin_fallback_prompt: BUILTIN_FALLBACK,
110        }
111    }
112
113    #[test]
114    fn resolve_model_uses_request_model_when_present() {
115        assert_eq!(resolve_model(Some("  gpt-5  "), None), "gpt-5");
116    }
117
118    #[test]
119    fn resolve_model_falls_back_to_config() {
120        assert_eq!(resolve_model(None, Some("gpt-4")), "gpt-4");
121    }
122
123    #[test]
124    fn resolve_model_falls_back_to_unknown() {
125        assert_eq!(resolve_model(None, None), "unknown");
126    }
127
128    #[test]
129    fn resolve_model_ignores_blank_request() {
130        assert_eq!(resolve_model(Some("   "), Some("gpt-4")), "gpt-4");
131    }
132
133    #[test]
134    fn resolve_reasoning_effort_prefers_request() {
135        assert_eq!(
136            resolve_reasoning_effort(Some(ReasoningEffort::High), Some(ReasoningEffort::Low)),
137            Some(ReasoningEffort::High)
138        );
139    }
140
141    #[test]
142    fn resolve_reasoning_effort_falls_back_to_config() {
143        assert_eq!(
144            resolve_reasoning_effort(None, Some(ReasoningEffort::Medium)),
145            Some(ReasoningEffort::Medium)
146        );
147    }
148
149    #[test]
150    fn build_new_session_applies_title_and_system_prompt() {
151        let input = CreateSessionInput {
152            id: "session-1".to_string(),
153            title: Some("  Sprint Session  ".to_string()),
154            system_prompt: Some("  You are helpful  ".to_string()),
155            model: Some("gpt-5".to_string()),
156            model_ref: None,
157            reasoning_effort: Some(ReasoningEffort::High),
158        };
159        let session = build_new_session(&input, &default_config());
160
161        assert_eq!(session.title, "Sprint Session");
162        assert_eq!(
163            session
164                .metadata
165                .get("base_system_prompt")
166                .map(String::as_str),
167            Some("You are helpful")
168        );
169        assert_eq!(session.reasoning_effort, Some(ReasoningEffort::High));
170        assert_eq!(
171            session.messages.first().map(|m| m.content.as_str()),
172            Some("You are helpful")
173        );
174    }
175
176    #[test]
177    fn build_new_session_uses_global_default_when_no_explicit_prompt() {
178        let input = CreateSessionInput {
179            id: "session-2".to_string(),
180            title: None,
181            system_prompt: None,
182            model: Some("gpt-5".to_string()),
183            model_ref: None,
184            reasoning_effort: None,
185        };
186        let session = build_new_session(&input, &default_config());
187
188        assert_eq!(
189            session
190                .metadata
191                .get("base_system_prompt")
192                .map(String::as_str),
193            Some("Global fallback")
194        );
195        assert!(session.messages.is_empty());
196    }
197
198    #[test]
199    fn build_new_session_uses_builtin_fallback_when_global_is_empty() {
200        let config = CreateSessionConfig {
201            global_default_prompt: "   ".to_string(),
202            ..default_config()
203        };
204        let input = CreateSessionInput {
205            id: "session-3".to_string(),
206            title: None,
207            system_prompt: None,
208            model: Some("gpt-5".to_string()),
209            model_ref: None,
210            reasoning_effort: None,
211        };
212        let session = build_new_session(&input, &config);
213
214        assert_eq!(
215            session
216                .metadata
217                .get("base_system_prompt")
218                .map(String::as_str),
219            Some(BUILTIN_FALLBACK)
220        );
221    }
222
223    #[test]
224    fn build_new_session_with_explicit_prompt_generates_snapshot() {
225        let input = CreateSessionInput {
226            id: "session-4".to_string(),
227            title: None,
228            system_prompt: Some("Custom prompt".to_string()),
229            model: Some("gpt-5".to_string()),
230            model_ref: None,
231            reasoning_effort: None,
232        };
233        let session = build_new_session(&input, &default_config());
234
235        let snapshot = bamboo_engine::runner::read_prompt_snapshot(&session)
236            .expect("prompt snapshot should exist");
237        assert_eq!(snapshot.base_system_prompt, "Custom prompt");
238        assert_eq!(snapshot.effective_system_prompt, "Custom prompt");
239    }
240
241    #[test]
242    fn build_new_session_with_model_ref_persists_bare_model_and_provider_metadata() {
243        let input = CreateSessionInput {
244            id: "session-5".to_string(),
245            title: None,
246            system_prompt: None,
247            model: Some("ignored-compat-model".to_string()),
248            model_ref: Some(ProviderModelRef::new("anthropic", "claude-3-7-sonnet")),
249            reasoning_effort: None,
250        };
251        let session = build_new_session(&input, &default_config());
252
253        assert_eq!(session.model, "claude-3-7-sonnet");
254        assert_eq!(
255            session.model_ref,
256            Some(ProviderModelRef::new("anthropic", "claude-3-7-sonnet"))
257        );
258        assert_eq!(
259            session.metadata.get("provider_name").map(String::as_str),
260            Some("anthropic")
261        );
262    }
263}