Skip to main content

stakpak_api/client/
provider.rs

1//! AgentProvider trait implementation for AgentClient
2//!
3//! Implements the unified provider interface with:
4//! - Stakpak-first routing when API key is present
5//! - Local fallback when Stakpak is unavailable
6//! - Hook registry integration for lifecycle events
7
8use crate::AgentProvider;
9use crate::models::*;
10use crate::storage::{
11    CreateCheckpointRequest as StorageCreateCheckpointRequest,
12    CreateSessionRequest as StorageCreateSessionRequest,
13    UpdateSessionRequest as StorageUpdateSessionRequest,
14};
15use async_trait::async_trait;
16use futures_util::Stream;
17use reqwest::header::HeaderMap;
18use rmcp::model::Content;
19use stakai::Model;
20use stakpak_shared::hooks::{HookContext, LifecycleEvent};
21use stakpak_shared::models::integrations::openai::{
22    ChatCompletionChoice, ChatCompletionResponse, ChatCompletionStreamChoice,
23    ChatCompletionStreamResponse, ChatMessage, FinishReason, MessageContent, Role, Tool,
24};
25use stakpak_shared::models::llm::{
26    GenerationDelta, LLMInput, LLMMessage, LLMMessageContent, LLMStreamInput,
27};
28use std::pin::Pin;
29use tokio::sync::mpsc;
30use uuid::Uuid;
31
32/// Lightweight session info returned by initialize_session / save_checkpoint
33#[derive(Debug, Clone)]
34pub(crate) struct SessionInfo {
35    session_id: Uuid,
36    checkpoint_id: Uuid,
37    checkpoint_created_at: chrono::DateTime<chrono::Utc>,
38}
39
40use super::AgentClient;
41
42// =============================================================================
43// Internal Message Types
44// =============================================================================
45
46#[derive(Debug)]
47pub(crate) enum StreamMessage {
48    Delta(GenerationDelta),
49    Ctx(Box<HookContext<AgentState>>),
50}
51
52// =============================================================================
53// AgentProvider Implementation
54// =============================================================================
55
56#[async_trait]
57impl AgentProvider for AgentClient {
58    // =========================================================================
59    // Account
60    // =========================================================================
61
62    async fn get_my_account(&self) -> Result<GetMyAccountResponse, String> {
63        if let Some(api) = &self.stakpak_api {
64            api.get_account().await
65        } else {
66            // Local stub
67            Ok(GetMyAccountResponse {
68                username: "local".to_string(),
69                id: "local".to_string(),
70                first_name: "local".to_string(),
71                last_name: "local".to_string(),
72                email: "local@stakpak.dev".to_string(),
73                scope: None,
74            })
75        }
76    }
77
78    async fn get_billing_info(
79        &self,
80        account_username: &str,
81    ) -> Result<stakpak_shared::models::billing::BillingResponse, String> {
82        if let Some(api) = &self.stakpak_api {
83            api.get_billing(account_username).await
84        } else {
85            Err("Billing info not available without Stakpak API key".to_string())
86        }
87    }
88
89    // =========================================================================
90    // Rulebooks
91    // =========================================================================
92
93    async fn list_rulebooks(&self) -> Result<Vec<ListRuleBook>, String> {
94        if let Some(api) = &self.stakpak_api {
95            api.list_rulebooks().await
96        } else {
97            // Try to fetch public rulebooks via unauthenticated request
98            let client = stakpak_shared::tls_client::create_tls_client(
99                stakpak_shared::tls_client::TlsClientConfig::default()
100                    .with_timeout(std::time::Duration::from_secs(30)),
101            )?;
102
103            let url = format!("{}/v1/rules", self.get_stakpak_api_endpoint());
104            let response = client.get(&url).send().await.map_err(|e| e.to_string())?;
105
106            if response.status().is_success() {
107                let value: serde_json::Value = response.json().await.map_err(|e| e.to_string())?;
108                match serde_json::from_value::<ListRulebooksResponse>(value) {
109                    Ok(resp) => Ok(resp.results),
110                    Err(_) => Ok(vec![]),
111                }
112            } else {
113                Ok(vec![])
114            }
115        }
116    }
117
118    async fn get_rulebook_by_uri(&self, uri: &str) -> Result<RuleBook, String> {
119        if let Some(api) = &self.stakpak_api {
120            api.get_rulebook_by_uri(uri).await
121        } else {
122            // Try to fetch public rulebook via unauthenticated request
123            let client = stakpak_shared::tls_client::create_tls_client(
124                stakpak_shared::tls_client::TlsClientConfig::default()
125                    .with_timeout(std::time::Duration::from_secs(30)),
126            )?;
127
128            let encoded_uri = urlencoding::encode(uri);
129            let url = format!(
130                "{}/v1/rules/{}",
131                self.get_stakpak_api_endpoint(),
132                encoded_uri
133            );
134            let response = client.get(&url).send().await.map_err(|e| e.to_string())?;
135
136            if response.status().is_success() {
137                response.json().await.map_err(|e| e.to_string())
138            } else {
139                Err("Rulebook not found".to_string())
140            }
141        }
142    }
143
144    async fn create_rulebook(
145        &self,
146        uri: &str,
147        description: &str,
148        content: &str,
149        tags: Vec<String>,
150        visibility: Option<RuleBookVisibility>,
151    ) -> Result<CreateRuleBookResponse, String> {
152        if let Some(api) = &self.stakpak_api {
153            api.create_rulebook(&CreateRuleBookInput {
154                uri: uri.to_string(),
155                description: description.to_string(),
156                content: content.to_string(),
157                tags,
158                visibility,
159            })
160            .await
161        } else {
162            Err("Creating rulebooks requires Stakpak API key".to_string())
163        }
164    }
165
166    async fn delete_rulebook(&self, uri: &str) -> Result<(), String> {
167        if let Some(api) = &self.stakpak_api {
168            api.delete_rulebook(uri).await
169        } else {
170            Err("Deleting rulebooks requires Stakpak API key".to_string())
171        }
172    }
173
174    // =========================================================================
175    // Chat Completion
176    // =========================================================================
177
178    async fn chat_completion(
179        &self,
180        model: Model,
181        messages: Vec<ChatMessage>,
182        tools: Option<Vec<Tool>>,
183        session_id: Option<Uuid>,
184        metadata: Option<serde_json::Value>,
185    ) -> Result<ChatCompletionResponse, String> {
186        let mut ctx = HookContext::new(
187            session_id,
188            AgentState::new(model, messages, tools, metadata),
189        );
190
191        // Execute before request hooks
192        self.hook_registry
193            .execute_hooks(&mut ctx, &LifecycleEvent::BeforeRequest)
194            .await
195            .map_err(|e| e.to_string())?
196            .ok()?;
197
198        // Initialize or resume session
199        let current_session = self.initialize_session(&ctx).await?;
200        ctx.set_session_id(current_session.session_id);
201
202        // Run completion
203        let new_message = self.run_agent_completion(&mut ctx, None).await?;
204        ctx.state.append_new_message(new_message.clone());
205
206        // Save checkpoint
207        let result = self
208            .save_checkpoint(
209                &current_session,
210                ctx.state.messages.clone(),
211                ctx.state.metadata.clone(),
212            )
213            .await?;
214        let checkpoint_created_at = result.checkpoint_created_at.timestamp() as u64;
215        ctx.set_new_checkpoint_id(result.checkpoint_id);
216
217        // Execute after request hooks
218        self.hook_registry
219            .execute_hooks(&mut ctx, &LifecycleEvent::AfterRequest)
220            .await
221            .map_err(|e| e.to_string())?
222            .ok()?;
223
224        let mut meta = serde_json::Map::new();
225        if let Some(session_id) = ctx.session_id {
226            meta.insert(
227                "session_id".to_string(),
228                serde_json::Value::String(session_id.to_string()),
229            );
230        }
231        if let Some(checkpoint_id) = ctx.new_checkpoint_id {
232            meta.insert(
233                "checkpoint_id".to_string(),
234                serde_json::Value::String(checkpoint_id.to_string()),
235            );
236        }
237        if let Some(state_metadata) = &ctx.state.metadata {
238            meta.insert("state_metadata".to_string(), state_metadata.clone());
239        }
240
241        Ok(ChatCompletionResponse {
242            id: ctx.new_checkpoint_id.unwrap().to_string(),
243            object: "chat.completion".to_string(),
244            created: checkpoint_created_at,
245            model: ctx
246                .state
247                .llm_input
248                .as_ref()
249                .map(|llm_input| llm_input.model.id.clone())
250                .unwrap_or_default(),
251            choices: vec![ChatCompletionChoice {
252                index: 0,
253                message: ctx.state.messages.last().cloned().unwrap(),
254                logprobs: None,
255                finish_reason: FinishReason::Stop,
256            }],
257            usage: ctx
258                .state
259                .llm_output
260                .as_ref()
261                .map(|u| u.usage.clone())
262                .unwrap_or_default(),
263            system_fingerprint: None,
264            metadata: if meta.is_empty() {
265                None
266            } else {
267                Some(serde_json::Value::Object(meta))
268            },
269        })
270    }
271
272    async fn chat_completion_stream(
273        &self,
274        model: Model,
275        messages: Vec<ChatMessage>,
276        tools: Option<Vec<Tool>>,
277        _headers: Option<HeaderMap>,
278        session_id: Option<Uuid>,
279        metadata: Option<serde_json::Value>,
280    ) -> Result<
281        (
282            Pin<
283                Box<dyn Stream<Item = Result<ChatCompletionStreamResponse, ApiStreamError>> + Send>,
284            >,
285            Option<String>,
286        ),
287        String,
288    > {
289        let mut ctx = HookContext::new(
290            session_id,
291            AgentState::new(model, messages, tools, metadata),
292        );
293
294        // Execute before request hooks
295        self.hook_registry
296            .execute_hooks(&mut ctx, &LifecycleEvent::BeforeRequest)
297            .await
298            .map_err(|e| e.to_string())?
299            .ok()?;
300
301        // Initialize session
302        let current_session = self.initialize_session(&ctx).await?;
303        ctx.set_session_id(current_session.session_id);
304
305        let (tx, mut rx) = mpsc::channel::<Result<StreamMessage, String>>(100);
306
307        // Clone what we need for the spawned task
308        let client = self.clone();
309        let mut ctx_clone = ctx.clone();
310
311        // Spawn the completion task with proper shutdown handling
312        // The task checks if the channel is closed before each expensive operation
313        // to support graceful shutdown when the stream consumer is dropped
314        tokio::spawn(async move {
315            // Check if consumer is still listening before starting
316            if tx.is_closed() {
317                return;
318            }
319
320            let result = client
321                .run_agent_completion(&mut ctx_clone, Some(tx.clone()))
322                .await;
323
324            match result {
325                Err(e) => {
326                    let _ = tx.send(Err(e)).await;
327                }
328                Ok(new_message) => {
329                    // Check if consumer is still listening before continuing
330                    if tx.is_closed() {
331                        return;
332                    }
333
334                    ctx_clone.state.append_new_message(new_message.clone());
335                    if tx
336                        .send(Ok(StreamMessage::Ctx(Box::new(ctx_clone.clone()))))
337                        .await
338                        .is_err()
339                    {
340                        // Consumer dropped, exit gracefully
341                        return;
342                    }
343
344                    // Check again before expensive session update
345                    if tx.is_closed() {
346                        return;
347                    }
348
349                    let result = client
350                        .save_checkpoint(
351                            &current_session,
352                            ctx_clone.state.messages.clone(),
353                            ctx_clone.state.metadata.clone(),
354                        )
355                        .await;
356
357                    match result {
358                        Err(e) => {
359                            let _ = tx.send(Err(e)).await;
360                        }
361                        Ok(updated) => {
362                            ctx_clone.set_new_checkpoint_id(updated.checkpoint_id);
363                            let _ = tx.send(Ok(StreamMessage::Ctx(Box::new(ctx_clone)))).await;
364                        }
365                    }
366                }
367            }
368        });
369
370        let hook_registry = self.hook_registry.clone();
371        let stream = async_stream::stream! {
372            while let Some(delta_result) = rx.recv().await {
373                match delta_result {
374                    Ok(delta) => match delta {
375                        StreamMessage::Ctx(updated_ctx) => {
376                            ctx = *updated_ctx;
377                            // Emit session metadata so callers can track session_id
378                            if let Some(session_id) = ctx.session_id {
379                                let mut meta = serde_json::Map::new();
380                                meta.insert("session_id".to_string(), serde_json::Value::String(session_id.to_string()));
381                                if let Some(checkpoint_id) = ctx.new_checkpoint_id {
382                                    meta.insert("checkpoint_id".to_string(), serde_json::Value::String(checkpoint_id.to_string()));
383                                }
384                                if let Some(state_metadata) = &ctx.state.metadata {
385                                    meta.insert("state_metadata".to_string(), state_metadata.clone());
386                                }
387                                yield Ok(ChatCompletionStreamResponse {
388                                    id: ctx.request_id.to_string(),
389                                    object: "chat.completion.chunk".to_string(),
390                                    created: chrono::Utc::now().timestamp() as u64,
391                                    model: String::new(),
392                                    choices: vec![],
393                                    usage: None,
394                                    metadata: Some(serde_json::Value::Object(meta)),
395                                });
396                            }
397                        }
398                        StreamMessage::Delta(delta) => {
399                            // Extract usage from Usage delta variant
400                            let usage = if let GenerationDelta::Usage { usage } = &delta {
401                                Some(usage.clone())
402                            } else {
403                                None
404                            };
405
406                            yield Ok(ChatCompletionStreamResponse {
407                                id: ctx.request_id.to_string(),
408                                object: "chat.completion.chunk".to_string(),
409                                created: chrono::Utc::now().timestamp() as u64,
410                                model: ctx.state.llm_input.as_ref().map(|llm_input| llm_input.model.clone().to_string()).unwrap_or_default(),
411                                choices: vec![ChatCompletionStreamChoice {
412                                    index: 0,
413                                    delta: delta.into(),
414                                    finish_reason: None,
415                                }],
416                                usage,
417                                metadata: None,
418                            })
419                        }
420                    }
421                    Err(e) => yield Err(ApiStreamError::Unknown(e)),
422                }
423            }
424
425            // Execute after request hooks
426            hook_registry
427                .execute_hooks(&mut ctx, &LifecycleEvent::AfterRequest)
428                .await
429                .map_err(|e| e.to_string())?
430                .ok()?;
431        };
432
433        Ok((Box::pin(stream), None))
434    }
435
436    async fn cancel_stream(&self, request_id: String) -> Result<(), String> {
437        if let Some(api) = &self.stakpak_api {
438            api.cancel_request(&request_id).await
439        } else {
440            // Local mode doesn't support cancellation yet
441            Ok(())
442        }
443    }
444
445    // =========================================================================
446    // Search Docs
447    // =========================================================================
448
449    async fn search_docs(&self, input: &SearchDocsRequest) -> Result<Vec<Content>, String> {
450        if let Some(api) = &self.stakpak_api {
451            api.search_docs(&crate::stakpak::SearchDocsRequest {
452                keywords: input.keywords.clone(),
453                exclude_keywords: input.exclude_keywords.clone(),
454                limit: input.limit,
455            })
456            .await
457        } else {
458            // Fallback to local search service
459            use stakpak_shared::models::integrations::search_service::*;
460
461            let config = SearchServicesOrchestrator::start()
462                .await
463                .map_err(|e| e.to_string())?;
464
465            let api_url = format!("http://localhost:{}", config.api_port);
466            let search_client = SearchClient::new(api_url);
467
468            let search_results = search_client
469                .search_and_scrape(input.keywords.clone(), None)
470                .await
471                .map_err(|e| e.to_string())?;
472
473            if search_results.is_empty() {
474                return Ok(vec![Content::text("No results found".to_string())]);
475            }
476
477            Ok(search_results
478                .into_iter()
479                .map(|result| {
480                    let content = result.content.unwrap_or_default();
481                    Content::text(format!("URL: {}\nContent: {}", result.url, content))
482                })
483                .collect())
484        }
485    }
486
487    // =========================================================================
488    // Memory
489    // =========================================================================
490
491    async fn memorize_session(&self, checkpoint_id: Uuid) -> Result<(), String> {
492        if let Some(api) = &self.stakpak_api {
493            api.memorize_session(checkpoint_id).await
494        } else {
495            // No-op in local mode
496            Ok(())
497        }
498    }
499
500    async fn search_memory(&self, input: &SearchMemoryRequest) -> Result<Vec<Content>, String> {
501        if let Some(api) = &self.stakpak_api {
502            api.search_memory(&crate::stakpak::SearchMemoryRequest {
503                keywords: input.keywords.clone(),
504                start_time: input.start_time,
505                end_time: input.end_time,
506            })
507            .await
508        } else {
509            // Empty results in local mode
510            Ok(vec![])
511        }
512    }
513
514    // =========================================================================
515    // Slack
516    // =========================================================================
517
518    async fn slack_read_messages(
519        &self,
520        input: &SlackReadMessagesRequest,
521    ) -> Result<Vec<Content>, String> {
522        if let Some(api) = &self.stakpak_api {
523            api.slack_read_messages(&crate::stakpak::SlackReadMessagesRequest {
524                channel: input.channel.clone(),
525                limit: input.limit,
526            })
527            .await
528        } else {
529            Err("Slack integration requires Stakpak API key".to_string())
530        }
531    }
532
533    async fn slack_read_replies(
534        &self,
535        input: &SlackReadRepliesRequest,
536    ) -> Result<Vec<Content>, String> {
537        if let Some(api) = &self.stakpak_api {
538            api.slack_read_replies(&crate::stakpak::SlackReadRepliesRequest {
539                channel: input.channel.clone(),
540                ts: input.ts.clone(),
541            })
542            .await
543        } else {
544            Err("Slack integration requires Stakpak API key".to_string())
545        }
546    }
547
548    async fn slack_send_message(
549        &self,
550        input: &SlackSendMessageRequest,
551    ) -> Result<Vec<Content>, String> {
552        if let Some(api) = &self.stakpak_api {
553            api.slack_send_message(&crate::stakpak::SlackSendMessageRequest {
554                channel: input.channel.clone(),
555                markdown_text: input.markdown_text.clone(),
556                thread_ts: input.thread_ts.clone(),
557            })
558            .await
559        } else {
560            Err("Slack integration requires Stakpak API key".to_string())
561        }
562    }
563
564    // =========================================================================
565    // Models
566    // =========================================================================
567
568    async fn list_models(&self) -> Vec<stakai::Model> {
569        // Use the provider registry which only contains providers with configured API keys.
570        // This ensures we only list models for providers the user actually has access to.
571        // Aggregate per provider so one failing provider does not hide all others.
572        let registry = self.stakai.registry();
573        let mut all_models = Vec::new();
574
575        for provider_id in registry.list_providers() {
576            if let Ok(mut models) = registry.models_for_provider(&provider_id).await {
577                all_models.append(&mut models);
578            }
579        }
580
581        sort_models_by_recency(&mut all_models);
582        all_models
583    }
584}
585
586/// Sort models by release_date descending (newest first)
587fn sort_models_by_recency(models: &mut [stakai::Model]) {
588    models.sort_by(|a, b| {
589        match (&b.release_date, &a.release_date) {
590            (Some(b_date), Some(a_date)) => b_date.cmp(a_date),
591            (Some(_), None) => std::cmp::Ordering::Less,
592            (None, Some(_)) => std::cmp::Ordering::Greater,
593            (None, None) => b.id.cmp(&a.id), // Fallback to ID descending
594        }
595    });
596}
597
598// =============================================================================
599// SessionStorage implementation (delegates to inner session_storage)
600// =============================================================================
601
602#[async_trait]
603impl crate::storage::SessionStorage for super::AgentClient {
604    async fn list_sessions(
605        &self,
606        query: &crate::storage::ListSessionsQuery,
607    ) -> Result<crate::storage::ListSessionsResult, crate::storage::StorageError> {
608        self.session_storage.list_sessions(query).await
609    }
610
611    async fn get_session(
612        &self,
613        session_id: Uuid,
614    ) -> Result<crate::storage::Session, crate::storage::StorageError> {
615        self.session_storage.get_session(session_id).await
616    }
617
618    async fn create_session(
619        &self,
620        request: &crate::storage::CreateSessionRequest,
621    ) -> Result<crate::storage::CreateSessionResult, crate::storage::StorageError> {
622        self.session_storage.create_session(request).await
623    }
624
625    async fn update_session(
626        &self,
627        session_id: Uuid,
628        request: &crate::storage::UpdateSessionRequest,
629    ) -> Result<crate::storage::Session, crate::storage::StorageError> {
630        self.session_storage
631            .update_session(session_id, request)
632            .await
633    }
634
635    async fn delete_session(&self, session_id: Uuid) -> Result<(), crate::storage::StorageError> {
636        self.session_storage.delete_session(session_id).await
637    }
638
639    async fn list_checkpoints(
640        &self,
641        session_id: Uuid,
642        query: &crate::storage::ListCheckpointsQuery,
643    ) -> Result<crate::storage::ListCheckpointsResult, crate::storage::StorageError> {
644        self.session_storage
645            .list_checkpoints(session_id, query)
646            .await
647    }
648
649    async fn get_checkpoint(
650        &self,
651        checkpoint_id: Uuid,
652    ) -> Result<crate::storage::Checkpoint, crate::storage::StorageError> {
653        self.session_storage.get_checkpoint(checkpoint_id).await
654    }
655
656    async fn create_checkpoint(
657        &self,
658        session_id: Uuid,
659        request: &crate::storage::CreateCheckpointRequest,
660    ) -> Result<crate::storage::Checkpoint, crate::storage::StorageError> {
661        self.session_storage
662            .create_checkpoint(session_id, request)
663            .await
664    }
665
666    async fn get_active_checkpoint(
667        &self,
668        session_id: Uuid,
669    ) -> Result<crate::storage::Checkpoint, crate::storage::StorageError> {
670        self.session_storage.get_active_checkpoint(session_id).await
671    }
672
673    async fn get_session_stats(
674        &self,
675        session_id: Uuid,
676    ) -> Result<crate::storage::SessionStats, crate::storage::StorageError> {
677        self.session_storage.get_session_stats(session_id).await
678    }
679}
680
681// =============================================================================
682// Helper Methods
683// =============================================================================
684
685const TITLE_GENERATOR_PROMPT: &str = include_str!("../prompts/session_title_generator.v1.txt");
686
687impl AgentClient {
688    /// Initialize or resume a session based on context
689    ///
690    /// If `ctx.session_id` is set, we resume that session directly.
691    /// Otherwise, we create a new session.
692    pub(crate) async fn initialize_session(
693        &self,
694        ctx: &HookContext<AgentState>,
695    ) -> Result<SessionInfo, String> {
696        let messages = &ctx.state.messages;
697
698        if messages.is_empty() {
699            return Err("At least one message is required".to_string());
700        }
701
702        // If session_id is set in context, resume that session directly
703        if let Some(session_id) = ctx.session_id {
704            let session = self
705                .session_storage
706                .get_session(session_id)
707                .await
708                .map_err(|e| e.to_string())?;
709
710            let checkpoint = session
711                .active_checkpoint
712                .ok_or_else(|| format!("Session {} has no active checkpoint", session_id))?;
713
714            // If the session still has the default title, generate a better one in the background.
715            if session.title.trim().is_empty() || session.title == "New Session" {
716                let client = self.clone();
717                let messages_for_title = messages.to_vec();
718                let session_id = session.id;
719                let existing_title = session.title.clone();
720                tokio::spawn(async move {
721                    if let Ok(title) = client.generate_session_title(&messages_for_title).await {
722                        let trimmed = title.trim();
723                        if !trimmed.is_empty() && trimmed != existing_title {
724                            let request =
725                                StorageUpdateSessionRequest::new().with_title(trimmed.to_string());
726                            let _ = client
727                                .session_storage
728                                .update_session(session_id, &request)
729                                .await;
730                        }
731                    }
732                });
733            }
734
735            return Ok(SessionInfo {
736                session_id: session.id,
737                checkpoint_id: checkpoint.id,
738                checkpoint_created_at: checkpoint.created_at,
739            });
740        }
741
742        // Create new session with a fast local title.
743        let fallback_title = Self::fallback_session_title(messages);
744
745        // Get current working directory
746        let cwd = std::env::current_dir()
747            .ok()
748            .map(|p| p.to_string_lossy().to_string());
749
750        // Create session via storage trait
751        let mut session_request =
752            StorageCreateSessionRequest::new(fallback_title.clone(), messages.to_vec());
753        if let Some(cwd) = cwd {
754            session_request = session_request.with_cwd(cwd);
755        }
756
757        let result = self
758            .session_storage
759            .create_session(&session_request)
760            .await
761            .map_err(|e| e.to_string())?;
762
763        // Generate a better title asynchronously and update the session when ready.
764        let client = self.clone();
765        let messages_for_title = messages.to_vec();
766        let session_id = result.session_id;
767        tokio::spawn(async move {
768            if let Ok(title) = client.generate_session_title(&messages_for_title).await {
769                let trimmed = title.trim();
770                if !trimmed.is_empty() && trimmed != fallback_title {
771                    let request =
772                        StorageUpdateSessionRequest::new().with_title(trimmed.to_string());
773                    let _ = client
774                        .session_storage
775                        .update_session(session_id, &request)
776                        .await;
777                }
778            }
779        });
780
781        Ok(SessionInfo {
782            session_id: result.session_id,
783            checkpoint_id: result.checkpoint.id,
784            checkpoint_created_at: result.checkpoint.created_at,
785        })
786    }
787
788    fn fallback_session_title(messages: &[ChatMessage]) -> String {
789        messages
790            .iter()
791            .find(|m| m.role == Role::User)
792            .and_then(|m| m.content.as_ref())
793            .map(|c| {
794                let text = c.to_string();
795                text.split_whitespace()
796                    .take(5)
797                    .collect::<Vec<_>>()
798                    .join(" ")
799            })
800            .unwrap_or_else(|| "New Session".to_string())
801    }
802
803    /// Save a new checkpoint for the current session
804    pub(crate) async fn save_checkpoint(
805        &self,
806        current: &SessionInfo,
807        messages: Vec<ChatMessage>,
808        metadata: Option<serde_json::Value>,
809    ) -> Result<SessionInfo, String> {
810        let mut checkpoint_request =
811            StorageCreateCheckpointRequest::new(messages).with_parent(current.checkpoint_id);
812
813        if let Some(meta) = metadata {
814            checkpoint_request = checkpoint_request.with_metadata(meta);
815        }
816
817        let checkpoint = self
818            .session_storage
819            .create_checkpoint(current.session_id, &checkpoint_request)
820            .await
821            .map_err(|e| e.to_string())?;
822
823        Ok(SessionInfo {
824            session_id: current.session_id,
825            checkpoint_id: checkpoint.id,
826            checkpoint_created_at: checkpoint.created_at,
827        })
828    }
829
830    /// Run agent completion (inference)
831    pub(crate) async fn run_agent_completion(
832        &self,
833        ctx: &mut HookContext<AgentState>,
834        stream_channel_tx: Option<mpsc::Sender<Result<StreamMessage, String>>>,
835    ) -> Result<ChatMessage, String> {
836        // Execute before inference hooks
837        self.hook_registry
838            .execute_hooks(ctx, &LifecycleEvent::BeforeInference)
839            .await
840            .map_err(|e| e.to_string())?
841            .ok()?;
842
843        let mut input = if let Some(llm_input) = ctx.state.llm_input.clone() {
844            llm_input
845        } else {
846            return Err(
847                "LLM input not found, make sure to register a context hook before inference"
848                    .to_string(),
849            );
850        };
851
852        // Inject session_id header if available
853        if let Some(session_id) = ctx.session_id {
854            let headers = input
855                .headers
856                .get_or_insert_with(std::collections::HashMap::new);
857            headers.insert("X-Session-Id".to_string(), session_id.to_string());
858        }
859
860        let (response_message, usage) = if let Some(tx) = stream_channel_tx {
861            // Streaming mode
862            let (internal_tx, mut internal_rx) = mpsc::channel::<GenerationDelta>(100);
863            let stream_input = LLMStreamInput {
864                model: input.model,
865                messages: input.messages,
866                max_tokens: input.max_tokens,
867                tools: input.tools,
868                stream_channel_tx: internal_tx,
869                provider_options: input.provider_options,
870                headers: input.headers,
871            };
872
873            let stakai = self.stakai.clone();
874            let chat_future = async move {
875                stakai
876                    .chat_stream(stream_input)
877                    .await
878                    .map_err(|e| e.to_string())
879            };
880
881            let receive_future = async move {
882                while let Some(delta) = internal_rx.recv().await {
883                    if tx.send(Ok(StreamMessage::Delta(delta))).await.is_err() {
884                        break;
885                    }
886                }
887            };
888
889            let (chat_result, _) = tokio::join!(chat_future, receive_future);
890            let response = chat_result?;
891            (response.choices[0].message.clone(), response.usage)
892        } else {
893            // Non-streaming mode
894            let response = self.stakai.chat(input).await.map_err(|e| e.to_string())?;
895            (response.choices[0].message.clone(), response.usage)
896        };
897
898        ctx.state.set_llm_output(response_message, usage);
899
900        // Execute after inference hooks
901        self.hook_registry
902            .execute_hooks(ctx, &LifecycleEvent::AfterInference)
903            .await
904            .map_err(|e| e.to_string())?
905            .ok()?;
906
907        let llm_output = ctx
908            .state
909            .llm_output
910            .as_ref()
911            .ok_or_else(|| "LLM output is missing from state".to_string())?;
912
913        Ok(ChatMessage::from(llm_output))
914    }
915
916    /// Generate a title for a new session
917    async fn generate_session_title(&self, messages: &[ChatMessage]) -> Result<String, String> {
918        // Pick a cheap model from the user's configured providers
919        let use_stakpak = self.stakpak.is_some();
920        let providers = self.stakai.registry().list_providers();
921        let cheap_models: &[(&str, &str)] = &[
922            ("stakpak", "claude-haiku-4-5"),
923            ("anthropic", "claude-haiku-4-5"),
924            ("openai", "gpt-4.1-mini"),
925            ("google", "gemini-2.5-flash"),
926        ];
927        let model = cheap_models
928            .iter()
929            .find_map(|(provider, model_id)| {
930                if providers.contains(&provider.to_string()) {
931                    crate::find_model(model_id, use_stakpak)
932                } else {
933                    None
934                }
935            })
936            .ok_or_else(|| "No model available for title generation".to_string())?;
937
938        let llm_messages = vec![
939            LLMMessage {
940                role: Role::System.to_string(),
941                content: LLMMessageContent::String(TITLE_GENERATOR_PROMPT.to_string()),
942            },
943            LLMMessage {
944                role: Role::User.to_string(),
945                content: LLMMessageContent::String(
946                    messages
947                        .iter()
948                        .map(|msg| {
949                            msg.content
950                                .as_ref()
951                                .unwrap_or(&MessageContent::String("".to_string()))
952                                .to_string()
953                        })
954                        .collect(),
955                ),
956            },
957        ];
958
959        let input = LLMInput {
960            model,
961            messages: llm_messages,
962            max_tokens: 100,
963            tools: None,
964            provider_options: None,
965            headers: None,
966        };
967
968        let response = self.stakai.chat(input).await.map_err(|e| e.to_string())?;
969
970        Ok(response.choices[0].message.content.to_string())
971    }
972}