Skip to main content

stakpak_api/local/hooks/inline_scratchpad_context/
mod.rs

1use stakpak_shared::define_hook;
2use stakpak_shared::hooks::{Hook, HookAction, HookContext, HookError, LifecycleEvent};
3use stakpak_shared::models::integrations::openai::Role;
4use stakpak_shared::models::llm::{LLMInput, LLMMessage, LLMMessageContent};
5
6use crate::local::context_managers::ContextManager;
7use crate::local::context_managers::scratchpad_context_manager::{
8    ScratchpadContextManager, ScratchpadContextManagerOptions,
9};
10use crate::local::{ModelOptions, ModelSet};
11use crate::models::AgentState;
12
13const SYSTEM_PROMPT: &str = include_str!("./system_prompt.txt");
14
15pub struct InlineScratchpadContextHook {
16    pub model_set: ModelSet,
17    pub context_manager: ScratchpadContextManager,
18}
19pub struct InlineScratchpadContextHookOptions {
20    pub model_options: ModelOptions,
21    pub history_action_message_size_limit: Option<usize>,
22    pub history_action_message_keep_last_n: Option<usize>,
23    pub history_action_result_keep_last_n: Option<usize>,
24}
25
26impl InlineScratchpadContextHook {
27    pub fn new(options: InlineScratchpadContextHookOptions) -> Self {
28        let model_set: ModelSet = options.model_options.into();
29
30        let context_manager = ScratchpadContextManager::new(ScratchpadContextManagerOptions {
31            history_action_message_size_limit: options
32                .history_action_message_size_limit
33                .unwrap_or(100),
34            history_action_message_keep_last_n: options
35                .history_action_message_keep_last_n
36                .unwrap_or(1),
37            history_action_result_keep_last_n: options
38                .history_action_result_keep_last_n
39                .unwrap_or(50),
40        });
41
42        Self {
43            model_set,
44            context_manager,
45        }
46    }
47}
48
49define_hook!(
50    InlineScratchpadContextHook,
51    "inline_scratchpad_context",
52    async |&self, ctx: &mut HookContext<AgentState>, event: &LifecycleEvent| {
53        if *event != LifecycleEvent::BeforeInference {
54            return Ok(HookAction::Continue);
55        }
56
57        let model = self.model_set.get_model(&ctx.state.agent_model);
58
59        let tools = ctx
60            .state
61            .tools
62            .clone()
63            .map(|t| t.into_iter().map(Into::into).collect());
64
65        let mut messages = Vec::new();
66        messages.push(LLMMessage {
67            role: Role::System.to_string(),
68            content: LLMMessageContent::String(SYSTEM_PROMPT.to_string()),
69        });
70        messages.extend(
71            self.context_manager
72                .reduce_context(ctx.state.messages.clone()),
73        );
74
75        ctx.state.llm_input = Some(LLMInput {
76            model,
77            messages,
78            max_tokens: 16000,
79            tools,
80            provider_options: None,
81            headers: None,
82        });
83
84        Ok(HookAction::Continue)
85    }
86);