stakpak_api/local/hooks/inline_scratchpad_context/
mod.rs1use stakpak_shared::define_hook;
2use stakpak_shared::hooks::{Hook, HookAction, HookContext, HookError, LifecycleEvent};
3use stakpak_shared::models::integrations::openai::Role;
4use stakpak_shared::models::llm::{LLMInput, LLMMessage, LLMMessageContent};
5
6use crate::local::context_managers::ContextManager;
7use crate::local::context_managers::scratchpad_context_manager::{
8 ScratchpadContextManager, ScratchpadContextManagerOptions,
9};
10use crate::models::AgentState;
11
12const SYSTEM_PROMPT: &str = include_str!("./system_prompt.txt");
13
14pub struct InlineScratchpadContextHook {
15 pub context_manager: ScratchpadContextManager,
16}
17pub struct InlineScratchpadContextHookOptions {
18 pub history_action_message_size_limit: Option<usize>,
19 pub history_action_message_keep_last_n: Option<usize>,
20 pub history_action_result_keep_last_n: Option<usize>,
21}
22
23impl InlineScratchpadContextHook {
24 pub fn new(options: InlineScratchpadContextHookOptions) -> Self {
25 let context_manager = ScratchpadContextManager::new(ScratchpadContextManagerOptions {
26 history_action_message_size_limit: options
27 .history_action_message_size_limit
28 .unwrap_or(100),
29 history_action_message_keep_last_n: options
30 .history_action_message_keep_last_n
31 .unwrap_or(1),
32 history_action_result_keep_last_n: options
33 .history_action_result_keep_last_n
34 .unwrap_or(50),
35 });
36
37 Self { context_manager }
38 }
39}
40
41define_hook!(
42 InlineScratchpadContextHook,
43 "inline_scratchpad_context",
44 async |&self, ctx: &mut HookContext<AgentState>, event: &LifecycleEvent| {
45 if *event != LifecycleEvent::BeforeInference {
46 return Ok(HookAction::Continue);
47 }
48
49 let model = ctx.state.active_model.clone();
50
51 let tools = ctx
52 .state
53 .tools
54 .clone()
55 .map(|t| t.into_iter().map(Into::into).collect());
56
57 let mut messages = Vec::new();
58 messages.push(LLMMessage {
59 role: Role::System.to_string(),
60 content: LLMMessageContent::String(SYSTEM_PROMPT.to_string()),
61 });
62 messages.extend(
63 self.context_manager
64 .reduce_context(ctx.state.messages.clone()),
65 );
66
67 ctx.state.llm_input = Some(LLMInput {
68 model,
69 messages,
70 max_tokens: 16000,
71 tools,
72 provider_options: None,
73 headers: None,
74 });
75
76 Ok(HookAction::Continue)
77 }
78);