stakpak_api/local/hooks/task_board_context/
mod.rs1use stakpak_shared::define_hook;
2use stakpak_shared::hooks::{Hook, HookAction, HookContext, HookError, LifecycleEvent};
3use stakpak_shared::models::integrations::openai::Role;
4use stakpak_shared::models::llm::{LLMInput, LLMMessage, LLMMessageContent};
5
6use crate::local::context_managers::ContextManager;
7use crate::local::context_managers::task_board_context_manager::{
8 TaskBoardContextManager, TaskBoardContextManagerOptions,
9};
10use crate::models::AgentState;
11
12const SYSTEM_PROMPT: &str = include_str!("./system_prompt.txt");
13
14pub struct TaskBoardContextHook {
15 pub context_manager: TaskBoardContextManager,
16}
17
18pub struct TaskBoardContextHookOptions {
19 pub history_action_message_size_limit: Option<usize>,
20 pub history_action_message_keep_last_n: Option<usize>,
21 pub history_action_result_keep_last_n: Option<usize>,
22}
23
24impl TaskBoardContextHook {
25 pub fn new(options: TaskBoardContextHookOptions) -> Self {
26 let context_manager = TaskBoardContextManager::new(TaskBoardContextManagerOptions {
27 history_action_message_size_limit: options
28 .history_action_message_size_limit
29 .unwrap_or(100),
30 history_action_message_keep_last_n: options
31 .history_action_message_keep_last_n
32 .unwrap_or(1),
33 history_action_result_keep_last_n: options
34 .history_action_result_keep_last_n
35 .unwrap_or(50),
36 });
37
38 Self { context_manager }
39 }
40}
41
42define_hook!(
43 TaskBoardContextHook,
44 "task_board_context",
45 async |&self, ctx: &mut HookContext<AgentState>, event: &LifecycleEvent| {
46 if *event != LifecycleEvent::BeforeInference {
47 return Ok(HookAction::Continue);
48 }
49
50 let model = ctx.state.active_model.clone();
51
52 let tools = ctx
53 .state
54 .tools
55 .clone()
56 .map(|t| t.into_iter().map(Into::into).collect());
57
58 let mut messages = Vec::new();
59 messages.push(LLMMessage {
60 role: Role::System.to_string(),
61 content: LLMMessageContent::String(SYSTEM_PROMPT.to_string()),
62 });
63 messages.extend(
64 self.context_manager
65 .reduce_context(ctx.state.messages.clone()),
66 );
67
68 ctx.state.llm_input = Some(LLMInput {
69 model,
70 messages,
71 max_tokens: 16000,
72 tools,
73 provider_options: None,
74 headers: None,
75 });
76
77 Ok(HookAction::Continue)
78 }
79);