stakpak_api/local/hooks/task_board_context/
mod.rs1use stakpak_shared::define_hook;
2use stakpak_shared::hooks::{Hook, HookAction, HookContext, HookError, LifecycleEvent};
3use stakpak_shared::models::integrations::openai::Role;
4use stakpak_shared::models::llm::{LLMInput, LLMMessage, LLMMessageContent};
5
6use crate::local::context_managers::ContextManager;
7use crate::local::context_managers::task_board_context_manager::{
8 TaskBoardContextManager, TaskBoardContextManagerOptions,
9};
10use crate::local::{ModelOptions, ModelSet};
11use crate::models::AgentState;
12
13const SYSTEM_PROMPT: &str = include_str!("./system_prompt.txt");
14
15pub struct TaskBoardContextHook {
16 pub model_set: ModelSet,
17 pub context_manager: TaskBoardContextManager,
18}
19
20pub struct TaskBoardContextHookOptions {
21 pub model_options: ModelOptions,
22 pub history_action_message_size_limit: Option<usize>,
23 pub history_action_message_keep_last_n: Option<usize>,
24 pub history_action_result_keep_last_n: Option<usize>,
25}
26
27impl TaskBoardContextHook {
28 pub fn new(options: TaskBoardContextHookOptions) -> Self {
29 let model_set: ModelSet = options.model_options.into();
30
31 let context_manager = TaskBoardContextManager::new(TaskBoardContextManagerOptions {
32 history_action_message_size_limit: options
33 .history_action_message_size_limit
34 .unwrap_or(100),
35 history_action_message_keep_last_n: options
36 .history_action_message_keep_last_n
37 .unwrap_or(1),
38 history_action_result_keep_last_n: options
39 .history_action_result_keep_last_n
40 .unwrap_or(50),
41 });
42
43 Self {
44 model_set,
45 context_manager,
46 }
47 }
48}
49
50define_hook!(
51 TaskBoardContextHook,
52 "task_board_context",
53 async |&self, ctx: &mut HookContext<AgentState>, event: &LifecycleEvent| {
54 if *event != LifecycleEvent::BeforeInference {
55 return Ok(HookAction::Continue);
56 }
57
58 let model = self.model_set.get_model(&ctx.state.agent_model);
59
60 let tools = ctx
61 .state
62 .tools
63 .clone()
64 .map(|t| t.into_iter().map(Into::into).collect());
65
66 let mut messages = Vec::new();
67 messages.push(LLMMessage {
68 role: Role::System.to_string(),
69 content: LLMMessageContent::String(SYSTEM_PROMPT.to_string()),
70 });
71 messages.extend(
72 self.context_manager
73 .reduce_context(ctx.state.messages.clone()),
74 );
75
76 ctx.state.llm_input = Some(LLMInput {
77 model,
78 messages,
79 max_tokens: 16000,
80 tools,
81 provider_options: None,
82 headers: None,
83 });
84
85 Ok(HookAction::Continue)
86 }
87);