1use async_trait::async_trait;
2use chrono::{DateTime, Utc};
3use serde::{Deserialize, Serialize};
4use std::collections::HashMap;
5use std::sync::Arc;
6use tokio::sync::RwLock;
7use tracing::{error, info};
8use uuid::Uuid;
9use vex_llm::{LlmProvider, LlmRequest};
10use vex_queue::job::BackoffStrategy;
11use vex_queue::{Job, JobResult};
12
13#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct AgentJobPayload {
16 pub agent_id: String,
17 pub prompt: String,
18 pub context_id: Option<String>,
19}
20
21#[derive(Debug, Clone, Serialize, Deserialize)]
23pub struct AgentJobResult {
24 pub job_id: Uuid,
25 pub agent_id: String,
26 pub prompt: String,
27 pub response: String,
28 pub tokens_used: Option<u32>,
29 pub completed_at: DateTime<Utc>,
30 pub success: bool,
31 pub error: Option<String>,
32}
33
34pub type JobResultStore = Arc<RwLock<HashMap<Uuid, AgentJobResult>>>;
36
37pub fn new_result_store() -> JobResultStore {
39 Arc::new(RwLock::new(HashMap::new()))
40}
41
42#[derive(Debug)]
43pub struct AgentExecutionJob {
44 pub job_id: Uuid,
45 pub payload: AgentJobPayload,
46 pub llm: Arc<dyn LlmProvider>,
47 pub result_store: JobResultStore,
48}
49
50impl AgentExecutionJob {
51 pub fn new(
52 job_id: Uuid,
53 payload: AgentJobPayload,
54 llm: Arc<dyn LlmProvider>,
55 result_store: JobResultStore,
56 ) -> Self {
57 Self {
58 job_id,
59 payload,
60 llm,
61 result_store,
62 }
63 }
64}
65
66#[async_trait]
67impl Job for AgentExecutionJob {
68 fn name(&self) -> &str {
69 "agent_execution"
70 }
71
72 async fn execute(&mut self) -> JobResult {
73 info!(job_id = %self.job_id, agent_id = %self.payload.agent_id, "Executing agent job");
74
75 let request = LlmRequest::with_role("You are a helpful VEX agent.", &self.payload.prompt);
76
77 match self.llm.complete(request).await {
78 Ok(response) => {
79 info!(
80 job_id = %self.job_id,
81 agent_id = %self.payload.agent_id,
82 response_len = response.content.len(),
83 "Agent job completed successfully"
84 );
85
86 let result = AgentJobResult {
88 job_id: self.job_id,
89 agent_id: self.payload.agent_id.clone(),
90 prompt: self.payload.prompt.clone(),
91 response: response.content,
92 tokens_used: response.tokens_used,
93 completed_at: Utc::now(),
94 success: true,
95 error: None,
96 };
97
98 self.result_store.write().await.insert(self.job_id, result);
99
100 JobResult::Success
101 }
102 Err(e) => {
103 error!(job_id = %self.job_id, error = %e, "LLM call failed");
104
105 let result = AgentJobResult {
107 job_id: self.job_id,
108 agent_id: self.payload.agent_id.clone(),
109 prompt: self.payload.prompt.clone(),
110 response: String::new(),
111 tokens_used: None,
112 completed_at: Utc::now(),
113 success: false,
114 error: Some(e.to_string()),
115 };
116
117 self.result_store.write().await.insert(self.job_id, result);
118
119 JobResult::Retry(e.to_string())
120 }
121 }
122 }
123
124 fn max_retries(&self) -> u32 {
125 5
126 }
127
128 fn backoff_strategy(&self) -> BackoffStrategy {
129 BackoffStrategy::Exponential {
130 initial_secs: 2,
131 multiplier: 2.0,
132 }
133 }
134}