1use crate::intelligent_behavior::{
7 config::IntelligentBehaviorConfig, llm_client::LlmClient, types::LlmGenerationRequest,
8};
9use crate::Result;
10use serde::{Deserialize, Serialize};
11use std::collections::HashMap;
12
13pub struct VoiceCommandParser {
15 llm_client: LlmClient,
17 config: IntelligentBehaviorConfig,
19}
20
21impl VoiceCommandParser {
22 pub fn new(config: IntelligentBehaviorConfig) -> Self {
24 let behavior_model = config.behavior_model.clone();
25 let llm_client = LlmClient::new(behavior_model);
26
27 Self { llm_client, config }
28 }
29
30 pub async fn parse_command(&self, command: &str) -> Result<ParsedCommand> {
39 let system_prompt = r#"You are an expert API designer. Your task is to parse natural language commands
41that describe API requirements and extract structured information.
42
43Extract the following information from the command:
441. API type/category (e.g., e-commerce, social media, blog, todo app)
452. Endpoints with HTTP methods (GET, POST, PUT, DELETE, PATCH)
463. Data models with fields and types
474. Relationships between models
485. Sample data counts (e.g., "20 products")
496. Business flows (e.g., checkout, authentication, user registration)
50
51Return your response as a JSON object with this structure:
52{
53 "api_type": "string (e.g., e-commerce, social-media, blog)",
54 "title": "string (API title)",
55 "description": "string (API description)",
56 "endpoints": [
57 {
58 "path": "string (e.g., /api/products)",
59 "method": "string (GET, POST, PUT, DELETE, PATCH)",
60 "description": "string",
61 "request_body": {
62 "schema": "object schema if applicable",
63 "required": ["array of required fields"]
64 },
65 "response": {
66 "status": 200,
67 "schema": "object schema",
68 "is_array": false,
69 "count": null or number if specified
70 }
71 }
72 ],
73 "models": [
74 {
75 "name": "string (e.g., Product)",
76 "fields": [
77 {
78 "name": "string",
79 "type": "string (string, number, integer, boolean, array, object)",
80 "description": "string",
81 "required": true
82 }
83 ]
84 }
85 ],
86 "relationships": [
87 {
88 "from": "string (model name)",
89 "to": "string (model name)",
90 "type": "string (one-to-many, many-to-many, one-to-one)"
91 }
92 ],
93 "sample_counts": {
94 "model_name": number
95 },
96 "flows": [
97 {
98 "name": "string (e.g., checkout)",
99 "description": "string",
100 "steps": ["array of step descriptions"]
101 }
102 ]
103}
104
105Be specific and extract all details mentioned in the command. If something is not mentioned,
106don't include it in the response."#;
107
108 let user_prompt =
110 format!("Parse this API creation command and extract all requirements:\n\n{}", command);
111
112 let llm_request = LlmGenerationRequest {
114 system_prompt: system_prompt.to_string(),
115 user_prompt,
116 temperature: 0.3, max_tokens: 2000,
118 schema: None,
119 };
120
121 let response = self.llm_client.generate(&llm_request).await?;
123
124 let response_str = serde_json::to_string(&response).unwrap_or_default();
126 let parsed: ParsedCommand = serde_json::from_value(response).map_err(|e| {
127 crate::Error::generic(format!(
128 "Failed to parse LLM response as ParsedCommand: {}. Response: {}",
129 e, response_str
130 ))
131 })?;
132
133 Ok(parsed)
134 }
135
136 pub async fn parse_conversational_command(
141 &self,
142 command: &str,
143 context: &super::conversation::ConversationContext,
144 ) -> Result<ParsedCommand> {
145 let system_prompt = r#"You are an expert API designer helping to build an API through conversation.
147The user is providing incremental commands to modify or extend an existing API specification.
148
149Extract the following information from the command:
1501. What is being added/modified (endpoints, models, flows)
1512. Details about the addition/modification
1523. Any relationships or dependencies
153
154Return your response as a JSON object with the same structure as parse_command, but focus only
155on what is NEW or MODIFIED. If the command is asking to add something, include it. If it's asking
156to modify something, include the modified version.
157
158If the command is asking a question or requesting confirmation, return an empty endpoints array
159and include a "question" or "confirmation" field in the response."#;
160
161 let context_summary = format!(
163 "Current API: {}\nExisting endpoints: {}\nExisting models: {}",
164 context.current_spec.as_ref().map(|s| s.title()).unwrap_or("None"),
165 context
166 .current_spec
167 .as_ref()
168 .map(|s| {
169 s.all_paths_and_operations()
170 .iter()
171 .map(|(path, ops)| {
172 format!(
173 "{} ({})",
174 path,
175 ops.keys().map(|s| s.as_str()).collect::<Vec<_>>().join(", ")
176 )
177 })
178 .collect::<Vec<_>>()
179 .join(", ")
180 })
181 .unwrap_or_else(|| "None".to_string()),
182 context
183 .current_spec
184 .as_ref()
185 .and_then(|s| s.spec.components.as_ref())
186 .map(|c| c.schemas.keys().cloned().collect::<Vec<_>>().join(", "))
187 .unwrap_or_else(|| "None".to_string())
188 );
189
190 let user_prompt = format!("Context:\n{}\n\nNew command:\n{}", context_summary, command);
192
193 let llm_request = LlmGenerationRequest {
195 system_prompt: system_prompt.to_string(),
196 user_prompt,
197 temperature: 0.3,
198 max_tokens: 2000,
199 schema: None,
200 };
201
202 let response = self.llm_client.generate(&llm_request).await?;
204
205 let response_str = serde_json::to_string(&response).unwrap_or_default();
207 let parsed: ParsedCommand = serde_json::from_value(response).map_err(|e| {
208 crate::Error::generic(format!(
209 "Failed to parse conversational LLM response: {}. Response: {}",
210 e, response_str
211 ))
212 })?;
213
214 Ok(parsed)
215 }
216}
217
218#[derive(Debug, Clone, Serialize, Deserialize)]
220pub struct ParsedCommand {
221 pub api_type: String,
223 pub title: String,
225 pub description: String,
227 pub endpoints: Vec<EndpointRequirement>,
229 pub models: Vec<ModelRequirement>,
231 #[serde(default)]
233 pub relationships: Vec<RelationshipRequirement>,
234 #[serde(default)]
236 pub sample_counts: HashMap<String, usize>,
237 #[serde(default)]
239 pub flows: Vec<FlowRequirement>,
240}
241
242#[derive(Debug, Clone, Serialize, Deserialize)]
244pub struct EndpointRequirement {
245 pub path: String,
247 pub method: String,
249 pub description: String,
251 #[serde(default)]
253 pub request_body: Option<RequestBodyRequirement>,
254 #[serde(default)]
256 pub response: Option<ResponseRequirement>,
257}
258
259#[derive(Debug, Clone, Serialize, Deserialize)]
261pub struct RequestBodyRequirement {
262 #[serde(default)]
264 pub schema: Option<serde_json::Value>,
265 #[serde(default)]
267 pub required: Vec<String>,
268}
269
270#[derive(Debug, Clone, Serialize, Deserialize)]
272pub struct ResponseRequirement {
273 #[serde(default = "default_status")]
275 pub status: u16,
276 #[serde(default)]
278 pub schema: Option<serde_json::Value>,
279 #[serde(default)]
281 pub is_array: bool,
282 #[serde(default)]
284 pub count: Option<usize>,
285}
286
287fn default_status() -> u16 {
288 200
289}
290
291#[derive(Debug, Clone, Serialize, Deserialize)]
293pub struct ModelRequirement {
294 pub name: String,
296 pub fields: Vec<FieldRequirement>,
298}
299
300#[derive(Debug, Clone, Serialize, Deserialize)]
302pub struct FieldRequirement {
303 pub name: String,
305 pub r#type: String,
307 #[serde(default)]
309 pub description: String,
310 #[serde(default = "default_true")]
312 pub required: bool,
313}
314
315fn default_true() -> bool {
316 true
317}
318
319#[derive(Debug, Clone, Serialize, Deserialize)]
321pub struct RelationshipRequirement {
322 pub from: String,
324 pub to: String,
326 pub r#type: String,
328}
329
330#[derive(Debug, Clone, Serialize, Deserialize)]
332pub struct FlowRequirement {
333 pub name: String,
335 pub description: String,
337 #[serde(default)]
339 pub steps: Vec<String>,
340}
341
342pub type ApiRequirement = ParsedCommand;