synth_ai_core/api/
inference.rs1use serde_json::Value;
4
5use crate::errors::CoreError;
6use crate::http::HttpClient;
7use crate::models::normalize_model_identifier;
8
9pub struct InferenceClient<'a> {
11 http: &'a HttpClient,
12}
13
14impl<'a> InferenceClient<'a> {
15 pub(crate) fn new(http: &'a HttpClient) -> Self {
16 Self { http }
17 }
18
19 pub async fn chat_completion(&self, mut body: Value) -> Result<Value, CoreError> {
21 let obj = body
22 .as_object_mut()
23 .ok_or_else(|| CoreError::Validation("request must be an object".to_string()))?;
24
25 let model = obj
26 .get("model")
27 .and_then(|v| v.as_str())
28 .ok_or_else(|| CoreError::Validation("model is required".to_string()))?;
29 let normalized = normalize_model_identifier(model, false)?;
30 obj.insert("model".to_string(), Value::String(normalized));
31
32 if !obj.contains_key("thinking_budget") {
33 obj.insert("thinking_budget".to_string(), Value::Number(256.into()));
34 }
35
36 self.http
37 .post_json("/api/inference/v1/chat/completions", &body)
38 .await
39 .map_err(CoreError::from)
40 }
41}