swiftide_integrations/openai/
simple_prompt.rs1use async_openai::types::ChatCompletionRequestUserMessageArgs;
6use async_trait::async_trait;
7#[cfg(feature = "metrics")]
8use swiftide_core::metrics::emit_usage;
9use swiftide_core::{
10 SimplePrompt,
11 chat_completion::{Usage, errors::LanguageModelError},
12 prompt::Prompt,
13 util::debug_long_utf8,
14};
15
16use crate::openai::openai_error_to_language_model_error;
17
18use super::GenericOpenAI;
19use anyhow::Result;
20
21#[async_trait]
24impl<
25 C: async_openai::config::Config + std::default::Default + Sync + Send + std::fmt::Debug + Clone,
26> SimplePrompt for GenericOpenAI<C>
27{
28 #[cfg_attr(not(feature = "langfuse"), tracing::instrument(skip_all, err))]
42 #[cfg_attr(
43 feature = "langfuse",
44 tracing::instrument(skip_all, err, fields(langfuse.type = "GENERATION"))
45 )]
46 async fn prompt(&self, prompt: Prompt) -> Result<String, LanguageModelError> {
47 let model = self
49 .default_options
50 .prompt_model
51 .as_ref()
52 .ok_or_else(|| LanguageModelError::PermanentError("Model not set".into()))?;
53
54 let request = self
56 .chat_completion_request_defaults()
57 .model(model)
58 .messages(vec![
59 ChatCompletionRequestUserMessageArgs::default()
60 .content(prompt.render()?)
61 .build()
62 .map_err(LanguageModelError::permanent)?
63 .into(),
64 ])
65 .build()
66 .map_err(LanguageModelError::permanent)?;
67
68 tracing::trace!(
70 model = &model,
71 messages = debug_long_utf8(
72 serde_json::to_string_pretty(&request.messages.last())
73 .map_err(LanguageModelError::permanent)?,
74 100
75 ),
76 "[SimplePrompt] Request to openai"
77 );
78
79 let mut response = self
81 .client
82 .chat()
83 .create(request.clone())
84 .await
85 .map_err(openai_error_to_language_model_error)?;
86
87 if cfg!(feature = "langfuse") {
88 let usage = response.usage.clone().unwrap_or_default();
89 tracing::debug!(
90 langfuse.model = model,
91 langfuse.input = %serde_json::to_string_pretty(&request).unwrap_or_default(),
92 langfuse.output = %serde_json::to_string_pretty(&response).unwrap_or_default(),
93 langfuse.usage = %serde_json::to_string_pretty(&usage).unwrap_or_default(),
94 );
95 }
96
97 let message = response
98 .choices
99 .remove(0)
100 .message
101 .content
102 .take()
103 .ok_or_else(|| {
104 LanguageModelError::PermanentError("Expected content in response".into())
105 })?;
106
107 {
108 if let Some(usage) = response.usage.as_ref() {
109 if let Some(callback) = &self.on_usage {
110 let usage = Usage {
111 prompt_tokens: usage.prompt_tokens,
112 completion_tokens: usage.completion_tokens,
113 total_tokens: usage.total_tokens,
114 };
115 callback(&usage).await?;
116 }
117 #[cfg(feature = "metrics")]
118 emit_usage(
119 model,
120 usage.prompt_tokens.into(),
121 usage.completion_tokens.into(),
122 usage.total_tokens.into(),
123 self.metric_metadata.as_ref(),
124 );
125 } else {
126 tracing::warn!("Metrics enabled but no usage data found in response");
127 }
128 }
129
130 Ok(message)
134 }
135}