use serde::Serialize;
use crate::client::BoxStream;
use crate::error::Result;
use crate::types::audio::{CreateSpeechRequest, CreateTranscriptionRequest, TranscriptionResponse};
use crate::types::image::{CreateImageRequest, ImagesResponse};
use crate::types::moderation::{ModerationRequest, ModerationResponse};
use crate::types::ocr::{OcrRequest, OcrResponse};
use crate::types::rerank::{RerankRequest, RerankResponse};
use crate::types::search::{SearchRequest, SearchResponse};
use crate::types::{
ChatCompletionChunk, ChatCompletionRequest, ChatCompletionResponse, EmbeddingRequest, EmbeddingResponse,
ModelsListResponse, Usage,
};
#[derive(Debug, Clone, Serialize)]
pub enum LlmRequest {
Chat(ChatCompletionRequest),
ChatStream(ChatCompletionRequest),
Embed(EmbeddingRequest),
ListModels,
ImageGenerate(CreateImageRequest),
Speech(CreateSpeechRequest),
Transcribe(CreateTranscriptionRequest),
Moderate(ModerationRequest),
Rerank(RerankRequest),
Search(SearchRequest),
Ocr(OcrRequest),
}
impl LlmRequest {
#[must_use]
pub fn operation_name(&self) -> &'static str {
match self {
Self::Chat(_) | Self::ChatStream(_) => "chat",
Self::Embed(_) => "embeddings",
Self::ListModels => "list_models",
Self::ImageGenerate(_) => "image_generate",
Self::Speech(_) => "speech",
Self::Transcribe(_) => "transcribe",
Self::Moderate(_) => "moderate",
Self::Rerank(_) => "rerank",
Self::Search(_) => "search",
Self::Ocr(_) => "ocr",
}
}
#[must_use]
pub fn request_type(&self) -> &'static str {
match self {
Self::Chat(_) => "chat",
Self::ChatStream(_) => "chat_stream",
Self::Embed(_) => "embeddings",
Self::ListModels => "list_models",
Self::ImageGenerate(_) => "image_generate",
Self::Speech(_) => "speech",
Self::Transcribe(_) => "transcribe",
Self::Moderate(_) => "moderate",
Self::Rerank(_) => "rerank",
Self::Search(_) => "search",
Self::Ocr(_) => "ocr",
}
}
#[must_use]
pub fn model(&self) -> Option<&str> {
match self {
Self::Chat(r) | Self::ChatStream(r) => Some(r.model.as_str()),
Self::Embed(r) => Some(r.model.as_str()),
Self::ImageGenerate(r) => r.model.as_deref(),
Self::Speech(r) => Some(r.model.as_str()),
Self::Transcribe(r) => Some(r.model.as_str()),
Self::Moderate(r) => r.model.as_deref(),
Self::Rerank(r) => Some(r.model.as_str()),
Self::Search(r) => Some(r.model.as_str()),
Self::Ocr(r) => Some(r.model.as_str()),
Self::ListModels => None,
}
}
}
pub enum LlmResponse {
Chat(ChatCompletionResponse),
ChatStream(BoxStream<'static, Result<ChatCompletionChunk>>),
Embed(EmbeddingResponse),
ListModels(ModelsListResponse),
ImageGenerate(ImagesResponse),
Speech(bytes::Bytes),
Transcribe(TranscriptionResponse),
Moderate(ModerationResponse),
Rerank(RerankResponse),
Search(SearchResponse),
Ocr(OcrResponse),
}
impl LlmResponse {
#[must_use]
pub fn usage(&self) -> Option<&Usage> {
match self {
Self::Chat(r) => r.usage.as_ref(),
Self::Embed(r) => r.usage.as_ref(),
Self::Ocr(r) => r.usage.as_ref(),
Self::ChatStream(_)
| Self::ListModels(_)
| Self::ImageGenerate(_)
| Self::Speech(_)
| Self::Transcribe(_)
| Self::Moderate(_)
| Self::Rerank(_)
| Self::Search(_) => None,
}
}
}
impl std::fmt::Debug for LlmResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Chat(r) => f.debug_tuple("Chat").field(r).finish(),
Self::ChatStream(_) => f.write_str("ChatStream(<stream>)"),
Self::Embed(r) => f.debug_tuple("Embed").field(r).finish(),
Self::ListModels(r) => f.debug_tuple("ListModels").field(r).finish(),
Self::ImageGenerate(r) => f.debug_tuple("ImageGenerate").field(r).finish(),
Self::Speech(b) => f
.debug_tuple("Speech")
.field(&format_args!("<{} bytes>", b.len()))
.finish(),
Self::Transcribe(r) => f.debug_tuple("Transcribe").field(r).finish(),
Self::Moderate(r) => f.debug_tuple("Moderate").field(r).finish(),
Self::Rerank(r) => f.debug_tuple("Rerank").field(r).finish(),
Self::Search(r) => f.debug_tuple("Search").field(r).finish(),
Self::Ocr(r) => f.debug_tuple("Ocr").field(r).finish(),
}
}
}