#![doc = include_str!(concat!(env!("OUT_DIR"), "/docs/openai_compatible.md"))]
pub mod generic;
pub mod streaming;
pub mod types;
use async_openai::types::chat::ChatCompletionStreamOptions;
use crate::providers::openai::mappers::map_tools;
use crate::{Context, LlmError};
pub use streaming::{create_custom_stream_generic, process_compatible_stream};
pub use types::{ChatCompletionStreamResponse, CompatibleChatRequest};
pub fn build_chat_request(model: &str, context: &Context) -> Result<CompatibleChatRequest, LlmError> {
let messages = types::map_messages(context.messages())?;
let tools = if context.tools().is_empty() { None } else { Some(map_tools(context.tools())?) };
Ok(CompatibleChatRequest {
model: model.to_string(),
messages,
stream: Some(true),
tools,
stream_options: Some(ChatCompletionStreamOptions { include_usage: Some(true), include_obfuscation: None }),
reasoning_effort: context.reasoning_effort(),
})
}