#[cfg(feature = "language-model-request")]
use futures::Stream;
#[cfg(feature = "language-model-request")]
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[cfg(feature = "language-model-request")]
use uuid;
#[cfg(feature = "language-model-request")]
use crate::core::LanguageModelStreamChunkType;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "kebab-case")]
pub enum VercelUIStream {
#[serde(rename = "text-start")]
TextStart {
id: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "text-delta")]
TextDelta {
id: String,
delta: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "text-end")]
TextEnd {
id: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "reasoning-start")]
ReasoningStart {
id: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "reasoning-delta")]
ReasoningDelta {
id: String,
delta: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "reasoning-end")]
ReasoningEnd {
id: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "tool-call-start")]
ToolCallStart {
id: String,
tool_call_id: String,
tool_name: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "tool-call-delta")]
ToolCallDelta {
id: String,
tool_call_id: String,
delta: String,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "tool-call-end")]
ToolCallEnd {
id: String,
tool_call_id: String,
result: Value,
#[serde(skip_serializing_if = "Option::is_none")]
provider_metadata: Option<Value>,
},
#[serde(rename = "error")]
Error {
error_text: String,
},
#[serde(rename = "not-supported")]
NotSupported {
error_text: String,
},
}
#[derive(Default)]
pub struct VercelUIStreamOptions {
pub send_reasoning: bool,
pub send_start: bool,
pub send_finish: bool,
pub generate_message_id: Option<Box<VercelUIStreamIdGenerator>>,
}
pub type VercelUIStreamIdGenerator = dyn Fn() -> String + Send + Sync;
pub struct VercelUIStreamBuilder<C, T> {
pub context: C,
pub options: VercelUIStreamOptions,
build_fn: Box<dyn Fn(C, VercelUIStreamOptions) -> T + Send + Sync>,
}
impl<C, T> VercelUIStreamBuilder<C, T> {
pub fn new<B>(context: C, build_fn: B) -> Self
where
B: Fn(C, VercelUIStreamOptions) -> T + Send + Sync + 'static,
{
Self {
context,
options: VercelUIStreamOptions::default(),
build_fn: Box::new(build_fn),
}
}
pub fn send_reasoning(mut self) -> Self {
self.options.send_reasoning = true;
self
}
pub fn send_start(mut self) -> Self {
self.options.send_start = true;
self
}
pub fn send_finish(mut self) -> Self {
self.options.send_finish = true;
self
}
pub fn with_id_generator<G>(mut self, generator: G) -> Self
where
G: Fn() -> String + Send + Sync + 'static,
{
self.options.generate_message_id = Some(Box::new(generator));
self
}
pub fn build(self) -> T {
(self.build_fn)(self.context, self.options)
}
}
#[cfg(feature = "language-model-request")]
impl crate::core::StreamTextResponse {
pub fn into_vercel_ui_stream(
self,
options: VercelUIStreamOptions,
) -> impl Stream<Item = crate::Result<VercelUIStream>> {
let message_id = options
.generate_message_id
.as_ref()
.map(|f| f())
.unwrap_or_else(|| format!("msg_{}", uuid::Uuid::new_v4().simple()));
self.stream.filter_map(move |chunk| {
let ui_chunk = match chunk {
LanguageModelStreamChunkType::Start if options.send_start => {
Some(VercelUIStream::TextStart {
id: message_id.clone(),
provider_metadata: None,
})
}
LanguageModelStreamChunkType::Text(delta) => Some(VercelUIStream::TextDelta {
id: message_id.clone(),
delta,
provider_metadata: None,
}),
LanguageModelStreamChunkType::Reasoning(delta) if options.send_reasoning => {
Some(VercelUIStream::ReasoningDelta {
id: message_id.clone(),
delta,
provider_metadata: None,
})
}
LanguageModelStreamChunkType::ToolCall(_json_str) => {
Some(VercelUIStream::ToolCallStart {
id: message_id.clone(),
tool_call_id: "unknown".to_string(),
tool_name: "unknown".to_string(),
provider_metadata: None,
})
}
LanguageModelStreamChunkType::End(_) if options.send_finish => {
Some(VercelUIStream::TextEnd {
id: message_id.clone(),
provider_metadata: None,
})
}
LanguageModelStreamChunkType::Failed(error)
| LanguageModelStreamChunkType::Incomplete(error) => {
Some(VercelUIStream::Error { error_text: error })
}
LanguageModelStreamChunkType::NotSupported(_) => None,
_ => None,
};
futures::future::ready(ui_chunk.map(Ok))
})
}
}
#[derive(Deserialize, Debug)]
pub struct VercelUIMessagePart {
pub text: String,
#[serde(rename = "type")]
pub part_type: String,
}
#[derive(Deserialize, Debug)]
pub struct VercelUIMessage {
pub id: String,
pub role: String,
pub parts: Vec<VercelUIMessagePart>,
}
#[derive(Deserialize, Debug)]
pub struct VercelUIRequest {
pub id: String,
pub messages: Vec<VercelUIMessage>,
pub trigger: String,
}
impl crate::core::Message {
pub fn from_vercel_ui_message(
ui_messages: &[VercelUIMessage],
) -> crate::core::messages::Messages {
ui_messages
.iter()
.filter_map(|msg| {
let content = msg
.parts
.iter()
.filter(|part| part.part_type == "text")
.map(|part| part.text.clone())
.collect::<Vec<_>>()
.join("");
match msg.role.as_str() {
"system" => Some(crate::core::messages::Message::System(content.into())),
"user" => Some(crate::core::messages::Message::User(content.into())),
"assistant" => Some(crate::core::messages::Message::Assistant(content.into())),
_ => None,
}
})
.collect()
}
}
impl From<VercelUIRequest> for Vec<crate::core::messages::Message> {
fn from(request: VercelUIRequest) -> Self {
crate::core::messages::Message::from_vercel_ui_message(&request.messages)
}
}