use anyllm::ProviderIdentity;
use anyllm::prelude::*;
struct StaticProvider;
impl ProviderIdentity for StaticProvider {
fn provider_name(&self) -> &'static str {
"static-demo"
}
}
impl ChatProvider for StaticProvider {
type Stream = SingleResponseStream;
async fn chat(&self, request: &ChatRequest) -> anyllm::Result<ChatResponse> {
if !request.model.starts_with("demo-") {
return Err(anyllm::Error::ModelNotFound(format!(
"static-demo does not know model '{}'",
request.model
)));
}
Ok(ChatResponse::new(vec![ContentBlock::Text {
text: format!("hello from {}", self.provider_name()),
}])
.finish_reason(FinishReason::Stop)
.model(request.model.clone()))
}
async fn chat_stream(&self, request: &ChatRequest) -> anyllm::Result<Self::Stream> {
Ok(self.chat(request).await?.into_stream())
}
fn chat_capability(&self, _model: &str, capability: ChatCapability) -> CapabilitySupport {
match capability {
ChatCapability::Streaming => CapabilitySupport::Supported,
ChatCapability::NativeStreaming => CapabilitySupport::Unsupported,
_ => CapabilitySupport::Unknown,
}
}
}
#[tokio::main]
async fn main() -> anyllm::Result<()> {
let provider = StaticProvider;
let ok = ChatRequest::new("demo-model")
.system("You are concise.")
.user("Say hello");
let response = provider.chat(&ok).await?;
println!("chat text: {}", response.text_or_empty());
println!(
"streaming support: {:?}",
provider.chat_capability(&ok.model, ChatCapability::Streaming)
);
println!(
"native streaming: {:?}",
provider.chat_capability(&ok.model, ChatCapability::NativeStreaming)
);
let streamed = provider.chat_stream(&ok).await?.collect_response().await?;
println!("stream text: {}", streamed.text_or_empty());
let bad = ChatRequest::new("unknown-model").user("Say hello");
match provider.chat(&bad).await {
Ok(_) => unreachable!("unknown-model should error"),
Err(anyllm::Error::ModelNotFound(message)) => {
println!("error mapping: ModelNotFound: {message}");
}
Err(other) => println!("unexpected error variant: {other:?}"),
}
Ok(())
}