crabtalk_core/model/
mod.rs1use anyhow::Result;
7use futures_core::Stream;
8pub use limits::default_context_limit;
9pub use message::{Message, MessageBuilder, Role, estimate_tokens};
10pub use request::Request;
11pub use response::{
12 Choice, CompletionMeta, CompletionTokensDetails, Delta, FinishReason, Response, Usage,
13};
14pub use stream::StreamChunk;
15pub use tool::{FunctionCall, Tool, ToolCall, ToolChoice};
16
17mod limits;
18mod message;
19mod request;
20mod response;
21mod stream;
22mod tool;
23
24#[cfg(any(test, feature = "test-utils"))]
25pub mod test_model;
26
27pub trait Model: Sized + Clone {
36 fn send(&self, request: &Request) -> impl Future<Output = Result<Response>> + Send;
38
39 fn stream(&self, request: Request) -> impl Stream<Item = Result<StreamChunk>> + Send;
41
42 fn context_limit(&self, model: &str) -> usize {
46 default_context_limit(model)
47 }
48
49 fn active_model(&self) -> String;
51}
52
53impl Model for () {
55 async fn send(&self, _request: &Request) -> Result<Response> {
56 panic!("NoopModel::send called — not intended for real LLM calls");
57 }
58
59 #[allow(unreachable_code)]
60 fn stream(&self, _request: Request) -> impl Stream<Item = Result<StreamChunk>> + Send {
61 panic!("NoopModel::stream called — not intended for real LLM calls");
62 async_stream::stream! {
63 yield Err(anyhow::anyhow!("not implemented"));
64 }
65 }
66
67 fn context_limit(&self, _model: &str) -> usize {
68 0
69 }
70
71 fn active_model(&self) -> String {
72 String::new()
73 }
74}