bamboo_infrastructure/llm/protocol/
mod.rs1mod anthropic;
15mod errors;
16pub mod gemini;
17mod openai;
18
19pub use anthropic::AnthropicProtocol;
20pub use errors::{ProtocolError, ProtocolResult};
21pub use gemini::GeminiProtocol;
22pub use openai::OpenAIProtocol;
23
24use bamboo_domain::Message;
25
26pub trait FromProvider<T>: Sized {
30 fn from_provider(value: T) -> ProtocolResult<Self>;
32}
33
34pub trait ToProvider<T>: Sized {
38 fn to_provider(&self) -> ProtocolResult<T>;
40}
41
42pub trait FromProviderBatch<T>: Sized {
44 fn from_provider_batch(values: Vec<T>) -> ProtocolResult<Vec<Self>>;
45}
46
47pub trait ToProviderBatch<T>: Sized {
49 fn to_provider_batch(&self) -> ProtocolResult<Vec<T>>;
50}
51
52impl FromProviderBatch<crate::llm::api::models::ChatMessage> for Message {
55 fn from_provider_batch(
56 values: Vec<crate::llm::api::models::ChatMessage>,
57 ) -> ProtocolResult<Vec<Self>> {
58 values.into_iter().map(Self::from_provider).collect()
59 }
60}
61
62impl ToProviderBatch<crate::llm::api::models::ChatMessage> for Vec<Message> {
63 fn to_provider_batch(&self) -> ProtocolResult<Vec<crate::llm::api::models::ChatMessage>> {
64 self.iter().map(|msg| msg.to_provider()).collect()
65 }
66}
67
68#[cfg(test)]
69mod tests {
70 use super::*;
71
72 #[test]
73 fn test_trait_bounds() {
74 fn assert_from_provider<T, U>()
76 where
77 T: FromProvider<U>,
78 {
79 }
80
81 fn assert_to_provider<T, U>()
82 where
83 T: ToProvider<U>,
84 {
85 }
86
87 assert_from_provider::<Message, crate::llm::api::models::ChatMessage>();
89 assert_to_provider::<Message, crate::llm::api::models::ChatMessage>();
90 }
91}