1mod builder;
6
7use async_trait::async_trait;
8
9use crate::llm::{
10 BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
11};
12
13pub use builder::ChatGroqBuilder;
14
15pub struct ChatGroq {
26 pub(super) inner: crate::llm::openai_compatible::ChatOpenAICompatible,
27}
28
29impl ChatGroq {
30 pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
32 Self::builder().model(model).build()
33 }
34
35 pub fn builder() -> ChatGroqBuilder {
37 ChatGroqBuilder::default()
38 }
39}
40
41#[async_trait]
42impl BaseChatModel for ChatGroq {
43 fn model(&self) -> &str {
44 self.inner.model()
45 }
46
47 fn provider(&self) -> &str {
48 "groq"
49 }
50
51 fn context_window(&self) -> Option<u64> {
52 Some(128_000)
53 }
54
55 async fn invoke(
56 &self,
57 messages: Vec<Message>,
58 tools: Option<Vec<ToolDefinition>>,
59 tool_choice: Option<ToolChoice>,
60 ) -> Result<ChatCompletion, LlmError> {
61 self.inner.invoke(messages, tools, tool_choice).await
62 }
63
64 async fn invoke_stream(
65 &self,
66 messages: Vec<Message>,
67 tools: Option<Vec<ToolDefinition>>,
68 tool_choice: Option<ToolChoice>,
69 ) -> Result<ChatStream, LlmError> {
70 self.inner.invoke_stream(messages, tools, tool_choice).await
71 }
72}