llm/providers/local/
llama_cpp.rs1#![doc = include_str!(concat!(env!("OUT_DIR"), "/docs/llamacpp.md"))]
2
3use super::util::get_local_config;
4use crate::providers::openai::OpenAiChatProvider;
5use crate::{ProviderFactory, Result};
6use async_openai::{Client, config::OpenAIConfig};
7
8pub struct LlamaCppProvider {
9 client: Client<OpenAIConfig>,
10}
11
12impl LlamaCppProvider {
13 pub fn new(base_url: &str) -> Self {
14 Self { client: Client::with_config(get_local_config(base_url)) }
15 }
16}
17
18impl Default for LlamaCppProvider {
19 fn default() -> Self {
20 Self { client: Client::with_config(get_local_config("http://localhost:8080/v1")) }
21 }
22}
23
24impl ProviderFactory for LlamaCppProvider {
25 async fn from_env() -> Result<Self> {
26 Ok(Self::default())
27 }
28
29 fn with_model(self, _model: &str) -> Self {
30 self
32 }
33}
34
35impl OpenAiChatProvider for LlamaCppProvider {
36 type Config = OpenAIConfig;
37
38 fn client(&self) -> &Client<Self::Config> {
39 &self.client
40 }
41
42 fn model(&self) -> &'static str {
43 "" }
45
46 fn provider_name(&self) -> &'static str {
47 "LlamaCpp"
48 }
49}