pub struct OllamaClient { /* private fields */ }
Expand description
Client for interacting with the Ollama API
This struct provides methods for making requests to the Ollama API, including text generation and model management.
§Examples
use projets_indexer::ollama::{OllamaClient, ClientConfig};
let client = OllamaClient::new(ClientConfig::default())?;
let response = client.generate(request).await?;
Implementations§
Source§impl OllamaClient
impl OllamaClient
Sourcepub fn new(config: ClientConfig) -> Result<Self>
pub fn new(config: ClientConfig) -> Result<Self>
Create a new Ollama client
This function initializes a new OllamaClient
with the provided
configuration. It sets up the HTTP client with the specified timeout
and other settings.
§Arguments
config
- Configuration for the HTTP client
§Returns
A Result
containing the initialized OllamaClient
or an error
if initialization fails.
§Examples
use projets_indexer::ollama::{OllamaClient, ClientConfig};
use std::time::Duration;
let config = ClientConfig {
timeout: Duration::from_secs(30),
};
let client = OllamaClient::new(config)?;
Sourcepub async fn check_availability(&self) -> Result<bool>
pub async fn check_availability(&self) -> Result<bool>
Check if the Ollama service is available
This function sends a simple request to the Ollama API to verify that the service is running and accessible.
§Returns
A Result<bool>
indicating whether the service is available.
§Examples
use projets_indexer::ollama::OllamaClient;
let client = OllamaClient::new(ClientConfig::default())?;
if client.check_availability().await? {
println!("Ollama service is available");
}
Sourcepub async fn generate(
&self,
request: GenerateRequest,
) -> Result<GenerateResponse>
pub async fn generate( &self, request: GenerateRequest, ) -> Result<GenerateResponse>
Generate text using the Ollama API
This function sends a text generation request to the Ollama API and returns the generated response.
§Arguments
request
- The generation request parameters
§Returns
A Result
containing the generated response or an error if the
request fails.
§Examples
use projets_indexer::ollama::{OllamaClient, GenerateRequest};
let client = OllamaClient::new(ClientConfig::default())?;
let request = GenerateRequest {
model: "gemma3:1b".to_string(),
prompt: "Generate a tag for this project".to_string(),
system: None,
template: None,
context: None,
options: None,
stream: false,
format: None,
};
let response = client.generate(request).await?;
Trait Implementations§
Source§impl Clone for OllamaClient
impl Clone for OllamaClient
Source§fn clone(&self) -> OllamaClient
fn clone(&self) -> OllamaClient
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source
. Read more