//! Ollama local LLM provider implementation for ADK.
//!
//! This module provides support for running local LLMs via Ollama.
//!
//! # Example
//!
//! ```rust,ignore
//! use adk_model::ollama::{OllamaModel, OllamaConfig};
//!
//! let model = OllamaModel::new(OllamaConfig {
//! model: "llama3.2".into(),
//! ..Default::default()
//! })?;
//! ```
pub use OllamaModel;
pub use OllamaConfig;