tiycore 0.2.3

Unified LLM API and stateful Agent runtime in Rust
Documentation
//! Google provider — uses Google Generative AI / Vertex protocol.

use crate::protocol::google::GoogleProtocol;
use crate::protocol::LLMProtocol;
use crate::stream::AssistantMessageEventStream;
use crate::types::*;
use async_trait::async_trait;

/// Google provider.
///
/// Internally uses the Google Generative AI protocol, with dual-mode
/// support for both Generative AI and Vertex AI based on `model.api`.
pub struct GoogleProvider {
    inner: GoogleProtocol,
}

impl GoogleProvider {
    /// Create a new Google provider.
    pub fn new() -> Self {
        Self {
            inner: GoogleProtocol::new(),
        }
    }

    /// Create a provider with a default API key.
    pub fn with_api_key(api_key: impl Into<String>) -> Self {
        Self {
            inner: GoogleProtocol::with_api_key(api_key),
        }
    }
}

impl Default for GoogleProvider {
    fn default() -> Self {
        Self::new()
    }
}

#[async_trait]
impl LLMProtocol for GoogleProvider {
    fn provider_type(&self) -> Provider {
        Provider::Google
    }

    fn stream(
        &self,
        model: &Model,
        context: &Context,
        options: StreamOptions,
    ) -> AssistantMessageEventStream {
        self.inner.stream(model, context, options)
    }

    fn stream_simple(
        &self,
        model: &Model,
        context: &Context,
        options: SimpleStreamOptions,
    ) -> AssistantMessageEventStream {
        self.inner.stream_simple(model, context, options)
    }
}