ambi 0.1.1

A flexible, multi-backend, customizable AI agent framework, entirely based on Rust.
Documentation
use crate::llm::engine::openai_api::openai_api_bridging::OpenAIEngine;
use crate::llm::handler::LLMRequest;
use crate::LLMEngineTrait;
use anyhow::anyhow;
use async_trait::async_trait;
use log::error;
use tokio::sync::mpsc::Sender;

pub mod openai_api_bridging;
pub mod openai_api_config;

#[cfg(feature = "openai-api")]
#[async_trait]
impl LLMEngineTrait for OpenAIEngine {
    async fn chat(&mut self, request: LLMRequest) -> anyhow::Result<String> {
        self.generate_response_sync(request).await.map_err(|e| {
            error!("OpenAI model generation error: {}", e);
            anyhow!("OpenAI error: {}", e)
        })
    }

    async fn chat_stream(&mut self, request: LLMRequest, tx: Sender<String>) {
        if let Err(e) = self.generate_response_stream(request, tx).await {
            error!("OpenAI stream generation error: {}", e);
        }
    }

    fn reset_context(&mut self) {
        self.reset_context();
    }
}