asla 0.1.5

An absurdly simple LLM API client for Rust
Documentation
pub mod error;
pub mod llm;

pub use error::Error;
pub use llm::{Chat, LlmClient, Message, MessageRole, GenerationConfig};

#[cfg(test)]
mod tests {
    use super::*;
    use llm::{Chat, LlmClient, Message};

    #[tokio::test]
    async fn test_flow() {
        let client = LlmClient::builder_with_default_client()
            .unwrap()
            .api_url("http://127.0.0.1:8001/v1/chat/completions")
            .build();
        let mut chat = Chat::new();
        let message = Message::user("Hello, how are you?");
        chat.add_message(message);
        let response = client.generate_response().chat(&chat).call().await.unwrap();
        assert_eq!(
            response,
            "{\"content\":\"Hello, how are you?\",\"role\":\"user\"}"
        );
        let _response = client
            .generate_response_and_add_to_chat()
            .chat(&mut chat)
            .call()
            .await
            .unwrap();
        assert_eq!(
            chat.to_json().iter().map(|message| message.to_string()).collect::<Vec<String>>().join("\n"),
            "{\"content\":\"Hello, how are you?\",\"role\":\"user\"}\n{\"content\":\"{\\\"content\\\":\\\"Hello, how are you?\\\",\\\"role\\\":\\\"user\\\"}\",\"role\":\"assistant\"}"
        );
    }
}