llmclient 0.3.2

Rust LLM client - Gemini, OpenAI, Claude, Mistral, DeepSeek, Groq
Documentation
use llmclient::gemini::GeminiResponse;

fn main() {
    let res = r#"
[{
  "candidates": [
    {
      "content": {
        "role": "model",
        "parts": [
          {
            "functionCall": {
              "name": "arithmetic",
              "args": {
                "expr": "(60 * 24) * 365.25"
              }
            }
          }
        ]
      },
      "safetyRatings": [
        {
          "category": "HARM_CATEGORY_HATE_SPEECH",
          "probability": "NEGLIGIBLE",
          "probabilityScore": 0.12560065,
          "severity": "HARM_SEVERITY_NEGLIGIBLE",
          "severityScore": 0.08299415
        },
        {
          "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
          "probability": "NEGLIGIBLE",
          "probabilityScore": 0.21866937,
          "severity": "HARM_SEVERITY_NEGLIGIBLE",
          "severityScore": 0.11858909
        },
        {
          "category": "HARM_CATEGORY_HARASSMENT",
          "probability": "NEGLIGIBLE",
          "probabilityScore": 0.1538032,
          "severity": "HARM_SEVERITY_NEGLIGIBLE",
          "severityScore": 0.10230471
        },
        {
          "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
          "probability": "NEGLIGIBLE",
          "probabilityScore": 0.08314291,
          "severity": "HARM_SEVERITY_NEGLIGIBLE",
          "severityScore": 0.06632687
        }
      ]
    }
  ]
}
,
{
  "candidates": [
    {
      "content": {
        "role": "model",
        "parts": [
          {
            "text": ""
          }
        ]
      },
      "finishReason": "STOP"
    }
  ],
  "usageMetadata": {
    "promptTokenCount": 47,
    "candidatesTokenCount": 18,
    "totalTokenCount": 65
  }
}
]"#;

    let res: Vec<GeminiResponse> = serde_json::from_str(&res).unwrap();

}