agentix 0.22.2

Multi-provider LLM client for Rust — streaming, non-streaming, tool calls, MCP, DeepSeek, OpenAI, Anthropic, Gemini, Mimo
Documentation
{
  "id": "resp_2",
  "object": "response",
  "created_at": 1700000000,
  "status": "completed",
  "model": "gpt-5",
  "output": [
    {
      "id": "rs_1",
      "type": "reasoning",
      "summary": [],
      "encrypted_content": "ENC_OPAQUE_1"
    },
    {
      "id": "fc_1",
      "type": "function_call",
      "call_id": "call_abc123",
      "name": "get_weather",
      "arguments": "{\"city\":\"Tokyo\",\"units\":\"celsius\"}",
      "status": "completed"
    }
  ],
  "usage": {
    "input_tokens": 20,
    "output_tokens": 15,
    "total_tokens": 35,
    "output_tokens_details": {"reasoning_tokens": 3}
  }
}