responses-proxy 0.1.1

OpenAI Responses API proxy over any Chat Completions provider. Supports HTTP SSE and WebSocket streaming, reasoning/thinking content, tool calling, and can serve as a drop-in Codex CLI backend via DeepSeek or other Chat API-compatible models.
server:
  # Log level for tracing output (trace, debug, info, warn, error)
  # Can be overridden by the RUST_LOG environment variable.
  # Default: info
  log_level: info

  listen_addr: "0.0.0.0:3000"
  request_timeout: 30

  # Auth: set enabled: true to require API key authentication
  auth:
    enabled: false
    keys:
      - sk-your-secret-key-here

  # Tool type allowlist: only these tool types pass through to the downstream
  # Default: ["function"]
  tool_type_allowlist:
    - function

models:
  - model: gpt-5.5
    provider:
      api_key: $DEEPSEEK_API_KEY
      base_url: https://api.deepseek.com
    downstream_model: deepseek-v4-pro

  - model: codex-auto-review
    provider:
      api_key: $DEEPSEEK_API_KEY
      base_url: https://api.deepseek.com
    downstream_model: deepseek-v4-flash