responses-proxy 0.1.2

OpenAI Responses API proxy over any Chat Completions provider. Supports HTTP SSE and WebSocket streaming, reasoning/thinking content, tool calling, and can serve as a drop-in Codex CLI backend via DeepSeek or other Chat API-compatible models.
name: CI

on:
  push:
    branches: [main]
  pull_request:
  workflow_dispatch:

env:
  CARGO_TERM_COLOR: always
  RUSTFLAGS: -D warnings

jobs:
  test:
    name: test ${{ matrix.os }}
    runs-on: ${{ matrix.os }}
    strategy:
      fail-fast: false
      matrix:
        os:
          - ubuntu-latest      # x86_64-unknown-linux-gnu
          - ubuntu-24.04-arm   # aarch64-unknown-linux-gnu
          - macos-latest       # aarch64-apple-darwin
          - windows-latest     # x86_64-pc-windows-msvc
    steps:
      - uses: actions/checkout@v4
      - name: Show toolchain
        run: rustc -V && cargo -V
      - uses: Swatinem/rust-cache@v2
      - run: cargo build --all-targets
      - run: cargo test --all-features

  msrv:
    name: msrv (1.85)
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - name: Remove rust-toolchain.toml
        run: rm -f rust-toolchain.toml
      - uses: dtolnay/rust-toolchain@master
        with:
          toolchain: "1.85"
      - uses: Swatinem/rust-cache@v2
      - run: cargo build

  lints:
    name: clippy + rustfmt
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - uses: Swatinem/rust-cache@v2
      - run: cargo fmt --all -- --check
      - run: cargo clippy --all-targets --all-features