responses-proxy 0.1.3

OpenAI Responses API proxy over any Chat Completions provider. Supports HTTP SSE and WebSocket streaming, reasoning/thinking content, tool calling, and can serve as a drop-in Codex CLI backend via DeepSeek or other Chat API-compatible models.
/// Symmetric encryption for compact summary content.
use aes_gcm::{
    Aes256Gcm, Nonce,
    aead::{Aead, KeyInit},
};
use rand::Rng;

/// Encrypt plaintext with AES-256-GCM. Returns hex-encoded ciphertext (nonce + data).
pub fn encrypt(key: &[u8; 32], plaintext: &str) -> Option<String> {
    let cipher = Aes256Gcm::new_from_slice(key).ok()?;
    let mut nonce_bytes = [0u8; 12];
    rand::thread_rng().fill(&mut nonce_bytes);
    let nonce = Nonce::from_slice(&nonce_bytes);
    let ciphertext = cipher.encrypt(nonce, plaintext.as_bytes()).ok()?;
    let mut result = nonce_bytes.to_vec();
    result.extend(&ciphertext);
    Some(hex::encode(&result))
}

/// Decrypt hex-encoded ciphertext produced by `encrypt`. Returns the plaintext.
pub fn decrypt(key: &[u8; 32], hex_ciphertext: &str) -> Option<String> {
    let data = hex::decode(hex_ciphertext).ok()?;
    if data.len() < 12 {
        return None;
    }
    let (nonce_bytes, ciphertext) = data.split_at(12);
    let cipher = Aes256Gcm::new_from_slice(key).ok()?;
    let nonce = Nonce::from_slice(nonce_bytes);
    let plaintext = cipher.decrypt(nonce, ciphertext).ok()?;
    String::from_utf8(plaintext).ok()
}