rs-zero 0.2.8

Rust-first microservice framework inspired by go-zero engineering practices
Documentation
/// Redaction settings for logs.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RedactionConfig {
    /// Case-insensitive key fragments treated as sensitive.
    pub sensitive_keys: Vec<String>,
    /// Replacement text.
    pub replacement: String,
}

impl Default for RedactionConfig {
    fn default() -> Self {
        Self {
            sensitive_keys: vec![
                "authorization".to_string(),
                "cookie".to_string(),
                "token".to_string(),
                "password".to_string(),
                "secret".to_string(),
                "api_key".to_string(),
                "apikey".to_string(),
                "key".to_string(),
            ],
            replacement: "[REDACTED]".to_string(),
        }
    }
}

/// Redacts common sensitive patterns from free-form text.
pub fn redact_text(input: &str, config: &RedactionConfig) -> String {
    let tokens = input
        .split_inclusive(char::is_whitespace)
        .collect::<Vec<_>>();
    let mut output = Vec::with_capacity(tokens.len());
    let mut index = 0;
    while index < tokens.len() {
        let token = tokens[index];
        let word = token.trim();
        let lower = word.to_ascii_lowercase();
        if lower.trim_end_matches(':') == "authorization" && index + 1 < tokens.len() {
            output.push(format!(
                "{}{}",
                token_prefix(token, word),
                format_authorization_key(word)
            ));
            output.push(format!(
                "{}{}",
                config.replacement,
                token_suffix(token, word)
            ));
            index += skip_authorization_tokens(&tokens[index + 1..]);
            continue;
        }
        if let Some((key, value)) = word.split_once('=')
            && is_sensitive_key(key, config)
        {
            output.push(format!(
                "{}{key}={}{}",
                token_prefix(token, word),
                config.replacement,
                token_suffix(token, word)
            ));
            index += skip_sensitive_value_tokens(value, &tokens[index + 1..]);
            continue;
        }
        if is_sensitive_key(&lower, config) && index + 1 < tokens.len() {
            output.push(token.to_string());
            output.push(format!(
                "{}{}",
                config.replacement,
                token_suffix(tokens[index + 1], tokens[index + 1].trim())
            ));
            index += 2;
            continue;
        }
        output.push(token.to_string());
        index += 1;
    }
    output.concat()
}

fn skip_authorization_tokens(tokens: &[&str]) -> usize {
    let credential_tokens = if tokens
        .first()
        .is_some_and(|value| value.trim().eq_ignore_ascii_case("bearer"))
    {
        2
    } else {
        1
    };
    credential_tokens + 1
}

fn skip_sensitive_value_tokens(first_value: &str, tokens: &[&str]) -> usize {
    let value = first_value.trim();
    if !value.starts_with('"') || value.ends_with('"') {
        return 1;
    }
    for (index, token) in tokens.iter().enumerate() {
        if token.trim().ends_with('"') {
            return index + 2;
        }
    }
    tokens.len() + 1
}

pub(crate) fn is_sensitive_key(key: &str, config: &RedactionConfig) -> bool {
    let lower = key.to_ascii_lowercase();
    config
        .sensitive_keys
        .iter()
        .any(|item| matches_sensitive_key(&lower, item))
}

fn matches_sensitive_key(key: &str, pattern: &str) -> bool {
    if pattern == "key" {
        return key == "key" || key.ends_with("_key") || key.ends_with("-key");
    }
    key.contains(pattern)
}

fn format_authorization_key(word: &str) -> &str {
    if word.ends_with(':') {
        word
    } else {
        "Authorization:"
    }
}

fn token_prefix<'a>(token: &'a str, word: &str) -> &'a str {
    token
        .split_once(word)
        .map(|(prefix, _)| prefix)
        .unwrap_or("")
}

fn token_suffix<'a>(token: &'a str, word: &str) -> &'a str {
    token
        .split_once(word)
        .map(|(_, suffix)| suffix)
        .unwrap_or("")
}