use crate::json_validator::{MAX_JSON_DEPTH, validate_json_depth};
use crate::{VeracodeClient, VeracodeError, VeracodeRegion};
use async_stream::try_stream;
use chrono::{DateTime, NaiveDateTime, TimeZone, Utc};
use chrono_tz::America::New_York;
use chrono_tz::Europe::Berlin;
use futures_core::stream::Stream;
use serde::{Deserialize, Serialize};
use urlencoding;
#[derive(Debug, Clone, Serialize)]
pub struct AuditReportRequest {
pub report_type: String,
pub start_date: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub end_date: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub audit_action: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub action_type: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub target_user_id: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub modifier_user_id: Option<Vec<String>>,
}
impl AuditReportRequest {
#[must_use]
pub fn new(start_date: impl Into<String>, end_date: Option<String>) -> Self {
Self {
report_type: "AUDIT".to_string(),
start_date: start_date.into(),
end_date,
audit_action: None,
action_type: None,
target_user_id: None,
modifier_user_id: None,
}
}
#[must_use]
pub fn with_audit_actions(mut self, actions: Vec<String>) -> Self {
self.audit_action = Some(actions);
self
}
#[must_use]
pub fn with_action_types(mut self, types: Vec<String>) -> Self {
self.action_type = Some(types);
self
}
#[must_use]
pub fn with_target_users(mut self, user_ids: Vec<String>) -> Self {
self.target_user_id = Some(user_ids);
self
}
#[must_use]
pub fn with_modifier_users(mut self, user_ids: Vec<String>) -> Self {
self.modifier_user_id = Some(user_ids);
self
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct GenerateReportData {
pub id: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct GenerateReportResponse {
#[serde(rename = "_embedded")]
pub embedded: GenerateReportData,
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum ReportStatus {
Queued,
Submitted,
Processing,
Completed,
Failed,
}
impl std::fmt::Display for ReportStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ReportStatus::Queued => write!(f, "Queued"),
ReportStatus::Submitted => write!(f, "Submitted"),
ReportStatus::Processing => write!(f, "Processing"),
ReportStatus::Completed => write!(f, "Completed"),
ReportStatus::Failed => write!(f, "Failed"),
}
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct ReportLinks {
pub first: Option<LinkHref>,
pub prev: Option<LinkHref>,
#[serde(rename = "self")]
pub self_link: Option<LinkHref>,
pub next: Option<LinkHref>,
pub last: Option<LinkHref>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct LinkHref {
pub href: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct PageMetadata {
pub number: u32,
pub size: u32,
pub total_elements: u32,
pub total_pages: u32,
}
#[derive(Debug, Clone, Serialize)]
pub struct AuditLogEntry {
pub raw_log: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp_utc: Option<String>,
pub log_hash: String,
}
#[derive(Debug, Deserialize)]
struct TimestampExtractor {
timestamp: Option<String>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ReportData {
pub id: String,
pub report_type: String,
pub status: ReportStatus,
pub requested_by_user: String,
pub requested_by_account: u64,
pub date_report_requested: String,
pub date_report_completed: Option<String>,
pub report_expiration_date: Option<String>,
pub audit_logs: serde_json::Value,
#[serde(rename = "_links")]
pub links: Option<ReportLinks>,
pub page_metadata: Option<PageMetadata>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ReportResponse {
#[serde(rename = "_embedded")]
pub embedded: ReportData,
}
fn convert_regional_timestamp_to_utc(
timestamp_str: &str,
region: &VeracodeRegion,
) -> Option<String> {
let has_millis = timestamp_str.contains('.');
let naive_dt = if has_millis {
NaiveDateTime::parse_from_str(timestamp_str, "%Y-%m-%d %H:%M:%S%.f").ok()?
} else {
NaiveDateTime::parse_from_str(timestamp_str, "%Y-%m-%d %H:%M:%S").ok()?
};
let utc_time = match region {
VeracodeRegion::European => {
let regional_time: DateTime<_> = Berlin.from_local_datetime(&naive_dt).earliest()?;
regional_time.with_timezone(&Utc)
}
VeracodeRegion::Commercial | VeracodeRegion::Federal => {
let regional_time: DateTime<_> = New_York.from_local_datetime(&naive_dt).earliest()?;
regional_time.with_timezone(&Utc)
}
};
if has_millis {
let formatted = utc_time.format("%Y-%m-%d %H:%M:%S%.f").to_string();
Some(formatted)
} else {
Some(utc_time.format("%Y-%m-%d %H:%M:%S").to_string())
}
}
fn generate_log_hash(raw_json: &str) -> String {
use xxhash_rust::xxh3::xxh3_128;
let hash = xxh3_128(raw_json.as_bytes());
format!("{:032x}", hash)
}
fn sort_log_values_by_timestamp(logs: &mut [serde_json::Value]) {
logs.sort_by(|a, b| {
let ts_a = a.get("timestamp_utc").and_then(|v| v.as_str());
let ts_b = b.get("timestamp_utc").and_then(|v| v.as_str());
match (ts_a, ts_b) {
(Some(ta), Some(tb)) => ta.cmp(tb),
(Some(_), None) => std::cmp::Ordering::Less,
(None, Some(_)) => std::cmp::Ordering::Greater,
(None, None) => std::cmp::Ordering::Equal,
}
});
}
#[derive(Clone)]
pub struct ReportingApi {
client: VeracodeClient,
region: VeracodeRegion,
}
impl ReportingApi {
#[must_use]
pub fn new(client: VeracodeClient) -> Self {
let region = client.config().region;
Self { client, region }
}
pub async fn generate_audit_report(
&self,
request: &AuditReportRequest,
) -> Result<String, VeracodeError> {
let response = self
.client
.post("/appsec/v1/analytics/report", Some(request))
.await?;
let response_text = response.text().await?;
log::debug!("Generate report API response: {}", response_text);
validate_json_depth(&response_text, MAX_JSON_DEPTH).map_err(|e| {
VeracodeError::InvalidResponse(format!("JSON validation failed: {}", e))
})?;
let generate_response: GenerateReportResponse = serde_json::from_str(&response_text)?;
Ok(generate_response.embedded.id)
}
pub async fn get_audit_report(
&self,
report_id: &str,
page: Option<u32>,
) -> Result<ReportResponse, VeracodeError> {
let encoded_report_id = urlencoding::encode(report_id);
let endpoint = if let Some(page_num) = page {
format!("/appsec/v1/analytics/report/{encoded_report_id}?page={page_num}")
} else {
format!("/appsec/v1/analytics/report/{encoded_report_id}")
};
let response = self.client.get(&endpoint, None).await?;
let response_text = response.text().await?;
log::debug!("Get audit report API response: {}", response_text);
validate_json_depth(&response_text, MAX_JSON_DEPTH).map_err(|e| {
VeracodeError::InvalidResponse(format!("JSON validation failed: {}", e))
})?;
let report_response: ReportResponse = serde_json::from_str(&response_text)?;
Ok(report_response)
}
pub async fn poll_report_status(
&self,
report_id: &str,
max_attempts: Option<u32>,
initial_delay_secs: Option<u64>,
) -> Result<ReportResponse, VeracodeError> {
let max_attempts = max_attempts.unwrap_or(30);
let initial_delay = initial_delay_secs.unwrap_or(2);
let mut attempts: u32 = 0;
let mut delay_secs = initial_delay;
loop {
attempts = attempts.saturating_add(1);
let report = self.get_audit_report(report_id, None).await?;
let status = &report.embedded.status;
log::debug!(
"Report {} status: {} (attempt {}/{})",
report_id,
status,
attempts,
max_attempts
);
match status {
ReportStatus::Completed => {
log::info!("Report {} completed successfully", report_id);
return Ok(report);
}
ReportStatus::Failed => {
return Err(VeracodeError::InvalidResponse(format!(
"Report generation failed for report ID: {}",
report_id
)));
}
ReportStatus::Queued | ReportStatus::Submitted | ReportStatus::Processing => {
if attempts >= max_attempts {
return Err(VeracodeError::InvalidResponse(format!(
"Report polling timeout after {} attempts. Status: {}",
attempts, status
)));
}
log::debug!("Report still processing, waiting {} seconds...", delay_secs);
tokio::time::sleep(tokio::time::Duration::from_secs(delay_secs)).await;
delay_secs = std::cmp::min(delay_secs.saturating_mul(2), 30);
}
}
}
}
pub async fn get_all_audit_log_pages(
&self,
report_id: &str,
) -> Result<Vec<AuditLogEntry>, VeracodeError> {
let mut all_logs = Vec::new();
let initial_report = self.get_audit_report(report_id, None).await?;
if initial_report.embedded.status != ReportStatus::Completed {
return Err(VeracodeError::InvalidResponse(format!(
"Report is not completed. Status: {}",
initial_report.embedded.status
)));
}
let page_metadata = match initial_report.embedded.page_metadata {
Some(metadata) if metadata.total_elements > 0 => metadata,
Some(metadata) => {
log::info!(
"Report completed but contains no audit log entries (0 total elements, {} total pages)",
metadata.total_pages
);
return Ok(all_logs); }
None => {
log::info!("Report completed but contains no audit log entries (no page metadata)");
return Ok(all_logs); }
};
let mut all_pages_raw = Vec::new();
let first_page = self.get_audit_report(report_id, Some(0)).await?;
all_pages_raw.push(first_page.embedded.audit_logs.clone());
log::info!(
"Retrieved page 1/{} ({} total)",
page_metadata.total_pages,
page_metadata.total_elements
);
if page_metadata.total_pages > 1 {
for page_num in 1..page_metadata.total_pages {
log::debug!(
"Retrieving page {}/{}",
page_num.saturating_add(1),
page_metadata.total_pages
);
let page_response = self.get_audit_report(report_id, Some(page_num)).await?;
all_pages_raw.push(page_response.embedded.audit_logs.clone());
log::info!(
"Retrieved page {}/{}",
page_num.saturating_add(1),
page_metadata.total_pages
);
}
}
let mut conversion_stats: (u32, u32) = (0, 0); let mut serialization_stats: (u32, u32) = (0, 0); let mut total_entries: u32 = 0;
for page_value in all_pages_raw {
if let Some(logs_array) = page_value.as_array() {
for log_value in logs_array {
total_entries = total_entries.saturating_add(1);
let raw_log = match serde_json::to_string(log_value) {
Ok(json_str) => {
serialization_stats.0 = serialization_stats.0.saturating_add(1);
json_str
}
Err(e) => {
log::error!(
"Failed to serialize audit log entry {}: {}. Entry will be replaced with empty object.",
total_entries,
e
);
serialization_stats.1 = serialization_stats.1.saturating_add(1);
"{}".to_string()
}
};
let log_hash = generate_log_hash(&raw_log);
let timestamp_utc = if let Ok(extractor) =
serde_json::from_value::<TimestampExtractor>(log_value.clone())
{
if let Some(timestamp) = extractor.timestamp {
match convert_regional_timestamp_to_utc(×tamp, &self.region) {
Some(utc) => {
conversion_stats.0 = conversion_stats.0.saturating_add(1);
Some(utc)
}
None => {
log::warn!("Failed to convert timestamp to UTC: {}", timestamp);
conversion_stats.1 = conversion_stats.1.saturating_add(1);
None
}
}
} else {
None
}
} else {
None
};
all_logs.push(AuditLogEntry {
raw_log,
timestamp_utc,
log_hash,
});
}
}
}
log::info!(
"Successfully processed {} audit log entries across {} pages",
total_entries,
page_metadata.total_pages
);
let (region_name, source_timezone) = match self.region {
VeracodeRegion::Commercial => (
"Commercial (api.veracode.com)",
"America/New_York (EST/EDT, UTC-5/-4)",
),
VeracodeRegion::European => (
"European (api.veracode.eu)",
"Europe/Berlin (CET/CEST, UTC+1/+2)",
),
VeracodeRegion::Federal => (
"Federal (api.veracode.us)",
"America/New_York (EST/EDT, UTC-5/-4)",
),
};
log::info!(
"Converted {} timestamps from {} to UTC - Region: {} ({} failures)",
conversion_stats.0,
source_timezone,
region_name,
conversion_stats.1
);
log::info!(
"Generated xxHash hashes for {} log entries (optimized: 10-50x faster than SHA256, zero cloning)",
total_entries
);
if serialization_stats.1 > 0 {
log::warn!(
"Serialization statistics: {} successful, {} failed (replaced with empty objects)",
serialization_stats.0,
serialization_stats.1
);
} else {
log::info!(
"Serialization statistics: {} successful, 0 failed",
serialization_stats.0
);
}
Ok(all_logs)
}
pub async fn get_audit_logs(
&self,
request: &AuditReportRequest,
) -> Result<serde_json::Value, VeracodeError> {
log::info!(
"Generating audit report for date range: {} to {}",
request.start_date,
request.end_date.as_deref().unwrap_or("now")
);
let report_id = self.generate_audit_report(request).await?;
log::info!("Report generated with ID: {}", report_id);
log::info!("Polling for report completion...");
let completed_report = self.poll_report_status(&report_id, None, None).await?;
log::info!(
"Report completed at: {}",
completed_report
.embedded
.date_report_completed
.as_deref()
.unwrap_or("unknown")
);
log::info!("Retrieving all audit log pages...");
let mut all_logs = self.get_all_audit_log_pages(&report_id).await?;
log::info!(
"Sorting {} audit logs by timestamp (oldest to newest)...",
all_logs.len()
);
all_logs.sort_by(|a, b| {
match (&a.timestamp_utc, &b.timestamp_utc) {
(Some(ts_a), Some(ts_b)) => {
let parsed_a = NaiveDateTime::parse_from_str(ts_a, "%Y-%m-%d %H:%M:%S%.f")
.or_else(|_| NaiveDateTime::parse_from_str(ts_a, "%Y-%m-%d %H:%M:%S"));
let parsed_b = NaiveDateTime::parse_from_str(ts_b, "%Y-%m-%d %H:%M:%S%.f")
.or_else(|_| NaiveDateTime::parse_from_str(ts_b, "%Y-%m-%d %H:%M:%S"));
match (parsed_a, parsed_b) {
(Ok(dt_a), Ok(dt_b)) => dt_a.cmp(&dt_b), (Ok(_), Err(_)) => std::cmp::Ordering::Less, (Err(_), Ok(_)) => std::cmp::Ordering::Greater, (Err(_), Err(_)) => std::cmp::Ordering::Equal, }
}
(Some(_), None) => std::cmp::Ordering::Less,
(None, Some(_)) => std::cmp::Ordering::Greater,
(None, None) => std::cmp::Ordering::Equal,
}
});
log::info!("Logs sorted successfully (oldest to newest)");
let json_logs = serde_json::to_value(&all_logs)?;
log::info!(
"Successfully retrieved {} total audit log entries",
all_logs.len()
);
Ok(json_logs)
}
pub fn get_audit_logs_stream(
&self,
request: AuditReportRequest,
flush_threshold_bytes: usize,
) -> impl Stream<Item = Result<Vec<serde_json::Value>, VeracodeError>> {
let api = self.clone();
try_stream! {
log::info!(
"Generating audit report for date range: {} to {}",
request.start_date,
request.end_date.as_deref().unwrap_or("now")
);
let report_id = api.generate_audit_report(&request).await?;
log::info!("Report generated with ID: {}", report_id);
log::info!("Polling for report completion...");
let completed_report = api.poll_report_status(&report_id, None, None).await?;
log::info!(
"Report completed at: {}",
completed_report.embedded.date_report_completed.as_deref().unwrap_or("unknown")
);
let initial = api.get_audit_report(&report_id, None).await?;
let page_metadata = match initial.embedded.page_metadata {
Some(ref m) if m.total_elements > 0 => m.clone(),
Some(ref m) => {
log::info!(
"Report completed but contains no audit log entries ({} total pages)",
m.total_pages
);
return;
}
None => {
log::info!("Report completed but contains no audit log entries (no page metadata)");
return;
}
};
log::info!(
"Streaming {} entries across {} pages (flush threshold: {} bytes)",
page_metadata.total_elements,
page_metadata.total_pages,
flush_threshold_bytes
);
let mut buffer: Vec<serde_json::Value> = Vec::new();
let mut buffer_bytes: usize = 0;
let mut batch_num: usize = 0;
for page_num in 0..page_metadata.total_pages {
let page = api.get_audit_report(&report_id, Some(page_num)).await?;
log::debug!(
"Processing page {}/{}",
page_num.saturating_add(1),
page_metadata.total_pages
);
if let Some(logs) = page.embedded.audit_logs.as_array() {
for log_value in logs {
let raw_log = match serde_json::to_string(log_value) {
Ok(s) => s,
Err(e) => {
log::error!("Failed to serialize log entry: {}", e);
"{}".to_string()
}
};
let log_hash = generate_log_hash(&raw_log);
let timestamp_utc = serde_json::from_value::<TimestampExtractor>(log_value.clone())
.ok()
.and_then(|e| e.timestamp)
.and_then(|ts| convert_regional_timestamp_to_utc(&ts, &api.region));
let entry = AuditLogEntry { raw_log, timestamp_utc, log_hash };
let entry_value = serde_json::to_value(&entry)?;
let entry_size = entry_value.to_string().len();
buffer_bytes = buffer_bytes.saturating_add(entry_size);
buffer.push(entry_value);
if buffer_bytes >= flush_threshold_bytes {
batch_num = batch_num.saturating_add(1);
sort_log_values_by_timestamp(&mut buffer);
log::info!(
"Flushing batch {} ({} entries, ~{} bytes)",
batch_num,
buffer.len(),
buffer_bytes
);
let batch = std::mem::take(&mut buffer);
buffer_bytes = 0;
yield batch;
}
}
}
}
if !buffer.is_empty() {
batch_num = batch_num.saturating_add(1);
sort_log_values_by_timestamp(&mut buffer);
log::info!(
"Flushing final batch {} ({} entries, ~{} bytes)",
batch_num,
buffer.len(),
buffer_bytes
);
yield buffer;
}
}
}
}
#[derive(Debug, thiserror::Error)]
#[must_use = "Need to handle all error enum types."]
pub enum ReportingError {
#[error("Veracode API error: {0}")]
VeracodeApi(#[from] VeracodeError),
#[error("Invalid date format: {0}")]
InvalidDate(String),
#[error("Date range exceeds maximum allowed: {0}")]
DateRangeExceeded(String),
}
#[cfg(test)]
#[allow(clippy::expect_used)]
mod tests {
use super::*;
#[test]
fn test_audit_report_request_new() {
let request = AuditReportRequest::new("2025-01-01", Some("2025-01-31".to_string()));
assert_eq!(request.report_type, "AUDIT");
assert_eq!(request.start_date, "2025-01-01");
assert_eq!(request.end_date, Some("2025-01-31".to_string()));
assert!(request.audit_action.is_none());
assert!(request.action_type.is_none());
}
#[test]
fn test_audit_report_request_with_filters() {
let request = AuditReportRequest::new("2025-01-01", Some("2025-01-31".to_string()))
.with_audit_actions(vec!["Delete".to_string(), "Create".to_string()])
.with_action_types(vec!["Admin".to_string()]);
assert_eq!(
request.audit_action,
Some(vec!["Delete".to_string(), "Create".to_string()])
);
assert_eq!(request.action_type, Some(vec!["Admin".to_string()]));
}
#[test]
fn test_audit_report_request_serialization() {
let request = AuditReportRequest::new("2025-01-01", Some("2025-01-31".to_string()));
let json = serde_json::to_string(&request).expect("should serialize to json");
assert!(json.contains("\"report_type\":\"AUDIT\""));
assert!(json.contains("\"start_date\":\"2025-01-01\""));
assert!(json.contains("\"end_date\":\"2025-01-31\""));
}
#[test]
fn test_audit_report_request_serialization_without_optional_fields() {
let request = AuditReportRequest::new("2025-01-01", None);
let json = serde_json::to_string(&request).expect("should serialize to json");
assert!(!json.contains("end_date"));
assert!(!json.contains("audit_action"));
assert!(!json.contains("action_type"));
}
#[test]
fn test_convert_european_timezone_winter() {
let result =
convert_regional_timestamp_to_utc("2025-01-15 10:00:00.000", &VeracodeRegion::European);
assert!(result.is_some());
assert_eq!(
result.expect("should convert timestamp"),
"2025-01-15 09:00:00"
);
}
#[test]
fn test_convert_european_timezone_summer() {
let result =
convert_regional_timestamp_to_utc("2025-06-15 10:00:00.000", &VeracodeRegion::European);
assert!(result.is_some());
assert_eq!(
result.expect("should convert timestamp"),
"2025-06-15 08:00:00"
);
}
#[test]
fn test_convert_commercial_timezone_winter() {
let result = convert_regional_timestamp_to_utc(
"2025-01-15 14:30:00.000",
&VeracodeRegion::Commercial,
);
assert!(result.is_some());
assert_eq!(
result.expect("should convert timestamp"),
"2025-01-15 19:30:00"
);
}
#[test]
fn test_convert_commercial_timezone_summer() {
let result = convert_regional_timestamp_to_utc(
"2025-06-15 14:30:00.000",
&VeracodeRegion::Commercial,
);
assert!(result.is_some());
assert_eq!(
result.expect("should convert timestamp"),
"2025-06-15 18:30:00"
);
}
#[test]
fn test_convert_federal_timezone_winter() {
let result =
convert_regional_timestamp_to_utc("2025-12-15 14:30:00.000", &VeracodeRegion::Federal);
assert!(result.is_some());
assert_eq!(
result.expect("should convert timestamp"),
"2025-12-15 19:30:00"
);
}
#[test]
fn test_convert_timezone_without_milliseconds() {
let result =
convert_regional_timestamp_to_utc("2025-01-15 10:00:00", &VeracodeRegion::European);
assert!(result.is_some());
assert_eq!(
result.expect("should convert timestamp"),
"2025-01-15 09:00:00"
);
}
#[test]
fn test_convert_timezone_variable_milliseconds() {
let result =
convert_regional_timestamp_to_utc("2025-01-15 10:00:00.1", &VeracodeRegion::European);
assert!(result.is_some());
let result =
convert_regional_timestamp_to_utc("2025-01-15 10:00:00.12", &VeracodeRegion::European);
assert!(result.is_some());
let result = convert_regional_timestamp_to_utc(
"2025-01-15 10:00:00.123456",
&VeracodeRegion::European,
);
assert!(result.is_some());
}
#[test]
fn test_convert_timezone_invalid_format() {
let result = convert_regional_timestamp_to_utc("invalid", &VeracodeRegion::European);
assert!(result.is_none());
let result =
convert_regional_timestamp_to_utc("2025-13-45 25:99:99", &VeracodeRegion::Commercial);
assert!(result.is_none());
}
#[test]
fn test_convert_timezone_dst_fallback_ambiguous() {
let result =
convert_regional_timestamp_to_utc("2028-11-05 01:00:00", &VeracodeRegion::Commercial);
assert!(
result.is_some(),
"Should handle DST fall-back ambiguous time"
);
let utc = result.expect("result should be Some as asserted above");
assert!(utc.len() >= 19, "UTC timestamp should be well-formed");
assert!(utc.starts_with("2028-11-05"), "Date should be preserved");
}
mod security_tests {
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 1000 },
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_valid_timestamp_conversion(
year in 2000u32..2100u32,
month in 1u32..=12u32,
day in 1u32..=28u32, hour in prop::sample::select(vec![0u32, 1, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]), minute in 0u32..=59u32,
second in 0u32..=59u32,
region in prop_oneof![
Just(VeracodeRegion::Commercial),
Just(VeracodeRegion::European),
Just(VeracodeRegion::Federal),
]
) {
let timestamp = format!(
"{:04}-{:02}-{:02} {:02}:{:02}:{:02}",
year, month, day, hour, minute, second
);
let result = convert_regional_timestamp_to_utc(×tamp, ®ion);
prop_assert!(result.is_some(), "Failed to convert valid timestamp: {}", timestamp);
if let Some(utc) = result {
prop_assert!(utc.len() >= 19, "UTC timestamp too short: {}", utc);
prop_assert!(utc.contains('-'), "UTC timestamp missing date separator");
prop_assert!(utc.contains(':'), "UTC timestamp missing time separator");
}
}
#[test]
fn proptest_malformed_timestamp_handling(
input in "\\PC{0,256}", ) {
let _ = convert_regional_timestamp_to_utc(&input, &VeracodeRegion::Commercial);
let _ = convert_regional_timestamp_to_utc(&input, &VeracodeRegion::European);
let _ = convert_regional_timestamp_to_utc(&input, &VeracodeRegion::Federal);
}
#[test]
fn proptest_variable_millisecond_precision(
milliseconds in "[0-9]{1,9}",
) {
let timestamp = format!("2025-06-15 10:30:45.{}", milliseconds);
let result = convert_regional_timestamp_to_utc(×tamp, &VeracodeRegion::Commercial);
if let Some(utc) = result {
prop_assert!(utc.len() >= 19, "UTC timestamp too short");
}
}
#[test]
fn proptest_extreme_dates(
year in 1900u32..2200u32,
month in 0u32..=13u32, day in 0u32..=32u32, ) {
let timestamp = format!(
"{:04}-{:02}-{:02} 12:00:00",
year, month, day
);
let _ = convert_regional_timestamp_to_utc(×tamp, &VeracodeRegion::Commercial);
}
}
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 1000 },
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_hash_format_consistency(
input in "\\PC{0,1024}", ) {
let hash = generate_log_hash(&input);
prop_assert_eq!(hash.len(), 32, "Hash length should be 32 chars");
prop_assert!(
hash.chars().all(|c| c.is_ascii_hexdigit()),
"Hash should only contain hex chars: {}",
hash
);
}
#[test]
fn proptest_hash_determinism(
input in "\\PC{0,2048}",
) {
let hash1 = generate_log_hash(&input);
let hash2 = generate_log_hash(&input);
prop_assert_eq!(
hash1, hash2,
"Hash function should be deterministic"
);
}
#[test]
fn proptest_hash_collision_resistance(
input1 in "\\PC{1,256}",
input2 in "\\PC{1,256}",
) {
if input1 != input2 {
let hash1 = generate_log_hash(&input1);
let hash2 = generate_log_hash(&input2);
prop_assert_ne!(
hash1, hash2,
"Collision detected for different inputs"
);
}
}
#[test]
fn proptest_hash_avalanche_effect(
base in "[a-zA-Z0-9]{10,100}",
suffix in "[a-z]",
) {
let input1 = base.clone();
let input2 = format!("{}{}", base, suffix);
let hash1 = generate_log_hash(&input1);
let hash2 = generate_log_hash(&input2);
prop_assert_ne!(
&hash1, &hash2,
"Avalanche effect failed: similar inputs produced similar hashes"
);
let diff_count = hash1.chars()
.zip(hash2.chars())
.filter(|(a, b)| a != b)
.count();
prop_assert!(
diff_count >= 12,
"Poor avalanche effect: only {} of 32 chars changed",
diff_count
);
}
}
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 1000 },
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_url_encoding_escapes_special_chars(
special_chars in prop::sample::select(vec![
"/", "\\", "?", "&", "=", "#", " ", "<", ">",
"\"", "'", "|", ";", "\n", "\r", "\0", "$"
]),
base in "[a-zA-Z0-9]{5,20}",
) {
let malicious_id = format!("{}{}{}", base, special_chars, base);
let encoded = urlencoding::encode(&malicious_id);
prop_assert!(
!encoded.contains(special_chars),
"Special character '{}' not encoded properly",
special_chars
);
if !special_chars.chars().all(|c| c.is_alphanumeric()) {
prop_assert!(
encoded.contains('%') || (special_chars == " " && encoded.contains('+')),
"Expected encoding for '{}'",
special_chars
);
}
}
#[test]
fn proptest_url_encoding_prevents_path_traversal(
traversal in prop_oneof![
Just("../"),
Just("..\\"),
Just("../../"),
Just("..%2f"),
Just("..%5c"),
Just("%2e%2e%2f"),
],
prefix in "[a-z]{1,10}",
suffix in "[a-z]{1,10}",
) {
let malicious_id = format!("{}{}{}", prefix, traversal, suffix);
let encoded = urlencoding::encode(&malicious_id);
prop_assert!(
!encoded.contains("../") && !encoded.contains("..\\"),
"Path traversal not properly encoded: {}",
encoded
);
}
#[test]
fn proptest_url_encoding_prevents_command_injection(
injection_char in prop::sample::select(vec![
";", "|", "&", "$", "`", "$(", ")", "{", "}", "\n", "\r"
]),
base in "[a-zA-Z0-9]{5,15}",
) {
let malicious_id = format!("{}{}rm -rf /", base, injection_char);
let encoded = urlencoding::encode(&malicious_id);
prop_assert!(
!encoded.contains(injection_char),
"Injection character '{}' not encoded",
injection_char
);
}
}
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 1000 },
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_saturating_add_never_overflows(
a in 0u32..=u32::MAX,
b in 0u32..=1000u32,
) {
let result = a.saturating_add(b);
prop_assert!(result >= a, "Saturating add decreased value");
#[allow(clippy::arithmetic_side_effects)]
{
if a as u64 + b as u64 > u32::MAX as u64 {
prop_assert_eq!(
result,
u32::MAX,
"Expected saturation at MAX for {} + {}",
a, b
);
} else {
prop_assert_eq!(
result,
a + b,
"Expected normal addition for {} + {}",
a, b
);
}
}
}
#[test]
fn proptest_saturating_mul_never_overflows(
a in 0u64..=u64::MAX / 2,
b in 0u64..=100u64,
) {
let result = a.saturating_mul(b);
if let Some(expected) = a.checked_mul(b) {
prop_assert_eq!(result, expected, "Multiplication mismatch");
} else {
prop_assert_eq!(
result,
u64::MAX,
"Expected saturation at MAX for {} * {}",
a, b
);
}
}
#[test]
fn proptest_counter_increment_safety(
start in 0u32..=u32::MAX - 1000,
increments in 1usize..=100,
) {
let mut counter = start;
for _ in 0..increments {
let old_value = counter;
counter = counter.saturating_add(1);
prop_assert!(
counter >= old_value,
"Counter decreased from {} to {}",
old_value, counter
);
if old_value == u32::MAX {
prop_assert_eq!(counter, u32::MAX, "Counter should saturate at MAX");
}
}
}
#[test]
fn proptest_page_iteration_overflow_safety(
total_pages in 1u32..=1000u32,
) {
let mut processed = 0u32;
for page_num in 1..total_pages {
let page_display = page_num.saturating_add(1);
prop_assert!(
page_display >= page_num,
"Page display calculation overflow"
);
processed = processed.saturating_add(1);
}
prop_assert_eq!(
processed,
total_pages.saturating_sub(1),
"Page count mismatch"
);
}
}
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 1000 },
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_request_builder_handles_arbitrary_input(
start_date in "\\PC{0,256}",
end_date in "\\PC{0,256}",
action in "\\PC{0,100}",
) {
let request = AuditReportRequest::new(
start_date.clone(),
if end_date.is_empty() { None } else { Some(end_date.clone()) }
);
prop_assert_eq!(&request.start_date, &start_date);
if !end_date.is_empty() {
prop_assert_eq!(&request.end_date, &Some(end_date.clone()));
}
let request = request.with_audit_actions(vec![action.clone()]);
prop_assert!(request.audit_action.is_some());
}
#[test]
fn proptest_request_builder_data_integrity(
start in "[0-9]{4}-[0-9]{2}-[0-9]{2}",
actions in prop::collection::vec("[A-Za-z]{5,15}", 0..10),
types in prop::collection::vec("[A-Za-z]{5,15}", 0..10),
user_ids in prop::collection::vec("[0-9]{1,10}", 0..10),
) {
let request = AuditReportRequest::new(start.clone(), None)
.with_audit_actions(actions.clone())
.with_action_types(types.clone())
.with_target_users(user_ids.clone())
.with_modifier_users(user_ids.clone());
prop_assert_eq!(request.start_date, start);
prop_assert_eq!(request.audit_action, Some(actions));
prop_assert_eq!(request.action_type, Some(types));
prop_assert_eq!(request.target_user_id, Some(user_ids.clone()));
prop_assert_eq!(request.modifier_user_id, Some(user_ids));
}
#[test]
fn proptest_request_builder_empty_collections(
start_date in "[0-9]{4}-[0-9]{2}-[0-9]{2}",
) {
let request = AuditReportRequest::new(start_date.clone(), None)
.with_audit_actions(vec![])
.with_action_types(vec![])
.with_target_users(vec![])
.with_modifier_users(vec![]);
prop_assert!(request.audit_action.is_some());
prop_assert!(request.action_type.is_some());
prop_assert!(request.target_user_id.is_some());
prop_assert!(request.modifier_user_id.is_some());
if let Some(ref actions) = request.audit_action {
prop_assert_eq!(actions.len(), 0);
}
}
#[test]
fn proptest_request_builder_large_collections(
start_date in "[0-9]{4}-[0-9]{2}-[0-9]{2}",
collection_size in 1usize..=100,
) {
let large_vec: Vec<String> = (0..collection_size)
.map(|i| format!("item_{}", i))
.collect();
let request = AuditReportRequest::new(start_date, None)
.with_audit_actions(large_vec.clone());
if let Some(ref actions) = request.audit_action {
prop_assert_eq!(
actions.len(),
collection_size,
"Collection size mismatch"
);
}
}
}
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 1000 },
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_request_serialization_safety(
start_date in "\\PC{0,100}",
actions in prop::collection::vec("\\PC{0,50}", 0..10),
) {
let request = AuditReportRequest::new(start_date, None)
.with_audit_actions(actions);
let result = serde_json::to_string(&request);
prop_assert!(result.is_ok(), "Serialization failed");
if let Ok(json) = result {
prop_assert!(json.contains("\"report_type\""), "Missing report_type");
prop_assert!(json.contains("\"AUDIT\""), "Wrong report_type value");
}
}
#[test]
fn proptest_json_injection_prevention(
injection in prop::sample::select(vec![
r#"","malicious":"value"#,
"\n\r\t",
"\\",
"\"",
"</script>",
]),
base_date in "[0-9]{4}-[0-9]{2}-[0-9]{2}",
) {
let malicious_date = format!("{}{}", base_date, injection);
let request = AuditReportRequest::new(malicious_date, None);
let json = serde_json::to_string(&request)
.expect("Should serialize even with special chars");
let parsed: serde_json::Value = serde_json::from_str(&json)
.expect("Serialized JSON should be parseable");
prop_assert!(parsed.is_object(), "Should be valid JSON object");
}
}
proptest! {
#![proptest_config(ProptestConfig {
cases: if cfg!(miri) { 5 } else { 500 }, // Fewer cases for error paths
failure_persistence: None,
.. ProptestConfig::default()
})]
#[test]
fn proptest_timestamp_error_handling_never_panics(
malformed in prop_oneof![
Just(""),
Just(" "),
Just("\n\t\r"),
Just("2025/01/01 12:00:00"),
Just("01-01-2025 12:00:00"),
Just("2025-01-01T12:00:00Z"),
Just("2025-13-01 12:00:00"), Just("2025-01-32 12:00:00"), Just("2025-01-01 25:00:00"), Just("2025-01-01 12:60:00"), Just("2025-01-01 12:00:60"),
Just("2025-01-01"),
Just("2025-01-01 12"),
Just("2025-01-01 12:00"),
Just("2025-01-01; DROP TABLE;"),
Just("../../etc/passwd"),
Just("<script>alert('xss')</script>"),
Just("9999-99-99 99:99:99"),
Just("0000-00-00 00:00:00"),
],
) {
let result_commercial = convert_regional_timestamp_to_utc(malformed, &VeracodeRegion::Commercial);
let result_european = convert_regional_timestamp_to_utc(malformed, &VeracodeRegion::European);
let result_federal = convert_regional_timestamp_to_utc(malformed, &VeracodeRegion::Federal);
prop_assert!(result_commercial.is_none() || result_commercial.is_some());
prop_assert!(result_european.is_none() || result_european.is_some());
prop_assert!(result_federal.is_none() || result_federal.is_some());
}
#[test]
fn proptest_hash_handles_all_input_sizes(
size in 0usize..=10_000,
) {
let input = "x".repeat(size);
let hash = generate_log_hash(&input);
prop_assert_eq!(hash.len(), 32);
prop_assert!(hash.chars().all(|c| c.is_ascii_hexdigit()));
}
#[test]
fn proptest_hash_handles_binary_data(
null_count in 0usize..=100,
) {
let input = format!("data{}\0{}\0end", "x".repeat(null_count), "y".repeat(null_count));
let hash = generate_log_hash(&input);
prop_assert_eq!(hash.len(), 32);
prop_assert!(hash.chars().all(|c| c.is_ascii_hexdigit()));
}
}
#[test]
fn test_url_encoding_sql_injection_attempt() {
let sql_injection = "1' OR '1'='1";
let encoded = urlencoding::encode(sql_injection);
assert!(!encoded.contains('\''));
assert!(!encoded.contains(' ') || encoded.contains('+') || encoded.contains("%20"));
}
#[test]
fn test_url_encoding_path_traversal_variants() {
let variants = vec![
"../../../etc/passwd",
"..%2f..%2f..%2fetc%2fpasswd",
"..\\..\\..\\windows\\system32",
];
for variant in variants {
let encoded = urlencoding::encode(variant);
assert!(!encoded.contains("../"));
assert!(!encoded.contains("..\\"));
}
}
#[test]
fn test_hash_known_collision_resistance() {
let similar_inputs = [
r#"{"timestamp":"2025-01-01 12:00:00.000"}"#,
r#"{"timestamp":"2025-01-01 12:00:00.001"}"#,
r#"{"timestamp":"2025-01-01 12:00:01.000"}"#,
];
let hashes: Vec<String> = similar_inputs
.iter()
.map(|input| generate_log_hash(input))
.collect();
for i in 0..hashes.len() {
for j in i + 1..hashes.len() {
if let (Some(hash_i), Some(hash_j)) = (hashes.get(i), hashes.get(j)) {
assert_ne!(
hash_i, hash_j,
"Collision between similar inputs {} and {}",
i, j
);
}
}
}
}
#[test]
fn test_saturating_arithmetic_at_boundaries() {
assert_eq!(u32::MAX.saturating_add(1), u32::MAX);
assert_eq!((u32::MAX - 1).saturating_add(2), u32::MAX);
assert_eq!(u64::MAX.saturating_mul(2), u64::MAX);
assert_eq!((u64::MAX / 2).saturating_mul(3), u64::MAX);
}
#[test]
fn test_timestamp_dst_transitions() {
let result = convert_regional_timestamp_to_utc(
"2025-03-09 02:30:00",
&VeracodeRegion::Commercial,
);
assert!(result.is_some() || result.is_none());
let result = convert_regional_timestamp_to_utc(
"2025-11-02 01:30:00",
&VeracodeRegion::Commercial,
);
assert!(result.is_some() || result.is_none());
}
#[test]
fn test_leap_year_handling() {
let result =
convert_regional_timestamp_to_utc("2024-02-29 12:00:00", &VeracodeRegion::European);
assert!(result.is_some(), "Leap year Feb 29 should be valid");
let result =
convert_regional_timestamp_to_utc("2025-02-29 12:00:00", &VeracodeRegion::European);
assert!(result.is_none(), "Non-leap year Feb 29 should be invalid");
}
#[test]
fn test_empty_request_serialization() {
let request = AuditReportRequest::new("2025-01-01", None);
let json = serde_json::to_string(&request).expect("Should serialize");
assert!(!json.contains("audit_action"));
assert!(!json.contains("action_type"));
assert!(!json.contains("target_user_id"));
assert!(!json.contains("modifier_user_id"));
assert!(json.contains("report_type"));
assert!(json.contains("start_date"));
}
}
}