use crate::plugin_manifest::{PluginManifest, ResourceRequirements};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::collections::HashMap;
use std::path::Path;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum SecurityError {
#[error("Failed to read file: {0}")]
IoError(String),
#[error("Hash mismatch: expected {expected}, got {actual}")]
HashMismatch { expected: String, actual: String },
#[error("Suspicious pattern detected: {0}")]
SuspiciousPattern(String),
#[error("Excessive resource requirements: {0}")]
ExcessiveResources(String),
#[error("Dangerous permission: {0}")]
DangerousPermission(String),
#[error("Vulnerability detected: {0}")]
VulnerabilityDetected(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityScanResult {
pub security_score: u8,
pub file_hash: String,
pub warnings: Vec<SecurityWarning>,
pub critical_issues: Vec<SecurityIssue>,
pub scanned_at: chrono::DateTime<chrono::Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityWarning {
pub category: SecurityCategory,
pub message: String,
pub score_impact: u8,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityIssue {
pub category: SecurityCategory,
pub description: String,
pub recommendation: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum SecurityCategory {
FileIntegrity,
Permissions,
Resources,
MaliciousCode,
Dependencies,
Network,
Filesystem,
}
pub struct PluginSecurityScanner {
max_memory_mb: u64,
max_cpu_cores: u32,
allow_network: bool,
allow_filesystem: bool,
malicious_patterns: Vec<String>,
}
impl Default for PluginSecurityScanner {
fn default() -> Self {
Self {
max_memory_mb: 1024, max_cpu_cores: 4,
allow_network: false,
allow_filesystem: false,
malicious_patterns: vec![
"eval(".to_string(),
"exec(".to_string(),
"subprocess".to_string(),
"__import__".to_string(),
"dangerous_syscall".to_string(),
],
}
}
}
impl PluginSecurityScanner {
pub fn new() -> Self {
Self::default()
}
pub fn strict() -> Self {
Self {
max_memory_mb: 256,
max_cpu_cores: 1,
allow_network: false,
allow_filesystem: false,
malicious_patterns: vec![
"eval(".to_string(),
"exec(".to_string(),
"subprocess".to_string(),
"__import__".to_string(),
"dangerous_syscall".to_string(),
"require(".to_string(),
"import(".to_string(),
],
}
}
pub fn with_max_memory(mut self, mb: u64) -> Self {
self.max_memory_mb = mb;
self
}
pub fn with_network_access(mut self, allow: bool) -> Self {
self.allow_network = allow;
self
}
pub fn with_filesystem_access(mut self, allow: bool) -> Self {
self.allow_filesystem = allow;
self
}
pub fn scan_file(&self, path: &Path) -> Result<SecurityScanResult, SecurityError> {
let file_hash = self.calculate_file_hash(path)?;
let mut warnings = Vec::new();
let mut critical_issues = Vec::new();
if let Ok(content) = std::fs::read_to_string(path) {
self.scan_for_malicious_patterns(&content, &mut warnings, &mut critical_issues);
}
let security_score = self.calculate_security_score(&warnings, &critical_issues);
Ok(SecurityScanResult {
security_score,
file_hash,
warnings,
critical_issues,
scanned_at: chrono::Utc::now(),
})
}
pub fn scan_manifest(
&self,
manifest: &PluginManifest,
) -> Result<SecurityScanResult, SecurityError> {
let mut warnings = Vec::new();
let mut critical_issues = Vec::new();
self.check_resource_requirements(
&manifest.capabilities.resource_requirements,
&mut warnings,
&mut critical_issues,
);
self.check_permissions(
&manifest.capabilities.resource_requirements,
&mut warnings,
&mut critical_issues,
);
let security_score = self.calculate_security_score(&warnings, &critical_issues);
Ok(SecurityScanResult {
security_score,
file_hash: String::new(), warnings,
critical_issues,
scanned_at: chrono::Utc::now(),
})
}
pub fn verify_hash(&self, path: &Path, expected_hash: &str) -> Result<(), SecurityError> {
let actual_hash = self.calculate_file_hash(path)?;
if actual_hash != expected_hash {
return Err(SecurityError::HashMismatch {
expected: expected_hash.to_string(),
actual: actual_hash,
});
}
Ok(())
}
fn calculate_file_hash(&self, path: &Path) -> Result<String, SecurityError> {
let bytes = std::fs::read(path).map_err(|e| SecurityError::IoError(e.to_string()))?;
let mut hasher = Sha256::new();
hasher.update(&bytes);
let result = hasher.finalize();
Ok(hex::encode(result))
}
fn scan_for_malicious_patterns(
&self,
content: &str,
_warnings: &mut Vec<SecurityWarning>,
critical_issues: &mut Vec<SecurityIssue>,
) {
for pattern in &self.malicious_patterns {
if content.contains(pattern) {
critical_issues.push(SecurityIssue {
category: SecurityCategory::MaliciousCode,
description: format!("Detected potentially malicious pattern: {}", pattern),
recommendation: "Review the code carefully or reject the plugin".to_string(),
});
}
}
}
fn check_resource_requirements(
&self,
requirements: &ResourceRequirements,
warnings: &mut Vec<SecurityWarning>,
critical_issues: &mut Vec<SecurityIssue>,
) {
if let Some(max_mem) = requirements.max_memory_mb {
if max_mem > self.max_memory_mb {
critical_issues.push(SecurityIssue {
category: SecurityCategory::Resources,
description: format!(
"Plugin requires {}MB memory, exceeds limit of {}MB",
max_mem, self.max_memory_mb
),
recommendation: "Increase limit or reject the plugin".to_string(),
});
} else if max_mem > self.max_memory_mb / 2 {
warnings.push(SecurityWarning {
category: SecurityCategory::Resources,
message: format!("Plugin requires high memory: {}MB", max_mem),
score_impact: 10,
});
}
}
if let Some(cpu_cores) = requirements.cpu_cores {
if cpu_cores > self.max_cpu_cores {
warnings.push(SecurityWarning {
category: SecurityCategory::Resources,
message: format!(
"Plugin requires {} CPU cores, exceeds limit of {}",
cpu_cores, self.max_cpu_cores
),
score_impact: 10,
});
}
}
}
fn check_permissions(
&self,
requirements: &ResourceRequirements,
warnings: &mut Vec<SecurityWarning>,
critical_issues: &mut Vec<SecurityIssue>,
) {
if requirements.requires_network && !self.allow_network {
critical_issues.push(SecurityIssue {
category: SecurityCategory::Network,
description: "Plugin requires network access but it is not allowed".to_string(),
recommendation: "Enable network access or reject the plugin".to_string(),
});
} else if requirements.requires_network {
warnings.push(SecurityWarning {
category: SecurityCategory::Network,
message: "Plugin has network access - potential data exfiltration risk".to_string(),
score_impact: 15,
});
}
if requirements.requires_filesystem && !self.allow_filesystem {
critical_issues.push(SecurityIssue {
category: SecurityCategory::Filesystem,
description: "Plugin requires filesystem access but it is not allowed".to_string(),
recommendation: "Enable filesystem access or reject the plugin".to_string(),
});
} else if requirements.requires_filesystem {
warnings.push(SecurityWarning {
category: SecurityCategory::Filesystem,
message: "Plugin has filesystem access - potential security risk".to_string(),
score_impact: 15,
});
}
}
fn calculate_security_score(
&self,
warnings: &[SecurityWarning],
critical_issues: &[SecurityIssue],
) -> u8 {
let mut score = 100u8;
for warning in warnings {
score = score.saturating_sub(warning.score_impact);
}
let critical_deduction = (critical_issues.len() as u8).saturating_mul(30);
score = score.saturating_sub(critical_deduction);
score
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityPolicy {
pub min_security_score: u8,
pub allow_warnings: bool,
pub allow_critical_issues: bool,
pub require_hash_verification: bool,
pub known_good_hashes: HashMap<String, String>,
}
impl Default for SecurityPolicy {
fn default() -> Self {
Self {
min_security_score: 70,
allow_warnings: true,
allow_critical_issues: false,
require_hash_verification: false,
known_good_hashes: HashMap::new(),
}
}
}
impl SecurityPolicy {
pub fn strict() -> Self {
Self {
min_security_score: 90,
allow_warnings: false,
allow_critical_issues: false,
require_hash_verification: true,
known_good_hashes: HashMap::new(),
}
}
pub fn check(&self, result: &SecurityScanResult) -> Result<(), SecurityError> {
if result.security_score < self.min_security_score {
return Err(SecurityError::VulnerabilityDetected(format!(
"Security score {} below minimum {}",
result.security_score, self.min_security_score
)));
}
if !self.allow_warnings && !result.warnings.is_empty() {
return Err(SecurityError::VulnerabilityDetected(format!(
"Plugin has {} warnings, which are not allowed",
result.warnings.len()
)));
}
if !self.allow_critical_issues && !result.critical_issues.is_empty() {
return Err(SecurityError::VulnerabilityDetected(format!(
"Plugin has {} critical issues",
result.critical_issues.len()
)));
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_security_scanner_creation() {
let scanner = PluginSecurityScanner::new();
assert_eq!(scanner.max_memory_mb, 1024);
assert_eq!(scanner.max_cpu_cores, 4);
}
#[test]
fn test_strict_scanner() {
let scanner = PluginSecurityScanner::strict();
assert_eq!(scanner.max_memory_mb, 256);
assert_eq!(scanner.max_cpu_cores, 1);
assert!(!scanner.allow_network);
assert!(!scanner.allow_filesystem);
}
#[test]
fn test_scanner_configuration() {
let scanner = PluginSecurityScanner::new()
.with_max_memory(512)
.with_network_access(true)
.with_filesystem_access(true);
assert_eq!(scanner.max_memory_mb, 512);
assert!(scanner.allow_network);
assert!(scanner.allow_filesystem);
}
#[test]
fn test_security_score_calculation() {
let scanner = PluginSecurityScanner::new();
let warnings = vec![
SecurityWarning {
category: SecurityCategory::Resources,
message: "High memory".to_string(),
score_impact: 10,
},
SecurityWarning {
category: SecurityCategory::Network,
message: "Network access".to_string(),
score_impact: 15,
},
];
let critical_issues = vec![];
let score = scanner.calculate_security_score(&warnings, &critical_issues);
assert_eq!(score, 75); }
#[test]
fn test_security_score_with_critical_issues() {
let scanner = PluginSecurityScanner::new();
let warnings = vec![];
let critical_issues = vec![SecurityIssue {
category: SecurityCategory::MaliciousCode,
description: "Malicious pattern".to_string(),
recommendation: "Reject".to_string(),
}];
let score = scanner.calculate_security_score(&warnings, &critical_issues);
assert_eq!(score, 70); }
#[test]
fn test_security_policy_default() {
let policy = SecurityPolicy::default();
assert_eq!(policy.min_security_score, 70);
assert!(policy.allow_warnings);
assert!(!policy.allow_critical_issues);
}
#[test]
fn test_security_policy_strict() {
let policy = SecurityPolicy::strict();
assert_eq!(policy.min_security_score, 90);
assert!(!policy.allow_warnings);
assert!(!policy.allow_critical_issues);
assert!(policy.require_hash_verification);
}
#[test]
fn test_policy_check_passes() {
let policy = SecurityPolicy::default();
let result = SecurityScanResult {
security_score: 80,
file_hash: "abc123".to_string(),
warnings: vec![],
critical_issues: vec![],
scanned_at: chrono::Utc::now(),
};
assert!(policy.check(&result).is_ok());
}
#[test]
fn test_policy_check_fails_score() {
let policy = SecurityPolicy::default();
let result = SecurityScanResult {
security_score: 50,
file_hash: "abc123".to_string(),
warnings: vec![],
critical_issues: vec![],
scanned_at: chrono::Utc::now(),
};
assert!(policy.check(&result).is_err());
}
#[test]
fn test_policy_check_fails_critical_issues() {
let policy = SecurityPolicy::default();
let result = SecurityScanResult {
security_score: 80,
file_hash: "abc123".to_string(),
warnings: vec![],
critical_issues: vec![SecurityIssue {
category: SecurityCategory::MaliciousCode,
description: "Malicious".to_string(),
recommendation: "Reject".to_string(),
}],
scanned_at: chrono::Utc::now(),
};
assert!(policy.check(&result).is_err());
}
}