use alloc::{
collections::BTreeMap,
string::{String, ToString},
vec::Vec,
};
use serde::{Deserialize, Serialize};
use super::ObfuscationMap;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TemperatureConfig {
pub temperature: f64,
pub min_mappings: usize,
pub max_mappings: usize,
pub min_length: usize,
pub max_length: usize,
pub expansion_ratio: f64,
pub compute_score: u32,
}
impl Default for TemperatureConfig {
fn default() -> Self {
Self {
temperature: 0.5,
min_mappings: 2,
max_mappings: 5,
min_length: 2,
max_length: 8,
expansion_ratio: 2.1,
compute_score: 40,
}
}
}
pub fn get_temperature_profile(name: &str) -> Option<TemperatureConfig> {
match name {
"minimal" => Some(TemperatureConfig {
temperature: 0.0,
min_mappings: 1,
max_mappings: 2,
min_length: 2, max_length: 3,
expansion_ratio: 1.5,
compute_score: 5,
}),
"low" => Some(TemperatureConfig {
temperature: 0.2,
min_mappings: 1,
max_mappings: 3,
min_length: 2, max_length: 4,
expansion_ratio: 1.5,
compute_score: 15,
}),
"medium" => Some(TemperatureConfig {
temperature: 0.5,
min_mappings: 2,
max_mappings: 5,
min_length: 2,
max_length: 8,
expansion_ratio: 2.1,
compute_score: 40,
}),
"high" => Some(TemperatureConfig {
temperature: 0.8,
min_mappings: 3,
max_mappings: 8,
min_length: 3,
max_length: 12,
expansion_ratio: 3.7,
compute_score: 75,
}),
"extreme" => Some(TemperatureConfig {
temperature: 1.0,
min_mappings: 5,
max_mappings: 15,
min_length: 4,
max_length: 20,
expansion_ratio: 6.2,
compute_score: 100,
}),
_ => None,
}
}
pub fn get_config_from_temperature(temperature: f64) -> TemperatureConfig {
let clamped = temperature.clamp(0.0, 1.0);
if clamped <= 0.1 {
get_temperature_profile("minimal").unwrap()
} else if clamped <= 0.3 {
get_temperature_profile("low").unwrap()
} else if clamped <= 0.6 {
get_temperature_profile("medium").unwrap()
} else if clamped <= 0.85 {
get_temperature_profile("high").unwrap()
} else {
get_temperature_profile("extreme").unwrap()
}
}
#[derive(Debug, Clone)]
pub struct GenerateMapOptions {
pub temperature: f64,
pub seed: Option<String>,
pub charset: Option<String>,
}
impl Default for GenerateMapOptions {
fn default() -> Self {
Self {
temperature: 0.5,
seed: None,
charset: None,
}
}
}
struct SeededRandom {
state: u64,
}
impl SeededRandom {
fn new(seed: &str) -> Self {
let mut hash: u64 = 0;
for (i, byte) in seed.bytes().enumerate() {
hash = hash.wrapping_add((byte as u64).wrapping_mul(31u64.wrapping_pow(i as u32)));
}
Self { state: hash.max(1) }
}
fn next(&mut self) -> u64 {
self.state = self.state.wrapping_mul(9301).wrapping_add(49297) % 233280;
self.state
}
fn next_usize(&mut self, max: usize) -> usize {
if max == 0 { return 0; }
(self.next() as usize) % max
}
fn next_range(&mut self, min: usize, max: usize) -> usize {
if min >= max { return min; }
min + self.next_usize(max - min + 1)
}
}
const SIMPLE_WORDS: &[&str] = &[
"aa", "ab", "ac", "ad", "ae", "af", "ag", "ah", "ai", "aj", "ak", "al", "am",
"an", "ao", "ap", "aq", "ar", "as", "at", "au", "av", "aw", "ax", "ay", "az",
"ba", "bb", "bc", "bd", "bf", "bg", "bh", "bi", "bj", "bk", "bl", "bm",
"bn", "bo", "bp", "bq", "br", "bs", "bt", "bu", "bv", "bw", "bx", "by", "bz",
];
const SHORT_WORDS: &[&str] = &[
"ox", "go", "hi", "me", "we", "up", "in", "on", "at", "to",
"be", "do", "he", "it", "my", "no", "of", "so", "us", "an",
];
const MEDIUM_WORDS: &[&str] = &[
"cat", "dog", "sun", "moon", "star", "tree", "bird", "fish",
"book", "door", "hand", "eye", "car", "red", "blue", "gold",
"fire", "water", "earth", "wind",
];
const WORDS: &[&str] = &[
"apple", "beach", "cloud", "dream", "eagle", "flame", "green",
"heart", "ivory", "jewel", "knife", "lemon", "magic", "north",
"ocean", "pearl", "quiet", "river", "storm", "tower",
];
const PHRASES: &[&str] = &[
"bright_star", "deep_ocean", "wild_forest", "golden_sand",
"silver_moon", "crystal_clear", "gentle_breeze", "warm_sunshine",
"cool_shadow", "fresh_water", "ancient_tree", "peaceful_valley",
"endless_sky", "hidden_treasure", "mystic_fog",
];
const COMPLEX: &[&str] = &[
"thunderstorm_approaching", "crystalline_formation",
"electromagnetic_pulse", "quantum_entanglement",
"bioluminescent_glow", "aerodynamic_structure",
"photosynthetic_process", "metamorphic_transformation",
"exponential_growth", "algorithmic_complexity",
];
const DEFAULT_CHARSET: &str =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 \t\n.,!?;:()[]{}\"'-=+*/%&@#$^~`|\\<>";
fn generate_random_strings(
count: usize,
min_length: usize,
max_length: usize,
rng: &mut SeededRandom,
) -> Vec<String> {
const CHARS: &[u8] = b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-";
let mut result = Vec::with_capacity(count);
for _ in 0..count {
let length = rng.next_range(min_length, max_length);
let mut s = String::with_capacity(length);
for _ in 0..length {
let idx = rng.next_usize(CHARS.len());
s.push(CHARS[idx] as char);
}
result.push(s);
}
result
}
fn generate_word_pool(config: &TemperatureConfig, rng: &mut SeededRandom) -> Vec<String> {
let mut pools: Vec<String> = Vec::new();
if config.max_length >= 2 {
pools.extend(SIMPLE_WORDS.iter().map(|s| s.to_string()));
pools.extend(SHORT_WORDS.iter().map(|s| s.to_string()));
}
if config.max_length >= 3 {
pools.extend(MEDIUM_WORDS.iter().map(|s| s.to_string()));
}
if config.max_length >= 5 {
pools.extend(WORDS.iter().map(|s| s.to_string()));
}
if config.temperature >= 0.5 && config.max_length >= 10 {
pools.extend(PHRASES.iter().map(|s| s.to_string()));
}
if config.temperature >= 0.8 && config.max_length >= 15 {
pools.extend(COMPLEX.iter().map(|s| s.to_string()));
}
let random_count = (config.temperature * 200.0) as usize;
if random_count > 0 {
pools.extend(generate_random_strings(
random_count,
config.min_length,
config.max_length,
rng,
));
}
pools.sort();
let min_mapping_length = config.min_length.max(2); let mut filtered: Vec<String> = pools
.into_iter()
.filter(|word| {
word.len() >= min_mapping_length
&& word.len() <= config.max_length
&& !word.chars().any(|c| {
c == '\x1F' || (c < ' ' && c != '\t' && c != '\n')
})
})
.collect();
let min_pool_size = 50;
let target_pool_size = min_pool_size + ((config.temperature * 150.0) as usize);
while filtered.len() < target_pool_size {
let needed = target_pool_size - filtered.len();
let additional = generate_random_strings(
needed * 2, min_mapping_length, config.max_length,
rng,
);
for word in additional {
if word.len() >= min_mapping_length
&& word.len() <= config.max_length
&& !word.chars().any(|c| c == '\x1F' || (c < ' ' && c != '\t' && c != '\n'))
&& !filtered.contains(&word) {
filtered.push(word);
if filtered.len() >= target_pool_size {
break;
}
}
}
}
if filtered.len() < min_pool_size {
let needed = min_pool_size - filtered.len();
let additional = generate_random_strings(
needed * 2,
min_mapping_length, config.max_length,
rng,
);
for word in additional {
if word.len() >= min_mapping_length
&& word.len() <= config.max_length
&& !word.chars().any(|c| c == '\x1F' || (c < ' ' && c != '\t' && c != '\n'))
&& !filtered.contains(&word) {
filtered.push(word);
if filtered.len() >= min_pool_size {
break;
}
}
}
}
filtered.sort();
for i in (1..filtered.len()).rev() {
let j = rng.next_usize(i + 1);
filtered.swap(i, j);
}
filtered
}
pub fn generate_map(options: Option<GenerateMapOptions>) -> ObfuscationMap {
let opts = options.unwrap_or_default();
let temperature = opts.temperature.clamp(0.0, 1.0);
let seed = opts.seed.unwrap_or_else(|| {
let temp_bits = temperature.to_bits();
let mut hash: u64 = 0;
for i in 0..8 {
hash = hash.wrapping_mul(31).wrapping_add(((temp_bits >> (i * 8)) & 0xFF) as u64);
}
format!("{:016x}", hash)
});
let charset = opts.charset.unwrap_or_else(|| DEFAULT_CHARSET.to_string());
let config = get_config_from_temperature(temperature);
let mut pool_rng = SeededRandom::new(&format!("{}-pool", seed));
let word_pool = generate_word_pool(&config, &mut pool_rng);
let mut map: ObfuscationMap = BTreeMap::new();
let mut used_mappings: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut charset_chars: Vec<char> = charset.chars().collect();
charset_chars.sort();
let char_mapping_counts: Vec<(char, usize)> = charset_chars.iter().map(|&ch| {
let mut count_rng = SeededRandom::new(&format!("{}-count-{}", seed, ch as u32));
let count = count_rng.next_range(config.min_mappings, config.max_mappings);
(ch, count)
}).collect();
let total_mappings_needed: usize = char_mapping_counts.iter().map(|(_, count)| count).sum();
let min_extend_length = config.min_length.max(2);
let mut extended_word_pool = word_pool.clone();
if extended_word_pool.len() < total_mappings_needed {
let needed = total_mappings_needed - extended_word_pool.len();
let mut extend_rng = SeededRandom::new(&format!("{}-extend", seed));
let additional = generate_random_strings(
needed * 3, min_extend_length,
config.max_length,
&mut extend_rng,
);
let mut filtered_additional: Vec<String> = additional.into_iter()
.filter(|word| {
word.len() >= 2 && word.len() >= min_extend_length
&& word.len() <= config.max_length
&& !word.chars().any(|c| c == '\x1F' || (c < ' ' && c != '\t' && c != '\n'))
&& !extended_word_pool.contains(word)
})
.collect();
filtered_additional.sort();
extended_word_pool.extend(filtered_additional.into_iter().take(total_mappings_needed * 2));
}
extended_word_pool.sort();
let mut word_pool_index = 0;
for (char, num_mappings) in char_mapping_counts {
let mut mappings: Vec<String> = Vec::with_capacity(num_mappings);
while mappings.len() < num_mappings && word_pool_index < extended_word_pool.len() {
let word = &extended_word_pool[word_pool_index];
word_pool_index += 1;
if !used_mappings.contains(word) {
mappings.push(word.clone());
used_mappings.insert(word.clone());
}
}
let min_fallback_length = config.min_length.max(2);
let mut fallback_rng = SeededRandom::new(&format!("{}-fallback-{}", seed, char as u32));
let mut fallback_attempts = 0;
while mappings.len() < num_mappings && fallback_attempts < num_mappings * 100 {
fallback_attempts += 1;
let random_words = generate_random_strings(1, min_fallback_length, config.max_length, &mut fallback_rng);
if let Some(word) = random_words.into_iter().next() {
if word.len() >= 2
&& !word.contains('\x1F')
&& !word.chars().any(|c| c < ' ' && c != '\t' && c != '\n')
&& !used_mappings.contains(&word) {
used_mappings.insert(word.clone());
mappings.push(word);
}
}
}
mappings.sort();
map.insert(char, mappings);
}
map
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MapAnalysis {
pub temperature: f64,
pub total_mappings: usize,
pub average_mappings_per_char: f64,
pub average_mapping_length: f64,
pub expansion_ratio: f64,
pub compute_score: u32,
pub entropy: f64,
}
pub fn analyze_map(map: &ObfuscationMap) -> MapAnalysis {
let mut total_mappings = 0;
let mut total_mapping_length = 0;
let mut entropy = 0.0;
for mappings in map.values() {
total_mappings += mappings.len();
total_mapping_length += mappings.iter().map(|m| m.len()).sum::<usize>();
if !mappings.is_empty() {
let p = 1.0 / mappings.len() as f64;
entropy += mappings.len() as f64 * (-p * p.log2());
}
}
let char_count = map.len();
let average_mappings_per_char = if char_count > 0 {
total_mappings as f64 / char_count as f64
} else {
0.0
};
let average_mapping_length = if total_mappings > 0 {
total_mapping_length as f64 / total_mappings as f64
} else {
0.0
};
let expansion_ratio = average_mapping_length;
let estimated_temperature = ((average_mappings_per_char - 1.0) / 10.0 + expansion_ratio / 10.0)
.clamp(0.0, 1.0);
let compute_score = (average_mappings_per_char.log2() * 10.0 + average_mapping_length * 2.0) as u32;
MapAnalysis {
temperature: estimated_temperature,
total_mappings,
average_mappings_per_char,
average_mapping_length,
expansion_ratio,
compute_score,
entropy: entropy / char_count as f64,
}
}
pub fn get_expansion_ratio(map: &ObfuscationMap) -> f64 {
let mut total_original = 0;
let mut total_mapping = 0.0;
for (char, mappings) in map.iter() {
total_original += char.len_utf8();
if !mappings.is_empty() {
let avg_len: f64 = mappings.iter().map(|m| m.len() as f64).sum::<f64>()
/ mappings.len() as f64;
total_mapping += avg_len;
}
}
if total_original > 0 {
total_mapping / total_original as f64
} else {
1.0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_generate_map_default() {
let map = generate_map(None);
assert!(!map.is_empty());
for mappings in map.values() {
assert!(!mappings.is_empty());
}
}
#[test]
fn test_generate_map_deterministic() {
let opts = GenerateMapOptions {
temperature: 0.5,
seed: Some("test-seed-123".to_string()),
charset: Some("abc".to_string()),
};
let map1 = generate_map(Some(opts.clone()));
let map2 = generate_map(Some(opts));
assert_eq!(map1, map2);
}
#[test]
fn test_generate_map_unique_mappings() {
let map = generate_map(Some(GenerateMapOptions {
temperature: 0.5,
seed: Some("unique-test".to_string()),
charset: Some("abcdef".to_string()),
}));
let mut all_mappings: Vec<&String> = Vec::new();
for mappings in map.values() {
all_mappings.extend(mappings.iter());
}
let mut seen: std::collections::HashSet<&String> = std::collections::HashSet::new();
for mapping in &all_mappings {
assert!(!seen.contains(mapping), "Duplicate mapping found: {}", mapping);
seen.insert(mapping);
}
}
#[test]
fn test_temperature_profiles() {
assert!(get_temperature_profile("minimal").is_some());
assert!(get_temperature_profile("low").is_some());
assert!(get_temperature_profile("medium").is_some());
assert!(get_temperature_profile("high").is_some());
assert!(get_temperature_profile("extreme").is_some());
assert!(get_temperature_profile("invalid").is_none());
}
#[test]
fn test_analyze_map() {
let map = generate_map(Some(GenerateMapOptions {
temperature: 0.5,
seed: Some("analyze-test".to_string()),
charset: Some("abc".to_string()),
}));
let analysis = analyze_map(&map);
assert!(analysis.total_mappings > 0);
assert!(analysis.average_mappings_per_char > 0.0);
assert!(analysis.average_mapping_length > 0.0);
}
#[test]
fn test_expansion_ratio() {
let map = generate_map(Some(GenerateMapOptions {
temperature: 0.5,
seed: Some("ratio-test".to_string()),
charset: Some("ab".to_string()),
}));
let ratio = get_expansion_ratio(&map);
assert!(ratio > 0.0);
}
}