ngdp_client/commands/
install.rs

1use crate::{InstallCommands, InstallType as CliInstallType, OutputFormat, wago_api};
2use comfy_table::{Cell, ContentArrangement, Table, presets::UTF8_FULL};
3use indicatif::{ProgressBar, ProgressStyle};
4use ngdp_bpsv::{BpsvBuilder, BpsvFieldType, BpsvValue};
5use ngdp_cache::cached_cdn_client::CachedCdnClient;
6use ngdp_cache::hybrid_version_client::HybridVersionClient;
7use ribbit_client::Region;
8use std::collections::HashMap;
9use std::path::{Path, PathBuf};
10use tact_parser::download::DownloadManifest;
11use tact_parser::encoding::EncodingFile;
12use tact_parser::install::InstallManifest;
13use tracing::{debug, error, info, warn};
14
15/// Unified file entry for both install and download manifests
16#[derive(Debug, Clone)]
17struct FileEntry {
18    path: String,
19    ckey: Vec<u8>, // For install manifest entries, for download manifest this is ekey
20    size: u64,
21    priority: i8,
22}
23
24/// Archive location information for a file
25#[derive(Debug, Clone)]
26struct ArchiveLocation {
27    archive_hash: String,
28    offset: usize,
29    size: usize,
30}
31
32/// Combined archive index mapping EKeys to archive locations
33#[derive(Debug)]
34struct ArchiveIndex {
35    map: HashMap<String, ArchiveLocation>, // Full EKey (uppercase hex) -> (archive, offset, size)
36}
37
38impl ArchiveIndex {
39    /// Create an empty archive index
40    fn new() -> Self {
41        Self {
42            map: HashMap::new(),
43        }
44    }
45
46    /// Look up archive location for an EKey
47    fn lookup(&self, ekey: &[u8]) -> Option<&ArchiveLocation> {
48        // Convert EKey to uppercase hex string for lookup
49        let lookup_key = hex::encode(ekey).to_uppercase();
50
51        let result = self.map.get(&lookup_key);
52        if result.is_none() && !self.map.is_empty() {
53            debug!(
54                "EKey {} not found in {} archive entries",
55                lookup_key,
56                self.map.len()
57            );
58        }
59        result
60    }
61
62    /// Parse a single archive index and add entries to this index
63    /// Using BuildBackup's exact format: 4096-byte blocks with 170 entries each
64    fn parse_and_add_index(
65        &mut self,
66        archive_hash: &str,
67        index_data: &[u8],
68    ) -> Result<usize, Box<dyn std::error::Error>> {
69        use byteorder::{BigEndian, ReadBytesExt};
70        use std::io::{Cursor, Read};
71
72        // BuildBackup format: fixed 4096-byte blocks with 170 entries of 24 bytes each
73        const BLOCK_SIZE: usize = 4096;
74        const ENTRIES_PER_BLOCK: usize = 170;
75        const _ENTRY_SIZE: usize = 24; // 16 bytes hash + 4 bytes size + 4 bytes offset
76        const BLOCK_CHECKSUM_SIZE: usize = 16;
77
78        let num_blocks = index_data.len() / BLOCK_SIZE;
79        let mut cursor = Cursor::new(index_data);
80        let mut entries_added = 0;
81
82        debug!(
83            "Parsing archive index {}: {} blocks ({} bytes total)",
84            archive_hash,
85            num_blocks,
86            index_data.len()
87        );
88
89        for block_idx in 0..num_blocks {
90            // Read 170 entries per block
91            for entry_idx in 0..ENTRIES_PER_BLOCK {
92                // Read 16-byte EKey
93                let mut ekey_bytes = [0u8; 16];
94                if cursor.read_exact(&mut ekey_bytes).is_err() {
95                    debug!("Failed to read entry {} in block {}", entry_idx, block_idx);
96                    break;
97                }
98
99                // Read 4-byte size (big-endian per BuildBackup)
100                let size = cursor.read_u32::<BigEndian>()? as usize;
101
102                // Read 4-byte offset (big-endian per BuildBackup)
103                let offset = cursor.read_u32::<BigEndian>()? as usize;
104
105                // Skip null entries
106                let ekey_hex = hex::encode(ekey_bytes).to_uppercase();
107                if ekey_hex == "00000000000000000000000000000000" || size == 0 {
108                    continue;
109                }
110
111                // Add valid entries (with reasonable size limit)
112                if size > 0 && size < 100_000_000 {
113                    // Max 100MB per file
114                    let location = ArchiveLocation {
115                        archive_hash: archive_hash.to_string(),
116                        offset,
117                        size,
118                    };
119
120                    // Store with uppercase hex key for consistent lookups
121                    self.map.insert(ekey_hex, location);
122                    entries_added += 1;
123                }
124            }
125
126            // Skip the 16-byte block checksum at end of each block
127            let mut checksum = [0u8; BLOCK_CHECKSUM_SIZE];
128            let _ = cursor.read_exact(&mut checksum);
129        }
130
131        debug!(
132            "Parsed archive index {}: {} entries added from {} blocks",
133            archive_hash, entries_added, num_blocks
134        );
135
136        Ok(entries_added)
137    }
138}
139
140/// Download file using archive index or fallback to loose file
141async fn download_file_with_archive(
142    cdn_client: &CachedCdnClient,
143    archive_index: &ArchiveIndex,
144    cdn_host: &str,
145    cdn_path: &str,
146    ekey_hex: &str,
147) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
148    let ekey_bytes = hex::decode(ekey_hex)?;
149
150    debug!(
151        "Looking up EKey {} (len={}) in archive index...",
152        ekey_hex,
153        ekey_bytes.len()
154    );
155
156    // First, try to find the file in archives
157    if let Some(location) = archive_index.lookup(&ekey_bytes) {
158        info!(
159            "✓ Found {} in archive {} at offset {}, size {}",
160            ekey_hex, location.archive_hash, location.offset, location.size
161        );
162
163        info!(
164            "Attempting archive byte-range download from {}",
165            location.archive_hash
166        );
167
168        // Try archive range download - archive data files should exist on CDN
169        info!(
170            "Attempting archive range download from {}",
171            location.archive_hash
172        );
173        match download_archive_range(
174            cdn_client,
175            cdn_path,
176            &location.archive_hash,
177            location.offset,
178            location.size,
179        )
180        .await
181        {
182            Ok(data) => {
183                // Decompress BLTE if needed
184                if data.starts_with(b"BLTE") {
185                    match blte::decompress_blte(data.clone(), None) {
186                        Ok(decompressed) => return Ok(decompressed),
187                        Err(e) => {
188                            warn!("Failed to decompress BLTE from archive: {}", e);
189                            return Ok(data);
190                        }
191                    }
192                } else {
193                    return Ok(data);
194                }
195            }
196            Err(e) => {
197                warn!(
198                    "Failed to download from archive {}: {}",
199                    location.archive_hash, e
200                );
201            }
202        }
203    } else {
204        warn!(
205            "❌ EKey {} NOT found in any archive - falling back to loose file download",
206            ekey_hex
207        );
208    }
209
210    // Fallback to loose file download
211    info!("⬇️ Attempting loose file download for {}", ekey_hex);
212    match cdn_client.download_data(cdn_host, cdn_path, ekey_hex).await {
213        Ok(response) => {
214            let data = response.bytes().await?;
215
216            // Decompress BLTE if needed
217            if data.starts_with(b"BLTE") {
218                match blte::decompress_blte(data.to_vec(), None) {
219                    Ok(decompressed) => Ok(decompressed),
220                    Err(e) => {
221                        warn!("Failed to decompress BLTE: {}", e);
222                        Ok(data.to_vec())
223                    }
224                }
225            } else {
226                Ok(data.to_vec())
227            }
228        }
229        Err(e) => Err(Box::new(e)),
230    }
231}
232
233/// Download byte range from archive file
234async fn download_archive_range(
235    _cdn_client: &CachedCdnClient,
236    cdn_path: &str,
237    archive_hash: &str,
238    offset: usize,
239    size: usize,
240) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
241    // Try to download from different CDN hosts
242    let hosts = vec![
243        "blzddist1-a.akamaihd.net",
244        "level3.blizzard.com",
245        "us.cdn.blizzard.com",
246        "cdn.arctium.tools",
247        "tact.mirror.reliquaryhq.com",
248    ];
249
250    for host in &hosts {
251        let url = format!(
252            "http://{}/{}/data/{}/{}/{}",
253            host,
254            cdn_path,
255            &archive_hash[0..2],
256            &archive_hash[2..4],
257            archive_hash
258        );
259
260        let client = reqwest::Client::new();
261        let range_header = format!("bytes={}-{}", offset, offset + size - 1);
262
263        match client.get(&url).header("Range", range_header).send().await {
264            Ok(response) => {
265                if response.status().is_success() {
266                    match response.bytes().await {
267                        Ok(data) => {
268                            debug!(
269                                "Downloaded {} bytes from archive {} ({})",
270                                data.len(),
271                                archive_hash,
272                                host
273                            );
274                            return Ok(data.to_vec());
275                        }
276                        Err(e) => warn!("Failed to read archive range response: {}", e),
277                    }
278                } else {
279                    warn!(
280                        "Archive range request failed: {} from {}",
281                        response.status(),
282                        host
283                    );
284                }
285            }
286            Err(e) => warn!("Archive range request failed from {}: {}", host, e),
287        }
288    }
289
290    Err("Failed to download archive range from all CDNs".into())
291}
292
293/// Download archive index with .index suffix using direct HTTP
294async fn download_archive_index(
295    _cdn_client: &CachedCdnClient,
296    cdn_path: &str,
297    archive_hash: &str,
298) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
299    // For now, let's create a simple cache file directly to verify caching works
300    use std::path::PathBuf;
301    use tokio::fs;
302
303    // Create cache path following the CDN cache structure
304    let cache_dir = dirs::cache_dir()
305        .unwrap_or_else(|| PathBuf::from(".cache"))
306        .join("ngdp")
307        .join("cdn")
308        .join(cdn_path)
309        .join("data")
310        .join(&archive_hash[0..2])
311        .join(&archive_hash[2..4]);
312
313    let cache_file = cache_dir.join(format!("{}.index", archive_hash));
314
315    // Check if cached
316    if cache_file.exists() {
317        debug!("Loading archive index {} from cache", archive_hash);
318        match fs::read(&cache_file).await {
319            Ok(bytes) => {
320                info!(
321                    "✓ Archive index {} loaded from cache ({} bytes)",
322                    archive_hash,
323                    bytes.len()
324                );
325                return Ok(bytes);
326            }
327            Err(e) => {
328                warn!("Failed to read cached archive index: {}", e);
329            }
330        }
331    }
332
333    // Not cached, download via direct HTTP (bypassing CDN client hash validation)
334    let hosts = vec![
335        "blzddist1-a.akamaihd.net",
336        "level3.blizzard.com",
337        "us.cdn.blizzard.com",
338        "cdn.arctium.tools",
339        "tact.mirror.reliquaryhq.com",
340    ];
341
342    let client = reqwest::Client::new();
343
344    for host in &hosts {
345        // Build URL: http://host/cdn_path/data/{hash[0:2]}/{hash[2:4]}/{hash}.index
346        let url = format!(
347            "http://{}/{}/data/{}/{}/{}.index",
348            host,
349            cdn_path,
350            &archive_hash[0..2],
351            &archive_hash[2..4],
352            archive_hash
353        );
354
355        debug!("Downloading archive index from: {}", url);
356
357        match client.get(&url).send().await {
358            Ok(response) => {
359                if response.status().is_success() {
360                    match response.bytes().await {
361                        Ok(bytes) => {
362                            info!(
363                                "✓ Downloaded archive index {} from {} ({} bytes)",
364                                archive_hash,
365                                host,
366                                bytes.len()
367                            );
368
369                            // Decompress BLTE if needed
370                            let decompressed = if bytes.starts_with(b"BLTE") {
371                                match blte::decompress_blte(bytes.to_vec(), None) {
372                                    Ok(data) => {
373                                        debug!(
374                                            "✓ Decompressed BLTE archive index: {} -> {} bytes",
375                                            bytes.len(),
376                                            data.len()
377                                        );
378                                        data
379                                    }
380                                    Err(e) => {
381                                        warn!("Failed to decompress BLTE archive index: {}", e);
382                                        bytes.to_vec()
383                                    }
384                                }
385                            } else {
386                                bytes.to_vec()
387                            };
388
389                            // Cache the decompressed archive index for future use
390                            if let Err(e) = fs::create_dir_all(&cache_dir).await {
391                                warn!("Failed to create cache directory: {}", e);
392                            } else if let Err(e) = fs::write(&cache_file, &decompressed).await {
393                                warn!("Failed to cache archive index {}: {}", archive_hash, e);
394                            } else {
395                                debug!(
396                                    "✓ Cached archive index {} at {:?}",
397                                    archive_hash, cache_file
398                                );
399                            }
400
401                            return Ok(decompressed);
402                        }
403                        Err(e) => {
404                            warn!("Failed to read response body from {}: {}", host, e);
405                        }
406                    }
407                } else {
408                    debug!(
409                        "HTTP {} from {} for archive index {}",
410                        response.status(),
411                        host,
412                        archive_hash
413                    );
414                }
415            }
416            Err(e) => {
417                debug!("Request failed to {}: {}", host, e);
418            }
419        }
420    }
421
422    Err(format!(
423        "Failed to download archive index {} from all CDNs",
424        archive_hash
425    )
426    .into())
427}
428
429/// Configuration for game installation
430#[derive(Debug, Clone)]
431struct GameInstallConfig {
432    /// Product to install
433    product: String,
434    /// Installation path
435    path: PathBuf,
436    /// Specific build to install (optional)
437    build: Option<String>,
438    /// Region for installation
439    region: Region,
440    /// Type of installation
441    install_type: CliInstallType,
442    /// Whether to verify files
443    verify: bool,
444    /// Whether this is a dry run
445    dry_run: bool,
446    /// Output format
447    format: OutputFormat,
448}
449
450/// Configuration for displaying installation plan
451#[derive(Debug)]
452struct InstallationPlanDisplay {
453    /// Product name
454    product: String,
455    /// Installation path
456    path: PathBuf,
457    /// Installation type
458    install_type: CliInstallType,
459    /// Manifest type
460    manifest_type: String,
461    /// Number of required files
462    required_files: usize,
463    /// Number of optional files
464    optional_files: usize,
465    /// Total size in bytes
466    total_size: u64,
467    /// Output format
468    format: OutputFormat,
469}
470
471/// Configuration for writing build info file
472#[derive(Debug)]
473struct BuildInfoConfig<'a> {
474    /// Installation path
475    install_path: &'a Path,
476    /// Product name
477    product: &'a str,
478    /// Version entry from Ribbit
479    version_entry: &'a ribbit_client::VersionEntry,
480    /// Build config hash
481    build_config_hash: &'a str,
482    /// CDN config hash
483    cdn_config_hash: &'a str,
484    /// Build configuration
485    build_config: &'a tact_parser::config::BuildConfig,
486    /// CDN entry
487    cdn_entry: &'a ribbit_client::CdnEntry,
488    /// Region
489    region: Region,
490}
491
492/// Handle the installation command
493pub async fn handle(
494    cmd: InstallCommands,
495    format: OutputFormat,
496) -> Result<(), Box<dyn std::error::Error>> {
497    match cmd {
498        InstallCommands::Game {
499            product,
500            path,
501            build,
502            region,
503            install_type,
504            resume,
505            verify,
506            dry_run,
507            max_concurrent: _,
508            tags: _,
509        } => {
510            let region = region.parse::<Region>().unwrap_or(Region::US);
511
512            // Check for resume mode
513            if resume {
514                let build_info_path = path.join(".build.info");
515                if build_info_path.exists() {
516                    info!(
517                        "🔄 Resume mode: Continuing existing installation at {:?}",
518                        path
519                    );
520                    return resume_installation(path.as_path(), format).await;
521                } else {
522                    return Err(format!(
523                        "Resume requested but no .build.info found at {}. Start with metadata-only installation first.",
524                        path.display()
525                    ).into());
526                }
527            }
528
529            // Normal installation flow
530            let config = GameInstallConfig {
531                product,
532                path,
533                build,
534                region,
535                install_type,
536                verify,
537                dry_run,
538                format,
539            };
540            handle_game_installation(config).await
541        }
542        InstallCommands::Repair {
543            path,
544            verify_checksums,
545            dry_run,
546            max_concurrent: _,
547        } => handle_repair_installation(path, verify_checksums, dry_run, format).await,
548    }
549}
550
551/// Handle normal game installation
552async fn handle_game_installation(
553    config: GameInstallConfig,
554) -> Result<(), Box<dyn std::error::Error>> {
555    let GameInstallConfig {
556        product,
557        path,
558        build,
559        region,
560        install_type,
561        verify,
562        dry_run,
563        format,
564    } = config;
565    info!("🚀 Starting installation of {} to {:?}", product, path);
566
567    if dry_run {
568        info!("🔍 DRY RUN mode - no files will be downloaded");
569    }
570
571    // Phase 1: Query product version
572    let version_entry = if let Some(build_str) = &build {
573        // For specific builds, try Wago Tools API first (for historical builds)
574        info!("🔍 Searching for build {} in Wago Tools API...", build_str);
575
576        let builds_response = wago_api::fetch_builds().await?;
577        let builds = wago_api::filter_builds_by_product(builds_response, &product);
578
579        if let Some(wago_build) = wago_api::find_build_by_id(&builds, build_str) {
580            info!(
581                "✓ Found build {} in historical data: {}",
582                build_str, wago_build.version
583            );
584
585            // Get current CDN config from the latest version since Wago might not have it
586            let version_client = HybridVersionClient::new(region).await?;
587            let current_versions = version_client.get_product_versions(&product).await?;
588            let current_cdn_config = current_versions
589                .entries
590                .first()
591                .map(|v| v.cdn_config.clone())
592                .unwrap_or_default();
593
594            // Use Wago's cdn_config if available, otherwise use current
595            let cdn_config = wago_build.cdn_config.clone().unwrap_or(current_cdn_config);
596
597            // Create a temporary version entry structure
598            use ribbit_client::VersionEntry;
599            VersionEntry {
600                region: region.to_string(),
601                build_config: wago_build.build_config.clone(),
602                cdn_config,
603                key_ring: None,
604                build_id: wago_api::extract_build_id(&wago_build.version)
605                    .and_then(|s| s.parse().ok())
606                    .unwrap_or(0),
607                versions_name: wago_build.version.clone(),
608                product_config: wago_build.product_config.clone().unwrap_or_default(),
609            }
610        } else {
611            // Fallback to current versions API
612            info!("🔍 Build not found in historical data, checking current versions...");
613            let version_client = HybridVersionClient::new(region).await?;
614            let versions = version_client.get_product_versions(&product).await?;
615
616            versions
617                .entries
618                .iter()
619                .find(|v| v.build_id.to_string() == *build_str || v.versions_name == *build_str)
620                .ok_or_else(|| {
621                    format!(
622                        "Build '{}' not found in current or historical versions",
623                        build_str
624                    )
625                })?
626                .clone()
627        }
628    } else {
629        // For latest build, use current versions API
630        info!("📋 Querying latest product version (HTTPS primary, Ribbit fallback)...");
631        let version_client = HybridVersionClient::new(region).await?;
632        let versions = version_client.get_product_versions(&product).await?;
633
634        versions
635            .entries
636            .first()
637            .ok_or("No versions available for product")?
638            .clone()
639    };
640
641    info!(
642        "📦 Selected build: {} ({})",
643        version_entry.versions_name, version_entry.build_id
644    );
645
646    let build_config_hash = &version_entry.build_config;
647    let cdn_config_hash = &version_entry.cdn_config;
648
649    // Phase 2: Download configurations
650    info!("📥 Downloading configurations...");
651
652    // Get CDN servers (need a fresh client since it might not exist if we used Wago)
653    let version_client = HybridVersionClient::new(region).await?;
654    let cdns = version_client.get_product_cdns(&product).await?;
655    let cdn_entry = cdns.entries.first().ok_or("No CDN servers available")?;
656
657    // Use the first host from the CDN entry (they're bare hostnames like "blzddist1-a.akamaihd.net")
658    let cdn_host = cdn_entry.hosts.first().ok_or("No CDN hosts available")?;
659
660    // Use the CDN path as announced by the server
661    let cdn_path = &cdn_entry.path;
662
663    debug!("Using CDN host: {} with path: {}", cdn_host, cdn_path);
664
665    // Create cached CDN client with automatic fallback support
666    let cdn_client = CachedCdnClient::new().await?;
667    // Add Blizzard CDN hosts from the product configuration
668    cdn_client.add_primary_hosts(cdn_entry.hosts.iter().cloned());
669    // Add community CDNs for fallback
670    cdn_client.add_fallback_host("cdn.arctium.tools");
671    cdn_client.add_fallback_host("tact.mirror.reliquaryhq.com");
672
673    // Download build config
674    let build_config_data = cdn_client
675        .download_build_config(&cdn_entry.hosts[0], cdn_path, build_config_hash)
676        .await?
677        .bytes()
678        .await?;
679    let build_config =
680        tact_parser::config::BuildConfig::parse(std::str::from_utf8(&build_config_data)?)?;
681    info!("✓ Build configuration loaded");
682
683    // Download CDN config
684    let cdn_config_data = cdn_client
685        .download_cdn_config(&cdn_entry.hosts[0], cdn_path, cdn_config_hash)
686        .await?
687        .bytes()
688        .await?;
689    let _cdn_config =
690        tact_parser::config::ConfigFile::parse(std::str::from_utf8(&cdn_config_data)?)?;
691    info!("✓ CDN configuration loaded");
692
693    // Phase 3: Download system files
694    info!("📥 Downloading system files...");
695
696    // Download encoding file
697    // The encoding field in build config contains two values:
698    // 1. Content key (first hash) - not used for direct download
699    // 2. Encoding key (second hash) - used to download from CDN
700    let encoding_value = build_config
701        .config
702        .get_value("encoding")
703        .ok_or("Missing encoding field")?;
704    let encoding_parts: Vec<&str> = encoding_value.split_whitespace().collect();
705
706    // Use the second hash (encoding key) if available, otherwise fall back to first
707    let encoding_ekey = if encoding_parts.len() >= 2 {
708        encoding_parts[1]
709    } else {
710        encoding_parts[0]
711    };
712
713    debug!("Downloading encoding file with ekey: {}", encoding_ekey);
714
715    let encoding_data = cdn_client
716        .download_data(&cdn_entry.hosts[0], cdn_path, encoding_ekey)
717        .await?
718        .bytes()
719        .await?;
720
721    // Decompress with BLTE if needed
722    let encoding_data = if encoding_data.starts_with(b"BLTE") {
723        blte::decompress_blte(encoding_data.to_vec(), None)?
724    } else {
725        encoding_data.to_vec()
726    };
727
728    let encoding_file = EncodingFile::parse(&encoding_data)?;
729    info!(
730        "✓ Encoding file loaded: {} CKey entries, {} EKey mappings",
731        encoding_file.ckey_count(),
732        encoding_file.ekey_count()
733    );
734
735    // Download ALL archive indices in parallel for complete coverage
736    info!("📦 Downloading ALL archive indices in parallel for complete coverage!");
737    let mut archive_index = ArchiveIndex::new();
738    let cdn_config_parsed =
739        tact_parser::config::CdnConfig::parse(std::str::from_utf8(&cdn_config_data)?)?;
740    let all_archives = cdn_config_parsed.archives();
741
742    info!("Found {} total archives available", all_archives.len());
743    info!(
744        "🚀 Downloading ALL {} archive indices in parallel (10 concurrent)...",
745        all_archives.len()
746    );
747
748    use futures::stream::{self, StreamExt};
749
750    // Load archive indices sequentially (they're cached, so should be fast)
751    info!(
752        "📥 Loading {} cached archive indices sequentially...",
753        all_archives.len()
754    );
755    let mut results = Vec::new();
756
757    for (i, archive_hash) in all_archives.iter().enumerate() {
758        let result = download_archive_index(&cdn_client, cdn_path, archive_hash).await;
759        results.push((i, archive_hash.to_string(), result));
760
761        // Show progress every 100 archives
762        if (i + 1) % 100 == 0 || i + 1 == all_archives.len() {
763            info!("📦 Loaded {}/{} archive indices", i + 1, all_archives.len());
764        }
765    }
766
767    let mut successful_archives = 0;
768    for (i, archive_hash, result) in results {
769        match result {
770            Ok(index_data) => match archive_index.parse_and_add_index(&archive_hash, &index_data) {
771                Ok(entries) => {
772                    debug!(
773                        "✓ [{}/{}] Indexed archive {} with {} entries",
774                        i + 1,
775                        all_archives.len(),
776                        archive_hash,
777                        entries
778                    );
779                    successful_archives += 1;
780                }
781                Err(e) => {
782                    warn!("Failed to parse archive index {}: {}", archive_hash, e);
783                }
784            },
785            Err(e) => {
786                warn!("Failed to download archive index {}: {}", archive_hash, e);
787            }
788        }
789    }
790
791    info!(
792        "✓ Archive indices loaded: {}/{} archives indexed, {} total entries",
793        successful_archives,
794        all_archives.len(),
795        archive_index.map.len()
796    );
797
798    // Debug: Show build config info for version verification
799    info!("Build Config Info:");
800    info!("  - Build Config Hash: {}", build_config_hash);
801    info!("  - CDN Config Hash: {}", cdn_config_hash);
802    if let Some(build_id) = build_config.config.get_value("build-id") {
803        info!("  - Build ID from config: {}", build_id);
804    }
805    if let Some(encoding_value) = build_config.config.get_value("encoding") {
806        info!("  - Encoding value: {}", encoding_value);
807    }
808    if let Some(install_value) = build_config.config.get_value("install") {
809        info!("  - Install value: {}", install_value);
810    }
811
812    info!(
813        "✓ Archive indices loaded, total entries: {}",
814        archive_index.map.len()
815    );
816
817    info!("DEBUG: About to get sample CKeys from encoding file...");
818    // Debug: Show a few sample content keys from encoding file
819    info!("Sample content keys from encoding file:");
820    for (i, ckey) in encoding_file.get_sample_ckeys(5).iter().enumerate() {
821        info!("  CKey[{}]: {}", i, ckey);
822    }
823    info!("DEBUG: Finished getting sample CKeys, moving to manifest processing...");
824
825    info!(
826        "🔄 Starting manifest download based on installation type: {:?}",
827        install_type
828    );
829    // Download manifests based on installation type
830    let (file_entries, manifest_type) = match install_type {
831        CliInstallType::Minimal => {
832            info!("📥 Processing minimal installation - using download manifest");
833            // TEMPORARY FIX: For minimal install, use download manifest and filter it
834            // The install manifest CKeys don't exist in encoding file for this build
835            let download_value = build_config
836                .config
837                .get_value("download")
838                .ok_or("Missing download field")?;
839            let download_parts: Vec<&str> = download_value.split_whitespace().collect();
840
841            let download_ekey = if download_parts.len() >= 2 {
842                download_parts[1].to_string()
843            } else {
844                let ckey = download_parts[0];
845                let ekey_bytes = encoding_file
846                    .lookup_by_ckey(&hex::decode(ckey)?)
847                    .and_then(|e| e.encoding_keys.first())
848                    .ok_or("Download file encoding key not found in encoding table")?;
849                hex::encode(ekey_bytes)
850            };
851
852            info!(
853                "📥 Downloading download manifest with ekey: {}",
854                download_ekey
855            );
856
857            let download_data = cdn_client
858                .download_data(&cdn_entry.hosts[0], cdn_path, &download_ekey)
859                .await?
860                .bytes()
861                .await?;
862
863            let download_data = if download_data.starts_with(b"BLTE") {
864                blte::decompress_blte(download_data.to_vec(), None)?
865            } else {
866                download_data.to_vec()
867            };
868
869            let download_manifest = DownloadManifest::parse(&download_data)?;
870            info!(
871                "✓ Download manifest loaded: {} files (filtering for minimal install)",
872                download_manifest.entries.len()
873            );
874
875            // Debug: Show a few sample EKeys from download manifest
876            info!("Sample EKeys from download manifest:");
877            for (i, (ekey, entry)) in download_manifest.entries.iter().enumerate() {
878                if i < 5 {
879                    info!(
880                        "  Download[{}]: {} (size: {} bytes)",
881                        i,
882                        hex::encode(ekey),
883                        entry.compressed_size
884                    );
885                } else {
886                    break;
887                }
888            }
889
890            // Test: Check if download manifest EKeys exist in archives (they should)
891            info!("Testing first few download manifest EKeys in archive indices:");
892            for (i, (ekey, entry)) in download_manifest.entries.iter().take(5).enumerate() {
893                let test_ekey = hex::encode(ekey);
894                match archive_index.lookup(ekey) {
895                    Some(location) => {
896                        info!(
897                            "  ✓ Download[{}]: {} FOUND in archive {} at offset {} (size: {})",
898                            i, test_ekey, location.archive_hash, location.offset, location.size
899                        );
900                    }
901                    None => {
902                        info!(
903                            "  ✗ Download[{}]: {} NOT FOUND in archives (size: {})",
904                            i, test_ekey, entry.compressed_size
905                        );
906                    }
907                }
908            }
909
910            // Convert download entries to common format (select first 10 for minimal)
911            let entries: Vec<FileEntry> = download_manifest
912                .entries
913                .iter()
914                .take(10)
915                .map(|(ekey, entry)| FileEntry {
916                    path: format!("file_{}", hex::encode(&ekey[..4])), // Generate path from EKey
917                    ckey: ekey.clone(), // For download manifest, we use EKey directly
918                    size: entry.compressed_size,
919                    priority: 0,
920                })
921                .collect();
922
923            info!(
924                "Selected {} files for minimal download install",
925                entries.len()
926            );
927            (entries, "download")
928        }
929        CliInstallType::Full | CliInstallType::Custom => {
930            info!("📥 Processing FULL/CUSTOM installation - using download manifest for all files");
931            // For full install, use download manifest (complete game files)
932            let download_value = build_config
933                .config
934                .get_value("download")
935                .ok_or("Missing download field")?;
936            let download_parts: Vec<&str> = download_value.split_whitespace().collect();
937
938            let download_ekey = if download_parts.len() >= 2 {
939                download_parts[1].to_string()
940            } else {
941                let ckey = download_parts[0];
942                let ekey_bytes = encoding_file
943                    .lookup_by_ckey(&hex::decode(ckey)?)
944                    .and_then(|e| e.encoding_keys.first())
945                    .ok_or("Download file encoding key not found in encoding table")?;
946                hex::encode(ekey_bytes)
947            };
948
949            debug!("Downloading download manifest with ekey: {}", download_ekey);
950
951            let download_data = cdn_client
952                .download_data(&cdn_entry.hosts[0], cdn_path, &download_ekey)
953                .await?
954                .bytes()
955                .await?;
956
957            let download_data = if download_data.starts_with(b"BLTE") {
958                blte::decompress_blte(download_data.to_vec(), None)?
959            } else {
960                download_data.to_vec()
961            };
962
963            let download_manifest = DownloadManifest::parse(&download_data)?;
964            info!(
965                "✓ Download manifest loaded: {} files (complete game)",
966                download_manifest.entries.len()
967            );
968
969            // Convert download entries to common format (no paths, just ekeys)
970            // NOTE: Use download manifest compressed_size but filter out unreasonable values
971            let mut total_entries = 0;
972            let mut skipped_not_in_encoding = 0;
973            let skipped_bad_size = 0;
974
975            let entries: Vec<FileEntry> = download_manifest
976                .entries
977                .iter()
978                .enumerate()
979                .filter_map(|(i, (_ekey, e))| {
980                    total_entries += 1;
981                    // Look up the CKey from the encoding file using the EKey
982                    if let Some(ckey) = encoding_file.lookup_by_ekey(&e.ekey) {
983                        // Get actual file size from encoding file (more reliable than download manifest)
984                        let file_size = encoding_file
985                            .get_file_size(ckey)
986                            .unwrap_or(e.compressed_size);
987
988                        Some(FileEntry {
989                            path: format!("data/{:08x}", i), // Generate placeholder path without .blte extension
990                            ckey: ckey.clone(),              // Use CKey from encoding file
991                            size: file_size, // Use size from encoding file if available
992                            priority: e.priority,
993                        })
994                    } else {
995                        skipped_not_in_encoding += 1;
996                        if skipped_not_in_encoding <= 5 {
997                            debug!("EKey {} not found in encoding file", hex::encode(&e.ekey));
998                        }
999                        None // Skip entries not found in encoding
1000                    }
1001                })
1002                .collect();
1003
1004            info!(
1005                "Download manifest processing: {} total entries, {} included, {} not in encoding, {} bad size",
1006                total_entries,
1007                entries.len(),
1008                skipped_not_in_encoding,
1009                skipped_bad_size
1010            );
1011
1012            (entries, "download")
1013        }
1014        CliInstallType::MetadataOnly => {
1015            // For metadata-only, we don't need any file entries
1016            (Vec::new(), "metadata-only")
1017        }
1018    };
1019
1020    // Phase 4: Build file list
1021    info!("📋 Building file manifest...");
1022
1023    let mut total_size = 0u64;
1024    let mut required_files = 0;
1025    let mut optional_files = 0;
1026
1027    for entry in &file_entries {
1028        // Check if file should be installed based on type
1029        let is_required = match install_type {
1030            CliInstallType::Minimal => is_required_file(&entry.path),
1031            CliInstallType::Full => true,
1032            CliInstallType::Custom => {
1033                // TODO: Implement tag filtering based on priority
1034                entry.priority <= 0 // High priority files only for now
1035            }
1036            CliInstallType::MetadataOnly => false, // No files are required for metadata-only
1037        };
1038
1039        if is_required {
1040            required_files += 1;
1041        } else {
1042            optional_files += 1;
1043        }
1044
1045        total_size += entry.size;
1046    }
1047
1048    // Display installation plan
1049    let plan = InstallationPlanDisplay {
1050        product: product.clone(),
1051        path: path.clone(),
1052        install_type,
1053        manifest_type: manifest_type.to_string(),
1054        required_files,
1055        optional_files,
1056        total_size,
1057        format,
1058    };
1059    display_installation_plan(&plan)?;
1060
1061    // Phase 5: Create directory structure
1062    info!("🗄️ Creating directory structure...");
1063    tokio::fs::create_dir_all(&path).await?;
1064    tokio::fs::create_dir_all(path.join("Data")).await?;
1065    tokio::fs::create_dir_all(path.join("Data/data")).await?;
1066    tokio::fs::create_dir_all(path.join("Data/config")).await?;
1067    info!("✓ Directory structure created");
1068
1069    // Phase 6: Write .build.info file for client functionality (even in dry-run mode)
1070    info!("📄 Writing .build.info file...");
1071    let build_info_config = BuildInfoConfig {
1072        install_path: path.as_path(),
1073        product: &product,
1074        version_entry: &version_entry,
1075        build_config_hash,
1076        cdn_config_hash,
1077        build_config: &build_config,
1078        cdn_entry,
1079        region,
1080    };
1081    write_build_info_file(build_info_config).await?;
1082    info!("✓ .build.info file written");
1083
1084    if dry_run {
1085        info!("✅ Dry run complete - no files were downloaded");
1086        return Ok(());
1087    }
1088
1089    // Write configuration files to Data/config/ for all installation types
1090    info!("📄 Writing configuration files to Data/config/...");
1091
1092    // Write build configuration using CDN-style subdirectory structure
1093    let build_config_subdir = format!("{}/{}", &build_config_hash[0..2], &build_config_hash[2..4]);
1094    let build_config_dir = path.join("Data/config").join(&build_config_subdir);
1095    tokio::fs::create_dir_all(&build_config_dir).await?;
1096    let build_config_path = build_config_dir.join(build_config_hash);
1097    tokio::fs::write(&build_config_path, &build_config_data).await?;
1098    info!(
1099        "✓ Saved build config: {}/{}",
1100        build_config_subdir, build_config_hash
1101    );
1102
1103    // Write CDN configuration using CDN-style subdirectory structure
1104    let cdn_config_subdir = format!("{}/{}", &cdn_config_hash[0..2], &cdn_config_hash[2..4]);
1105    let cdn_config_dir = path.join("Data/config").join(&cdn_config_subdir);
1106    tokio::fs::create_dir_all(&cdn_config_dir).await?;
1107    let cdn_config_path = cdn_config_dir.join(cdn_config_hash);
1108    tokio::fs::write(&cdn_config_path, &cdn_config_data).await?;
1109    info!(
1110        "✓ Saved CDN config: {}/{}",
1111        cdn_config_subdir, cdn_config_hash
1112    );
1113
1114    // Write encoding file info (just metadata, not the full file)
1115    let encoding_info_path = path.join("Data/config").join("encoding.info");
1116    let encoding_info = format!(
1117        "# Encoding file information\n\
1118        # Generated by cascette-rs\n\
1119        Encoding-Hash: {}\n\
1120        CKey-Count: {}\n\
1121        EKey-Count: {}\n\
1122        Build: {}\n\
1123        Product: {}\n\
1124        Region: {}\n",
1125        build_config
1126            .config
1127            .get_value("encoding")
1128            .unwrap_or("unknown")
1129            .split_whitespace()
1130            .next()
1131            .unwrap_or("unknown"),
1132        encoding_file.ckey_count(),
1133        encoding_file.ekey_count(),
1134        version_entry.build_id,
1135        product,
1136        region
1137    );
1138    tokio::fs::write(&encoding_info_path, encoding_info).await?;
1139    info!("✓ Saved encoding info: encoding.info");
1140
1141    // For metadata-only installations, we're done
1142    if install_type == CliInstallType::MetadataOnly {
1143        info!("✅ Metadata-only installation complete");
1144        info!("📋 Created: .build.info and Data/config/ with CDN-style structure");
1145        info!("💡 Use this for quick client comparison or as base for full installation");
1146        return Ok(());
1147    }
1148
1149    // Phase 7: Download files
1150    info!("📥 Downloading files...");
1151
1152    let pb = ProgressBar::new(total_size);
1153    pb.set_style(
1154        ProgressStyle::default_bar()
1155            .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})")?
1156            .progress_chars("#>-"),
1157    );
1158
1159    // Filter files to download
1160    let files_to_download: Vec<_> = file_entries
1161        .iter()
1162        .filter(|entry| {
1163            match install_type {
1164                CliInstallType::Minimal => {
1165                    // For minimal installs using download manifest, we already selected 10 files
1166                    // No need for additional filtering since we don't have real file paths
1167                    let include = manifest_type == "download" || is_required_file(&entry.path);
1168                    if !include {
1169                        debug!("Skipping file for minimal install: {}", entry.path);
1170                    } else {
1171                        debug!("Including file for minimal install: {}", entry.path);
1172                    }
1173                    include
1174                }
1175                CliInstallType::Full => true,
1176                CliInstallType::Custom => entry.priority <= 0, // High priority only for now
1177                CliInstallType::MetadataOnly => false, // Never download files for metadata-only
1178            }
1179        })
1180        .collect();
1181
1182    info!(
1183        "Files selected for download: {} out of {} total files",
1184        files_to_download.len(),
1185        file_entries.len()
1186    );
1187
1188    if files_to_download.is_empty() {
1189        error!("❌ No files selected for download! Check filtering logic.");
1190        return Ok(());
1191    }
1192
1193    info!("DEBUG: Passed file selection check, continuing to download setup...");
1194
1195    // Show first few files that will be downloaded
1196    for (i, entry) in files_to_download.iter().take(3).enumerate() {
1197        info!(
1198            "File {}: {} (ckey: {})",
1199            i + 1,
1200            entry.path,
1201            hex::encode(&entry.ckey)
1202        );
1203    }
1204
1205    info!(
1206        "Downloading {} files with parallel processing (max 10 concurrent)",
1207        files_to_download.len()
1208    );
1209
1210    // Use futures stream for parallel downloads with controlled concurrency
1211    use std::sync::Arc;
1212    use std::sync::atomic::{AtomicUsize, Ordering};
1213
1214    let downloaded_count = Arc::new(AtomicUsize::new(0));
1215    let error_count = Arc::new(AtomicUsize::new(0));
1216    let pb = Arc::new(pb);
1217    let cdn_client = Arc::new(cdn_client);
1218    let archive_index = Arc::new(archive_index);
1219    let encoding_file = Arc::new(encoding_file);
1220    let path = Arc::new(path);
1221
1222    info!("Starting download of {} files...", files_to_download.len());
1223
1224    // Simple test to verify async works
1225    info!("DEBUG: Testing async runtime...");
1226    tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
1227    info!("DEBUG: Async runtime works!");
1228
1229    info!("DEBUG: Creating stream iterator...");
1230    info!("Starting download of {} files", files_to_download.len());
1231
1232    // Check if we have any files to download
1233    if files_to_download.is_empty() {
1234        warn!("No files selected for download!");
1235        return Ok(());
1236    }
1237
1238    // Debug: Show actual files we're about to download
1239    info!("Files to download: {}", files_to_download.len());
1240    for (i, entry) in files_to_download.iter().take(5).enumerate() {
1241        info!(
1242            "  File {}: {} (size: {} bytes)",
1243            i + 1,
1244            entry.path,
1245            entry.size
1246        );
1247    }
1248
1249    // Process downloads with proper concurrency
1250    let total_files = files_to_download.len();
1251    info!("Starting to process {} files concurrently", total_files);
1252    let download_futures = stream::iter(files_to_download)
1253        .map(|entry| {
1254            let cdn_client = cdn_client.clone();
1255            let archive_index = archive_index.clone();
1256            let encoding_file = encoding_file.clone();
1257            let path = path.clone();
1258            let pb = pb.clone();
1259            let downloaded_count = downloaded_count.clone();
1260            let error_count = error_count.clone();
1261            let manifest_type = manifest_type.to_string();
1262            let entry = entry.clone(); // Clone the entry for the async block
1263
1264            async move {
1265                info!("DEBUG: Entered async closure for file: {}", entry.path);
1266                info!(
1267                    "Processing file: {} (ckey: {})",
1268                    entry.path,
1269                    hex::encode(&entry.ckey)
1270                );
1271
1272                // Create parent directory for the file
1273                let file_dir = path.join("Data/data");
1274                if let Err(e) = tokio::fs::create_dir_all(&file_dir).await {
1275                    warn!("Failed to create directory {}: {}", file_dir.display(), e);
1276                    error_count.fetch_add(1, Ordering::Relaxed);
1277                    return;
1278                }
1279
1280                // For install manifest entries, we need to look up the encoding key
1281                // For download manifest entries, we already have the encoding key
1282                let download_key = if manifest_type == "install" {
1283                    // Look up encoding key for content key
1284                    debug!(
1285                        "Looking up ckey: {} (path: {})",
1286                        hex::encode(&entry.ckey),
1287                        entry.path
1288                    );
1289                    if let Some(encoding_entry) = encoding_file.lookup_by_ckey(&entry.ckey) {
1290                        // Validate file size (catch corruption like 121TB files)
1291                        if encoding_entry.size > 10_000_000_000 {
1292                            // 10GB limit
1293                            debug!(
1294                                "Skipping file with suspicious size: {} bytes ({}GB) for path: {}",
1295                                encoding_entry.size,
1296                                encoding_entry.size / 1_000_000_000,
1297                                entry.path
1298                            );
1299                            return;
1300                        }
1301
1302                        if let Some(ekey) = encoding_entry.encoding_keys.first() {
1303                            debug!(
1304                                "Found ekey: {} for ckey: {}",
1305                                hex::encode(ekey),
1306                                hex::encode(&entry.ckey)
1307                            );
1308                            hex::encode(ekey)
1309                        } else {
1310                            warn!(
1311                                "No encoding key found for content key: {} (path: {}) - skipping",
1312                                hex::encode(&entry.ckey),
1313                                entry.path
1314                            );
1315                            return;
1316                        }
1317                    } else {
1318                        // Content key not found in encoding file, skip it
1319                        warn!(
1320                            "Content key not found in encoding file: {} (path: {}) - skipping",
1321                            hex::encode(&entry.ckey),
1322                            entry.path
1323                        );
1324                        return; // Skip files without encoding entries
1325                    }
1326                } else {
1327                    // Download manifest already has encoding keys
1328                    hex::encode(&entry.ckey)
1329                };
1330
1331                // Download file using archive-aware method
1332                info!(
1333                    "Attempting to download file: {} with key: {}",
1334                    entry.path, download_key
1335                );
1336                info!("DEBUG: About to call download_file_with_archive...");
1337                info!("Archive index has {} entries", archive_index.map.len());
1338                match download_file_with_archive(
1339                    &cdn_client,
1340                    &archive_index,
1341                    &cdn_entry.hosts[0],
1342                    cdn_path,
1343                    &download_key,
1344                )
1345                .await
1346                {
1347                    Ok(data) => {
1348                        // Store files in subdirectories based on first 2 bytes of hash (like CASC)
1349                        // e.g., ab/cd/abcdef...
1350                        let subdir1 = &download_key[0..2];
1351                        let subdir2 = &download_key[2..4];
1352                        let file_dir = path.join("Data/data").join(subdir1).join(subdir2);
1353
1354                        // Create subdirectory structure
1355                        if let Err(e) = tokio::fs::create_dir_all(&file_dir).await {
1356                            warn!("Failed to create directory {}: {}", file_dir.display(), e);
1357                            error_count.fetch_add(1, Ordering::Relaxed);
1358                            return;
1359                        }
1360
1361                        let file_path = file_dir.join(&download_key);
1362                        info!(
1363                            "Writing {} bytes to path: {}",
1364                            data.len(),
1365                            file_path.display()
1366                        );
1367                        if let Err(e) = tokio::fs::write(&file_path, &data).await {
1368                            warn!("Failed to write {}: {}", entry.path, e);
1369                            error_count.fetch_add(1, Ordering::Relaxed);
1370                        } else {
1371                            downloaded_count.fetch_add(1, Ordering::Relaxed);
1372                            pb.inc(entry.size);
1373                            info!(
1374                                "✓ Downloaded and wrote {} ({} bytes to {})",
1375                                entry.path,
1376                                data.len(),
1377                                file_path.display()
1378                            );
1379                        }
1380                    }
1381                    Err(e) => {
1382                        warn!("Failed to download {}: {}", entry.path, e);
1383                        error_count.fetch_add(1, Ordering::Relaxed);
1384                    }
1385                }
1386            }
1387        })
1388        .buffer_unordered(50) // Process up to 50 downloads concurrently
1389        .collect::<Vec<_>>();
1390
1391    info!("DEBUG: Awaiting all download futures...");
1392
1393    // Actually execute the futures and collect results
1394    let results: Vec<_> = download_futures.await;
1395    info!(
1396        "Download futures completed - processed {} results",
1397        results.len()
1398    );
1399
1400    info!("DEBUG: Stream processing completed");
1401    info!("Completed processing all file download tasks");
1402
1403    pb.finish_with_message("Download complete!");
1404
1405    let final_downloaded = downloaded_count.load(Ordering::Relaxed);
1406    let final_errors = error_count.load(Ordering::Relaxed);
1407
1408    info!(
1409        "✅ Installation completed: {} files downloaded, {} errors",
1410        final_downloaded, final_errors
1411    );
1412
1413    if verify {
1414        info!("🔍 Verifying installation...");
1415        // TODO: Implement verification
1416        info!("✓ Verification complete");
1417    }
1418
1419    Ok(())
1420}
1421
1422/// Check if a file is required for basic functionality
1423fn is_required_file(path: &str) -> bool {
1424    // Core executables and libraries
1425    if path.ends_with(".exe") || path.ends_with(".dll") || path.ends_with(".so") {
1426        return true;
1427    }
1428
1429    // Configuration files
1430    if path.contains("config") || path.ends_with(".ini") || path.ends_with(".xml") {
1431        return true;
1432    }
1433
1434    // Core data files - be more inclusive for WoW Classic Era
1435    if path.starts_with("Data/") {
1436        // Include DBC files which are critical for WoW
1437        if path.ends_with(".dbc") || path.ends_with(".db2") {
1438            return true;
1439        }
1440
1441        // Include patch and locale data
1442        if path.contains("patch") || path.contains("locale") || path.contains("enUS") {
1443            return true;
1444        }
1445
1446        // Include common WoW data directories
1447        if path.contains("base") || path.contains("core") || path.contains("common") {
1448            return true;
1449        }
1450    }
1451
1452    // For minimal installs, include some essential executables
1453    if path.ends_with("Wow.exe") || path.ends_with("WowClassic.exe") {
1454        return true;
1455    }
1456
1457    false
1458}
1459
1460/// Display installation plan to user
1461fn display_installation_plan(
1462    plan: &InstallationPlanDisplay,
1463) -> Result<(), Box<dyn std::error::Error>> {
1464    let InstallationPlanDisplay {
1465        product,
1466        path,
1467        install_type,
1468        manifest_type,
1469        required_files,
1470        optional_files,
1471        total_size,
1472        format,
1473    } = plan;
1474    match format {
1475        OutputFormat::Json | OutputFormat::JsonPretty => {
1476            let plan = serde_json::json!({
1477                "product": product,
1478                "path": path,
1479                "install_type": format!("{:?}", install_type),
1480                "manifest_type": manifest_type,
1481                "required_files": required_files,
1482                "optional_files": optional_files,
1483                "total_files": required_files + optional_files,
1484                "total_size": total_size,
1485                "total_size_human": format_bytes(*total_size),
1486            });
1487
1488            if matches!(format, OutputFormat::JsonPretty) {
1489                println!("{}", serde_json::to_string_pretty(&plan)?);
1490            } else {
1491                println!("{}", serde_json::to_string(&plan)?);
1492            }
1493        }
1494        OutputFormat::Text => {
1495            let mut table = Table::new();
1496            table
1497                .load_preset(UTF8_FULL)
1498                .set_content_arrangement(ContentArrangement::Dynamic)
1499                .set_header(vec!["Installation Plan", "Value"]);
1500
1501            table.add_row(vec![Cell::new("Product"), Cell::new(product)]);
1502            table.add_row(vec![
1503                Cell::new("Installation Path"),
1504                Cell::new(path.display()),
1505            ]);
1506            table.add_row(vec![
1507                Cell::new("Installation Type"),
1508                Cell::new(format!("{install_type:?}")),
1509            ]);
1510            table.add_row(vec![Cell::new("Manifest Type"), Cell::new(manifest_type)]);
1511            table.add_row(vec![Cell::new("Required Files"), Cell::new(required_files)]);
1512            table.add_row(vec![Cell::new("Optional Files"), Cell::new(optional_files)]);
1513            table.add_row(vec![
1514                Cell::new("Total Files"),
1515                Cell::new(required_files + optional_files),
1516            ]);
1517            table.add_row(vec![
1518                Cell::new("Total Size"),
1519                Cell::new(if *install_type == CliInstallType::MetadataOnly {
1520                    "Metadata only".to_string()
1521                } else {
1522                    format_bytes(*total_size)
1523                }),
1524            ]);
1525
1526            println!("{table}");
1527        }
1528        OutputFormat::Bpsv => {
1529            // Not applicable for installation plan
1530            return Err("BPSV format not supported for installation plan".into());
1531        }
1532    }
1533
1534    Ok(())
1535}
1536
1537/// Format bytes to human-readable string
1538fn format_bytes(bytes: u64) -> String {
1539    const UNITS: &[&str] = &["B", "KB", "MB", "GB", "TB"];
1540    let mut size = bytes as f64;
1541    let mut unit_index = 0;
1542
1543    while size >= 1024.0 && unit_index < UNITS.len() - 1 {
1544        size /= 1024.0;
1545        unit_index += 1;
1546    }
1547
1548    format!("{:.2} {}", size, UNITS[unit_index])
1549}
1550
1551/// Write .build.info file for client functionality
1552///
1553/// Creates a BPSV-formatted file containing build metadata required by the game client.
1554/// This file allows the client to identify its build version and connect to appropriate CDN servers.
1555async fn write_build_info_file(
1556    config: BuildInfoConfig<'_>,
1557) -> Result<(), Box<dyn std::error::Error>> {
1558    let BuildInfoConfig {
1559        install_path,
1560        product,
1561        version_entry,
1562        build_config_hash,
1563        cdn_config_hash,
1564        build_config,
1565        cdn_entry,
1566        region,
1567    } = config;
1568    // Extract install key from build config
1569    let install_value = build_config.config.get_value("install").unwrap_or("");
1570    let install_parts: Vec<&str> = install_value.split_whitespace().collect();
1571    let install_key = if install_parts.len() >= 2 {
1572        install_parts[1] // Use encoding key if available
1573    } else {
1574        install_parts.first().copied().unwrap_or("") // Fallback to content key
1575    };
1576
1577    // Create CDN hosts string (space-separated)
1578    let cdn_hosts = cdn_entry.hosts.join(" ");
1579
1580    // Create CDN servers string (space-separated with parameters)
1581    let cdn_servers = if cdn_entry.servers.is_empty() {
1582        // Generate default server URLs from hosts if servers list is empty
1583        cdn_entry
1584            .hosts
1585            .iter()
1586            .flat_map(|host| {
1587                vec![
1588                    format!("http://{}/?maxhosts=4", host),
1589                    format!("https://{}/?maxhosts=4&fallback=1", host),
1590                ]
1591            })
1592            .collect::<Vec<_>>()
1593            .join(" ")
1594    } else {
1595        cdn_entry.servers.join(" ")
1596    };
1597
1598    // Generate basic tags (platform/architecture)
1599    let tags = format!(
1600        "Windows x86_64 {}? acct-{}?",
1601        region.as_str().to_uppercase(),
1602        region.as_str().to_uppercase()
1603    );
1604
1605    // Build .build.info using BPSV builder
1606    let mut builder = BpsvBuilder::new();
1607
1608    // Add fields according to .build.info schema
1609    builder.add_field("Branch", BpsvFieldType::String(0))?;
1610    builder.add_field("Active", BpsvFieldType::Decimal(1))?;
1611    builder.add_field("Build Key", BpsvFieldType::Hex(16))?;
1612    builder.add_field("CDN Key", BpsvFieldType::Hex(16))?;
1613    builder.add_field("Install Key", BpsvFieldType::Hex(16))?;
1614    builder.add_field("IM Size", BpsvFieldType::Decimal(4))?;
1615    builder.add_field("CDN Path", BpsvFieldType::String(0))?;
1616    builder.add_field("CDN Hosts", BpsvFieldType::String(0))?;
1617    builder.add_field("CDN Servers", BpsvFieldType::String(0))?;
1618    builder.add_field("Tags", BpsvFieldType::String(0))?;
1619    builder.add_field("Armadillo", BpsvFieldType::String(0))?;
1620    builder.add_field("Last Activated", BpsvFieldType::String(0))?;
1621    builder.add_field("Version", BpsvFieldType::String(0))?;
1622    builder.add_field("KeyRing", BpsvFieldType::Hex(16))?;
1623    builder.add_field("Product", BpsvFieldType::String(0))?;
1624
1625    // Add the data row
1626    builder.add_row(vec![
1627        BpsvValue::String(region.as_str().to_string()), // Branch
1628        BpsvValue::Decimal(1),                          // Active (always 1)
1629        BpsvValue::Hex(build_config_hash.to_string()),  // Build Key
1630        BpsvValue::Hex(cdn_config_hash.to_string()),    // CDN Key
1631        BpsvValue::Hex(install_key.to_string()),        // Install Key
1632        BpsvValue::Decimal(0),                          // IM Size (empty)
1633        BpsvValue::String(cdn_entry.path.clone()),      // CDN Path
1634        BpsvValue::String(cdn_hosts),                   // CDN Hosts
1635        BpsvValue::String(cdn_servers),                 // CDN Servers
1636        BpsvValue::String(tags),                        // Tags
1637        BpsvValue::String(String::new()),               // Armadillo (empty)
1638        BpsvValue::String(String::new()),               // Last Activated (empty)
1639        BpsvValue::String(version_entry.versions_name.clone()), // Version
1640        BpsvValue::Hex(version_entry.key_ring.as_deref().unwrap_or("").to_string()), // KeyRing
1641        BpsvValue::String(product.to_string()),         // Product
1642    ])?;
1643
1644    // Build the BPSV content
1645    let build_info_content = builder.build_string()?;
1646
1647    // Write .build.info file to installation root directory
1648    let build_info_path = install_path.join(".build.info");
1649    tokio::fs::write(&build_info_path, build_info_content).await?;
1650
1651    debug!("Written .build.info to: {}", build_info_path.display());
1652    Ok(())
1653}
1654
1655/// Resume an existing installation by detecting missing files
1656async fn resume_installation(
1657    install_path: &Path,
1658    _format: OutputFormat,
1659) -> Result<(), Box<dyn std::error::Error>> {
1660    info!("📋 Reading installation metadata from .build.info...");
1661
1662    // Read and parse .build.info file
1663    let build_info_path = install_path.join(".build.info");
1664    let build_info_content = tokio::fs::read_to_string(&build_info_path).await?;
1665
1666    // Parse BPSV format to extract product, version, and CDN information
1667    let build_info = ngdp_bpsv::BpsvDocument::parse(&build_info_content)?;
1668
1669    // Extract key information from .build.info
1670    let rows = build_info.rows();
1671    if rows.is_empty() {
1672        return Err("No entries found in .build.info file".into());
1673    }
1674
1675    let schema = build_info.schema();
1676    let row = &rows[0]; // Use first entry
1677    let product = row
1678        .get_raw_by_name("Product", schema)
1679        .ok_or("Product not found in .build.info")?;
1680    let version = row
1681        .get_raw_by_name("Version", schema)
1682        .ok_or("Version not found in .build.info")?;
1683    let branch = row
1684        .get_raw_by_name("Branch", schema)
1685        .ok_or("Branch not found in .build.info")?;
1686    let build_key = row
1687        .get_raw_by_name("Build Key", schema)
1688        .ok_or("Build Key not found in .build.info")?;
1689    let cdn_path = row
1690        .get_raw_by_name("CDN Path", schema)
1691        .ok_or("CDN Path not found in .build.info")?;
1692    let cdn_hosts_str = row
1693        .get_raw_by_name("CDN Hosts", schema)
1694        .ok_or("CDN Hosts not found in .build.info")?;
1695
1696    // Parse CDN hosts (space-separated)
1697    let cdn_hosts: Vec<&str> = cdn_hosts_str.split_whitespace().collect();
1698    let cdn_host = cdn_hosts.first().ok_or("No CDN hosts available")?;
1699
1700    info!("🔄 Resuming installation:");
1701    info!("  • Product: {}", product);
1702    info!("  • Version: {}", version);
1703    info!("  • Branch: {}", branch);
1704    info!("  • Build Key: {}", build_key);
1705    info!("  • CDN Host: {}", cdn_host);
1706
1707    // Read build configuration from Data/config/ structure
1708    let build_config_subdir = format!("{}/{}", &build_key[0..2], &build_key[2..4]);
1709    let build_config_path = install_path
1710        .join("Data/config")
1711        .join(&build_config_subdir)
1712        .join(build_key);
1713
1714    if !build_config_path.exists() {
1715        return Err(format!(
1716            "Build configuration not found at: {}. The installation appears corrupted.",
1717            build_config_path.display()
1718        )
1719        .into());
1720    }
1721
1722    let build_config_data = tokio::fs::read_to_string(&build_config_path).await?;
1723    let build_config = tact_parser::config::BuildConfig::parse(&build_config_data)?;
1724
1725    info!("✓ Loaded build configuration from local cache");
1726
1727    // Get encoding file from config
1728    let encoding_value = build_config
1729        .config
1730        .get_value("encoding")
1731        .ok_or("Missing encoding field in build config")?;
1732    let encoding_parts: Vec<&str> = encoding_value.split_whitespace().collect();
1733    let encoding_ekey = if encoding_parts.len() >= 2 {
1734        encoding_parts[1]
1735    } else {
1736        encoding_parts[0]
1737    };
1738
1739    // Download and parse encoding file
1740    info!("📥 Downloading encoding file...");
1741    let cdn_client = CachedCdnClient::new().await?;
1742    cdn_client.add_primary_hosts(cdn_hosts.iter().map(|h| h.to_string()));
1743    // Add community CDNs for fallback
1744    cdn_client.add_fallback_host("cdn.arctium.tools");
1745    cdn_client.add_fallback_host("tact.mirror.reliquaryhq.com");
1746    let encoding_data = cdn_client
1747        .download_data(cdn_hosts[0], cdn_path, encoding_ekey)
1748        .await?
1749        .bytes()
1750        .await?;
1751
1752    let encoding_data = if encoding_data.starts_with(b"BLTE") {
1753        blte::decompress_blte(encoding_data.to_vec(), None)?
1754    } else {
1755        encoding_data.to_vec()
1756    };
1757
1758    let encoding_file = EncodingFile::parse(&encoding_data)?;
1759    info!("✓ Encoding file loaded");
1760
1761    // For resume, we'll create an empty archive index since we don't have CDN config readily available
1762    // This means we'll fall back to loose file downloads, which should work for resume scenarios
1763    let archive_index = ArchiveIndex::new();
1764    info!("📦 Using empty archive index for resume (loose file fallback)");
1765
1766    // Get install manifest information
1767    let install_value = build_config
1768        .config
1769        .get_value("install")
1770        .ok_or("Missing install field in build config")?;
1771    let install_parts: Vec<&str> = install_value.split_whitespace().collect();
1772
1773    let install_ekey = if install_parts.len() >= 2 {
1774        install_parts[1].to_string()
1775    } else {
1776        // Look up content key in encoding file
1777        let ckey = install_parts[0];
1778        let ekey_bytes = encoding_file
1779            .lookup_by_ckey(&hex::decode(ckey)?)
1780            .and_then(|e| e.encoding_keys.first())
1781            .ok_or("Install manifest encoding key not found")?;
1782        hex::encode(ekey_bytes)
1783    };
1784
1785    // Download and parse install manifest
1786    info!("📥 Downloading install manifest...");
1787    let install_data = cdn_client
1788        .download_data(cdn_hosts[0], cdn_path, &install_ekey)
1789        .await?
1790        .bytes()
1791        .await?;
1792
1793    let install_data = if install_data.starts_with(b"BLTE") {
1794        blte::decompress_blte(install_data.to_vec(), None)?
1795    } else {
1796        install_data.to_vec()
1797    };
1798
1799    let install_manifest = InstallManifest::parse(&install_data)?;
1800    info!(
1801        "📋 Install manifest loaded: {} files",
1802        install_manifest.entries.len()
1803    );
1804
1805    // Check which files are missing
1806    let data_dir = install_path.join("Data/data");
1807    tokio::fs::create_dir_all(&data_dir).await?;
1808
1809    let mut missing_files = Vec::new();
1810    let mut total_missing_size = 0u64;
1811
1812    info!("🔍 Checking for missing files...");
1813    for entry in &install_manifest.entries {
1814        // Look up encoding key for this content key
1815        if let Some(encoding_entry) = encoding_file.lookup_by_ckey(&entry.ckey) {
1816            if let Some(ekey) = encoding_entry.encoding_keys.first() {
1817                let ekey_hex = hex::encode(ekey);
1818                let expected_path = data_dir.join(&ekey_hex);
1819
1820                if !expected_path.exists() {
1821                    missing_files.push((entry, ekey_hex));
1822                    total_missing_size += entry.size as u64;
1823                }
1824            }
1825        }
1826    }
1827
1828    if missing_files.is_empty() {
1829        info!("✅ No missing files found - installation is complete!");
1830        return Ok(());
1831    }
1832
1833    info!(
1834        "📊 Found {} missing files ({} total)",
1835        missing_files.len(),
1836        format_bytes(total_missing_size)
1837    );
1838
1839    info!("📥 Downloading missing files...");
1840    let mut downloaded_count = 0;
1841    let mut error_count = 0;
1842
1843    for (entry, ekey_hex) in &missing_files {
1844        match download_file_with_archive(
1845            &cdn_client,
1846            &archive_index,
1847            cdn_hosts[0],
1848            cdn_path,
1849            ekey_hex,
1850        )
1851        .await
1852        {
1853            Ok(data) => {
1854                // Write file to disk
1855                let file_path = data_dir.join(ekey_hex);
1856                if let Err(e) = tokio::fs::write(&file_path, &data).await {
1857                    warn!("Failed to write {}: {}", entry.path, e);
1858                    error_count += 1;
1859                } else {
1860                    downloaded_count += 1;
1861                    if downloaded_count % 10 == 0 {
1862                        info!(
1863                            "📥 Downloaded {}/{} files...",
1864                            downloaded_count,
1865                            missing_files.len()
1866                        );
1867                    }
1868                }
1869            }
1870            Err(e) => {
1871                warn!("Failed to fetch {}: {}", entry.path, e);
1872                error_count += 1;
1873            }
1874        }
1875    }
1876
1877    info!(
1878        "✅ Resume completed: {} files downloaded, {} errors",
1879        downloaded_count, error_count
1880    );
1881
1882    Ok(())
1883}
1884
1885/// Handle repair of an existing installation
1886async fn handle_repair_installation(
1887    install_path: PathBuf,
1888    verify_checksums: bool,
1889    dry_run: bool,
1890    _format: OutputFormat,
1891) -> Result<(), Box<dyn std::error::Error>> {
1892    info!("🔧 Starting repair of installation at {:?}", install_path);
1893
1894    if dry_run {
1895        info!("🔍 DRY RUN mode - no files will be modified");
1896    }
1897
1898    let build_info_path = install_path.join(".build.info");
1899    if !build_info_path.exists() {
1900        return Err(format!(
1901            "No .build.info found at {}. This does not appear to be a valid installation.",
1902            install_path.display()
1903        )
1904        .into());
1905    }
1906
1907    if verify_checksums {
1908        info!("🔍 Verifying file checksums...");
1909        // TODO: Implement checksum verification
1910        info!("🚧 Checksum verification not yet implemented");
1911    }
1912
1913    // For now, repair is similar to resume - detect missing files
1914    info!("🔍 Checking for missing or corrupted files...");
1915
1916    if dry_run {
1917        info!("✅ Dry run completed - repair functionality in development");
1918    } else {
1919        info!("🚧 Repair functionality implementation in progress");
1920        info!(
1921            "💡 Use 'ngdp install game <product> --path {} --resume' for now",
1922            install_path.display()
1923        );
1924    }
1925
1926    Ok(())
1927}