rs_web/
build.rs

1//! Build orchestrator for static site generation
2
3use anyhow::{Context, Result};
4use log::{debug, info, trace};
5use rayon::prelude::*;
6use std::fs;
7use std::path::{Path, PathBuf};
8use std::sync::Arc;
9
10use crate::config::{Config, PageDef};
11use crate::markdown::{Pipeline, TransformContext};
12use crate::templates::Templates;
13use crate::tracker::{
14    AssetRef, BuildTracker, CachedDeps, SharedTracker, extract_html_asset_refs,
15    extract_markdown_asset_refs, resolve_url_to_source,
16};
17
18/// Cache file name
19const CACHE_FILE: &str = ".rs-web-cache/deps.bin";
20
21/// Main build orchestrator
22pub struct Builder {
23    config: Config,
24    output_dir: PathBuf,
25    project_dir: PathBuf,
26    /// Build dependency tracker
27    tracker: SharedTracker,
28    /// Cached dependency info from previous build
29    cached_deps: Option<CachedDeps>,
30    /// Cached global data from last build
31    cached_global_data: Option<serde_json::Value>,
32    /// Cached page definitions from last build
33    cached_pages: Option<Vec<PageDef>>,
34}
35
36impl Builder {
37    pub fn new(config: Config, output_dir: PathBuf, project_dir: PathBuf) -> Self {
38        // Load cached deps from previous build
39        let cache_path = project_dir.join(CACHE_FILE);
40        let cached_deps = CachedDeps::load(&cache_path);
41        if cached_deps.is_some() {
42            debug!("Loaded cached dependency info from {:?}", cache_path);
43        }
44
45        // Get the tracker from config (it was created during config loading)
46        let tracker = config.tracker().clone();
47
48        Self {
49            config,
50            output_dir,
51            project_dir,
52            tracker,
53            cached_deps,
54            cached_global_data: None,
55            cached_pages: None,
56        }
57    }
58
59    /// Create a new builder with a fresh tracker (for full rebuilds)
60    pub fn new_with_tracker(project_dir: PathBuf, output_dir: PathBuf) -> Result<Self> {
61        let tracker = Arc::new(BuildTracker::new());
62        let config = Config::load_with_tracker(&project_dir, tracker.clone())?;
63
64        // Load cached deps from previous build
65        let cache_path = project_dir.join(CACHE_FILE);
66        let cached_deps = CachedDeps::load(&cache_path);
67        if cached_deps.is_some() {
68            debug!("Loaded cached dependency info from {:?}", cache_path);
69        }
70
71        Ok(Self {
72            config,
73            output_dir,
74            project_dir,
75            tracker,
76            cached_deps,
77            cached_global_data: None,
78            cached_pages: None,
79        })
80    }
81
82    /// Resolve a path relative to the project directory
83    fn resolve_path(&self, path: &str) -> PathBuf {
84        let p = Path::new(path);
85        if p.is_absolute() {
86            p.to_path_buf()
87        } else {
88            self.project_dir.join(path)
89        }
90    }
91
92    pub fn build(&mut self) -> Result<()> {
93        info!("Starting build");
94        debug!("Output directory: {:?}", self.output_dir);
95        debug!("Project directory: {:?}", self.project_dir);
96
97        // Stage 1: Clean output directory
98        trace!("Stage 1: Cleaning output directory");
99        self.clean()?;
100
101        // Run before_build hook (after clean, so it can write to output_dir)
102        trace!("Running before_build hook");
103        self.config.call_before_build()?;
104
105        // Stage 2: Call data() to get global data
106        trace!("Stage 2: Calling data() function");
107        let global_data = self.config.call_data()?;
108        debug!("Global data loaded");
109
110        // Stage 3: Call pages(global) to get page definitions
111        trace!("Stage 3: Calling pages() function");
112        let pages = self.config.call_pages(&global_data)?;
113        debug!("Found {} pages to generate", pages.len());
114
115        // Cache for incremental builds
116        self.cached_global_data = Some(global_data.clone());
117        self.cached_pages = Some(pages.clone());
118
119        // Stage 4: Load templates
120        trace!("Stage 5: Loading templates");
121        let templates = Templates::new(
122            &self.resolve_path(&self.config.paths.templates),
123            Some(self.tracker.clone()),
124        )?;
125
126        // Stage 6: Render all pages in parallel
127        trace!("Stage 6: Rendering {} pages", pages.len());
128        let pipeline = Pipeline::from_config(&self.config);
129        self.render_pages(&pages, &global_data, &templates, &pipeline)?;
130
131        info!("Build complete: {} pages generated", pages.len());
132        rs_print!("Generated {} pages", pages.len());
133
134        // Run after_build hook
135        trace!("Running after_build hook");
136        self.config.call_after_build()?;
137
138        // Merge all thread-local tracking data and save
139        self.tracker.merge_all_threads();
140        self.save_cached_deps()?;
141
142        Ok(())
143    }
144
145    /// Save tracked dependencies to cache file
146    fn save_cached_deps(&self) -> Result<()> {
147        let cache_path = self.project_dir.join(CACHE_FILE);
148        let deps = CachedDeps::from_tracker(&self.tracker);
149        deps.save(&cache_path)
150            .with_context(|| format!("Failed to save dependency cache to {:?}", cache_path))?;
151        debug!(
152            "Saved dependency cache: {} reads, {} writes",
153            deps.reads.len(),
154            deps.writes.len()
155        );
156        Ok(())
157    }
158
159    /// Get files that have changed since last build
160    pub fn get_changed_files(&self) -> Vec<PathBuf> {
161        match &self.cached_deps {
162            Some(cached) => self.tracker.get_changed_files(cached),
163            None => Vec::new(), // No cache means full rebuild needed
164        }
165    }
166
167    /// Check if a full rebuild is needed (no cache or config changed)
168    pub fn needs_full_rebuild(&self) -> bool {
169        self.cached_deps.is_none()
170    }
171
172    /// Check if a file was tracked as a dependency in the last build
173    /// This includes both explicit reads (via Lua API) and implicit refs (via HTML/markdown)
174    pub fn is_tracked_file(&self, path: &Path) -> bool {
175        if let Some(ref cached) = self.cached_deps {
176            // Canonicalize path for comparison
177            let path = path.canonicalize().unwrap_or_else(|_| path.to_path_buf());
178            // Check if it was read via Lua API (copy_file, read_file, etc.)
179            if cached.reads.contains_key(&path) {
180                return true;
181            }
182            // Check if it's referenced in any page's HTML/markdown
183            if cached.asset_to_pages.contains_key(&path) {
184                return true;
185            }
186            false
187        } else {
188            // No cache, assume all files are relevant
189            true
190        }
191    }
192
193    /// Check if any tracked files have changed since last build
194    pub fn has_tracked_changes(&self) -> bool {
195        if let Some(ref cached) = self.cached_deps {
196            !self.tracker.get_changed_files(cached).is_empty()
197        } else {
198            true // No cache means we need to build
199        }
200    }
201
202    fn clean(&self) -> Result<()> {
203        if self.output_dir.exists() {
204            debug!("Removing existing output directory: {:?}", self.output_dir);
205            fs::remove_dir_all(&self.output_dir).with_context(|| {
206                format!("Failed to clean output directory: {:?}", self.output_dir)
207            })?;
208        }
209        trace!("Creating output directories");
210        fs::create_dir_all(&self.output_dir)?;
211        fs::create_dir_all(self.output_dir.join("static"))?;
212        Ok(())
213    }
214
215    /// Remove pages that existed in the old build but not in the new one
216    fn remove_stale_pages(&self, old_pages: &[PageDef], new_pages: &[PageDef]) -> Result<()> {
217        use std::collections::HashSet;
218
219        // Collect new page paths
220        let new_paths: HashSet<&str> = new_pages.iter().map(|p| p.path.as_str()).collect();
221
222        // Find and remove stale pages
223        for old_page in old_pages {
224            if !new_paths.contains(old_page.path.as_str()) {
225                let relative_path = old_page.path.trim_matches('/');
226
227                // Check if path has a file extension
228                let has_extension = relative_path.contains('.')
229                    && !relative_path.ends_with('/')
230                    && relative_path
231                        .rsplit('/')
232                        .next()
233                        .map(|s| s.contains('.'))
234                        .unwrap_or(false);
235
236                let file_path = if has_extension {
237                    self.output_dir.join(relative_path)
238                } else if relative_path.is_empty() {
239                    self.output_dir.join("index.html")
240                } else {
241                    self.output_dir.join(relative_path).join("index.html")
242                };
243
244                if file_path.exists() {
245                    rs_print!("  Removed: {}", old_page.path);
246                    fs::remove_file(&file_path)?;
247
248                    // Try to remove empty parent directory
249                    if let Some(parent) = file_path.parent()
250                        && parent != self.output_dir
251                        && parent.read_dir()?.next().is_none()
252                    {
253                        let _ = fs::remove_dir(parent);
254                    }
255                }
256            }
257        }
258
259        Ok(())
260    }
261
262    fn render_pages(
263        &self,
264        pages: &[PageDef],
265        global_data: &serde_json::Value,
266        templates: &Templates,
267        pipeline: &Pipeline,
268    ) -> Result<()> {
269        // Render all pages in parallel
270        pages
271            .par_iter()
272            .try_for_each(|page| self.render_single_page(page, global_data, templates, pipeline))?;
273
274        Ok(())
275    }
276
277    fn render_single_page(
278        &self,
279        page: &PageDef,
280        global_data: &serde_json::Value,
281        templates: &Templates,
282        pipeline: &Pipeline,
283    ) -> Result<()> {
284        trace!("Rendering page: {}", page.path);
285
286        // Process content through markdown pipeline if provided
287        let html_content = if let Some(ref markdown) = page.content {
288            let ctx = TransformContext {
289                config: &self.config,
290                current_path: &self.project_dir,
291                base_url: &self.config.site.base_url,
292            };
293            Some(pipeline.process(markdown, &ctx))
294        } else {
295            page.html.clone()
296        };
297
298        // If no template, output html directly (for raw text/xml files)
299        let html = if page.template.is_none() {
300            html_content.unwrap_or_default()
301        } else {
302            templates.render_page(&self.config, page, global_data, html_content.as_deref())?
303        };
304
305        // Extract asset references from the generated HTML and markdown content
306        self.extract_and_record_asset_refs(page, &html);
307
308        // Minify HTML if enabled (default: true)
309        let html = if page.minify {
310            minify_html(&html)
311        } else {
312            html
313        };
314
315        // Write output file
316        let relative_path = page.path.trim_matches('/');
317
318        // Check if path has a file extension (e.g., feed.xml, sitemap.json)
319        let has_extension = relative_path.contains('.')
320            && !relative_path.ends_with('/')
321            && relative_path
322                .rsplit('/')
323                .next()
324                .map(|s| s.contains('.'))
325                .unwrap_or(false);
326
327        if has_extension {
328            // Write directly to file path (e.g., /feed.xml -> dist/feed.xml)
329            let file_path = self.output_dir.join(relative_path);
330            if let Some(parent) = file_path.parent() {
331                fs::create_dir_all(parent)?;
332            }
333            fs::write(file_path, html)?;
334        } else {
335            // Write to directory with index.html (e.g., /about/ -> dist/about/index.html)
336            let page_dir = if relative_path.is_empty() {
337                self.output_dir.clone()
338            } else {
339                self.output_dir.join(relative_path)
340            };
341            fs::create_dir_all(&page_dir)?;
342            fs::write(page_dir.join("index.html"), html)?;
343        }
344
345        Ok(())
346    }
347
348    /// Extract asset references from HTML and markdown, record them in tracker
349    fn extract_and_record_asset_refs(&self, page: &PageDef, html: &str) {
350        // Extract from HTML
351        let mut url_paths: Vec<String> = extract_html_asset_refs(html);
352
353        // Also extract from markdown content if present
354        if let Some(ref markdown) = page.content {
355            let md_refs = extract_markdown_asset_refs(markdown);
356            url_paths.extend(md_refs);
357        }
358
359        if url_paths.is_empty() {
360            return;
361        }
362
363        // Get writes to resolve URL paths to source files
364        let writes = self.tracker.get_writes();
365
366        // Convert URL paths to AssetRefs with source resolution
367        let asset_refs: Vec<AssetRef> = url_paths
368            .into_iter()
369            .map(|url_path| {
370                let source_path =
371                    resolve_url_to_source(&url_path, &self.output_dir, &writes, &self.project_dir);
372                AssetRef {
373                    url_path,
374                    source_path,
375                }
376            })
377            .collect();
378
379        // Record in tracker
380        let page_path = PathBuf::from(&page.path);
381        self.tracker.record_html_refs(page_path, asset_refs);
382    }
383
384    /// Perform an incremental build based on what changed
385    /// Uses tracker data to filter changes to only files that were actually used
386    pub fn incremental_build(&mut self, changes: &crate::watch::ChangeSet) -> Result<()> {
387        debug!("Starting incremental build");
388        trace!("Change set: {:?}", changes);
389
390        // Config changed - full rebuild needed (Lua functions may have changed)
391        if changes.full_rebuild {
392            return self.build();
393        }
394
395        // Filter content changes to only files that were tracked as dependencies
396        let relevant_content: Vec<PathBuf> = changes
397            .content_files
398            .iter()
399            .filter(|p| {
400                let full_path = self.project_dir.join(p);
401                let is_tracked = self.is_tracked_file(&full_path);
402                if !is_tracked {
403                    trace!("Skipping untracked content file: {:?}", p);
404                }
405                is_tracked
406            })
407            .map(|p| self.project_dir.join(p))
408            .collect();
409
410        // Filter asset changes to only files that were tracked as dependencies
411        let relevant_assets: Vec<PathBuf> = changes
412            .asset_files
413            .iter()
414            .filter(|p| {
415                let full_path = self.project_dir.join(p);
416                let is_tracked = self.is_tracked_file(&full_path);
417                if !is_tracked {
418                    trace!("Skipping untracked asset file: {:?}", p);
419                }
420                is_tracked
421            })
422            .map(|p| self.project_dir.join(p))
423            .collect();
424
425        // Log skipped files
426        if !changes.content_files.is_empty() && relevant_content.is_empty() {
427            debug!(
428                "All {} content files were untracked, skipping",
429                changes.content_files.len()
430            );
431        }
432        if !changes.asset_files.is_empty() && relevant_assets.is_empty() {
433            debug!(
434                "All {} asset files were untracked, skipping",
435                changes.asset_files.len()
436            );
437        }
438
439        // Handle asset changes first (before_build runs copy_file, etc.)
440        if !relevant_assets.is_empty() {
441            debug!(
442                "{} tracked assets changed (out of {} total)",
443                relevant_assets.len(),
444                changes.asset_files.len()
445            );
446            self.rebuild_assets_only(&relevant_assets)?;
447        }
448
449        // Handle CSS changes
450        if changes.rebuild_css {
451            self.rebuild_css_only()?;
452        }
453
454        // Content files changed - try incremental update
455        if !relevant_content.is_empty() {
456            debug!(
457                "{} tracked content files changed (out of {} total)",
458                relevant_content.len(),
459                changes.content_files.len()
460            );
461            return self.rebuild_content_only(&relevant_content);
462        }
463
464        // Template changes - re-render affected pages with cached data (skip Lua calls)
465        if changes.has_template_changes() {
466            for path in &changes.template_files {
467                rs_print!("  Changed: {}", path.display());
468            }
469            return self.rebuild_templates_only(&changes.template_files);
470        }
471
472        Ok(())
473    }
474
475    /// Rebuild content - use incremental update if available, otherwise full data reload
476    fn rebuild_content_only(&mut self, changed_paths: &[PathBuf]) -> Result<()> {
477        debug!(
478            "Content-only rebuild for {} changed files",
479            changed_paths.len()
480        );
481
482        // Print changed files
483        for path in changed_paths {
484            if let Ok(rel) = path.strip_prefix(&self.project_dir) {
485                rs_print!("  Changed: {}", rel.display());
486            } else {
487                rs_print!("  Changed: {}", path.display());
488            }
489        }
490
491        // Try incremental update if update_data function exists and we have cached data
492        let global_data = if self.config.has_update_data() && self.cached_global_data.is_some() {
493            debug!("Using incremental update_data()");
494            let cached = self.cached_global_data.as_ref().unwrap();
495            // Convert absolute paths to relative paths for Lua
496            let relative_paths: Vec<PathBuf> = changed_paths
497                .iter()
498                .filter_map(|p| {
499                    p.strip_prefix(&self.project_dir)
500                        .ok()
501                        .map(|r| r.to_path_buf())
502                })
503                .collect();
504            self.config.call_update_data(cached, &relative_paths)?
505        } else {
506            debug!("Using full data() reload");
507            self.config.call_data()?
508        };
509
510        let pages = self.config.call_pages(&global_data)?;
511
512        // Remove stale pages that no longer exist in the new page list
513        if let Some(ref old_pages) = self.cached_pages {
514            self.remove_stale_pages(old_pages, &pages)?;
515        }
516
517        // Update cache
518        self.cached_global_data = Some(global_data.clone());
519        self.cached_pages = Some(pages.clone());
520
521        // Reload templates and re-render
522        let templates = Templates::new(
523            &self.resolve_path(&self.config.paths.templates),
524            Some(self.tracker.clone()),
525        )?;
526        let pipeline = Pipeline::from_config(&self.config);
527        self.render_pages(&pages, &global_data, &templates, &pipeline)?;
528
529        // Merge thread-local tracking data and save
530        self.tracker.merge_all_threads();
531        self.save_cached_deps()?;
532
533        rs_print!("Re-rendered {} pages (content changed)", pages.len());
534        Ok(())
535    }
536
537    /// Rebuild only by re-rendering templates with cached data
538    fn rebuild_templates_only(
539        &mut self,
540        changed_template_files: &std::collections::HashSet<PathBuf>,
541    ) -> Result<()> {
542        let (global_data, all_pages) = match (&self.cached_global_data, &self.cached_pages) {
543            (Some(data), Some(pages)) => (data.clone(), pages.clone()),
544            _ => {
545                // No cache available, do a full build to populate it
546                log::info!("No cached data available, performing full build");
547                return self.build();
548            }
549        };
550
551        // Reload templates and get dependency graph
552        let template_dir = self.resolve_path(&self.config.paths.templates);
553        let templates = Templates::new(&template_dir, Some(self.tracker.clone()))?;
554        let deps = templates.deps();
555
556        // Find all affected templates (transitively)
557        let mut affected_templates = std::collections::HashSet::new();
558        for changed_path in changed_template_files {
559            // Find template name from path
560            if let Some(template_name) = deps.find_template_by_path(changed_path) {
561                let transitive = deps.get_affected_templates(template_name);
562                affected_templates.extend(transitive);
563            } else if let Ok(rel_path) = changed_path.strip_prefix(&template_dir) {
564                // Try relative path as template name
565                let template_name = rel_path.to_string_lossy().to_string();
566                let transitive = deps.get_affected_templates(&template_name);
567                affected_templates.extend(transitive);
568            }
569        }
570
571        debug!("Affected templates: {:?}", affected_templates);
572
573        // Filter pages to only those using affected templates
574        let pages_to_rebuild: Vec<_> = all_pages
575            .iter()
576            .filter(|page| {
577                if let Some(ref template) = page.template {
578                    affected_templates.contains(template)
579                } else {
580                    false
581                }
582            })
583            .cloned()
584            .collect();
585
586        if pages_to_rebuild.is_empty() {
587            rs_print!("No pages affected by template changes");
588            return Ok(());
589        }
590
591        debug!(
592            "Template rebuild: {} of {} pages affected",
593            pages_to_rebuild.len(),
594            all_pages.len()
595        );
596
597        let pipeline = Pipeline::from_config(&self.config);
598
599        // Re-render only affected pages with cached data
600        self.render_pages(&pages_to_rebuild, &global_data, &templates, &pipeline)?;
601
602        rs_print!(
603            "Re-rendered {} of {} pages (templates changed)",
604            pages_to_rebuild.len(),
605            all_pages.len()
606        );
607        Ok(())
608    }
609
610    /// Rebuild CSS by calling before_build hook (CSS is now handled via Lua)
611    fn rebuild_css_only(&self) -> Result<()> {
612        rs_print!("  Changed: styles");
613        self.config.call_before_build()?;
614        rs_print!("Rebuilt CSS");
615        Ok(())
616    }
617
618    /// Rebuild assets by calling before_build hook (re-copies static files)
619    fn rebuild_assets_only(&self, changed_paths: &[PathBuf]) -> Result<()> {
620        for path in changed_paths {
621            if let Ok(rel) = path.strip_prefix(&self.project_dir) {
622                rs_print!("  Changed: {}", rel.display());
623            } else {
624                rs_print!("  Changed: {}", path.display());
625            }
626        }
627        self.config.call_before_build()?;
628        rs_print!("Rebuilt {} assets", changed_paths.len());
629        Ok(())
630    }
631
632    /// Reload config from disk
633    pub fn reload_config(&mut self) -> Result<()> {
634        debug!("Reloading config from {:?}", self.project_dir);
635        self.config = crate::config::Config::load(&self.project_dir)?;
636        // Clear cache since Lua functions might produce different output
637        self.cached_global_data = None;
638        self.cached_pages = None;
639        info!("Config reloaded successfully");
640        Ok(())
641    }
642
643    /// Get a reference to the current config
644    pub fn config(&self) -> &Config {
645        &self.config
646    }
647}
648
649/// Minify HTML content with OXC-based inline JS minification
650fn minify_html(html: &str) -> String {
651    // First, minify inline JS with OXC (minify-js has bugs)
652    let html = minify_inline_js(html);
653
654    let cfg = minify_html::Cfg {
655        minify_js: false,
656        minify_css: true,
657        ..Default::default()
658    };
659    let minified = minify_html::minify(html.as_bytes(), &cfg);
660    String::from_utf8(minified).unwrap_or_else(|_| html.to_string())
661}
662
663/// Minify inline <script> tags using OXC
664fn minify_inline_js(html: &str) -> String {
665    use oxc_allocator::Allocator;
666    use oxc_codegen::{Codegen, CodegenOptions};
667    use oxc_minifier::{CompressOptions, MangleOptions, Minifier, MinifierOptions};
668    use oxc_parser::Parser;
669    use oxc_span::SourceType;
670    use regex::Regex;
671
672    let re = Regex::new(r"(?s)(<script(?:\s[^>]*)?>)(.*?)(</script>)").unwrap();
673
674    re.replace_all(html, |caps: &regex::Captures| {
675        let open_tag = &caps[1];
676        let content = &caps[2];
677        let close_tag = &caps[3];
678
679        // Skip external scripts (src=) or empty scripts
680        if open_tag.contains("src=") || content.trim().is_empty() {
681            return format!("{}{}{}", open_tag, content, close_tag);
682        }
683
684        // Try to minify with OXC
685        let allocator = Allocator::default();
686        let source_type = SourceType::mjs();
687        let ret = Parser::new(&allocator, content, source_type).parse();
688
689        if !ret.errors.is_empty() {
690            // Parse error - return original
691            return format!("{}{}{}", open_tag, content, close_tag);
692        }
693
694        let mut program = ret.program;
695        let options = MinifierOptions {
696            mangle: Some(MangleOptions::default()),
697            compress: Some(CompressOptions::default()),
698        };
699
700        Minifier::new(options).minify(&allocator, &mut program);
701        let minified = Codegen::new()
702            .with_options(CodegenOptions::minify())
703            .build(&program)
704            .code;
705
706        format!("{}{}{}", open_tag, minified, close_tag)
707    })
708    .to_string()
709}
710
711#[cfg(test)]
712mod tests {
713    use super::*;
714
715    #[test]
716    fn test_minify_html_basic() {
717        let input = "<html>  <body>   <p>Hello</p>  </body>  </html>";
718        let result = minify_html(input);
719        assert!(result.len() <= input.len());
720        assert!(result.contains("Hello"));
721    }
722
723    #[test]
724    fn test_minify_html_preserves_pre() {
725        let input = "<pre>  code  with  spaces  </pre>";
726        let result = minify_html(input);
727        // Pre tags should preserve whitespace
728        assert!(result.contains("code  with  spaces"));
729    }
730
731    #[test]
732    fn test_minify_inline_js_basic() {
733        let input = r#"<script>
734            function hello() {
735                console.log("hi");
736            }
737        </script>"#;
738        let result = minify_inline_js(input);
739        assert!(
740            !result.contains('\n') || result.matches('\n').count() < input.matches('\n').count()
741        );
742        assert!(result.contains("<script>"));
743        assert!(result.contains("</script>"));
744    }
745
746    #[test]
747    fn test_minify_inline_js_skips_external() {
748        let input = r#"<script src="/js/app.js"></script>"#;
749        let result = minify_inline_js(input);
750        assert_eq!(result, input);
751    }
752
753    #[test]
754    fn test_minify_inline_js_skips_empty() {
755        let input = "<script></script>";
756        let result = minify_inline_js(input);
757        assert_eq!(result, input);
758    }
759
760    #[test]
761    fn test_minify_inline_js_multiple_scripts() {
762        // Use console.log to prevent DCE
763        let input = r#"<script>console.log(1);</script><script>console.log(2);</script>"#;
764        let result = minify_inline_js(input);
765        assert!(
766            result.contains("console.log(1)") && result.contains("console.log(2)"),
767            "Result: {}",
768            result
769        );
770    }
771
772    #[test]
773    fn test_minify_inline_js_preserves_on_parse_error() {
774        let input = "<script>function { broken</script>";
775        let result = minify_inline_js(input);
776        // Should preserve original on parse error
777        assert!(result.contains("function { broken"));
778    }
779
780    #[test]
781    fn test_minify_inline_js_with_attributes() {
782        // Use console.log to prevent DCE
783        let input = r#"<script type="text/javascript">console.log(1);</script>"#;
784        let result = minify_inline_js(input);
785        assert!(result.contains(r#"type="text/javascript""#));
786    }
787
788    #[test]
789    fn test_minify_html_with_inline_js() {
790        // Use console.log to prevent DCE
791        let input = r#"<html><head><script>console.log(true);</script></head></html>"#;
792        let result = minify_html(input);
793        // Should minify JS (true -> !0)
794        assert!(
795            result.contains("!0") || result.contains("true"),
796            "Result: {}",
797            result
798        );
799    }
800
801    #[test]
802    fn test_minify_html_css_minification() {
803        let input = r#"<style>  body  {  color:  red;  }  </style>"#;
804        let result = minify_html(input);
805        assert!(result.len() < input.len());
806    }
807}