sampo_core/
release.rs

1use crate::types::{Bump, CrateInfo, DependencyUpdate, ReleaseOutput, ReleasedPackage, Workspace};
2use crate::{
3    changeset::ChangesetInfo, config::Config, detect_changesets_dir,
4    detect_github_repo_slug_with_config, discover_workspace, enrich_changeset_message,
5    get_commit_hash_for_path, load_changesets,
6};
7use rustc_hash::FxHashSet;
8use std::collections::{BTreeMap, BTreeSet};
9use std::fs;
10use std::io;
11use std::path::{Component, Path};
12
13/// Format dependency updates for changelog display
14///
15/// Creates a message in the style of Changesets for dependency updates,
16/// e.g., "Updated dependencies [hash]: pkg1@1.2.0, pkg2@2.0.0"
17pub fn format_dependency_updates_message(updates: &[DependencyUpdate]) -> Option<String> {
18    if updates.is_empty() {
19        return None;
20    }
21
22    let dep_list = updates
23        .iter()
24        .map(|dep| format!("{}@{}", dep.name, dep.new_version))
25        .collect::<Vec<_>>()
26        .join(", ");
27
28    Some(format!("Updated dependencies: {}", dep_list))
29}
30
31/// Convert a list of (name, version) tuples into DependencyUpdate structs
32pub fn build_dependency_updates(updates: &[(String, String)]) -> Vec<DependencyUpdate> {
33    updates
34        .iter()
35        .map(|(name, version)| DependencyUpdate {
36            name: name.clone(),
37            new_version: version.clone(),
38        })
39        .collect()
40}
41
42/// Create a changelog entry for dependency updates
43///
44/// Returns a tuple of (message, bump_type) suitable for adding to changelog messages
45pub fn create_dependency_update_entry(updates: &[DependencyUpdate]) -> Option<(String, Bump)> {
46    format_dependency_updates_message(updates).map(|msg| (msg, Bump::Patch))
47}
48
49/// Create a changelog entry for fixed dependency group policy
50///
51/// Returns a tuple of (message, bump_type) suitable for adding to changelog messages
52pub fn create_fixed_dependency_policy_entry(bump: Bump) -> (String, Bump) {
53    (
54        "Bumped due to fixed dependency group policy".to_string(),
55        bump,
56    )
57}
58
59/// Infer bump type from version changes
60///
61/// This helper function determines the semantic version bump type based on
62/// the difference between old and new version strings.
63pub fn infer_bump_from_versions(old_ver: &str, new_ver: &str) -> Bump {
64    let old_parts: Vec<u32> = old_ver.split('.').filter_map(|s| s.parse().ok()).collect();
65    let new_parts: Vec<u32> = new_ver.split('.').filter_map(|s| s.parse().ok()).collect();
66
67    if old_parts.len() >= 3 && new_parts.len() >= 3 {
68        if new_parts[0] > old_parts[0] {
69            Bump::Major
70        } else if new_parts[1] > old_parts[1] {
71            Bump::Minor
72        } else {
73            Bump::Patch
74        }
75    } else {
76        Bump::Patch
77    }
78}
79
80/// Detect all dependency-related explanations for package releases
81///
82/// This function is the unified entry point for detecting all types of automatic
83/// dependency-related changelog entries. It identifies:
84/// - Packages bumped due to internal dependency updates ("Updated dependencies: ...")
85/// - Packages bumped due to fixed dependency group policy ("Bumped due to fixed dependency group policy")
86///
87/// # Arguments
88/// * `changesets` - The changesets being processed
89/// * `workspace` - The workspace containing all packages
90/// * `config` - The configuration with dependency policies
91/// * `releases` - Map of package name to (old_version, new_version) for all planned releases
92///
93/// # Returns
94/// A map of package name to list of (message, bump_type) explanations to add to changelogs
95pub fn detect_all_dependency_explanations(
96    changesets: &[ChangesetInfo],
97    workspace: &Workspace,
98    config: &Config,
99    releases: &BTreeMap<String, (String, String)>,
100) -> BTreeMap<String, Vec<(String, Bump)>> {
101    let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
102
103    // 1. Detect packages bumped due to fixed dependency group policy
104    let bumped_packages: BTreeSet<String> = releases.keys().cloned().collect();
105    let policy_packages =
106        detect_fixed_dependency_policy_packages(changesets, workspace, config, &bumped_packages);
107
108    for (pkg_name, policy_bump) in policy_packages {
109        // For accurate bump detection, infer from actual version changes
110        let actual_bump = if let Some((old_ver, new_ver)) = releases.get(&pkg_name) {
111            infer_bump_from_versions(old_ver, new_ver)
112        } else {
113            policy_bump
114        };
115
116        let (msg, bump_type) = create_fixed_dependency_policy_entry(actual_bump);
117        messages_by_pkg
118            .entry(pkg_name)
119            .or_default()
120            .push((msg, bump_type));
121    }
122
123    // 2. Detect packages bumped due to internal dependency updates
124    // Note: Even packages with explicit changesets can have dependency updates
125
126    // Build new version lookup from releases
127    let new_version_by_name: BTreeMap<String, String> = releases
128        .iter()
129        .map(|(name, (_old, new_ver))| (name.clone(), new_ver.clone()))
130        .collect();
131
132    // Build map of crate name -> CrateInfo for quick lookup
133    let by_name: BTreeMap<String, &CrateInfo> = workspace
134        .members
135        .iter()
136        .map(|c| (c.name.clone(), c))
137        .collect();
138
139    // For each released crate, check if it has internal dependencies that were updated
140    for crate_name in releases.keys() {
141        if let Some(crate_info) = by_name.get(crate_name) {
142            // Find which internal dependencies were updated
143            let mut updated_deps = Vec::new();
144            for dep_name in &crate_info.internal_deps {
145                if let Some(new_version) = new_version_by_name.get(dep_name as &str) {
146                    // This internal dependency was updated
147                    updated_deps.push((dep_name.clone(), new_version.clone()));
148                }
149            }
150
151            if !updated_deps.is_empty() {
152                // Create dependency update entry
153                let updates = build_dependency_updates(&updated_deps);
154                if let Some((msg, bump)) = create_dependency_update_entry(&updates) {
155                    messages_by_pkg
156                        .entry(crate_name.clone())
157                        .or_default()
158                        .push((msg, bump));
159                }
160            }
161        }
162    }
163
164    messages_by_pkg
165}
166
167/// Detect packages that need fixed dependency group policy messages
168///
169/// This function identifies packages that were bumped solely due to fixed dependency
170/// group policies (not due to direct changesets or normal dependency cascades).
171/// Returns a map of package name to the bump level they received.
172pub fn detect_fixed_dependency_policy_packages(
173    changesets: &[ChangesetInfo],
174    workspace: &Workspace,
175    config: &Config,
176    bumped_packages: &BTreeSet<String>,
177) -> BTreeMap<String, Bump> {
178    // Build set of packages with direct changesets
179    let packages_with_changesets: BTreeSet<String> = changesets
180        .iter()
181        .flat_map(|cs| cs.packages.iter().cloned())
182        .collect();
183
184    // Build dependency graph (dependent -> set of dependencies)
185    let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
186    for crate_info in &workspace.members {
187        for dep_name in &crate_info.internal_deps {
188            dependents
189                .entry(dep_name.clone())
190                .or_default()
191                .insert(crate_info.name.clone());
192        }
193    }
194
195    // Find packages affected by normal dependency cascade
196    let mut packages_affected_by_cascade = BTreeSet::new();
197    for pkg_with_changeset in &packages_with_changesets {
198        let mut queue = vec![pkg_with_changeset.clone()];
199        let mut visited = BTreeSet::new();
200
201        while let Some(pkg) = queue.pop() {
202            if visited.contains(&pkg) {
203                continue;
204            }
205            visited.insert(pkg.clone());
206
207            if let Some(deps) = dependents.get(&pkg) {
208                for dep in deps {
209                    packages_affected_by_cascade.insert(dep.clone());
210                    queue.push(dep.clone());
211                }
212            }
213        }
214    }
215
216    // Find packages that need fixed dependency policy messages
217    let mut result = BTreeMap::new();
218
219    for pkg_name in bumped_packages {
220        // Skip if package has direct changeset
221        if packages_with_changesets.contains(pkg_name) {
222            continue;
223        }
224
225        // Skip if package is affected by normal dependency cascade
226        if packages_affected_by_cascade.contains(pkg_name) {
227            continue;
228        }
229
230        // Check if this package is in a fixed dependency group with an affected package
231        for group in &config.fixed_dependencies {
232            if group.contains(&pkg_name.to_string()) {
233                // Check if any other package in this group has changes
234                let has_affected_group_member = group.iter().any(|group_member| {
235                    group_member != pkg_name
236                        && (packages_with_changesets.contains(group_member)
237                            || packages_affected_by_cascade.contains(group_member))
238                });
239
240                if has_affected_group_member {
241                    // Find the highest bump level in the group to determine the policy bump
242                    let group_bump = group
243                        .iter()
244                        .filter_map(|member| {
245                            if packages_with_changesets.contains(member) {
246                                // Find the highest bump from changesets affecting this member
247                                changesets
248                                    .iter()
249                                    .filter(|cs| cs.packages.contains(member))
250                                    .map(|cs| cs.bump)
251                                    .max()
252                            } else {
253                                None
254                            }
255                        })
256                        .max()
257                        .unwrap_or(Bump::Patch);
258
259                    result.insert(pkg_name.clone(), group_bump);
260                    break;
261                }
262            }
263        }
264    }
265
266    result
267}
268
269/// Type alias for initial bumps computation result
270type InitialBumpsResult = (
271    BTreeMap<String, Bump>,                // bump_by_pkg
272    BTreeMap<String, Vec<(String, Bump)>>, // messages_by_pkg
273    BTreeSet<std::path::PathBuf>,          // used_paths
274);
275
276/// Type alias for release plan
277type ReleasePlan = Vec<(String, String, String)>; // (name, old_version, new_version)
278
279/// Main release function that can be called from CLI or other interfaces
280pub fn run_release(root: &std::path::Path, dry_run: bool) -> io::Result<ReleaseOutput> {
281    let ws = discover_workspace(root).map_err(io::Error::other)?;
282    let cfg = Config::load(&ws.root).map_err(io::Error::other)?;
283
284    // Validate fixed dependencies configuration
285    validate_fixed_dependencies(&cfg, &ws).map_err(io::Error::other)?;
286
287    let changesets_dir = detect_changesets_dir(&ws.root);
288    let changesets = load_changesets(&changesets_dir)?;
289    if changesets.is_empty() {
290        println!(
291            "No changesets found in {}",
292            ws.root.join(".sampo").join("changesets").display()
293        );
294        return Ok(ReleaseOutput {
295            released_packages: vec![],
296            dry_run,
297        });
298    }
299
300    // Compute initial bumps from changesets
301    let (mut bump_by_pkg, mut messages_by_pkg, used_paths) =
302        compute_initial_bumps(&changesets, &ws, &cfg)?;
303
304    if bump_by_pkg.is_empty() {
305        println!("No applicable packages found in changesets.");
306        return Ok(ReleaseOutput {
307            released_packages: vec![],
308            dry_run,
309        });
310    }
311
312    // Build dependency graph and apply cascading logic
313    let dependents = build_dependency_graph(&ws);
314    apply_dependency_cascade(&mut bump_by_pkg, &dependents, &cfg);
315    apply_linked_dependencies(&mut bump_by_pkg, &cfg);
316
317    // Prepare and validate release plan
318    let releases = prepare_release_plan(&bump_by_pkg, &ws)?;
319    if releases.is_empty() {
320        println!("No matching workspace crates to release.");
321        return Ok(ReleaseOutput {
322            released_packages: vec![],
323            dry_run,
324        });
325    }
326
327    print_release_plan(&releases);
328
329    // Convert releases to ReleasedPackage structs
330    let released_packages: Vec<ReleasedPackage> = releases
331        .iter()
332        .map(|(name, old_version, new_version)| {
333            let bump = bump_by_pkg.get(name).copied().unwrap_or(Bump::Patch);
334            ReleasedPackage {
335                name: name.clone(),
336                old_version: old_version.clone(),
337                new_version: new_version.clone(),
338                bump,
339            }
340        })
341        .collect();
342
343    if dry_run {
344        println!("Dry-run: no files modified, no tags created.");
345        return Ok(ReleaseOutput {
346            released_packages,
347            dry_run: true,
348        });
349    }
350
351    // Apply changes
352    apply_releases(&releases, &ws, &mut messages_by_pkg, &changesets, &cfg)?;
353
354    // Clean up
355    cleanup_consumed_changesets(used_paths)?;
356
357    Ok(ReleaseOutput {
358        released_packages,
359        dry_run: false,
360    })
361}
362
363/// Compute initial bumps from changesets and collect messages
364fn compute_initial_bumps(
365    changesets: &[ChangesetInfo],
366    ws: &Workspace,
367    cfg: &Config,
368) -> io::Result<InitialBumpsResult> {
369    let mut bump_by_pkg: BTreeMap<String, Bump> = BTreeMap::new();
370    let mut messages_by_pkg: BTreeMap<String, Vec<(String, Bump)>> = BTreeMap::new();
371    let mut used_paths: BTreeSet<std::path::PathBuf> = BTreeSet::new();
372
373    // Resolve GitHub repo slug once if available (config, env or origin remote)
374    let repo_slug = detect_github_repo_slug_with_config(&ws.root, cfg.github_repository.as_deref());
375    let github_token = std::env::var("GITHUB_TOKEN")
376        .ok()
377        .or_else(|| std::env::var("GH_TOKEN").ok());
378
379    for cs in changesets {
380        for pkg in &cs.packages {
381            used_paths.insert(cs.path.clone());
382            bump_by_pkg
383                .entry(pkg.clone())
384                .and_modify(|b| {
385                    if cs.bump > *b {
386                        *b = cs.bump;
387                    }
388                })
389                .or_insert(cs.bump);
390
391            // Enrich message with commit info and acknowledgments
392            let commit_hash = get_commit_hash_for_path(&ws.root, &cs.path);
393            let enriched = if let Some(hash) = commit_hash {
394                enrich_changeset_message(
395                    &cs.message,
396                    &hash,
397                    &ws.root,
398                    repo_slug.as_deref(),
399                    github_token.as_deref(),
400                    cfg.changelog_show_commit_hash,
401                    cfg.changelog_show_acknowledgments,
402                )
403            } else {
404                cs.message.clone()
405            };
406
407            messages_by_pkg
408                .entry(pkg.clone())
409                .or_default()
410                .push((enriched, cs.bump));
411        }
412    }
413
414    Ok((bump_by_pkg, messages_by_pkg, used_paths))
415}
416
417/// Build reverse dependency graph: dep -> set of dependents
418fn build_dependency_graph(ws: &Workspace) -> BTreeMap<String, BTreeSet<String>> {
419    let mut dependents: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
420    for c in &ws.members {
421        for dep in &c.internal_deps {
422            dependents
423                .entry(dep.clone())
424                .or_default()
425                .insert(c.name.clone());
426        }
427    }
428    dependents
429}
430
431/// Apply dependency cascade logic and fixed dependency groups
432fn apply_dependency_cascade(
433    bump_by_pkg: &mut BTreeMap<String, Bump>,
434    dependents: &BTreeMap<String, BTreeSet<String>>,
435    cfg: &Config,
436) {
437    // Helper function to find which fixed group a package belongs to, if any
438    let find_fixed_group = |pkg_name: &str| -> Option<usize> {
439        cfg.fixed_dependencies
440            .iter()
441            .position(|group| group.contains(&pkg_name.to_string()))
442    };
443
444    let mut queue: Vec<String> = bump_by_pkg.keys().cloned().collect();
445    let mut seen: BTreeSet<String> = queue.iter().cloned().collect();
446
447    while let Some(changed) = queue.pop() {
448        let changed_bump = bump_by_pkg.get(&changed).copied().unwrap_or(Bump::Patch);
449
450        // 1. Handle normal dependency relationships (unchanged → dependent)
451        if let Some(deps) = dependents.get(&changed) {
452            for dep_name in deps {
453                // Determine bump level for this dependent
454                let dependent_bump = if find_fixed_group(dep_name).is_some() {
455                    // Fixed dependencies: same bump level as the dependency
456                    changed_bump
457                } else {
458                    // Normal dependencies: at least patch
459                    Bump::Patch
460                };
461
462                let entry = bump_by_pkg
463                    .entry(dep_name.clone())
464                    .or_insert(dependent_bump);
465                // If already present, keep the higher bump
466                if *entry < dependent_bump {
467                    *entry = dependent_bump;
468                }
469                if !seen.contains(dep_name) {
470                    queue.push(dep_name.clone());
471                    seen.insert(dep_name.clone());
472                }
473            }
474        }
475
476        // 2. Handle fixed dependency groups (bidirectional)
477        if let Some(group_idx) = find_fixed_group(&changed) {
478            // All packages in the same fixed group should bump together
479            for group_member in &cfg.fixed_dependencies[group_idx] {
480                if group_member != &changed {
481                    let entry = bump_by_pkg
482                        .entry(group_member.clone())
483                        .or_insert(changed_bump);
484                    // If already present, keep the higher bump
485                    if *entry < changed_bump {
486                        *entry = changed_bump;
487                    }
488                    if !seen.contains(group_member) {
489                        queue.push(group_member.clone());
490                        seen.insert(group_member.clone());
491                    }
492                }
493            }
494        }
495    }
496}
497
498/// Apply linked dependencies logic: highest bump level to affected packages only
499fn apply_linked_dependencies(bump_by_pkg: &mut BTreeMap<String, Bump>, cfg: &Config) {
500    for group in &cfg.linked_dependencies {
501        // Check if any package in this group has been bumped
502        let mut group_has_bumps = false;
503        let mut highest_bump = Bump::Patch;
504
505        // First pass: find the highest bump level in the group among affected packages
506        for group_member in group {
507            if let Some(&member_bump) = bump_by_pkg.get(group_member) {
508                group_has_bumps = true;
509                if member_bump > highest_bump {
510                    highest_bump = member_bump;
511                }
512            }
513        }
514
515        // If any package in the group is being bumped, apply highest bump to affected packages only
516        if group_has_bumps {
517            // Apply the highest bump level to packages that are already being bumped
518            // (either directly affected or through dependency cascade)
519            for group_member in group {
520                if bump_by_pkg.contains_key(group_member) {
521                    // Only update if the current bump is lower than the group's highest bump
522                    let current_bump = bump_by_pkg
523                        .get(group_member)
524                        .copied()
525                        .unwrap_or(Bump::Patch);
526                    if highest_bump > current_bump {
527                        bump_by_pkg.insert(group_member.clone(), highest_bump);
528                    }
529                }
530            }
531        }
532    }
533}
534
535/// Prepare the release plan by matching bumps to workspace members
536fn prepare_release_plan(
537    bump_by_pkg: &BTreeMap<String, Bump>,
538    ws: &Workspace,
539) -> io::Result<ReleasePlan> {
540    // Map crate name -> CrateInfo for quick lookup
541    let mut by_name: BTreeMap<String, &CrateInfo> = BTreeMap::new();
542    for c in &ws.members {
543        by_name.insert(c.name.clone(), c);
544    }
545
546    let mut releases: Vec<(String, String, String)> = Vec::new(); // (name, old_version, new_version)
547    for (name, bump) in bump_by_pkg {
548        if let Some(info) = by_name.get(name) {
549            let old = if info.version.is_empty() {
550                "0.0.0".to_string()
551            } else {
552                info.version.clone()
553            };
554
555            let newv = bump_version(&old, *bump).unwrap_or_else(|_| old.clone());
556
557            releases.push((name.clone(), old, newv));
558        }
559    }
560
561    Ok(releases)
562}
563
564/// Print the planned releases
565fn print_release_plan(releases: &ReleasePlan) {
566    println!("Planned releases:");
567    for (name, old, newv) in releases {
568        println!("  {name}: {old} -> {newv}");
569    }
570}
571
572/// Apply all releases: update manifests and changelogs
573fn apply_releases(
574    releases: &ReleasePlan,
575    ws: &Workspace,
576    messages_by_pkg: &mut BTreeMap<String, Vec<(String, Bump)>>,
577    changesets: &[ChangesetInfo],
578    cfg: &Config,
579) -> io::Result<()> {
580    // Build lookup maps
581    let mut by_name: BTreeMap<String, &CrateInfo> = BTreeMap::new();
582    for c in &ws.members {
583        by_name.insert(c.name.clone(), c);
584    }
585
586    let mut new_version_by_name: BTreeMap<String, String> = BTreeMap::new();
587    for (name, _old, newv) in releases {
588        new_version_by_name.insert(name.clone(), newv.clone());
589    }
590
591    // Build releases map for dependency explanations
592    let releases_map: BTreeMap<String, (String, String)> = releases
593        .iter()
594        .map(|(name, old, new)| (name.clone(), (old.clone(), new.clone())))
595        .collect();
596
597    // Use unified function to detect all dependency explanations
598    let dependency_explanations =
599        detect_all_dependency_explanations(changesets, ws, cfg, &releases_map);
600
601    // Merge dependency explanations into existing messages
602    for (pkg_name, explanations) in dependency_explanations {
603        messages_by_pkg
604            .entry(pkg_name)
605            .or_default()
606            .extend(explanations);
607    }
608
609    // Apply updates for each release
610    for (name, old, newv) in releases {
611        let info = by_name.get(name.as_str()).unwrap();
612        let manifest_path = info.path.join("Cargo.toml");
613        let text = fs::read_to_string(&manifest_path)?;
614
615        // Update manifest versions
616        let (updated, _dep_updates) =
617            update_manifest_versions(&text, Some(newv.as_str()), ws, &new_version_by_name)?;
618        fs::write(&manifest_path, updated)?;
619
620        let messages = messages_by_pkg.get(name).cloned().unwrap_or_default();
621        update_changelog(&info.path, name, old, newv, &messages)?;
622    }
623
624    Ok(())
625}
626
627/// Clean up consumed changeset files
628fn cleanup_consumed_changesets(used_paths: BTreeSet<std::path::PathBuf>) -> io::Result<()> {
629    for p in used_paths {
630        let _ = fs::remove_file(p);
631    }
632    println!("Removed consumed changesets.");
633    Ok(())
634}
635
636/// Bump a semver version string
637pub fn bump_version(old: &str, bump: Bump) -> Result<String, String> {
638    let mut parts = old
639        .split('.')
640        .map(|s| s.parse::<u64>().unwrap_or(0))
641        .collect::<Vec<_>>();
642    while parts.len() < 3 {
643        parts.push(0);
644    }
645    let (maj, min, pat) = (parts[0], parts[1], parts[2]);
646    let (maj, min, pat) = match bump {
647        Bump::Patch => (maj, min, pat + 1),
648        Bump::Minor => (maj, min + 1, 0),
649        Bump::Major => (maj + 1, 0, 0),
650    };
651    Ok(format!("{maj}.{min}.{pat}"))
652}
653
654/// Update a crate manifest, setting the crate version (if provided) and retargeting
655/// internal dependency version requirements to the latest planned versions.
656/// Returns the updated TOML string along with a list of (dep_name, new_version) applied.
657pub fn update_manifest_versions(
658    input: &str,
659    new_pkg_version: Option<&str>,
660    ws: &Workspace,
661    new_version_by_name: &BTreeMap<String, String>,
662) -> io::Result<(String, Vec<(String, String)>)> {
663    let mut value: toml::Value = input
664        .parse()
665        .map_err(|e| io::Error::other(format!("invalid Cargo.toml: {e}")))?;
666
667    if let Some(v) = new_pkg_version
668        && let Some(pkg) = value.get_mut("package").and_then(toml::Value::as_table_mut)
669    {
670        pkg.insert("version".into(), toml::Value::String(v.to_string()));
671    }
672
673    let workspace_crates: BTreeSet<String> = ws.members.iter().map(|c| c.name.clone()).collect();
674    let mut applied: Vec<(String, String)> = Vec::new();
675
676    // helper to try update one dependency entry
677    fn update_dep_entry(
678        key: &str,
679        entry: &mut toml::Value,
680        workspace_crates: &BTreeSet<String>,
681        new_version_by_name: &BTreeMap<String, String>,
682        crate_dirs: &BTreeMap<String, std::path::PathBuf>,
683        base_dir: &std::path::Path,
684    ) -> Option<(String, String)> {
685        match entry {
686            toml::Value::String(ver) => {
687                // If the key itself matches a workspace crate with a new version, update string
688                if let Some(newv) = new_version_by_name.get(key)
689                    && workspace_crates.contains(key)
690                {
691                    *ver = newv.clone();
692                    return Some((key.to_string(), newv.clone()));
693                }
694            }
695            toml::Value::Table(tbl) => {
696                // Determine the real crate name: key or overridden via 'package'
697                let mut real_name = key.to_string();
698                if let Some(toml::Value::String(pkg_name)) = tbl.get("package") {
699                    real_name = pkg_name.clone();
700                }
701
702                // If path points to a workspace crate, prefer that crate's name
703                if let Some(toml::Value::String(path_str)) = tbl.get("path") {
704                    let dep_path = clean_path_like(&base_dir.join(path_str));
705                    if let Some(name) = crate_name_by_path(crate_dirs, &dep_path) {
706                        real_name = name;
707                    }
708                }
709
710                // Skip pure workspace deps (managed at workspace level)
711                if matches!(tbl.get("workspace"), Some(toml::Value::Boolean(true))) {
712                    return None;
713                }
714
715                if let Some(newv) = new_version_by_name.get(&real_name)
716                    && workspace_crates.contains(&real_name)
717                {
718                    tbl.insert("version".into(), toml::Value::String(newv.clone()));
719                    return Some((real_name, newv.clone()));
720                }
721            }
722            _ => {}
723        }
724        None
725    }
726
727    // Build helper maps for path resolution
728    let mut crate_dirs: BTreeMap<String, std::path::PathBuf> = BTreeMap::new();
729    for c in &ws.members {
730        crate_dirs.insert(c.name.clone(), c.path.clone());
731    }
732
733    // Resolve manifest base_dir from package.name
734    let current_crate_name = value
735        .get("package")
736        .and_then(toml::Value::as_table)
737        .and_then(|t| t.get("name"))
738        .and_then(toml::Value::as_str)
739        .unwrap_or("")
740        .to_string();
741    let base_dir = ws
742        .members
743        .iter()
744        .find(|c| c.name == current_crate_name)
745        .map(|c| c.path.as_path().to_path_buf())
746        .unwrap_or_else(|| std::path::PathBuf::from("."));
747
748    // Update dependencies across dependency sections
749    for section in ["dependencies", "dev-dependencies", "build-dependencies"] {
750        if let Some(t) = value.get_mut(section).and_then(toml::Value::as_table_mut) {
751            // Clone keys to avoid borrow issues while mutating
752            let keys: Vec<String> = t.keys().cloned().collect();
753            for dep_key in keys {
754                if let Some(entry) = t.get_mut(&dep_key)
755                    && let Some((dep_name, ver)) = update_dep_entry(
756                        &dep_key,
757                        entry,
758                        &workspace_crates,
759                        new_version_by_name,
760                        &crate_dirs,
761                        &base_dir,
762                    )
763                {
764                    applied.push((dep_name, ver));
765                }
766            }
767        }
768    }
769
770    // Also handle table-style per-dependency sections like [dependencies.foo]
771    // toml::Value already represents those as entries in the tables above, so no extra work.
772
773    let out = toml::to_string(&value)
774        .map_err(|e| io::Error::other(format!("failed to serialize Cargo.toml: {e}")))?;
775    Ok((out, applied))
776}
777
778fn crate_name_by_path(
779    crate_dirs: &BTreeMap<String, std::path::PathBuf>,
780    dep_path: &Path,
781) -> Option<String> {
782    let cleaned = clean_path_like(dep_path);
783    for (name, p) in crate_dirs {
784        if clean_path_like(p) == cleaned {
785            return Some(name.clone());
786        }
787    }
788    None
789}
790
791fn clean_path_like(p: &std::path::Path) -> std::path::PathBuf {
792    let mut out = std::path::PathBuf::new();
793    for c in p.components() {
794        match c {
795            Component::CurDir => {}
796            Component::ParentDir => {
797                if !matches!(
798                    out.components().next_back(),
799                    Some(Component::RootDir | Component::Prefix(_))
800                ) {
801                    out.pop();
802                }
803            }
804            _ => out.push(c),
805        }
806    }
807    out
808}
809
810fn update_changelog(
811    crate_dir: &Path,
812    package: &str,
813    old_version: &str,
814    new_version: &str,
815    entries: &[(String, Bump)],
816) -> io::Result<()> {
817    let path = crate_dir.join("CHANGELOG.md");
818    let existing = if path.exists() {
819        fs::read_to_string(&path)?
820    } else {
821        String::new()
822    };
823    let mut body = existing.trim_start_matches('\u{feff}').to_string();
824    // Remove existing top package header if present
825    let package_header = format!("# {}", package);
826    if body.starts_with(&package_header) {
827        if let Some(idx) = body.find('\n') {
828            body = body[idx + 1..].to_string();
829        } else {
830            body.clear();
831        }
832    }
833
834    // Parse and merge the current top section only if it's an unpublished section.
835    // Heuristic: if the top section header equals the current (old) version, it is published
836    // and must be preserved. Otherwise, treat it as in-progress and merge its bullets.
837    let mut merged_major: Vec<String> = Vec::new();
838    let mut merged_minor: Vec<String> = Vec::new();
839    let mut merged_patch: Vec<String> = Vec::new();
840
841    // helper to push without duplicates (preserve append order)
842    let push_unique = |list: &mut Vec<String>, msg: &str| {
843        if !list.iter().any(|m| m == msg) {
844            list.push(msg.to_string());
845        }
846    };
847
848    // Collect new entries
849    for (msg, bump) in entries {
850        match bump {
851            Bump::Major => push_unique(&mut merged_major, msg),
852            Bump::Minor => push_unique(&mut merged_minor, msg),
853            Bump::Patch => push_unique(&mut merged_patch, msg),
854        }
855    }
856
857    // If body starts with a previous top section (## ...), inspect its header.
858    // If header == old_version => preserve it (do not merge or strip).
859    // Else => parse and merge its bullets, then strip that section.
860    let trimmed = body.trim_start();
861    if trimmed.starts_with("## ") {
862        // Extract first header line text
863        let mut lines_iter = trimmed.lines();
864        let header_line = lines_iter.next().unwrap_or("").trim();
865        let header_text = header_line.trim_start_matches("## ").trim();
866
867        let is_published_top = header_text == old_version;
868
869        if !is_published_top {
870            // Determine the extent of the first section in 'trimmed'
871            let after_header_offset = header_line.len();
872            let rest_after_header = &trimmed[after_header_offset..];
873            // Find next section marker starting at a new line
874            let next_rel = rest_after_header.find("\n## ");
875            let (section_text, remaining) = match next_rel {
876                Some(pos) => {
877                    let end = after_header_offset + pos + 1; // include leading newline
878                    (&trimmed[..end], &trimmed[end..])
879                }
880                None => (trimmed, ""),
881            };
882
883            let mut current = None::<&str>;
884            for line in section_text.lines() {
885                let t = line.trim();
886                if t.eq_ignore_ascii_case("### Major changes") {
887                    current = Some("major");
888                    continue;
889                } else if t.eq_ignore_ascii_case("### Minor changes") {
890                    current = Some("minor");
891                    continue;
892                } else if t.eq_ignore_ascii_case("### Patch changes") {
893                    current = Some("patch");
894                    continue;
895                }
896                if t.starts_with("- ") {
897                    let msg = t.trim_start_matches("- ").trim();
898                    match current {
899                        Some("major") => push_unique(&mut merged_major, msg),
900                        Some("minor") => push_unique(&mut merged_minor, msg),
901                        Some("patch") => push_unique(&mut merged_patch, msg),
902                        _ => {}
903                    }
904                }
905            }
906
907            body = remaining.to_string();
908        }
909    }
910
911    // Build new aggregated top section
912    let mut section = String::new();
913    section.push_str(&format!("# {}\n\n", package));
914    section.push_str(&format!("## {}\n\n", new_version));
915
916    if !merged_major.is_empty() {
917        section.push_str("### Major changes\n\n");
918        for msg in &merged_major {
919            section.push_str("- ");
920            section.push_str(msg);
921            section.push('\n');
922        }
923        section.push('\n');
924    }
925    if !merged_minor.is_empty() {
926        section.push_str("### Minor changes\n\n");
927        for msg in &merged_minor {
928            section.push_str("- ");
929            section.push_str(msg);
930            section.push('\n');
931        }
932        section.push('\n');
933    }
934    if !merged_patch.is_empty() {
935        section.push_str("### Patch changes\n\n");
936        for msg in &merged_patch {
937            section.push_str("- ");
938            section.push_str(msg);
939            section.push('\n');
940        }
941        section.push('\n');
942    }
943
944    let combined = if body.trim().is_empty() {
945        section
946    } else {
947        format!("{}{}", section, body)
948    };
949    fs::write(&path, combined)
950}
951
952/// Validate fixed dependencies configuration against the workspace
953fn validate_fixed_dependencies(cfg: &Config, ws: &Workspace) -> Result<(), String> {
954    let workspace_packages: FxHashSet<String> = ws.members.iter().map(|c| c.name.clone()).collect();
955
956    for (group_idx, group) in cfg.fixed_dependencies.iter().enumerate() {
957        for package in group {
958            if !workspace_packages.contains(package) {
959                let available_packages: Vec<String> = workspace_packages.iter().cloned().collect();
960                return Err(format!(
961                    "Package '{}' in fixed dependency group {} does not exist in the workspace. Available packages: [{}]",
962                    package,
963                    group_idx + 1,
964                    available_packages.join(", ")
965                ));
966            }
967        }
968    }
969    Ok(())
970}