1pub mod stats;
2mod writer;
3
4use crate::args::Args;
5use crate::data::DataSource;
6use crate::error::MintError;
7use crate::layout;
8use crate::layout::args::BlockNames;
9use crate::layout::block::Config;
10use crate::layout::error::LayoutError;
11use crate::layout::settings::Endianness;
12use crate::layout::used_values::{NoopValueSink, ValueCollector};
13use crate::output;
14use crate::output::error::OutputError;
15use crate::output::{DataRange, OutputFile};
16use rayon::prelude::*;
17use stats::{BlockStat, BuildStats};
18use std::collections::{HashMap, HashSet};
19use std::time::Instant;
20use writer::write_output;
21
22#[derive(Debug, Clone)]
23struct ResolvedBlock {
24 name: String,
25 file: String,
26}
27
28struct BlockBuildResult {
29 block_names: BlockNames,
30 data_range: DataRange,
31 stat: BlockStat,
32 used_values: Option<serde_json::Value>,
33}
34
35fn resolve_blocks(
36 block_args: &[BlockNames],
37) -> Result<(Vec<ResolvedBlock>, HashMap<String, Config>), LayoutError> {
38 let unique_files: HashSet<String> = block_args.iter().map(|b| b.file.clone()).collect();
39
40 let layouts: Result<HashMap<String, Config>, LayoutError> = unique_files
41 .par_iter()
42 .map(|file| layout::load_layout(file).map(|cfg| (file.clone(), cfg)))
43 .collect();
44
45 let layouts = layouts?;
46
47 let mut resolved = Vec::new();
48 for arg in block_args {
49 if arg.name.is_empty() {
50 let layout = &layouts[&arg.file];
51 for block_name in layout.blocks.keys() {
52 resolved.push(ResolvedBlock {
53 name: block_name.clone(),
54 file: arg.file.clone(),
55 });
56 }
57 } else {
58 resolved.push(ResolvedBlock {
59 name: arg.name.clone(),
60 file: arg.file.clone(),
61 });
62 }
63 }
64
65 let mut seen = HashSet::new();
66 let deduplicated: Vec<ResolvedBlock> = resolved
67 .into_iter()
68 .filter(|b| seen.insert((b.file.clone(), b.name.clone())))
69 .collect();
70
71 Ok((deduplicated, layouts))
72}
73
74fn build_bytestreams(
75 blocks: &[ResolvedBlock],
76 layouts: &HashMap<String, Config>,
77 data_source: Option<&dyn DataSource>,
78 strict: bool,
79 capture_values: bool,
80) -> Result<Vec<BlockBuildResult>, MintError> {
81 blocks
82 .par_iter()
83 .map(|resolved| {
84 build_single_bytestream(resolved, layouts, data_source, strict, capture_values)
85 })
86 .collect()
87}
88
89fn build_single_bytestream(
90 resolved: &ResolvedBlock,
91 layouts: &HashMap<String, Config>,
92 data_source: Option<&dyn DataSource>,
93 strict: bool,
94 capture_values: bool,
95) -> Result<BlockBuildResult, MintError> {
96 let result = (|| {
97 let layout = &layouts[&resolved.file];
98 let block = &layout.blocks[&resolved.name];
99 let mut collector = ValueCollector::new();
100 let mut noop = NoopValueSink;
101 let value_sink = if capture_values {
102 &mut collector as &mut dyn crate::layout::used_values::ValueSink
103 } else {
104 &mut noop as &mut dyn crate::layout::used_values::ValueSink
105 };
106
107 let (bytestream, padding_bytes) =
108 block.build_bytestream(data_source, &layout.settings, strict, value_sink)?;
109
110 let data_range = output::bytestream_to_datarange(
111 bytestream,
112 &block.header,
113 &layout.settings,
114 padding_bytes,
115 )?;
116
117 let crc_value = extract_crc_value(&data_range.crc_bytestream, &layout.settings.endianness);
118
119 let stat = BlockStat {
120 name: resolved.name.clone(),
121 start_address: data_range.start_address,
122 allocated_size: data_range.allocated_size,
123 used_size: data_range.used_size,
124 crc_value,
125 };
126
127 Ok(BlockBuildResult {
128 block_names: BlockNames {
129 name: resolved.name.clone(),
130 file: resolved.file.clone(),
131 },
132 data_range,
133 stat,
134 used_values: capture_values.then(|| collector.into_value()),
135 })
136 })();
137
138 result.map_err(|e| MintError::InBlock {
139 block_name: resolved.name.clone(),
140 layout_file: resolved.file.clone(),
141 source: Box::new(e),
142 })
143}
144
145fn extract_crc_value(crc_bytestream: &[u8], endianness: &Endianness) -> Option<u32> {
146 if crc_bytestream.len() < 4 {
147 return None;
148 }
149 let bytes: [u8; 4] = crc_bytestream[..4].try_into().ok()?;
150 Some(match endianness {
151 Endianness::Big => u32::from_be_bytes(bytes),
152 Endianness::Little => u32::from_le_bytes(bytes),
153 })
154}
155
156fn output_results(results: Vec<BlockBuildResult>, args: &Args) -> Result<BuildStats, MintError> {
157 let mut stats = BuildStats::new();
158 let named_ranges: Vec<(String, DataRange)> = results
159 .into_iter()
160 .map(|r| {
161 stats.add_block(r.stat);
162 (r.block_names.name, r.data_range)
163 })
164 .collect();
165
166 check_overlaps(&named_ranges)?;
167 let ranges: Vec<DataRange> = named_ranges.into_iter().map(|(_, r)| r).collect();
168 let output_file = OutputFile {
169 ranges,
170 format: args.output.format,
171 record_width: args.output.record_width as usize,
172 };
173
174 write_output(&output_file, &args.output)?;
175 Ok(stats)
176}
177
178fn check_overlaps(named_ranges: &[(String, DataRange)]) -> Result<(), MintError> {
179 for i in 0..named_ranges.len() {
180 for j in (i + 1)..named_ranges.len() {
181 let (ref name_a, ref range_a) = named_ranges[i];
182 let (ref name_b, ref range_b) = named_ranges[j];
183 let a_start = range_a.start_address;
184 let a_end = a_start + range_a.allocated_size;
185 let b_start = range_b.start_address;
186 let b_end = b_start + range_b.allocated_size;
187
188 let overlap_start = a_start.max(b_start);
189 let overlap_end = a_end.min(b_end);
190
191 if overlap_start < overlap_end {
192 let overlap_size = overlap_end - overlap_start;
193 let msg = format!(
194 "Block '{}' (0x{:08X}-0x{:08X}) overlaps with block '{}' (0x{:08X}-0x{:08X}). Overlap: 0x{:08X}-0x{:08X} ({} bytes)",
195 name_a,
196 a_start,
197 a_end - 1,
198 name_b,
199 b_start,
200 b_end - 1,
201 overlap_start,
202 overlap_end - 1,
203 overlap_size
204 );
205 return Err(OutputError::BlockOverlapError(msg).into());
206 }
207 }
208 }
209 Ok(())
210}
211
212pub fn build(args: &Args, data_source: Option<&dyn DataSource>) -> Result<BuildStats, MintError> {
213 let start_time = Instant::now();
214
215 let (resolved_blocks, layouts) = resolve_blocks(&args.layout.blocks)?;
216 let capture_values = args.output.export_json.is_some();
217 let mut results = build_bytestreams(
218 &resolved_blocks,
219 &layouts,
220 data_source,
221 args.layout.strict,
222 capture_values,
223 )?;
224
225 if let Some(path) = args.output.export_json.as_ref() {
226 let report = take_used_values_report(&mut results)?;
227 output::report::write_used_values_json(path, &report)?;
228 }
229
230 let mut stats = output_results(results, args)?;
231
232 stats.total_duration = start_time.elapsed();
233 Ok(stats)
234}
235
236fn take_used_values_report(
237 results: &mut [BlockBuildResult],
238) -> Result<serde_json::Value, MintError> {
239 let mut report = serde_json::Map::new();
240 for result in results {
241 let value = result.used_values.take().ok_or_else(|| {
242 OutputError::FileError(
243 "JSON export requested but values were not captured.".to_string(),
244 )
245 })?;
246 let file_entry = report
247 .entry(result.block_names.file.clone())
248 .or_insert_with(|| serde_json::Value::Object(serde_json::Map::new()));
249 let serde_json::Value::Object(blocks) = file_entry else {
250 return Err(OutputError::FileError(
251 "JSON export contains unexpected non-object entry.".to_string(),
252 )
253 .into());
254 };
255 if blocks.contains_key(&result.block_names.name) {
256 return Err(OutputError::FileError(format!(
257 "Duplicate block '{}' in JSON export for file '{}'.",
258 result.block_names.name, result.block_names.file
259 ))
260 .into());
261 }
262 blocks.insert(result.block_names.name.clone(), value);
263 }
264 Ok(serde_json::Value::Object(report))
265}