1use crate::{
104 artifact_output::Artifacts,
105 artifacts::{Settings, VersionedFilteredSources, VersionedSources},
106 buildinfo::RawBuildInfo,
107 cache::ArtifactsCache,
108 error::Result,
109 filter::SparseOutputFilter,
110 output::AggregatedCompilerOutput,
111 report,
112 resolver::GraphEdges,
113 ArtifactOutput, CompilerInput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Solc,
114 Sources,
115};
116use rayon::prelude::*;
117use std::{collections::btree_map::BTreeMap, path::PathBuf, time::Instant};
118use tracing::trace;
119
120#[derive(Debug)]
121pub struct ProjectCompiler<'a, T: ArtifactOutput> {
122 edges: GraphEdges,
124 project: &'a Project<T>,
125 sources: CompilerSources,
127 sparse_output: SparseOutputFilter,
129}
130
131impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
132 #[cfg(all(feature = "svm-solc", not(target_arch = "wasm32")))]
144 pub fn new(project: &'a Project<T>) -> Result<Self> {
145 Self::with_sources(project, project.paths.read_input_files()?)
146 }
147
148 #[cfg(all(feature = "svm-solc", not(target_arch = "wasm32")))]
155 pub fn with_sources(project: &'a Project<T>, sources: Sources) -> Result<Self> {
156 let graph = Graph::resolve_sources(&project.paths, sources)?;
157 let (versions, edges) = graph.into_sources_by_version(project.offline)?;
158
159 let sources_by_version = versions.get(project)?;
160
161 let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 {
162 CompilerSources::Parallel(sources_by_version, project.solc_jobs)
165 } else {
166 CompilerSources::Sequential(sources_by_version)
167 };
168
169 Ok(Self { edges, project, sources, sparse_output: Default::default() })
170 }
171
172 pub fn with_sources_and_solc(
174 project: &'a Project<T>,
175 sources: Sources,
176 solc: Solc,
177 ) -> Result<Self> {
178 let version = solc.version()?;
179 let (sources, edges) = Graph::resolve_sources(&project.paths, sources)?.into_sources();
180
181 let solc = project.configure_solc_with_version(
183 solc,
184 Some(version.clone()),
185 edges.include_paths().clone(),
186 );
187
188 let sources_by_version = BTreeMap::from([(solc, (version, sources))]);
189 let sources = CompilerSources::Sequential(sources_by_version);
190
191 Ok(Self { edges, project, sources, sparse_output: Default::default() })
192 }
193
194 pub fn with_sparse_output(mut self, sparse_output: impl Into<SparseOutputFilter>) -> Self {
197 self.sparse_output = sparse_output.into();
198 self
199 }
200
201 pub fn compile(self) -> Result<ProjectCompileOutput<T>> {
217 let slash_paths = self.project.slash_paths;
218
219 let mut output = self.preprocess()?.compile()?.write_artifacts()?.write_cache()?;
221
222 if slash_paths {
223 output.slash_paths();
225 }
226
227 Ok(output)
228 }
229
230 fn preprocess(self) -> Result<PreprocessedState<'a, T>> {
234 trace!("preprocessing");
235 let Self { edges, project, mut sources, sparse_output } = self;
236
237 sources.slash_paths();
240
241 let mut cache = ArtifactsCache::new(project, edges)?;
242 let sources = sources.filtered(&mut cache);
244
245 Ok(PreprocessedState { sources, cache, sparse_output })
246 }
247}
248
249#[derive(Debug)]
253struct PreprocessedState<'a, T: ArtifactOutput> {
254 sources: FilteredCompilerSources,
256
257 cache: ArtifactsCache<'a, T>,
259
260 sparse_output: SparseOutputFilter,
261}
262
263impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> {
264 fn compile(self) -> Result<CompiledState<'a, T>> {
266 trace!("compiling");
267 let PreprocessedState { sources, cache, sparse_output } = self;
268 let project = cache.project();
269 let mut output = sources.compile(
270 &project.solc_config.settings,
271 &project.paths,
272 sparse_output,
273 cache.graph(),
274 project.build_info,
275 )?;
276
277 output.join_all(cache.project().root());
283
284 Ok(CompiledState { output, cache })
285 }
286}
287
288#[derive(Debug)]
290struct CompiledState<'a, T: ArtifactOutput> {
291 output: AggregatedCompilerOutput,
292 cache: ArtifactsCache<'a, T>,
293}
294
295impl<'a, T: ArtifactOutput> CompiledState<'a, T> {
296 #[tracing::instrument(skip_all, name = "write-artifacts")]
301 fn write_artifacts(self) -> Result<ArtifactsState<'a, T>> {
302 let CompiledState { output, cache } = self;
303
304 let project = cache.project();
305 let ctx = cache.output_ctx();
306 let compiled_artifacts = if project.no_artifacts {
309 project.artifacts_handler().output_to_artifacts(
310 &output.contracts,
311 &output.sources,
312 ctx,
313 &project.paths,
314 )
315 } else if output.has_error(&project.ignored_error_codes, &project.compiler_severity_filter)
316 {
317 trace!("skip writing cache file due to solc errors: {:?}", output.errors);
318 project.artifacts_handler().output_to_artifacts(
319 &output.contracts,
320 &output.sources,
321 ctx,
322 &project.paths,
323 )
324 } else {
325 trace!(
326 "handling artifact output for {} contracts and {} sources",
327 output.contracts.len(),
328 output.sources.len()
329 );
330 let artifacts = project.artifacts_handler().on_output(
332 &output.contracts,
333 &output.sources,
334 &project.paths,
335 ctx,
336 )?;
337
338 output.write_build_infos(project.build_info_path())?;
340
341 artifacts
342 };
343
344 Ok(ArtifactsState { output, cache, compiled_artifacts })
345 }
346}
347
348#[derive(Debug)]
350struct ArtifactsState<'a, T: ArtifactOutput> {
351 output: AggregatedCompilerOutput,
352 cache: ArtifactsCache<'a, T>,
353 compiled_artifacts: Artifacts<T::Artifact>,
354}
355
356impl<'a, T: ArtifactOutput> ArtifactsState<'a, T> {
357 fn write_cache(self) -> Result<ProjectCompileOutput<T>> {
361 let ArtifactsState { output, cache, compiled_artifacts } = self;
362 let project = cache.project();
363 let ignored_error_codes = project.ignored_error_codes.clone();
364 let compiler_severity_filter = project.compiler_severity_filter;
365 let has_error = output.has_error(&ignored_error_codes, &compiler_severity_filter);
366 let skip_write_to_disk = project.no_artifacts || has_error;
367 trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file");
368
369 let cached_artifacts = cache.consume(&compiled_artifacts, !skip_write_to_disk)?;
370 Ok(ProjectCompileOutput {
371 compiler_output: output,
372 compiled_artifacts,
373 cached_artifacts,
374 ignored_error_codes,
375 compiler_severity_filter,
376 })
377 }
378}
379
380#[derive(Debug, Clone)]
382#[allow(dead_code)]
383enum CompilerSources {
384 Sequential(VersionedSources),
386 Parallel(VersionedSources, usize),
388}
389
390impl CompilerSources {
391 fn slash_paths(&mut self) {
396 #[cfg(windows)]
397 {
398 use path_slash::PathBufExt;
399
400 fn slash_versioned_sources(v: &mut VersionedSources) {
401 for (_, (_, sources)) in v {
402 *sources = std::mem::take(sources)
403 .into_iter()
404 .map(|(path, source)| {
405 (PathBuf::from(path.to_slash_lossy().as_ref()), source)
406 })
407 .collect()
408 }
409 }
410
411 match self {
412 CompilerSources::Sequential(v) => slash_versioned_sources(v),
413 CompilerSources::Parallel(v, _) => slash_versioned_sources(v),
414 };
415 }
416 }
417 fn filtered<T: ArtifactOutput>(self, cache: &mut ArtifactsCache<T>) -> FilteredCompilerSources {
419 fn filtered_sources<T: ArtifactOutput>(
420 sources: VersionedSources,
421 cache: &mut ArtifactsCache<T>,
422 ) -> VersionedFilteredSources {
423 sources.iter().for_each(|(_, (_, sources))| {
425 cache.fill_content_hashes(sources);
426 });
427
428 sources
429 .into_iter()
430 .map(|(solc, (version, sources))| {
431 trace!("Filtering {} sources for {}", sources.len(), version);
432 let sources = cache.filter(sources, &version);
433 trace!(
434 "Detected {} dirty sources {:?}",
435 sources.dirty().count(),
436 sources.dirty_files().collect::<Vec<_>>()
437 );
438 (solc, (version, sources))
439 })
440 .collect()
441 }
442
443 match self {
444 CompilerSources::Sequential(s) => {
445 FilteredCompilerSources::Sequential(filtered_sources(s, cache))
446 }
447 CompilerSources::Parallel(s, j) => {
448 FilteredCompilerSources::Parallel(filtered_sources(s, cache), j)
449 }
450 }
451 }
452}
453
454#[derive(Debug, Clone)]
456#[allow(dead_code)]
457enum FilteredCompilerSources {
458 Sequential(VersionedFilteredSources),
460 Parallel(VersionedFilteredSources, usize),
462}
463
464impl FilteredCompilerSources {
465 fn compile(
467 self,
468 settings: &Settings,
469 paths: &ProjectPathsConfig,
470 sparse_output: SparseOutputFilter,
471 graph: &GraphEdges,
472 create_build_info: bool,
473 ) -> Result<AggregatedCompilerOutput> {
474 match self {
475 FilteredCompilerSources::Sequential(input) => {
476 compile_sequential(input, settings, paths, sparse_output, graph, create_build_info)
477 }
478 FilteredCompilerSources::Parallel(input, j) => {
479 compile_parallel(input, j, settings, paths, sparse_output, graph, create_build_info)
480 }
481 }
482 }
483
484 #[cfg(test)]
485 #[allow(unused)]
486 fn sources(&self) -> &VersionedFilteredSources {
487 match self {
488 FilteredCompilerSources::Sequential(v) => v,
489 FilteredCompilerSources::Parallel(v, _) => v,
490 }
491 }
492}
493
494fn compile_sequential(
496 input: VersionedFilteredSources,
497 settings: &Settings,
498 paths: &ProjectPathsConfig,
499 sparse_output: SparseOutputFilter,
500 graph: &GraphEdges,
501 create_build_info: bool,
502) -> Result<AggregatedCompilerOutput> {
503 let mut aggregated = AggregatedCompilerOutput::default();
504 trace!("compiling {} jobs sequentially", input.len());
505 for (solc, (version, filtered_sources)) in input {
506 if filtered_sources.is_empty() {
507 trace!("skip solc {} {} for empty sources set", solc.as_ref().display(), version);
509 continue
510 }
511 trace!(
512 "compiling {} sources with solc \"{}\" {:?}",
513 filtered_sources.len(),
514 solc.as_ref().display(),
515 solc.args
516 );
517
518 let dirty_files: Vec<PathBuf> = filtered_sources.dirty_files().cloned().collect();
519
520 let mut opt_settings = settings.clone();
523 let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph);
524
525 for input in CompilerInput::with_sources(sources) {
526 let actually_dirty = input
527 .sources
528 .keys()
529 .filter(|f| dirty_files.contains(f))
530 .cloned()
531 .collect::<Vec<_>>();
532 if actually_dirty.is_empty() {
533 trace!(
536 "skip solc {} {} compilation of {} compiler input due to empty source set",
537 solc.as_ref().display(),
538 version,
539 input.language
540 );
541 continue
542 }
543 let input = input
544 .settings(opt_settings.clone())
545 .normalize_evm_version(&version)
546 .with_remappings(paths.remappings.clone())
547 .with_base_path(&paths.root)
548 .sanitized(&version);
549
550 trace!(
551 "calling solc `{}` with {} sources {:?}",
552 version,
553 input.sources.len(),
554 input.sources.keys()
555 );
556
557 let start = Instant::now();
558 report::solc_spawn(&solc, &version, &input, &actually_dirty);
559 let output = solc.compile(&input)?;
560 report::solc_success(&solc, &version, &output, &start.elapsed());
561 trace!("compiled input, output has error: {}", output.has_error());
562 trace!("received compiler output: {:?}", output.contracts.keys());
563
564 if create_build_info {
566 let build_info = RawBuildInfo::new(&input, &output, &version)?;
567 aggregated.build_infos.insert(version.clone(), build_info);
568 }
569
570 aggregated.extend(version.clone(), output);
571 }
572 }
573 Ok(aggregated)
574}
575
576fn compile_parallel(
578 input: VersionedFilteredSources,
579 num_jobs: usize,
580 settings: &Settings,
581 paths: &ProjectPathsConfig,
582 sparse_output: SparseOutputFilter,
583 graph: &GraphEdges,
584 create_build_info: bool,
585) -> Result<AggregatedCompilerOutput> {
586 debug_assert!(num_jobs > 1);
587 trace!("compile {} sources in parallel using up to {} solc jobs", input.len(), num_jobs);
588
589 let mut jobs = Vec::with_capacity(input.len());
590 for (solc, (version, filtered_sources)) in input {
591 if filtered_sources.is_empty() {
592 trace!("skip solc {} {} for empty sources set", solc.as_ref().display(), version);
594 continue
595 }
596
597 let dirty_files: Vec<PathBuf> = filtered_sources.dirty_files().cloned().collect();
598
599 let mut opt_settings = settings.clone();
602 let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph);
603
604 for input in CompilerInput::with_sources(sources) {
605 let actually_dirty = input
606 .sources
607 .keys()
608 .filter(|f| dirty_files.contains(f))
609 .cloned()
610 .collect::<Vec<_>>();
611 if actually_dirty.is_empty() {
612 trace!(
615 "skip solc {} {} compilation of {} compiler input due to empty source set",
616 solc.as_ref().display(),
617 version,
618 input.language
619 );
620 continue
621 }
622
623 let job = input
624 .settings(settings.clone())
625 .normalize_evm_version(&version)
626 .with_remappings(paths.remappings.clone())
627 .with_base_path(&paths.root)
628 .sanitized(&version);
629
630 jobs.push((solc.clone(), version.clone(), job, actually_dirty))
631 }
632 }
633
634 let scoped_report = report::get_default(|reporter| reporter.clone());
638
639 let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap();
641
642 let outputs = pool.install(move || {
643 jobs.into_par_iter()
644 .map(move |(solc, version, input, actually_dirty)| {
645 let _guard = report::set_scoped(&scoped_report);
647
648 trace!(
649 "calling solc `{}` {:?} with {} sources: {:?}",
650 version,
651 solc.args,
652 input.sources.len(),
653 input.sources.keys()
654 );
655 let start = Instant::now();
656 report::solc_spawn(&solc, &version, &input, &actually_dirty);
657 solc.compile(&input).map(move |output| {
658 report::solc_success(&solc, &version, &output, &start.elapsed());
659 (version, input, output)
660 })
661 })
662 .collect::<Result<Vec<_>>>()
663 })?;
664
665 let mut aggregated = AggregatedCompilerOutput::default();
666 for (version, input, output) in outputs {
667 if create_build_info {
669 let build_info = RawBuildInfo::new(&input, &output, &version)?;
670 aggregated.build_infos.insert(version.clone(), build_info);
671 }
672 aggregated.extend(version, output);
673 }
674
675 Ok(aggregated)
676}
677
678#[cfg(test)]
679#[cfg(all(feature = "project-util", feature = "svm-solc"))]
680mod tests {
681 use super::*;
682 use crate::{project_util::TempProject, MinimalCombinedArtifacts};
683
684 #[allow(unused)]
685 fn init_tracing() {
686 let _ = tracing_subscriber::fmt()
687 .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
688 .try_init()
689 .ok();
690 }
691
692 #[test]
693 fn can_preprocess() {
694 let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
695 let project =
696 Project::builder().paths(ProjectPathsConfig::dapptools(root).unwrap()).build().unwrap();
697
698 let compiler = ProjectCompiler::new(&project).unwrap();
699 let prep = compiler.preprocess().unwrap();
700 let cache = prep.cache.as_cached().unwrap();
701 assert_eq!(cache.dirty_source_files.len(), 3);
703 assert!(cache.filtered.is_empty());
704 assert!(cache.cache.is_empty());
705
706 let compiled = prep.compile().unwrap();
707 assert_eq!(compiled.output.contracts.files().count(), 3);
708 }
709
710 #[test]
711 fn can_detect_cached_files() {
712 let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
713 let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib"));
714 let project = TempProject::<MinimalCombinedArtifacts>::new(paths).unwrap();
715
716 let compiled = project.compile().unwrap();
717 compiled.assert_success();
718
719 let inner = project.project();
720 let compiler = ProjectCompiler::new(inner).unwrap();
721 let prep = compiler.preprocess().unwrap();
722 assert!(prep.cache.as_cached().unwrap().dirty_source_files.is_empty())
723 }
724
725 #[test]
726 fn can_recompile_with_optimized_output() {
727 let tmp = TempProject::dapptools().unwrap();
728
729 tmp.add_source(
730 "A",
731 r#"
732 pragma solidity ^0.8.10;
733 import "./B.sol";
734 contract A {}
735 "#,
736 )
737 .unwrap();
738
739 tmp.add_source(
740 "B",
741 r#"
742 pragma solidity ^0.8.10;
743 contract B {
744 function hello() public {}
745 }
746 import "./C.sol";
747 "#,
748 )
749 .unwrap();
750
751 tmp.add_source(
752 "C",
753 r"
754 pragma solidity ^0.8.10;
755 contract C {
756 function hello() public {}
757 }
758 ",
759 )
760 .unwrap();
761 let compiled = tmp.compile().unwrap();
762 compiled.assert_success();
763
764 tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present();
765
766 tmp.add_source(
768 "A",
769 r#"
770 pragma solidity ^0.8.10;
771 import "./B.sol";
772 contract A {
773 function testExample() public {}
774 }
775 "#,
776 )
777 .unwrap();
778
779 let compiler = ProjectCompiler::new(tmp.project()).unwrap();
780 let state = compiler.preprocess().unwrap();
781 let sources = state.sources.sources();
782
783 assert_eq!(sources.len(), 1);
785
786 let (_, filtered) = sources.values().next().unwrap();
787
788 assert_eq!(filtered.0.len(), 3);
790 assert_eq!(filtered.dirty().count(), 1);
792 assert!(filtered.dirty_files().next().unwrap().ends_with("A.sol"));
793
794 let state = state.compile().unwrap();
795 assert_eq!(state.output.sources.len(), 3);
796 for (f, source) in state.output.sources.sources() {
797 if f.ends_with("A.sol") {
798 assert!(source.ast.is_some());
799 } else {
800 assert!(source.ast.is_none());
801 }
802 }
803
804 assert_eq!(state.output.contracts.len(), 1);
805 let (a, c) = state.output.contracts_iter().next().unwrap();
806 assert_eq!(a, "A");
807 assert!(c.abi.is_some() && c.evm.is_some());
808
809 let state = state.write_artifacts().unwrap();
810 assert_eq!(state.compiled_artifacts.as_ref().len(), 1);
811
812 let out = state.write_cache().unwrap();
813
814 let artifacts: Vec<_> = out.into_artifacts().collect();
815 assert_eq!(artifacts.len(), 3);
816 for (_, artifact) in artifacts {
817 let c = artifact.into_contract_bytecode();
818 assert!(c.abi.is_some() && c.bytecode.is_some() && c.deployed_bytecode.is_some());
819 }
820
821 tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present();
822 }
823
824 #[test]
825 #[ignore]
826 fn can_compile_real_project() {
827 init_tracing();
828 let paths = ProjectPathsConfig::builder()
829 .root("../../foundry-integration-tests/testdata/solmate")
830 .build()
831 .unwrap();
832 let project = Project::builder().paths(paths).build().unwrap();
833 let compiler = ProjectCompiler::new(&project).unwrap();
834 let _out = compiler.compile().unwrap();
835 }
836}