use crate::cache::{FileObjectSource, Storage};
use crate::compiler::preprocessor_cache::preprocessor_cache_entry_hash_key;
use crate::compiler::{
Cacheable, ColorMode, Compilation, CompileCommand, Compiler, CompilerArguments, CompilerHasher,
CompilerKind, HashResult, Language,
};
#[cfg(feature = "dist-client")]
use crate::compiler::{DistPackagers, NoopOutputsRewriter};
use crate::config::PreprocessorCacheModeConfig;
use crate::dist;
#[cfg(feature = "dist-client")]
use crate::dist::pkg;
use crate::mock_command::CommandCreatorSync;
use crate::util::{
Digest, HashToDigest, MetadataCtimeExt, TimeMacroFinder, Timestamp, decode_path, encode_path,
hash_all, strip_basedirs,
};
use async_trait::async_trait;
use fs_err as fs;
use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::hash::Hash;
use std::io;
use std::ops::ControlFlow;
use std::path::{Path, PathBuf};
use std::process;
use std::sync::{Arc, LazyLock};
use crate::errors::*;
use super::CacheControl;
use super::preprocessor_cache::PreprocessorCacheEntry;
#[derive(Clone)]
pub struct CCompiler<I>
where
I: CCompilerImpl,
{
executable: PathBuf,
executable_digest: String,
compiler: I,
}
#[derive(Debug, Clone)]
pub struct CCompilerHasher<I>
where
I: CCompilerImpl,
{
parsed_args: ParsedArguments,
executable: PathBuf,
executable_digest: String,
compiler: I,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ArtifactDescriptor {
pub path: PathBuf,
pub optional: bool,
}
#[allow(dead_code)]
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ParsedArguments {
pub input: PathBuf,
pub double_dash_input: bool,
pub language: Language,
pub compilation_flag: OsString,
pub depfile: Option<PathBuf>,
pub outputs: HashMap<&'static str, ArtifactDescriptor>,
pub dependency_args: Vec<OsString>,
pub preprocessor_args: Vec<OsString>,
pub common_args: Vec<OsString>,
pub arch_args: Vec<OsString>,
pub unhashed_args: Vec<OsString>,
pub extra_dist_files: Vec<PathBuf>,
pub extra_hash_files: Vec<PathBuf>,
pub msvc_show_includes: bool,
pub profile_generate: bool,
pub color_mode: ColorMode,
pub suppress_rewrite_includes_only: bool,
pub too_hard_for_preprocessor_cache_mode: Option<OsString>,
}
impl ParsedArguments {
pub fn output_pretty(&self) -> Cow<'_, str> {
self.outputs
.get("obj")
.and_then(|o| o.path.file_name())
.map(|s| s.to_string_lossy())
.unwrap_or(Cow::Borrowed("Unknown filename"))
}
}
struct CCompilation<I: CCompilerImpl> {
parsed_args: ParsedArguments,
is_locally_preprocessed: bool,
#[cfg(feature = "dist-client")]
preprocessed_input: Vec<u8>,
executable: PathBuf,
compiler: I,
cwd: PathBuf,
env_vars: Vec<(OsString, OsString)>,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum CCompilerKind {
Gcc,
Clang,
Diab,
Msvc,
Nvcc,
CudaFE,
Cicc,
Ptxas,
Nvhpc,
TaskingVX,
}
#[async_trait]
pub trait CCompilerImpl: Clone + fmt::Debug + Send + Sync + 'static {
fn kind(&self) -> CCompilerKind;
fn plusplus(&self) -> bool;
fn version(&self) -> Option<String>;
fn parse_arguments(
&self,
arguments: &[OsString],
cwd: &Path,
env_vars: &[(OsString, OsString)],
) -> CompilerArguments<ParsedArguments>;
#[allow(clippy::too_many_arguments)]
async fn preprocess<T>(
&self,
creator: &T,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
may_dist: bool,
rewrite_includes_only: bool,
preprocessor_cache_mode: bool,
) -> Result<process::Output>
where
T: CommandCreatorSync;
fn generate_compile_commands<T>(
&self,
path_transformer: &mut dist::PathTransformer,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
rewrite_includes_only: bool,
) -> Result<(
Box<dyn CompileCommand<T>>,
Option<dist::CompileCommand>,
Cacheable,
)>
where
T: CommandCreatorSync;
}
impl<I> CCompiler<I>
where
I: CCompilerImpl,
{
pub async fn new(
compiler: I,
executable: PathBuf,
pool: &tokio::runtime::Handle,
) -> Result<CCompiler<I>> {
let digest = Digest::file(executable.clone(), pool).await?;
Ok(CCompiler {
executable,
executable_digest: {
if let Some(version) = compiler.version() {
let mut m = Digest::new();
m.update(digest.as_bytes());
m.update(version.as_bytes());
m.finish()
} else {
digest
}
},
compiler,
})
}
fn extract_rocm_arg(args: &ParsedArguments, flag: &str) -> Option<PathBuf> {
args.common_args.iter().find_map(|arg| match arg.to_str() {
Some(sarg) if sarg.starts_with(flag) => {
Some(PathBuf::from(sarg[arg.len()..].to_string()))
}
_ => None,
})
}
fn extract_rocm_env(env_vars: &[(OsString, OsString)], name: &str) -> Option<PathBuf> {
env_vars.iter().find_map(|(k, v)| match v.to_str() {
Some(path) if k == name => Some(PathBuf::from(path.to_string())),
_ => None,
})
}
fn search_hip_device_libs(
args: &ParsedArguments,
env_vars: &[(OsString, OsString)],
) -> Vec<PathBuf> {
let rocm_path_arg: Option<PathBuf> = Self::extract_rocm_arg(args, "--rocm-path=");
let hip_device_lib_path_arg: Option<PathBuf> =
Self::extract_rocm_arg(args, "--hip-device-lib-path=");
let rocm_path_env: Option<PathBuf> = Self::extract_rocm_env(env_vars, "ROCM_PATH");
let hip_device_lib_path_env: Option<PathBuf> =
Self::extract_rocm_env(env_vars, "HIP_DEVICE_LIB_PATH");
let hip_device_lib_path: PathBuf = hip_device_lib_path_arg
.or(hip_device_lib_path_env)
.or(rocm_path_arg.map(|path| path.join("amdgcn").join("bitcode")))
.or(rocm_path_env.map(|path| path.join("amdgcn").join("bitcode")))
.unwrap_or(PathBuf::from("/opt/rocm/amdgcn/bitcode"));
hip_device_lib_path
.read_dir()
.ok()
.map(|f| {
let mut device_libs = f
.flatten()
.filter(|f| f.path().extension().is_some_and(|ext| ext == "bc"))
.map(|f| f.path())
.collect::<Vec<_>>();
device_libs.sort_unstable();
device_libs
})
.unwrap_or_default()
}
}
impl<T: CommandCreatorSync, I: CCompilerImpl> Compiler<T> for CCompiler<I> {
fn kind(&self) -> CompilerKind {
CompilerKind::C(self.compiler.kind())
}
#[cfg(feature = "dist-client")]
fn get_toolchain_packager(&self) -> Box<dyn pkg::ToolchainPackager> {
Box::new(CToolchainPackager {
executable: self.executable.clone(),
kind: self.compiler.kind(),
})
}
fn parse_arguments(
&self,
arguments: &[OsString],
cwd: &Path,
env_vars: &[(OsString, OsString)],
) -> CompilerArguments<Box<dyn CompilerHasher<T> + 'static>> {
match self.compiler.parse_arguments(arguments, cwd, env_vars) {
CompilerArguments::Ok(mut args) => {
for (k, v) in env_vars.iter() {
if k.as_os_str() == OsStr::new("SCCACHE_EXTRAFILES") {
args.extra_hash_files.extend(std::env::split_paths(&v));
}
}
if args.language == Language::Hip {
args.extra_hash_files
.extend(Self::search_hip_device_libs(&args, env_vars));
}
CompilerArguments::Ok(Box::new(CCompilerHasher {
parsed_args: args,
executable: self.executable.clone(),
executable_digest: self.executable_digest.clone(),
compiler: self.compiler.clone(),
}))
}
CompilerArguments::CannotCache(why, extra_info) => {
CompilerArguments::CannotCache(why, extra_info)
}
CompilerArguments::NotCompilation => CompilerArguments::NotCompilation,
}
}
fn box_clone(&self) -> Box<dyn Compiler<T>> {
Box::new((*self).clone())
}
}
#[async_trait]
impl<T, I> CompilerHasher<T> for CCompilerHasher<I>
where
T: CommandCreatorSync,
I: CCompilerImpl,
{
async fn generate_hash_key(
&mut self,
creator: &T,
cwd: PathBuf,
env_vars: Vec<(OsString, OsString)>,
may_dist: bool,
pool: &tokio::runtime::Handle,
rewrite_includes_only: bool,
storage: Arc<dyn Storage>,
cache_control: CacheControl,
) -> Result<HashResult<T>> {
let start_of_compilation = std::time::SystemTime::now();
let extra_hashes = hash_all(&self.parsed_args.extra_hash_files, &pool.clone()).await?;
let mut preprocessor_and_arch_args = self.parsed_args.preprocessor_args.clone();
preprocessor_and_arch_args.extend(self.parsed_args.arch_args.clone());
preprocessor_and_arch_args.extend(self.parsed_args.common_args.clone());
let absolute_input_path: Cow<'_, _> = if self.parsed_args.input.is_absolute() {
Cow::Borrowed(&self.parsed_args.input)
} else {
Cow::Owned(cwd.join(&self.parsed_args.input))
};
let preprocessor_cache_mode_config = storage.preprocessor_cache_mode_config();
let too_hard_for_preprocessor_cache_mode = self
.parsed_args
.too_hard_for_preprocessor_cache_mode
.is_some();
if let Some(arg) = &self.parsed_args.too_hard_for_preprocessor_cache_mode {
debug!(
"generate_hash_key: Cannot use preprocessor cache because of {:?}",
arg
);
}
let needs_preprocessing = self.parsed_args.language.needs_c_preprocessing();
let use_preprocessor_cache_mode = if needs_preprocessing {
let can_use_preprocessor_cache_mode = preprocessor_cache_mode_config
.use_preprocessor_cache_mode
&& !too_hard_for_preprocessor_cache_mode;
let mut use_preprocessor_cache_mode = can_use_preprocessor_cache_mode;
for (key, val) in env_vars.iter() {
if key == "SCCACHE_DIRECT" {
if let Some(val) = val.to_str() {
use_preprocessor_cache_mode = match val.to_lowercase().as_str() {
"false" | "off" | "0" => false,
_ => can_use_preprocessor_cache_mode,
};
}
break;
}
}
if can_use_preprocessor_cache_mode && !use_preprocessor_cache_mode {
debug!(
"generate_hash_key: Disabling preprocessor cache because SCCACHE_DIRECT=false"
);
}
use_preprocessor_cache_mode
} else {
debug!(
"generate_hash_key: Disabling preprocessor cache because {} language doesn't need C preprocessing",
self.parsed_args.language.as_str()
);
false
};
let mut preprocessor_key = if use_preprocessor_cache_mode {
preprocessor_cache_entry_hash_key(
&self.executable_digest,
self.parsed_args.language,
&preprocessor_and_arch_args,
&extra_hashes,
&env_vars,
&absolute_input_path,
self.compiler.plusplus(),
preprocessor_cache_mode_config,
storage.basedirs(),
)?
} else {
None
};
let (preprocessor_output, include_files) = if needs_preprocessing {
if let Some(preprocessor_key) = &preprocessor_key {
if cache_control == CacheControl::Default {
if let Some(mut seekable) = storage
.get_preprocessor_cache_entry(preprocessor_key)
.await?
{
let mut buf = vec![];
seekable.read_to_end(&mut buf)?;
let mut preprocessor_cache_entry = PreprocessorCacheEntry::read(&buf)?;
let mut updated = false;
let hit = preprocessor_cache_entry
.lookup_result_digest(preprocessor_cache_mode_config, &mut updated);
let mut update_failed = false;
if updated {
debug!(
"Preprocessor cache updated because of time macros: {preprocessor_key}"
);
if let Err(e) = storage
.put_preprocessor_cache_entry(
preprocessor_key,
preprocessor_cache_entry,
)
.await
{
debug!("Failed to update preprocessor cache: {}", e);
update_failed = true;
}
}
if !update_failed {
if let Some(key) = hit {
debug!("Preprocessor cache hit: {preprocessor_key}");
let weak_toolchain_key = format!(
"{}-{}",
self.executable.to_string_lossy(),
self.executable_digest
);
return Ok(HashResult {
key,
compilation: Box::new(CCompilation {
parsed_args: self.parsed_args.clone(),
is_locally_preprocessed: false,
#[cfg(feature = "dist-client")]
preprocessed_input: PREPROCESSING_SKIPPED_COMPILE_POISON
.to_vec(),
executable: self.executable.clone(),
compiler: self.compiler.to_owned(),
cwd: cwd.clone(),
env_vars: env_vars.clone(),
}),
weak_toolchain_key,
});
} else {
debug!("Preprocessor cache miss: {preprocessor_key}");
}
}
}
}
}
let result = self
.compiler
.preprocess(
creator,
&self.executable,
&self.parsed_args,
&cwd,
&env_vars,
may_dist,
rewrite_includes_only,
use_preprocessor_cache_mode,
)
.await;
let out_pretty = self.parsed_args.output_pretty().into_owned();
let result = result.map_err(|e| {
debug!("[{}]: preprocessor failed: {:?}", out_pretty, e);
e
});
let outputs = self.parsed_args.outputs.clone();
let args_cwd = cwd.clone();
let mut preprocessor_result = result.or_else(move |err| {
debug!("removing files {:?}", &outputs);
let v: std::result::Result<(), std::io::Error> =
outputs.values().try_for_each(|output| {
let mut path = args_cwd.clone();
path.push(&output.path);
match fs::metadata(&path) {
Ok(_) => fs::remove_file(&path),
_ => Ok(()),
}
});
if v.is_err() {
warn!("Could not remove files after preprocessing failed!");
}
match err.downcast::<ProcessError>() {
Ok(ProcessError(output)) => {
debug!(
"[{}]: preprocessor returned error status {:?}",
out_pretty,
output.status.code()
);
bail!(ProcessError(process::Output {
stdout: vec!(),
..output
}))
}
Err(err) => Err(err),
}
})?;
let mut include_files = HashMap::new();
if preprocessor_key.is_some() {
if !process_preprocessed_file(
&absolute_input_path,
&cwd,
&mut preprocessor_result.stdout,
&mut include_files,
preprocessor_cache_mode_config,
start_of_compilation,
StandardFsAbstraction,
)? {
debug!("Disabling preprocessor cache mode");
preprocessor_key = None;
}
}
trace!(
"[{}]: Preprocessor output is {} bytes",
self.parsed_args.output_pretty(),
preprocessor_result.stdout.len()
);
(preprocessor_result.stdout, include_files)
} else {
(
std::fs::read(absolute_input_path.as_path())?,
HashMap::new(),
)
};
let mut common_and_arch_args = self.parsed_args.common_args.clone();
common_and_arch_args.extend(self.parsed_args.arch_args.clone());
let key = HashKeyParams::new(
&self.executable_digest,
self.parsed_args.language,
&common_and_arch_args,
&preprocessor_output,
)
.with_extra_hashes(&extra_hashes)
.with_env_vars(&env_vars)
.with_plusplus(self.compiler.plusplus())
.with_basedirs(storage.basedirs())
.compute();
if let Some(preprocessor_key) = preprocessor_key {
if !include_files.is_empty() {
let mut preprocessor_cache_entry = PreprocessorCacheEntry::new();
let mut files: Vec<_> = include_files
.into_iter()
.map(|(path, digest)| (digest, path))
.collect();
files.sort_unstable_by(|a, b| a.1.cmp(&b.1));
preprocessor_cache_entry.add_result(start_of_compilation, &key, files);
if let Err(e) = storage
.put_preprocessor_cache_entry(&preprocessor_key, preprocessor_cache_entry)
.await
{
debug!("Failed to update preprocessor cache: {}", e);
}
}
}
let weak_toolchain_key = format!(
"{}-{}",
self.executable.to_string_lossy(),
self.executable_digest
);
Ok(HashResult {
key,
compilation: Box::new(CCompilation {
parsed_args: self.parsed_args.clone(),
is_locally_preprocessed: true,
#[cfg(feature = "dist-client")]
preprocessed_input: preprocessor_output,
executable: self.executable.clone(),
compiler: self.compiler.clone(),
cwd,
env_vars,
}),
weak_toolchain_key,
})
}
fn color_mode(&self) -> ColorMode {
self.parsed_args.color_mode
}
fn output_pretty(&self) -> Cow<'_, str> {
self.parsed_args.output_pretty()
}
fn box_clone(&self) -> Box<dyn CompilerHasher<T>> {
Box::new((*self).clone())
}
fn language(&self) -> Language {
self.parsed_args.language
}
}
const PRAGMA_GCC_PCH_PREPROCESS: &[u8] = b"pragma GCC pch_preprocess";
const HASH_31_COMMAND_LINE_NEWLINE: &[u8] = b"# 31 \"<command-line>\"\n";
const HASH_32_COMMAND_LINE_2_NEWLINE: &[u8] = b"# 32 \"<command-line>\" 2\n";
const INCBIN_DIRECTIVE: &[u8] = b".incbin";
fn process_preprocessed_file(
input_file: &Path,
cwd: &Path,
bytes: &mut [u8],
included_files: &mut HashMap<PathBuf, String>,
config: PreprocessorCacheModeConfig,
time_of_compilation: std::time::SystemTime,
fs_impl: impl PreprocessorFSAbstraction,
) -> Result<bool> {
let mut start = 0;
let mut hash_start = 0;
let total_len = bytes.len();
let mut digest = Digest::new();
let mut normalized_include_paths: HashMap<Vec<u8>, Option<Vec<u8>>> = HashMap::new();
while start < total_len.saturating_sub(7) {
let mut slice = &bytes[start..];
if slice[0] == b'#'
&& ((slice[1] == b' ' && slice[2] >= b'0' && slice[2] <= b'9')
|| slice[1..].starts_with(PRAGMA_GCC_PCH_PREPROCESS)
|| (&slice[1..5] == b"line "))
&& (start == 0 || bytes[start - 1] == b'\n')
{
match process_preprocessor_line(
input_file,
cwd,
included_files,
config,
time_of_compilation,
bytes,
start,
hash_start,
&mut digest,
total_len,
&mut normalized_include_paths,
&fs_impl,
)? {
ControlFlow::Continue((s, h)) => {
start = s;
hash_start = h;
}
ControlFlow::Break((s, h, continue_preprocessor_cache_mode)) => {
if !continue_preprocessor_cache_mode {
return Ok(false);
}
start = s;
hash_start = h;
continue;
}
}
} else if slice
.strip_prefix(INCBIN_DIRECTIVE)
.filter(|slice| {
slice.starts_with(b"\"") || slice.starts_with(b" \"") || slice.starts_with(b" \\\"")
})
.is_some()
{
debug!("Found potential unsupported .inc bin directive in source code");
return Ok(false);
} else if slice.starts_with(b"___________") && (start == 0 || bytes[start - 1] == b'\n') {
digest.update(&bytes[hash_start..start]);
while start < total_len && slice[0] != b'\n' {
start += 1;
if start < total_len {
slice = &bytes[start..];
}
}
slice = &bytes[start..];
if slice[0] == b'\n' {
start += 1;
}
hash_start = start;
continue;
} else {
start += 1;
}
}
digest.update(&bytes[hash_start..]);
Ok(true)
}
type PreprocessedLineAction = ControlFlow<(usize, usize, bool), (usize, usize)>;
#[allow(clippy::too_many_arguments)]
fn process_preprocessor_line(
input_file: &Path,
cwd: &Path,
included_files: &mut HashMap<PathBuf, String>,
config: PreprocessorCacheModeConfig,
time_of_compilation: std::time::SystemTime,
bytes: &mut [u8],
mut start: usize,
mut hash_start: usize,
digest: &mut Digest,
total_len: usize,
normalized_include_paths: &mut HashMap<Vec<u8>, Option<Vec<u8>>>,
fs_impl: &impl PreprocessorFSAbstraction,
) -> Result<PreprocessedLineAction> {
let mut slice = &bytes[start..];
if slice.get(2) == Some(&b'3') {
if slice.starts_with(HASH_31_COMMAND_LINE_NEWLINE) {
digest.update(&bytes[hash_start..start]);
while start < hash_start && slice[0] != b'\n' {
start += 1;
}
start += 1;
hash_start = start;
return Ok(ControlFlow::Break((start, hash_start, true)));
} else if slice.starts_with(HASH_32_COMMAND_LINE_2_NEWLINE) {
digest.update(&bytes[hash_start..start]);
start += 1;
bytes[start..=start + 2].copy_from_slice(b"# 1");
hash_start = start;
slice = &bytes[start..];
}
}
while start < total_len && slice[0] != b'"' && slice[0] != b'\n' {
start += 1;
if start < total_len {
slice = &bytes[start..];
}
}
slice = &bytes[start..];
if start < total_len && slice[0] == b'\n' {
return Ok(ControlFlow::Break((start, hash_start, true)));
}
start += 1;
if start >= total_len {
bail!("Failed to parse included file path");
}
digest.update(&bytes[hash_start..start]);
hash_start = start;
slice = &bytes[start..];
while start < total_len && slice[0] != b'"' {
start += 1;
if start < total_len {
slice = &bytes[start..];
}
}
if start == hash_start {
return Ok(ControlFlow::Break((start, hash_start, true)));
}
let mut system = false;
let mut pointer = start + 1;
while pointer < total_len && bytes[pointer] != b'\n' {
if bytes[pointer] == b'3' {
system = true;
}
pointer += 1;
}
let include_path = &bytes[hash_start..start];
let include_path: &[u8] = if let Some(opt) = normalized_include_paths.get(include_path) {
match opt {
Some(normalized) => normalized,
None => include_path,
}
} else {
let path_buf = decode_path(include_path)?;
let normalized = normalize_path(&path_buf);
if normalized == path_buf {
normalized_include_paths.insert(include_path.to_owned(), None);
include_path
} else {
let mut encoded = Vec::with_capacity(include_path.len());
encode_path(&mut encoded, &normalized)?;
normalized_include_paths.insert(include_path.to_owned(), Some(encoded));
normalized_include_paths
.get(include_path)
.unwrap()
.as_ref()
.unwrap()
}
};
if !remember_include_file(
include_path,
input_file,
cwd,
included_files,
digest,
system,
config,
time_of_compilation,
fs_impl,
)? {
return Ok(ControlFlow::Break((start, hash_start, false)));
}
hash_start = start;
Ok(ControlFlow::Continue((start, hash_start)))
}
pub fn normalize_path(path: &Path) -> PathBuf {
use std::path::Component;
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() {
components.next();
PathBuf::from(c.as_os_str())
} else {
PathBuf::new()
};
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
Component::RootDir => {
ret.push(component.as_os_str());
}
Component::CurDir => {}
Component::ParentDir => {
ret.pop();
}
Component::Normal(c) => {
ret.push(c);
}
}
}
ret
}
#[derive(Debug, Eq, PartialEq, Clone)]
struct PreprocessorFileMetadata {
is_dir: bool,
is_file: bool,
modified: Option<Timestamp>,
ctime_or_creation: Option<Timestamp>,
}
impl From<std::fs::Metadata> for PreprocessorFileMetadata {
fn from(meta: std::fs::Metadata) -> Self {
Self {
is_dir: meta.is_dir(),
is_file: meta.is_file(),
modified: meta.modified().ok().map(Into::into),
ctime_or_creation: meta.ctime_or_creation().ok(),
}
}
}
trait PreprocessorFSAbstraction {
fn metadata(&self, path: impl AsRef<Path>) -> io::Result<PreprocessorFileMetadata> {
std::fs::metadata(path).map(Into::into)
}
fn open(&self, path: impl AsRef<Path>) -> io::Result<Box<dyn std::io::Read>> {
Ok(Box::new(std::fs::File::open(path)?))
}
}
struct StandardFsAbstraction;
impl PreprocessorFSAbstraction for StandardFsAbstraction {}
#[allow(clippy::too_many_arguments)]
fn remember_include_file(
mut path: &[u8],
input_file: &Path,
cwd: &Path,
included_files: &mut HashMap<PathBuf, String>,
digest: &mut Digest,
system: bool,
config: PreprocessorCacheModeConfig,
time_of_compilation: std::time::SystemTime,
fs_impl: &impl PreprocessorFSAbstraction,
) -> Result<bool> {
if path.len() >= 2 && path[0] == b'<' && path[path.len() - 1] == b'>' {
digest.update(path);
return Ok(true);
}
if system && config.skip_system_headers {
digest.update(path);
return Ok(true);
}
let original_path = path;
#[cfg(windows)]
{
if path.starts_with(br".\") || path.starts_with(b"./") {
path = &path[2..];
}
}
#[cfg(not(windows))]
{
if path.starts_with(b"./") {
path = &path[2..];
}
}
let mut path = decode_path(path).context("failed to decode path")?;
if path.is_relative() {
path = cwd.join(path);
}
if path != cwd || config.hash_working_directory {
digest.update(original_path);
}
if included_files.contains_key(&path) {
return Ok(true);
}
if path == input_file {
return Ok(true);
}
let meta = match fs_impl.metadata(&path) {
Ok(meta) => meta,
Err(e) => {
debug!("Failed to stat include file {}: {}", path.display(), e);
return Ok(false);
}
};
if meta.is_dir {
return Ok(true);
}
if !meta.is_file {
debug!("Non-regular include file {}", path.display());
return Ok(false);
}
if include_is_too_new(&path, &meta, time_of_compilation) {
return Ok(false);
}
let file = match fs_impl.open(&path) {
Ok(file) => file,
Err(e) => {
debug!("Failed to open header file {}: {}", path.display(), e);
return Ok(false);
}
};
let (file_digest, finder) = if config.ignore_time_macros {
match Digest::reader_sync(file) {
Ok(file_digest) => (file_digest, TimeMacroFinder::new()),
Err(e) => {
debug!("Failed to read header file {}: {}", path.display(), e);
return Ok(false);
}
}
} else {
match Digest::reader_sync_time_macros(file) {
Ok((file_digest, finder)) => (file_digest, finder),
Err(e) => {
debug!("Failed to read header file {}: {}", path.display(), e);
return Ok(false);
}
}
};
if finder.found_time() {
debug!("Found __TIME__ in header file {}", path.display());
return Ok(false);
}
included_files.insert(path, file_digest);
Ok(true)
}
fn include_is_too_new(
path: &Path,
meta: &PreprocessorFileMetadata,
time_of_compilation: std::time::SystemTime,
) -> bool {
if let Some(mtime) = meta.modified {
if mtime >= time_of_compilation.into() {
debug!("Include file {} is too new", path.display());
return true;
}
}
if let Some(ctime) = meta.ctime_or_creation {
if ctime >= time_of_compilation.into() {
debug!("Include file {} is too new", path.display());
return true;
}
}
false
}
#[cfg(feature = "dist-client")]
const PREPROCESSING_SKIPPED_COMPILE_POISON: &[u8] = b"([{SCCACHE -*-* INVALID_C_CPP_CODE([{\"";
impl<T: CommandCreatorSync, I: CCompilerImpl> Compilation<T> for CCompilation<I> {
fn generate_compile_commands(
&self,
path_transformer: &mut dist::PathTransformer,
rewrite_includes_only: bool,
) -> Result<(
Box<dyn CompileCommand<T>>,
Option<dist::CompileCommand>,
Cacheable,
)> {
self.compiler.generate_compile_commands(
path_transformer,
&self.executable,
&self.parsed_args,
&self.cwd,
&self.env_vars,
rewrite_includes_only,
)
}
#[cfg(feature = "dist-client")]
fn into_dist_packagers(
self: Box<Self>,
path_transformer: dist::PathTransformer,
) -> Result<DistPackagers> {
let CCompilation {
parsed_args,
cwd,
preprocessed_input,
executable,
compiler,
..
} = *self;
trace!("Dist inputs: {:?}", parsed_args.input);
let input_path = cwd.join(&parsed_args.input);
let inputs_packager = Box::new(CInputsPackager {
input_path,
preprocessed_input,
path_transformer,
extra_dist_files: parsed_args.extra_dist_files,
extra_hash_files: parsed_args.extra_hash_files,
});
let toolchain_packager = Box::new(CToolchainPackager {
executable,
kind: compiler.kind(),
});
let outputs_rewriter = Box::new(NoopOutputsRewriter);
Ok((inputs_packager, toolchain_packager, outputs_rewriter))
}
fn is_locally_preprocessed(&self) -> bool {
self.is_locally_preprocessed
}
fn outputs<'a>(&'a self) -> Box<dyn Iterator<Item = FileObjectSource> + 'a> {
Box::new(
self.parsed_args
.outputs
.iter()
.map(|(k, output)| FileObjectSource {
key: k.to_string(),
path: output.path.clone(),
optional: output.optional,
}),
)
}
}
#[cfg(feature = "dist-client")]
struct CInputsPackager {
input_path: PathBuf,
path_transformer: dist::PathTransformer,
preprocessed_input: Vec<u8>,
extra_dist_files: Vec<PathBuf>,
extra_hash_files: Vec<PathBuf>,
}
#[cfg(feature = "dist-client")]
impl pkg::InputsPackager for CInputsPackager {
fn write_inputs(self: Box<Self>, wtr: &mut dyn io::Write) -> Result<dist::PathTransformer> {
let CInputsPackager {
input_path,
mut path_transformer,
preprocessed_input,
extra_dist_files,
extra_hash_files,
} = *self;
let mut builder = tar::Builder::new(wtr);
{
let input_path = pkg::simplify_path(&input_path)?;
let dist_input_path = path_transformer.as_dist(&input_path).with_context(|| {
format!("unable to transform input path {}", input_path.display())
})?;
let mut file_header = pkg::make_tar_header(&input_path, &dist_input_path)?;
file_header.set_size(preprocessed_input.len() as u64); file_header.set_cksum();
builder.append(&file_header, preprocessed_input.as_slice())?;
}
for input_path in extra_hash_files.iter().chain(extra_dist_files.iter()) {
let input_path = pkg::simplify_path(input_path)?;
if !super::CAN_DIST_DYLIBS
&& input_path
.extension()
.is_some_and(|ext| ext == std::env::consts::DLL_EXTENSION)
{
bail!(
"Cannot distribute dylib input {} on this platform",
input_path.display()
)
}
let dist_input_path = path_transformer.as_dist(&input_path).with_context(|| {
format!("unable to transform input path {}", input_path.display())
})?;
let mut file = io::BufReader::new(fs::File::open(&input_path)?);
let mut output = vec![];
io::copy(&mut file, &mut output)?;
let mut file_header = pkg::make_tar_header(&input_path, &dist_input_path)?;
file_header.set_size(output.len() as u64);
file_header.set_cksum();
builder.append(&file_header, &*output)?;
}
let _ = builder.into_inner();
Ok(path_transformer)
}
}
#[cfg(feature = "dist-client")]
#[allow(unused)]
struct CToolchainPackager {
executable: PathBuf,
kind: CCompilerKind,
}
#[cfg(feature = "dist-client")]
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
impl pkg::ToolchainPackager for CToolchainPackager {
fn write_pkg(self: Box<Self>, f: fs::File) -> Result<()> {
use std::os::unix::ffi::OsStringExt;
info!("Generating toolchain {}", self.executable.display());
let mut package_builder = pkg::ToolchainPackageBuilder::new();
package_builder.add_common()?;
package_builder.add_executable_and_deps(self.executable.clone())?;
let named_file = |kind: &str, name: &str| -> Option<PathBuf> {
let mut output = process::Command::new(&self.executable)
.arg(format!("-print-{}-name={}", kind, name))
.output()
.ok()?;
debug!(
"find named {} {} output:\n{}\n===\n{}",
kind,
name,
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
);
if !output.status.success() {
debug!("exit failure");
return None;
}
if output.stdout.last() == Some(&b'\n') {
output.stdout.pop();
}
let path: PathBuf = OsString::from_vec(output.stdout).into();
if path.is_absolute() {
Some(path)
} else {
which::which(path).ok()
}
};
let add_named_prog =
|builder: &mut pkg::ToolchainPackageBuilder, name: &str| -> Result<()> {
if let Some(path) = named_file("prog", name) {
builder.add_executable_and_deps(path)?;
}
Ok(())
};
let add_named_file =
|builder: &mut pkg::ToolchainPackageBuilder, name: &str| -> Result<()> {
if let Some(path) = named_file("file", name) {
builder.add_file(path)?;
}
Ok(())
};
add_named_prog(&mut package_builder, "as")?;
add_named_prog(&mut package_builder, "objcopy")?;
if Path::new("/etc/ld.so.conf").is_file() {
package_builder.add_file("/etc/ld.so.conf".into())?;
}
let ld_conf_dir = Path::new("/etc/ld.so.conf.d");
if ld_conf_dir.is_dir() {
package_builder.add_dir_contents(ld_conf_dir)?;
}
match self.kind {
CCompilerKind::Clang => {
if let Some(limits_h) = named_file("file", "include/limits.h") {
info!("limits_h = {}", limits_h.display());
package_builder.add_dir_contents(limits_h.parent().unwrap())?;
}
}
CCompilerKind::Gcc => {
add_named_prog(&mut package_builder, "cc1")?;
add_named_prog(&mut package_builder, "cc1plus")?;
add_named_file(&mut package_builder, "specs")?;
add_named_file(&mut package_builder, "liblto_plugin.so")?;
}
CCompilerKind::Cicc
| CCompilerKind::CudaFE
| CCompilerKind::Ptxas
| CCompilerKind::Nvcc => {}
CCompilerKind::Nvhpc => {
add_named_file(&mut package_builder, "cpp1")?;
add_named_file(&mut package_builder, "cpp2")?;
add_named_file(&mut package_builder, "opt")?;
add_named_prog(&mut package_builder, "llc")?;
add_named_prog(&mut package_builder, "acclnk")?;
}
_ => unreachable!(),
}
package_builder.into_compressed_tar(f)
}
}
pub const CACHE_VERSION: &[u8] = b"12";
static CACHED_ENV_VARS: LazyLock<HashSet<&'static OsStr>> = LazyLock::new(|| {
[
"SCCACHE_C_CUSTOM_CACHE_BUSTER",
"MACOSX_DEPLOYMENT_TARGET",
"IPHONEOS_DEPLOYMENT_TARGET",
"TVOS_DEPLOYMENT_TARGET",
"WATCHOS_DEPLOYMENT_TARGET",
"SDKROOT",
"CCC_OVERRIDE_OPTIONS",
]
.iter()
.map(OsStr::new)
.collect()
});
#[derive(Debug, Clone)]
pub struct HashKeyParams<'a> {
compiler_digest: &'a str,
language: Language,
arguments: &'a [OsString],
extra_hashes: &'a [String],
env_vars: &'a [(OsString, OsString)],
preprocessor_output: &'a [u8],
plusplus: bool,
basedirs: &'a [Vec<u8>],
}
impl<'a> HashKeyParams<'a> {
pub fn new(
compiler_digest: &'a str,
language: Language,
arguments: &'a [OsString],
preprocessor_output: &'a [u8],
) -> Self {
Self {
compiler_digest,
language,
arguments,
preprocessor_output,
extra_hashes: &[],
env_vars: &[],
plusplus: false,
basedirs: &[],
}
}
pub fn with_extra_hashes(mut self, extra_hashes: &'a [String]) -> Self {
self.extra_hashes = extra_hashes;
self
}
pub fn with_env_vars(mut self, env_vars: &'a [(OsString, OsString)]) -> Self {
self.env_vars = env_vars;
self
}
pub fn with_plusplus(mut self, plusplus: bool) -> Self {
self.plusplus = plusplus;
self
}
pub fn with_basedirs(mut self, basedirs: &'a [Vec<u8>]) -> Self {
self.basedirs = basedirs;
self
}
pub fn compute(&self) -> String {
let mut m = Digest::new();
m.update(self.compiler_digest.as_bytes());
m.update(&[self.plusplus as u8]);
m.update(CACHE_VERSION);
m.update(self.language.as_str().as_bytes());
for arg in self.arguments {
arg.hash(&mut HashToDigest { digest: &mut m });
}
for hash in self.extra_hashes {
m.update(hash.as_bytes());
}
for (var, val) in self.env_vars.iter() {
if CACHED_ENV_VARS.contains(var.as_os_str()) {
var.hash(&mut HashToDigest { digest: &mut m });
m.update(&b"="[..]);
val.hash(&mut HashToDigest { digest: &mut m });
}
}
let preprocessor_output_to_hash = strip_basedirs(self.preprocessor_output, self.basedirs);
m.update(&preprocessor_output_to_hash);
m.finish()
}
}
#[cfg(test)]
mod test {
use std::{collections::VecDeque, sync::Mutex};
use super::*;
#[test]
fn test_same_content() {
let args = ovec!["a", "b", "c"];
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
let h2 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
assert_eq!(h1, h2);
}
#[test]
fn test_plusplus_differs() {
let args = ovec!["a", "b", "c"];
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
let h2 = HashKeyParams::new("abcd", Language::C, &args, b"hello world")
.with_plusplus(true)
.compute();
assert_neq!(h1, h2);
}
#[test]
fn test_header_differs() {
let args = ovec!["a", "b", "c"];
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
let h2 = HashKeyParams::new("abcd", Language::CHeader, &args, b"hello world").compute();
assert_neq!(h1, h2);
}
#[test]
fn test_plusplus_header_differs() {
let args = ovec!["a", "b", "c"];
let h1 = HashKeyParams::new("abcd", Language::Cxx, &args, b"hello world")
.with_plusplus(true)
.compute();
let h2 = HashKeyParams::new("abcd", Language::CxxHeader, &args, b"hello world")
.with_plusplus(true)
.compute();
assert_neq!(h1, h2);
}
#[test]
fn test_hash_key_executable_contents_differs() {
let args = ovec!["a", "b", "c"];
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
let h2 = HashKeyParams::new("wxyz", Language::C, &args, b"hello world").compute();
assert_neq!(h1, h2);
}
#[test]
fn test_hash_key_args_differs() {
let abc = ovec!["a", "b", "c"];
let xyz = ovec!["x", "y", "z"];
let ab = ovec!["a", "b"];
let a = ovec!["a"];
let h_abc = HashKeyParams::new("abcd", Language::C, &abc, b"hello world").compute();
let h_xyz = HashKeyParams::new("abcd", Language::C, &xyz, b"hello world").compute();
let h_ab = HashKeyParams::new("abcd", Language::C, &ab, b"hello world").compute();
let h_a = HashKeyParams::new("abcd", Language::C, &a, b"hello world").compute();
assert_neq!(h_abc, h_xyz);
assert_neq!(h_abc, h_ab);
assert_neq!(h_abc, h_a);
}
#[test]
fn test_hash_key_preprocessed_content_differs() {
let args = ovec!["a", "b", "c"];
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
let h2 = HashKeyParams::new("abcd", Language::C, &args, b"goodbye").compute();
assert_neq!(h1, h2);
}
#[test]
fn test_hash_key_env_var_differs() {
let args = ovec!["a", "b", "c"];
for var in CACHED_ENV_VARS.iter() {
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
let vars1 = vec![(OsString::from(var), OsString::from("something"))];
let h2 = HashKeyParams::new("abcd", Language::C, &args, b"hello world")
.with_env_vars(&vars1)
.compute();
let vars2 = vec![(OsString::from(var), OsString::from("something else"))];
let h3 = HashKeyParams::new("abcd", Language::C, &args, b"hello world")
.with_env_vars(&vars2)
.compute();
assert_neq!(h1, h2);
assert_neq!(h2, h3);
}
}
#[test]
fn test_extra_hash_data() {
let args = ovec!["a", "b", "c"];
let extra_data = stringvec!["hello", "world"];
let h1 = HashKeyParams::new("abcd", Language::C, &args, b"hello world")
.with_extra_hashes(&extra_data)
.compute();
let h2 = HashKeyParams::new("abcd", Language::C, &args, b"hello world").compute();
assert_neq!(h1, h2);
}
#[test]
fn test_hash_key_basedirs() {
let args = ovec!["a", "b", "c"];
let preprocessed1 = b"# 1 \"/home/user1/project/src/main.c\"\nint main() { return 0; }";
let preprocessed2 = b"# 1 \"/home/user2/project/src/main.c\"\nint main() { return 0; }";
let basedirs = [
b"/home/user1/project".to_vec(),
b"/home/user2/project".to_vec(),
];
let hash_with_basedirs = |output: &[u8], dirs: &[Vec<u8>]| {
HashKeyParams::new("abcd", Language::C, &args, output)
.with_basedirs(dirs)
.compute()
};
let h1 = hash_with_basedirs(preprocessed1, &basedirs);
let h2 = hash_with_basedirs(preprocessed2, &basedirs);
assert_eq!(h1, h2);
assert_eq!(
hash_with_basedirs(preprocessed1, &basedirs[..1]),
hash_with_basedirs(preprocessed2, &basedirs[1..])
);
assert_neq!(
HashKeyParams::new("abcd", Language::C, &args, preprocessed1).compute(),
HashKeyParams::new("abcd", Language::C, &args, preprocessed2).compute()
);
let preprocessed_cpp1 =
b"# 1 \"/home/user1/project/src/main.cpp\"\nint main() { return 0; }";
let preprocessed_cpp2 =
b"# 1 \"/home/user2/project/src/main.cpp\"\nint main() { return 0; }";
let h_cpp1 = HashKeyParams::new("abcd", Language::Cxx, &args, preprocessed_cpp1)
.with_plusplus(true)
.with_basedirs(&basedirs)
.compute();
let h_cpp2 = HashKeyParams::new("abcd", Language::Cxx, &args, preprocessed_cpp2)
.with_plusplus(true)
.with_basedirs(&basedirs)
.compute();
assert_eq!(h_cpp1, h_cpp2);
let basedir_slash = b"/home/user1/project/".to_vec();
let h_slash = hash_with_basedirs(preprocessed1, std::slice::from_ref(&basedir_slash));
assert_neq!(h1, h_slash);
let multi_basedirs = vec![
b"/home/user1".to_vec(),
b"/home/user1/project".to_vec(), ];
assert_eq!(h1, hash_with_basedirs(preprocessed1, &multi_basedirs));
}
#[test]
fn test_language_from_file_name() {
fn t(extension: &str, expected: Language) {
let path_str = format!("input.{}", extension);
let path = Path::new(&path_str);
let actual = Language::from_file_name(path);
assert_eq!(actual, Some(expected));
}
t("s", Language::Assembler);
t("S", Language::AssemblerToPreprocess);
t("sx", Language::AssemblerToPreprocess);
t("c", Language::C);
t("i", Language::CPreprocessed);
t("C", Language::Cxx);
t("cc", Language::Cxx);
t("cp", Language::Cxx);
t("cpp", Language::Cxx);
t("CPP", Language::Cxx);
t("cxx", Language::Cxx);
t("c++", Language::Cxx);
t("ii", Language::CxxPreprocessed);
t("h", Language::GenericHeader);
t("hh", Language::CxxHeader);
t("H", Language::CxxHeader);
t("hp", Language::CxxHeader);
t("hxx", Language::CxxHeader);
t("hpp", Language::CxxHeader);
t("HPP", Language::CxxHeader);
t("h++", Language::CxxHeader);
t("tcc", Language::CxxHeader);
t("m", Language::ObjectiveC);
t("mi", Language::ObjectiveCPreprocessed);
t("M", Language::ObjectiveCxx);
t("mm", Language::ObjectiveCxx);
t("mii", Language::ObjectiveCxxPreprocessed);
t("cu", Language::Cuda);
t("hip", Language::Hip);
}
#[test]
fn test_language_from_file_name_none() {
fn t(extension: &str) {
let path_str = format!("input.{}", extension);
let path = Path::new(&path_str);
let actual = Language::from_file_name(path);
let expected = None;
assert_eq!(actual, expected);
}
t("Cp");
t("Cpp");
t("Hp");
t("Hpp");
t("Mm");
t("Cu");
}
#[test]
fn test_process_preprocessed_file() {
env_logger::builder()
.is_test(true)
.filter_level(log::LevelFilter::Debug)
.try_init()
.ok();
let input_file = Path::new("tests/test.c");
let path = Path::new(file!())
.parent()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap();
let path = path.join("tests/test.c.gcc-13.2.0-preproc");
let mut bytes = std::fs::read(path).unwrap();
let original_bytes = bytes.clone();
let mut include_files = HashMap::new();
let config = PreprocessorCacheModeConfig {
use_preprocessor_cache_mode: true,
skip_system_headers: true,
..Default::default()
};
let success = process_preprocessed_file(
input_file,
Path::new(""),
&mut bytes,
&mut include_files,
config,
std::time::SystemTime::now(),
StandardFsAbstraction,
)
.unwrap();
assert_eq!(&bytes, &original_bytes);
assert!(success);
assert_eq!(include_files.len(), 0);
}
struct PanicFs;
impl PreprocessorFSAbstraction for PanicFs {
fn metadata(&self, path: impl AsRef<Path>) -> io::Result<PreprocessorFileMetadata> {
panic!("called metadata at {}", path.as_ref().display());
}
fn open(&self, path: impl AsRef<Path>) -> io::Result<Box<dyn std::io::Read>> {
panic!("called open at {}", path.as_ref().display());
}
}
struct TestFs {
metadata_results: Mutex<VecDeque<(PathBuf, PreprocessorFileMetadata)>>,
open_results: Mutex<VecDeque<(PathBuf, Box<dyn std::io::Read>)>>,
}
impl PreprocessorFSAbstraction for TestFs {
fn metadata(&self, path: impl AsRef<Path>) -> io::Result<PreprocessorFileMetadata> {
let (expected_path, meta) = self
.metadata_results
.lock()
.unwrap()
.pop_front()
.expect("not enough 'metadata' results");
assert_eq!(expected_path, path.as_ref(), "{}", path.as_ref().display());
Ok(meta)
}
fn open(&self, path: impl AsRef<Path>) -> io::Result<Box<dyn std::io::Read>> {
let (expected_path, impls_read) = self
.open_results
.lock()
.unwrap()
.pop_front()
.expect("not enough 'open' results");
assert_eq!(expected_path, path.as_ref(), "{}", path.as_ref().display());
Ok(impls_read)
}
}
fn do_single_preprocessor_line_call(
line: &[u8],
include_files: &mut HashMap<PathBuf, String>,
fs_impl: &impl PreprocessorFSAbstraction,
skip_system_headers: bool,
) -> PreprocessedLineAction {
let input_file = Path::new("tests/test.c");
let config = PreprocessorCacheModeConfig {
use_preprocessor_cache_mode: true,
skip_system_headers,
..Default::default()
};
let mut bytes = line.to_vec();
let total_len = bytes.len();
process_preprocessor_line(
input_file,
Path::new(""),
include_files,
config,
std::time::SystemTime::now(),
&mut bytes,
0,
0,
&mut Digest::new(),
total_len,
&mut HashMap::new(),
fs_impl,
)
.unwrap()
}
#[test]
fn test_process_preprocessor_line_simple() {
env_logger::builder()
.is_test(true)
.filter_level(log::LevelFilter::Debug)
.try_init()
.ok();
let mut include_files = HashMap::new();
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 0 "tests/test.c""#,
&mut include_files,
&PanicFs,
true,
),
ControlFlow::Continue((20, 20)),
);
assert_eq!(include_files.len(), 0);
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 0 "<built-in>""#,
&mut include_files,
&PanicFs,
true,
),
ControlFlow::Continue((18, 18)),
);
assert_eq!(include_files.len(), 0);
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 0 "<command-line>""#,
&mut include_files,
&PanicFs,
true,
),
ControlFlow::Continue((22, 22)),
);
assert_eq!(include_files.len(), 0);
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 0 "<command-line>" 2"#,
&mut include_files,
&PanicFs,
true,
),
ControlFlow::Continue((22, 22)),
);
assert_eq!(include_files.len(), 0);
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 1 "tests/test.c""#,
&mut include_files,
&PanicFs,
true,
),
ControlFlow::Continue((20, 20)),
);
assert_eq!(include_files.len(), 0);
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 1 "/usr/include/stdc-predef.h" 1 3 4"#,
&mut include_files,
&PanicFs,
true,
),
ControlFlow::Continue((34, 34)),
);
assert_eq!(include_files.len(), 0);
}
#[test]
fn test_test_helpers() {
env_logger::builder()
.is_test(true)
.filter_level(log::LevelFilter::Debug)
.try_init()
.ok();
let res = std::panic::catch_unwind(|| {
let mut include_files = HashMap::new();
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 1 "/usr/include/stdc-predef.h" 1 3 4"#,
&mut include_files,
&PanicFs,
false,
),
ControlFlow::Continue((34, 34)),
);
});
assert_eq!(
res.unwrap_err().downcast_ref::<String>().unwrap(),
"called metadata at /usr/include/stdc-predef.h"
);
let res = std::panic::catch_unwind(|| {
let mut include_files = HashMap::new();
let fs_impl = TestFs {
metadata_results: Mutex::new(VecDeque::new()),
open_results: Mutex::new(VecDeque::new()),
};
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 33 "/usr/include/x86_64-linux-gnu/bits/libc-header-start.h" 3 4"#,
&mut include_files,
&fs_impl,
false,
),
ControlFlow::Continue((34, 34)),
);
});
assert_eq!(
res.unwrap_err().downcast_ref::<String>().unwrap(),
"not enough 'metadata' results"
);
}
#[test]
fn test_process_preprocessor_line_fs_access() {
env_logger::builder()
.is_test(true)
.filter_level(log::LevelFilter::Debug)
.try_init()
.ok();
let mut include_files = HashMap::new();
let fs_impl = TestFs {
metadata_results: Mutex::new(
[(
PathBuf::from("/usr/include/x86_64-linux-gnu/bits/libc-header-start.h"),
PreprocessorFileMetadata {
is_dir: false,
is_file: true,
modified: Some(Timestamp::new(i64::MAX - 1, 0)),
ctime_or_creation: None,
},
)]
.into_iter()
.collect(),
),
open_results: Mutex::new(VecDeque::new()),
};
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 33 "/usr/include/x86_64-linux-gnu/bits/libc-header-start.h" 3 4"#,
&mut include_files,
&fs_impl,
false,
),
ControlFlow::Break((63, 9, false)),
);
let mut include_files = HashMap::new();
let fs_impl = TestFs {
metadata_results: Mutex::new(
[(
PathBuf::from("/usr/include/x86_64-linux-gnu/bits/libc-header-start.h"),
PreprocessorFileMetadata {
is_dir: true,
is_file: false,
modified: Some(Timestamp::new(12341234, 0)),
ctime_or_creation: None,
},
)]
.into_iter()
.collect(),
),
open_results: Mutex::new(VecDeque::new()),
};
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 33 "/usr/include/x86_64-linux-gnu/bits/libc-header-start.h" 3 4"#,
&mut include_files,
&fs_impl,
false,
),
ControlFlow::Continue((63, 63)),
);
assert_eq!(include_files.len(), 0);
let mut include_files = HashMap::new();
let fs_impl = TestFs {
metadata_results: Mutex::new(
[(
PathBuf::from("/usr/include/x86_64-linux-gnu/bits/libc-header-start.h"),
PreprocessorFileMetadata {
is_dir: false,
is_file: true,
modified: Some(Timestamp::new(12341234, 0)),
ctime_or_creation: None,
},
)]
.into_iter()
.collect(),
),
open_results: Mutex::new(
[(
PathBuf::from("/usr/include/x86_64-linux-gnu/bits/libc-header-start.h"),
Box::new(&b"contents"[..]) as Box<dyn std::io::Read>,
)]
.into_iter()
.collect(),
),
};
assert_eq!(
do_single_preprocessor_line_call(
br#"// # 33 "/usr/include/x86_64-linux-gnu/bits/libc-header-start.h" 3 4"#,
&mut include_files,
&fs_impl,
false,
),
ControlFlow::Continue((63, 63)),
);
assert_eq!(include_files.len(), 1);
assert_eq!(
include_files
.get(Path::new(
"/usr/include/x86_64-linux-gnu/bits/libc-header-start.h",
))
.unwrap(),
"a93900c371d997927c5bc568ea538bed59ae5c960021dcfe7b0b369da5267528",
);
}
}