use std::cell::{Cell, RefCell};
use std::collections::HashMap;
use std::error::Error as StdError;
use std::fs::File;
use std::io::{BufRead, BufReader, Error as IoError, ErrorKind as IoErrorKind, Lines};
use std::iter::Peekable;
use std::path::Path;
use std::rc::Rc;
use eyre::{bail, eyre, Context, Result};
use lazy_static::lazy_static;
use regex::Regex;
use crate::args::Args;
#[cfg(feature = "full")]
use super::conditional::{Line as ConditionalLine, State as ConditionalState};
#[cfg(feature = "full")]
use super::eval_context::DeferredEvalContext;
use super::parse::{MacroAssignment, MacroAssignmentOutcome};
#[cfg(feature = "full")]
use super::r#macro::ExportConfig;
use super::r#macro::Macro;
use super::target::StaticTargetSet;
use super::{
builtin_targets, CommandLine, InferenceRule, InferenceRuleSet, ItemSource, LookupInternal,
MacroScopeStack, MacroSet, Target, TokenString,
};
enum LineType {
Rule,
#[cfg(feature = "full")]
RuleMacro,
Macro,
Include,
#[cfg(feature = "full")]
Export,
#[cfg(feature = "full")]
Unexport,
Unknown,
}
impl LineType {
fn of(line_tokens: &TokenString) -> Self {
#[cfg(feature = "full")]
if line_tokens.starts_with("define ") {
return Self::Macro;
}
if line_tokens.starts_with("include ") || line_tokens.starts_with("-include ") {
return Self::Include;
}
#[cfg(feature = "full")]
if line_tokens.starts_with("export ") || line_tokens == "export" {
return Self::Export;
}
#[cfg(feature = "full")]
if line_tokens.starts_with("unexport ") || line_tokens == "unexport" {
return Self::Unexport;
}
let colon_idx = line_tokens.find(":");
#[cfg(not(feature = "full"))]
let equals_idx = line_tokens.find("=");
#[cfg(feature = "full")]
let equals_idx = ["=", ":=", "::=", "?=", "+="]
.iter()
.filter_map(|p| line_tokens.find(p))
.min();
match (colon_idx, equals_idx) {
(Some(_), None) => {
return Self::Rule;
}
(Some(c), Some(e)) if c < e => {
#[cfg(feature = "full")]
return Self::RuleMacro;
#[cfg(not(feature = "full"))]
return Self::Rule;
}
(None, Some(_)) => {
return Self::Macro;
}
(Some(c), Some(e)) if e <= c => {
return Self::Macro;
}
_ => {}
}
Self::Unknown
}
}
#[derive(Debug)]
struct InferenceMatch<'a> {
s1: &'a str,
s2: &'a str,
}
fn inference_match<'a>(
targets: &[&'a str],
prerequisites: &[String],
) -> Option<InferenceMatch<'a>> {
lazy_static! {
static ref INFERENCE_RULE: Regex = #[allow(clippy::unwrap_used)]
Regex::new(r"^(?P<s2>(\.[^/.]+)?)(?P<s1>\.[^/.]+)$")
.unwrap();
static ref SPECIAL_TARGET: Regex = #[allow(clippy::unwrap_used)]
Regex::new(r"^\.[A-Z]+$").unwrap();
}
let inference_match = INFERENCE_RULE.captures(targets[0]);
let special_target_match = SPECIAL_TARGET.captures(targets[0]);
let inference_rule = targets.len() == 1
&& prerequisites.is_empty()
&& inference_match.is_some()
&& special_target_match.is_none();
if inference_rule {
#[allow(clippy::unwrap_used)]
inference_match.map(|x| InferenceMatch {
s1: x.name("s1").unwrap().as_str(),
s2: x.name("s2").unwrap().as_str(),
})
} else {
None
}
}
struct LineNumbers<T, E, Inner>(Inner, usize)
where
E: StdError + Send + Sync + 'static,
Inner: Iterator<Item = Result<T, E>>;
impl<T, E, Inner> LineNumbers<T, E, Inner>
where
E: StdError + Send + Sync + 'static,
Inner: Iterator<Item = Result<T, E>>,
{
const fn new(inner: Inner) -> Self {
Self(inner, 0)
}
}
impl<T, E, Inner> Iterator for LineNumbers<T, E, Inner>
where
E: StdError + Send + Sync + 'static,
Inner: Iterator<Item = Result<T, E>>,
{
type Item = (usize, Result<T>);
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|x| {
self.1 = self.1.saturating_add(1);
(
self.1,
x.with_context(|| format!("failed to read line {} of makefile", self.1)),
)
})
}
}
trait IteratorExt<T, E: StdError + Send + Sync + 'static>: Iterator<Item = Result<T, E>> {
fn line_numbered(self) -> LineNumbers<T, E, Self>
where
Self: Sized,
{
LineNumbers::new(self)
}
}
impl<T, E: StdError + Send + Sync + 'static, I: Iterator<Item = Result<T, E>>> IteratorExt<T, E>
for I
{
}
#[derive(Clone, Copy)]
struct NextLineSettings {
escaped_newline_replacement: &'static str,
peeking: bool,
strip_comments: bool,
}
impl Default for NextLineSettings {
fn default() -> Self {
Self {
escaped_newline_replacement: " ",
peeking: false,
strip_comments: true,
}
}
}
pub struct MakefileReader<'a, 'parent, R: BufRead> {
file_name: String,
pub inference_rules: InferenceRuleSet,
pub stack: MacroScopeStack<'parent>,
pub macros: MacroSet,
pub targets: StaticTargetSet,
built_in_targets: HashMap<String, Target>,
pub first_non_special_target: Option<String>,
pub failed_includes: Vec<String>,
pub args: &'a Args,
lines_iter: Peekable<LineNumbers<String, IoError, Lines<R>>>,
pending_line: Option<(usize, Vec<String>)>,
#[cfg(feature = "full")]
conditional_stack: Vec<ConditionalState>,
pub file_names: Rc<RefCell<Vec<String>>>,
}
impl<'a, 'parent> MakefileReader<'a, 'parent, BufReader<File>> {
pub fn read_file(
args: &'a Args,
stack: MacroScopeStack<'parent>,
path: impl AsRef<Path>,
file_names: Rc<RefCell<Vec<String>>>,
) -> Result<Self> {
let mut macros = MacroSet::new();
#[cfg(feature = "full")]
if let Some(old_makefile_list) = stack.get("MAKEFILE_LIST") {
let mut old_makefile_list = old_makefile_list.into_owned();
old_makefile_list.text.extend(TokenString::text(format!(
" {}",
path.as_ref().to_string_lossy()
)));
macros.set("MAKEFILE_LIST".to_owned(), old_makefile_list);
} else {
macros.set(
"MAKEFILE_LIST".to_owned(),
Macro {
source: ItemSource::Builtin,
text: TokenString::text(path.as_ref().to_string_lossy()),
#[cfg(feature = "full")]
eagerly_expanded: false,
},
);
}
let file_name = path.as_ref().to_string_lossy();
file_names.borrow_mut().push(file_name.to_string());
let file = File::open(path.as_ref());
let file = file.context("couldn't open makefile!")?;
let file_reader = BufReader::new(file);
Self::read(args, stack, macros, file_reader, file_name, file_names)
}
}
impl<'a, 'parent, R: BufRead> MakefileReader<'a, 'parent, R> {
pub fn read(
args: &'a Args,
stack: MacroScopeStack<'parent>,
macros: MacroSet,
source: R,
name: impl Into<String>,
file_names: Rc<RefCell<Vec<String>>>,
) -> Result<Self> {
let name = name.into();
let mut reader = Self {
file_name: name.clone(),
inference_rules: InferenceRuleSet::default(),
stack,
macros,
targets: Default::default(),
built_in_targets: HashMap::new(),
first_non_special_target: None,
failed_includes: Vec::new(),
args,
lines_iter: source.lines().line_numbered().peekable(),
pending_line: None,
#[cfg(feature = "full")]
conditional_stack: Vec::new(),
file_names,
};
if !args.no_builtin_rules {
reader.built_in_targets.extend(
builtin_targets()
.into_iter()
.map(|target| (target.name.clone(), target)),
);
}
reader
.read_all()
.wrap_err_with(|| format!("while reading {}", name))?;
Ok(reader)
}
fn read_all(&mut self) -> Result<()> {
let topmost = NextLineSettings {
escaped_newline_replacement: " ",
..Default::default()
};
while let Some((line_number, line)) = self.next_line(topmost) {
let line = line?;
if line.trim().is_empty() {
continue;
}
let line_tokens: TokenString = line
.parse()
.with_context(|| format!("failed to parse line {}", line_number))?;
let line_type = LineType::of(&line_tokens);
let (line_tokens, line_type) = if matches!(line_type, LineType::Unknown) {
let line_tokens = TokenString::text(
self.expand_macros(&line_tokens)
.wrap_err_with(|| format!("while parsing line {}", line_number))?
.trim(),
);
let line_type = LineType::of(&line_tokens);
(line_tokens, line_type)
} else {
(line_tokens, line_type)
};
match line_type {
LineType::Rule => {
self.read_rule(line_tokens, line_number).wrap_err_with(|| {
format!(
"while parsing rule definition starting on line {}",
line_number
)
})?;
}
#[cfg(feature = "full")]
LineType::RuleMacro => {
self.read_rule_macro(line_tokens, line_number)
.wrap_err_with(|| {
format!(
"while parsing rule-specific macro definition starting on line {}",
line_number
)
})?;
}
LineType::Macro => {
self.read_macro(line_tokens, line_number)
.wrap_err_with(|| {
format!(
"while parsing macro definition starting on line {}",
line_number
)
})?;
}
LineType::Include => {
self.read_include(line_tokens, line_number)
.wrap_err_with(|| {
format!("while parsing include starting on line {}", line_number)
})?;
}
#[cfg(feature = "full")]
LineType::Export => {
let mut line_tokens = line_tokens;
line_tokens.strip_prefix("export");
if line_tokens.is_empty() {
self.macros.exported = ExportConfig::all_but();
} else {
let exported = if line_tokens.contains_text("=") {
self.read_macro(line_tokens, line_number)?
} else {
self.expand_macros(&line_tokens)?
};
self.macros.exported.add_all(exported.split_whitespace());
}
}
#[cfg(feature = "full")]
LineType::Unexport => {
let mut line_tokens = line_tokens;
line_tokens.strip_prefix("unexport");
if line_tokens.is_empty() {
self.macros.exported = ExportConfig::only();
} else {
let exported = self.expand_macros(&line_tokens)?;
self.macros.exported.remove_all(exported.split_whitespace());
}
}
LineType::Unknown => {
if !line_tokens.is_empty() {
bail!(
"error: line {}: unknown line \"{}\"",
line_number,
line_tokens
);
}
}
}
}
Ok(())
}
fn next_line(&mut self, settings: NextLineSettings) -> Option<(usize, Result<String>)> {
lazy_static! {
static ref COMMENT: Regex = #[allow(clippy::unwrap_used)]
Regex::new(r"(^|[^\\])#.*$").unwrap();
}
let escaped_newline_replacement = settings.escaped_newline_replacement;
if let Some((line_number, line)) = self.pending_line.take() {
if settings.peeking {
self.pending_line = Some((line_number, line.clone()));
}
let line = line.join(escaped_newline_replacement);
let line = if settings.strip_comments {
COMMENT
.replace(&line, "$1")
.replace(r"\#", "#")
.trim_end()
.to_owned()
} else {
line
};
return Some((line_number, Ok(line)));
}
while let Some((line_number, line)) = self.lines_iter.next() {
let line = match line {
Ok(x) => x,
Err(err) => return Some((line_number, Err(err))),
};
let line_without_comments = COMMENT.replace(&line, "$1").replace(r"\#", "#");
let line_without_comments = line_without_comments.trim_end();
let line = if settings.strip_comments {
line_without_comments.to_owned()
} else {
line
};
let mut line_pieces = vec![line];
while line_pieces.last().map_or(false, |p| p.ends_with('\\')) {
line_pieces.last_mut().map(|x| x.pop());
if let Some((n, x)) = self.lines_iter.next() {
let line = match x {
Ok(x) => x,
Err(err) => return Some((n, Err(err))),
};
let line = if settings.strip_comments {
COMMENT
.replace(&line, "$1")
.replace(r"\#", "#")
.trim_end()
.to_owned()
} else {
line
};
line_pieces.push(line.trim_start().to_owned());
}
}
let line = line_pieces.join(escaped_newline_replacement);
#[cfg(feature = "full")]
{
let cond_line =
ConditionalLine::from(line_without_comments, |t| self.expand_macros(t));
let cond_line = match cond_line {
Ok(x) => x,
Err(err) => return Some((line_number, Err(err))),
};
if let Some(line) = cond_line {
let mut deferred_eval_context = DeferredEvalContext::new(self);
let action = line
.action(
self.conditional_stack.last(),
|name| self.stack.with_scope(&self.macros).is_defined(name),
|t| self.expand_macros_deferred_eval(t, &mut deferred_eval_context),
)
.wrap_err_with(|| {
format!("while applying conditional on line {}", line_number)
});
for child in deferred_eval_context {
self.extend(child);
}
let action = match action {
Ok(x) => x,
Err(err) => return Some((line_number, Err(err))),
};
action.apply_to(&mut self.conditional_stack);
continue;
}
if self
.conditional_stack
.iter()
.any(ConditionalState::skipping)
{
continue;
}
}
if settings.peeking {
self.pending_line = Some((line_number, line_pieces));
}
return Some((line_number, Ok(line)));
}
None
}
fn next_line_if(
&mut self,
settings: NextLineSettings,
predicate: impl FnOnce(&(usize, Result<String>)) -> bool,
) -> Option<(usize, Result<String>)> {
let peek_settings = NextLineSettings {
peeking: true,
..settings
};
if predicate(&self.next_line(peek_settings)?) {
self.next_line(settings)
} else {
None
}
}
fn special_target_has_prereq(&self, target: &str, name: &str, empty_counts: bool) -> bool {
self.targets
.get(target)
.or_else(|| self.built_in_targets.get(target))
.map_or(false, |target| {
(empty_counts && target.prerequisites.is_empty())
|| target.prerequisites.iter().any(|e| e == name)
})
}
fn read_include(&mut self, mut line_tokens: TokenString, line_number: usize) -> Result<()> {
let suppress_errors = line_tokens.starts_with("-");
line_tokens.strip_prefix("-");
line_tokens.strip_prefix("include ");
line_tokens.trim_start();
let line = self.expand_macros(&line_tokens)?;
let fields = line.split_whitespace();
for field in fields {
log::trace!("{}:{}: including {}", &self.file_name, line_number, field);
let child_stack = self.stack.with_scope(&self.macros);
let child = MakefileReader::read_file(
self.args,
child_stack,
field,
Rc::clone(&self.file_names),
)
.with_context(|| format!("while including {}", field));
match child {
Ok(child) => {
let child = child.finish();
self.extend(child);
}
Err(err) => {
if !suppress_errors {
match err.downcast_ref::<IoError>() {
Some(err) if err.kind() == IoErrorKind::NotFound => {
log::error!(
"{}:{}: included makefile {} not found",
&self.file_name,
line_number,
field,
);
self.failed_includes.push(field.to_owned());
}
_ => {
return Err(err);
}
}
}
}
}
}
Ok(())
}
fn read_rule(&mut self, line_tokens: TokenString, line_number: usize) -> Result<()> {
let (targets, not_targets) = line_tokens
.split_once(":")
.ok_or_else(|| eyre!("read_rule couldn't find a ':' on line {}", line_number))?;
#[cfg(feature = "full")]
let (static_targets, targets, not_targets) = if not_targets.contains_text(":") {
let (pattern, not_targets) = not_targets
.split_once(":")
.ok_or_else(|| eyre!("bro hold the fuck up it literally just had that"))?;
(Some(targets), pattern, not_targets)
} else {
(None, targets, not_targets)
};
let targets = self.expand_macros(&targets)?;
let targets = targets.split_whitespace().collect::<Vec<_>>();
let (prerequisites, mut commands) = match not_targets.split_once(";") {
Some((prerequisites, command)) => {
(prerequisites, vec![command])
}
None => (not_targets, vec![]),
};
if prerequisites.contains_text("=") {
bail!("handling rule-specific macro as rule");
}
#[cfg(feature = "full")]
let mut deferred_eval_context = DeferredEvalContext::new(self);
let prerequisites = self
.stack
.with_scope(&self.macros)
.with_scope(&LookupInternal::new_partial(&targets))
.expand(
&prerequisites,
#[cfg(feature = "full")]
Some(&mut deferred_eval_context),
)?;
#[cfg(feature = "full")]
let prerequisites = if self.targets.has(".SECONDEXPANSION") {
self.stack
.with_scope(&self.macros)
.with_scope(&LookupInternal::new_partial(&targets))
.expand(
&prerequisites.parse()?,
#[cfg(feature = "full")]
Some(&mut deferred_eval_context),
)?
} else {
prerequisites
};
#[cfg(feature = "full")]
for child in deferred_eval_context {
self.extend(child);
}
let prerequisites = prerequisites
.split_whitespace()
.map(|x| x.into())
.collect::<Vec<String>>();
let settings = NextLineSettings {
escaped_newline_replacement: "\\\n",
strip_comments: false,
..Default::default()
};
while let Some((_, x)) = self.next_line_if(settings, |(_, x)| {
x.as_ref()
.ok()
.map_or(false, |line| line.starts_with('\t') || line.is_empty())
}) {
let mut line = x?;
if !line.is_empty() {
assert!(line.starts_with('\t'));
line.remove(0);
}
if line.is_empty() {
continue;
}
commands.push(
line.parse()
.with_context(|| format!("failed to parse line {}", line_number))?,
);
}
let commands = commands
.into_iter()
.map(CommandLine::from)
.collect::<Vec<_>>();
if targets.is_empty() {
return Ok(());
}
let inference_match = inference_match(&targets, &prerequisites);
#[cfg(feature = "full")]
let is_pattern = targets.iter().all(|x| x.contains('%'));
#[cfg(feature = "full")]
if is_pattern {
let new_rule = InferenceRule {
source: ItemSource::File {
name: self.file_name.clone(),
line: line_number,
},
products: targets.into_iter().map(|x| x.to_owned()).collect(),
prerequisites,
commands,
macros: MacroSet::new(),
};
if let Some(static_targets) = static_targets {
let static_targets = self.expand_macros(&static_targets)?;
let static_targets = static_targets.split_whitespace();
for real_target in static_targets {
if new_rule.matches(real_target)? {
let new_target = Target {
name: real_target.to_owned(),
prerequisites: new_rule.prereqs(real_target)?.collect(),
commands: new_rule.commands.clone(),
stem: new_rule
.first_match(real_target)?
.and_then(|x| x.get(1).map(|x| x.as_str().to_owned())),
already_updated: Cell::new(false),
macros: MacroSet::new(),
};
self.targets.put(new_target);
}
}
} else {
log::debug!("pattern-based inference rule defined: {:?}", &new_rule,);
self.inference_rules.put(new_rule);
}
return Ok(());
}
let inference_match = inference_match.and_then(|inference| {
if self.special_target_has_prereq(".SUFFIXES", inference.s1, false)
&& (inference.s2.is_empty()
|| self.special_target_has_prereq(".SUFFIXES", inference.s2, false))
{
Some(inference)
} else {
log::info!(
"{}:{}: looks like {:?} is not a suffix rule because .SUFFIXES is {:?}",
&self.file_name,
line_number,
inference,
self.targets
.get(".SUFFIXES")
.or_else(|| self.built_in_targets.get(".SUFFIXES"))
.map(|x| &x.prerequisites)
);
None
}
});
if let Some(inference_match) = inference_match {
let new_rule = InferenceRule::new_suffix(
ItemSource::File {
name: self.file_name.clone(),
line: line_number,
},
inference_match.s1.to_owned(),
inference_match.s2.to_owned(),
commands,
MacroSet::new(),
);
log::debug!(
"suffix-based inference rule defined by {:?} - {:?}",
&inference_match,
&new_rule,
);
self.inference_rules.put(new_rule);
} else {
log::debug!(
"{}:{}: new target {:?} based on {:?}",
&self.file_name,
line_number,
&targets,
&prerequisites
);
for target in targets {
if self.first_non_special_target.is_none() && !target.starts_with('.') {
self.first_non_special_target = Some(target.into());
}
let new_target = Target {
name: target.into(),
prerequisites: prerequisites.clone(),
commands: commands.clone(),
stem: None,
already_updated: Cell::new(false),
macros: MacroSet::new(),
};
self.targets.put(new_target);
}
}
Ok(())
}
#[cfg(feature = "full")]
fn read_rule_macro(&mut self, line_tokens: TokenString, line_number: usize) -> Result<()> {
let (targets, macro_def) = line_tokens
.split_once(":")
.ok_or_else(|| eyre!("read_rule couldn't find a ':' on line {}", line_number))?;
lazy_static! {
static ref NON_EAGER_EXPANSION_ASSIGNMENT_COLON: Regex = #[allow(clippy::unwrap_used)] Regex::new(":[^:=]").unwrap();
}
if macro_def.matches_regex(&NON_EAGER_EXPANSION_ASSIGNMENT_COLON) {
bail!("GNUful static patterns not yet implemented in rule-specific macros");
};
let targets = self.expand_macros(&targets)?;
let targets = targets.split_whitespace().collect::<Vec<_>>();
let (name, value) = macro_def
.split_once("=")
.ok_or_else(|| eyre!("read_macro couldn't find a '=' on line {}", line_number))?;
let macro_assignment = self.parse_macro_assignment(name, value, line_number)?;
if targets.is_empty() {
return Ok(());
}
let inference_match = inference_match(&targets, &[]);
let is_pattern = targets.iter().all(|x| x.contains('%'));
let mut macro_set = MacroSet::new();
if let Some(outcome) = self
.check_macro_assignment_outcome(¯o_assignment, self.stack.with_scope(&self.macros))
{
let (name, value) = self.resolve_macro_value(macro_assignment, outcome, line_number)?;
macro_set.set(name, value);
}
if is_pattern {
let new_rule = InferenceRule {
source: ItemSource::File {
name: self.file_name.clone(),
line: line_number,
},
products: targets.into_iter().map(|x| x.to_owned()).collect(),
prerequisites: vec![],
commands: vec![],
macros: macro_set,
};
log::error!(
"{}:{}: inference rule specific macros not yet working",
&self.file_name,
line_number
);
self.inference_rules.put(new_rule);
return Ok(());
}
let inference_match = inference_match.and_then(|inference| {
if self.special_target_has_prereq(".SUFFIXES", inference.s1, false)
&& (inference.s2.is_empty()
|| self.special_target_has_prereq(".SUFFIXES", inference.s2, false))
{
Some(inference)
} else {
log::info!(
"{}:{}: looks like {:?} is not a suffix rule because .SUFFIXES is {:?}",
&self.file_name,
line_number,
inference,
self.targets
.get(".SUFFIXES")
.or_else(|| self.built_in_targets.get(".SUFFIXES"))
.map(|x| &x.prerequisites)
);
None
}
});
if let Some(inference_match) = inference_match {
let new_rule = InferenceRule::new_suffix(
ItemSource::File {
name: self.file_name.clone(),
line: line_number,
},
inference_match.s1.to_owned(),
inference_match.s2.to_owned(),
vec![],
macro_set,
);
log::error!(
"{}:{}: inference rule specific macros not yet working",
&self.file_name,
line_number
);
self.inference_rules.put(new_rule);
} else {
log::trace!(
"{}:{}: target {:?} gets macros {:?}",
&self.file_name,
line_number,
&targets,
¯o_set
);
for target in targets {
if self.first_non_special_target.is_none() && !target.starts_with('.') {
self.first_non_special_target = Some(target.into());
}
let new_target = Target {
name: target.into(),
prerequisites: vec![],
commands: vec![],
stem: None,
already_updated: Cell::new(false),
macros: macro_set.clone(),
};
self.targets.put(new_target);
}
}
Ok(())
}
fn read_macro(&mut self, mut line_tokens: TokenString, line_number: usize) -> Result<String> {
let (name, value) = if cfg!(feature = "full") && line_tokens.starts_with("define ") {
line_tokens.strip_prefix("define ");
if line_tokens.ends_with("=") {
line_tokens.strip_suffix("=");
line_tokens.trim_end();
}
let mut value = TokenString::empty();
let settings = NextLineSettings {
strip_comments: false, ..Default::default()
};
while let Some((_, line)) = self.next_line(settings) {
let line = line?;
if line == "endef" {
break;
}
if !value.is_empty() {
value.extend(TokenString::text("\n"));
}
value.extend(line.parse()?);
}
(line_tokens, value)
} else {
line_tokens
.split_once("=")
.ok_or_else(|| eyre!("read_macro couldn't find a '=' on line {}", line_number))?
};
let macro_assignment = self.parse_macro_assignment(name, value, line_number)?;
let macro_name = macro_assignment.name.clone();
if let Some(outcome) = self
.check_macro_assignment_outcome(¯o_assignment, self.stack.with_scope(&self.macros))
{
let (name, value) = self.resolve_macro_value(macro_assignment, outcome, line_number)?;
log::trace!(
"{}:{}: setting macro {} to {:?}",
&self.file_name,
line_number,
&name,
&value
);
self.macros.set(name, value);
}
Ok(macro_name)
}
fn parse_macro_assignment(
&mut self,
name: TokenString,
mut value: TokenString,
line_number: usize,
) -> Result<MacroAssignment> {
let name = self.expand_macros(&name)?;
#[cfg(feature = "full")]
let mut expand_value = false;
#[cfg(feature = "full")]
let mut skip_if_defined = false;
#[cfg(feature = "full")]
let mut append = false;
#[cfg(feature = "full")]
let name = if let Some(real_name) = name.strip_suffix("::") {
expand_value = true;
real_name
} else if let Some(real_name) = name.strip_suffix(":") {
expand_value = true;
real_name
} else if let Some(real_name) = name.strip_suffix("?") {
skip_if_defined = true;
real_name
} else if let Some(real_name) = name.strip_suffix("+") {
append = true;
real_name
} else {
&name
};
let name = name.trim();
value.trim_start();
#[cfg(feature = "full")]
let value = if expand_value {
TokenString::text(
self.expand_macros(&value)
.wrap_err_with(|| format!("while defining {} on line {}", name, line_number))?,
)
} else {
value
};
Ok(MacroAssignment {
name: name.to_owned(),
value,
#[cfg(feature = "full")]
expand_value,
#[cfg(feature = "full")]
skip_if_defined,
#[cfg(feature = "full")]
append,
})
}
fn check_macro_assignment_outcome(
&self,
macro_assignment: &MacroAssignment,
stack: MacroScopeStack,
) -> Option<MacroAssignmentOutcome> {
let skipped = match stack.get(¯o_assignment.name).map(|x| x.source.clone()) {
Some(ItemSource::CommandLineOrMakeflags) => true,
Some(ItemSource::Environment) => self.args.environment_overrides,
#[cfg(feature = "full")]
Some(_) => macro_assignment.skip_if_defined,
#[cfg(not(feature = "full"))]
Some(_) => false,
None => false,
};
if skipped {
None
} else {
Some(match stack.get(¯o_assignment.name) {
#[cfg(feature = "full")]
Some(old_value) if macro_assignment.append => {
MacroAssignmentOutcome::AppendedTo(old_value.into_owned())
}
_ => MacroAssignmentOutcome::Set,
})
}
}
fn resolve_macro_value(
&mut self,
macro_assignment: MacroAssignment,
outcome: MacroAssignmentOutcome,
line_number: usize,
) -> Result<(String, Macro)> {
match outcome {
MacroAssignmentOutcome::AppendedTo(mut old_value) => {
let value = macro_assignment.value;
#[cfg(feature = "full")]
let value = if old_value.eagerly_expanded {
TokenString::text(self.expand_macros(&value).wrap_err_with(|| {
format!(
"while defining {} on line {}",
macro_assignment.name, line_number
)
})?)
} else {
value
};
old_value.text.extend(TokenString::text(" "));
old_value.text.extend(value);
Ok((macro_assignment.name, old_value))
}
MacroAssignmentOutcome::Set => Ok((
macro_assignment.name,
Macro {
source: ItemSource::File {
name: self.file_name.clone(),
line: line_number,
},
text: macro_assignment.value,
#[cfg(feature = "full")]
eagerly_expanded: macro_assignment.expand_value,
},
)),
}
}
fn expand_macros(&mut self, text: &TokenString) -> Result<String> {
#[cfg(feature = "full")]
let mut deferred_eval_context = DeferredEvalContext::new(self);
let result = self.expand_macros_deferred_eval(
text,
#[cfg(feature = "full")]
&mut deferred_eval_context,
);
#[cfg(feature = "full")]
for child in deferred_eval_context {
self.extend(child);
}
result
}
fn expand_macros_deferred_eval(
&self,
text: &TokenString,
#[cfg(feature = "full")] deferred_eval_context: &mut DeferredEvalContext<R>,
) -> Result<String> {
self.stack
.with_scope(&self.macros)
.expand(
text,
#[cfg(feature = "full")]
Some(deferred_eval_context),
)
.wrap_err_with(|| format!("while expanding \"{}\"", text))
}
pub fn finish(self) -> FinishedMakefileReader {
FinishedMakefileReader {
inference_rules: self.inference_rules,
macros: self.macros,
targets: self.targets.into(),
first_non_special_target: self.first_non_special_target,
failed_includes: self.failed_includes,
}
}
fn extend(&mut self, new: FinishedMakefileReader) {
self.inference_rules.extend(new.inference_rules);
self.macros.extend(new.macros);
for (_, target) in new.targets {
self.targets.put(target);
}
if self.first_non_special_target.is_none() {
self.first_non_special_target = new.first_non_special_target;
}
self.failed_includes.extend(new.failed_includes);
}
}
pub struct FinishedMakefileReader {
pub inference_rules: InferenceRuleSet,
pub macros: MacroSet,
pub targets: HashMap<String, Target>,
pub first_non_special_target: Option<String>,
pub failed_includes: Vec<String>,
}
#[cfg(test)]
mod test {
use super::*;
use std::io::Cursor;
type R = Result<()>;
#[test]
fn multi_line_dependencies() -> R {
let file = "
unrelated: example
\tswag
x = 3 4 \\
\t\t5
a: $(x) b \\
\t\tc \\
\t\td
\tfoo";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?
.finish();
assert_eq!(
makefile.targets["a"].prerequisites,
vec!["3", "4", "5", "b", "c", "d"]
);
Ok(())
}
#[cfg(feature = "full")]
#[test]
fn basic_conditionals() -> R {
let file = "
ifeq (1,1)
worked = yes
else ifeq (2,2)
worked = no
else
worked = perhaps
endif
";
let args = Args::empty();
let mut makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
assert_eq!(
makefile.expand_macros(&TokenString::r#macro("worked"))?,
"yes"
);
Ok(())
}
#[cfg(feature = "full")]
#[test]
fn condition_in_rule() -> R {
let file = "
a:
ifeq (1,1)
\tfoo
endif
";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let makefile = makefile.finish();
assert_eq!(makefile.targets["a"].commands.len(), 1);
Ok(())
}
#[cfg(feature = "full")]
#[test]
fn define_syntax() -> R {
let file = "
define foo =
bar
baz
endef
";
let args = Args::empty();
let mut makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
assert_eq!(
makefile.expand_macros(&TokenString::r#macro("foo"))?,
"bar\nbaz"
);
Ok(())
}
#[cfg(feature = "full")]
#[test]
fn elseif() -> R {
let file = "
ifdef CONFIG_CC_OPTIMIZE_FOR_PERFORMANCE
KBUILD_CFLAGS += -O2
else ifdef CONFIG_CC_OPTIMIZE_FOR_PERFORMANCE_O3
KBUILD_CFLAGS += -O3
else ifdef CONFIG_CC_OPTIMIZE_FOR_SIZE
KBUILD_CFLAGS += -Os
endif
FOO = bar
";
let args = Args::empty();
let mut makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
assert_eq!(makefile.expand_macros(&TokenString::r#macro("FOO"))?, "bar",);
Ok(())
}
#[test]
#[cfg(feature = "full")]
fn eval() -> R {
let file = "
PROGRAMS = server client
server_OBJS = server.o server_priv.o server_access.o
server_LIBS = priv protocol
client_OBJS = client.o client_api.o client_mem.o
client_LIBS = protocol
# Everything after this is generic
.PHONY: all
all: $(PROGRAMS)
define PROGRAM_template =
$(1): $$($(1)_OBJS) $$($(1)_LIBS:%=-l%)
ALL_OBJS += $$($(1)_OBJS)
endef
$(foreach prog,$(PROGRAMS),$(eval $(call PROGRAM_template,$(prog))))
$(PROGRAMS):
\t$(LINK.o) $^ $(LDLIBS) -o $@
clean:
\trm -f $(ALL_OBJS) $(PROGRAMS)
";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let makefile = makefile.finish();
assert!(makefile.targets.contains_key("server"));
Ok(())
}
#[test]
fn comment_bullshit() -> R {
let file = "
foo: bar baz#swag
example: test\\#post
info:
\thello # there
";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let makefile = makefile.finish();
assert_eq!(
makefile.targets["foo"],
Target {
name: "foo".to_owned(),
prerequisites: vec!["bar".to_owned(), "baz".to_owned()],
commands: vec![],
stem: None,
already_updated: Cell::new(false),
macros: MacroSet::new(),
}
);
assert_eq!(
makefile.targets["example"],
Target {
name: "example".to_owned(),
prerequisites: vec!["test#post".to_owned()],
commands: vec![],
stem: None,
already_updated: Cell::new(false),
macros: MacroSet::new(),
}
);
assert_eq!(
makefile.targets["info"],
Target {
name: "info".to_owned(),
prerequisites: vec![],
commands: vec![CommandLine::from(TokenString::text("hello # there")),],
stem: None,
already_updated: Cell::new(false),
macros: MacroSet::new(),
}
);
Ok(())
}
#[test]
fn sdafjijsafjdoisdf() -> R {
let file = "
cursed:
\techo this uses the bash variable '$$#' and all that \\
\techo yeah its value is $$# and it's really cool
";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let _makefile = makefile.finish();
Ok(())
}
#[test]
fn double_suffix_rule() -> R {
let file = "
.c.o:
\techo yeet
.SUFFIXES:
.l.a:
\techo hey
.SUFFIXES: .test .post
.post.test:
\techo hiiii
";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let makefile = makefile.finish();
assert_eq!(makefile.inference_rules.len(), 2);
Ok(())
}
#[test]
fn dependency_prepending_appending() -> R {
let file = "
test: b
test: a
\techo hi
test: c
";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let makefile = makefile.finish();
assert_eq!(
makefile.targets["test"].prerequisites,
vec!["a".to_owned(), "b".to_owned(), "c".to_owned()]
);
Ok(())
}
#[cfg(feature = "full")]
#[test]
fn export_assign() -> R {
let file = "export x = 3";
let args = Args::empty();
let makefile = MakefileReader::read(
&args,
MacroScopeStack::default(),
MacroSet::new(),
Cursor::new(file),
"",
Default::default(),
)?;
let makefile = makefile.finish();
assert_eq!(
makefile.macros.get_non_recursive("x").map(|x| &x.text),
Some(&TokenString::text("3"))
);
assert!(
matches!(makefile.macros.exported, ExportConfig::Only(exported) if exported.contains("x"))
);
Ok(())
}
}