use rustc::dep_graph::DepGraph;
use rustc::hir::map as hir_map;
use rustc::hir;
use rustc::lint::{self, LintStore};
use rustc::session::config::Options as SessionOptions;
use rustc::session::config::{Input, OutputFilenames};
use rustc::session::{self, DiagnosticOutput, Session};
use rustc::ty::steal::Steal;
use rustc::ty::{self, GlobalCtxt, ResolverOutputs};
use rustc::util::common::ErrorReported;
use rustc_codegen_utils::codegen_backend::CodegenBackend;
use rustc_data_structures::declare_box_region_type;
use rustc_data_structures::sync::{Lock, Lrc};
use rustc_driver;
use rustc_errors::DiagnosticBuilder;
use rustc_incremental::DepGraphFuture;
use rustc_interface::interface;
use rustc_interface::interface::BoxedResolver;
use rustc_interface::util::get_codegen_backend;
use rustc_interface::{util, Config};
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashSet;
use std::mem;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::Arc;
use syntax::ast;
use syntax::ast::DUMMY_NODE_ID;
use syntax::ast::{
Block, BlockCheckMode, Expr, ForeignItem, ImplItem, Item, ItemKind, NodeId, Param, Pat, Stmt,
Ty, UnsafeSource,
};
use syntax_pos::hygiene::SyntaxContext;
use rustc_parse::parser::Parser;
use syntax::token::{self, TokenKind};
use syntax;
use rustc_errors::PResult;
use syntax::ptr::P;
use syntax::source_map::SourceMap;
use syntax::source_map::{FileLoader, RealFileLoader};
use syntax::symbol::{kw, Symbol};
use syntax::tokenstream::TokenTree;
use syntax_pos::{FileName, Span, DUMMY_SP};
use syntax_pos::edition::Edition;
use crate::ast_manip::remove_paren;
use crate::command::{GenerationalTyCtxt, RefactorState, Registry};
use crate::file_io::{ArcFileIO, FileIO};
use crate::util::Lone;
use crate::RefactorCtxt;
use crate::context::HirMap;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub enum Phase {
Phase1,
Phase2,
Phase3,
}
impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> {
pub fn new_phase_1(sess: &'a Session) -> RefactorCtxt<'a, 'tcx> {
RefactorCtxt::new(sess, None, None, None)
}
pub fn new_phase_2(
sess: &'a Session,
max_node_id: NodeId,
map: &'a hir_map::Map<'tcx>,
) -> RefactorCtxt<'a, 'tcx> {
RefactorCtxt::new(sess, None, Some(HirMap::new(max_node_id, map)), None)
}
pub fn new_phase_3(
sess: &'a Session,
max_node_id: NodeId,
map: &'a hir_map::Map<'tcx>,
tcx: GenerationalTyCtxt<'tcx>,
) -> RefactorCtxt<'a, 'tcx> {
RefactorCtxt::new(sess, None, Some(HirMap::new(max_node_id, map)), Some(tcx))
}
}
fn maybe_set_sysroot(mut sopts: SessionOptions, args: &[String]) -> SessionOptions {
if sopts.maybe_sysroot.is_none() && !args.is_empty() {
let p = Path::new(&args[0]);
if p.is_absolute() {
if let Some(sysroot) = p.parent().and_then(|p| p.parent()) {
sopts.maybe_sysroot = Some(sysroot.to_owned());
}
}
}
sopts
}
pub fn clone_config(config: &interface::Config) -> interface::Config {
let input = match &config.input {
Input::File(f) => Input::File(f.clone()),
Input::Str { name, input } => Input::Str {
name: name.clone(),
input: input.clone(),
},
};
interface::Config {
opts: config.opts.clone(),
crate_cfg: config.crate_cfg.clone(),
input,
input_path: config.input_path.clone(),
output_file: config.output_file.clone(),
output_dir: config.output_dir.clone(),
file_loader: None,
diagnostic_output: DiagnosticOutput::Default,
stderr: config.stderr.clone(),
crate_name: config.crate_name.clone(),
lint_caps: config.lint_caps.clone(),
register_lints: None,
override_queries: None,
registry: config.registry.clone(),
}
}
pub fn create_config(args: &[String]) -> interface::Config {
let matches = rustc_driver::handle_options(args).expect("rustc arg parsing failed");
let sopts = session::config::build_session_options(&matches);
let cfg = interface::parse_cfgspecs(matches.opt_strs("cfg"));
let sopts = maybe_set_sysroot(sopts, args);
let output_dir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o));
let output_file = matches.opt_str("o").map(|o| PathBuf::from(&o));
assert!(matches.free.len() == 1, "expected exactly one input file");
let input_path = Some(Path::new(&matches.free[0]).to_owned());
let input = Input::File(input_path.as_ref().unwrap().clone());
interface::Config {
opts: sopts,
crate_cfg: cfg,
input,
input_path,
output_file,
output_dir,
file_loader: None,
diagnostic_output: DiagnosticOutput::Default,
stderr: None,
crate_name: None,
lint_caps: Default::default(),
register_lints: None,
override_queries: None,
registry: rustc_driver::diagnostics_registry(),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn run_compiler<F, R>(
mut config: interface::Config,
file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
f: F,
) -> R
where
F: FnOnce(&interface::Compiler) -> R,
R: Send,
{
config.opts.incremental = None;
config.file_loader = file_loader;
syntax::with_globals(Edition::Edition2018, move || {
ty::tls::GCX_PTR.set(&Lock::new(0), || {
ty::tls::with_thread_locals(|| {
interface::run_compiler_in_existing_thread_pool(config, f)
})
})
})
}
#[cfg_attr(feature = "profile", flame)]
pub fn run_refactoring<F, R>(
mut config: interface::Config,
cmd_reg: Registry,
file_io: Arc<dyn FileIO + Sync + Send>,
marks: HashSet<(NodeId, Symbol)>,
f: F,
) -> R
where
F: FnOnce(RefactorState) -> R,
R: Send,
{
config.opts.incremental = None;
syntax::with_globals(Edition::Edition2018, move || {
ty::tls::GCX_PTR.set(&Lock::new(0), || {
ty::tls::with_thread_locals(|| {
let state = RefactorState::new(config, cmd_reg, file_io, marks);
f(state)
})
})
})
}
#[allow(dead_code)]
pub struct Compiler {
pub sess: Lrc<Session>,
pub codegen_backend: Lrc<Box<dyn CodegenBackend>>,
source_map: Lrc<SourceMap>,
input: Input,
input_path: Option<PathBuf>,
output_dir: Option<PathBuf>,
output_file: Option<PathBuf>,
crate_name: Option<String>,
register_lints: Option<Box<dyn Fn(&Session, &mut lint::LintStore) + Send + Sync>>,
override_queries:
Option<fn(&Session, &mut ty::query::Providers<'_>, &mut ty::query::Providers<'_>)>,
}
#[allow(dead_code)]
#[derive(Default)]
struct Queries<'tcx> {
dep_graph_future: Query<Option<DepGraphFuture>>,
parse: Query<ast::Crate>,
crate_name: Query<String>,
register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
expansion: Query<(ast::Crate, Steal<Rc<RefCell<BoxedResolver>>>)>,
dep_graph: Query<DepGraph>,
lower_to_hir: Query<(&'tcx hir::map::Forest, Steal<ResolverOutputs>)>,
prepare_outputs: Query<OutputFilenames>,
global_ctxt: Query<BoxedGlobalCtxt>,
ongoing_codegen: Query<Box<dyn Any>>,
link: Query<()>,
}
#[allow(dead_code)]
struct Query<T> {
result: RefCell<Option<Result<T, ErrorReported>>>,
}
impl<T> Default for Query<T> {
fn default() -> Self {
Query {
result: RefCell::new(None),
}
}
}
declare_box_region_type!(
pub BoxedGlobalCtxt,
for('gcx),
(&'gcx GlobalCtxt<'gcx>) -> ((), ())
);
pub fn make_compiler(config: &Config, file_io: Arc<dyn FileIO + Sync + Send>) -> interface::Compiler {
let mut config = clone_config(config);
config.file_loader = Some(Box::new(ArcFileIO(file_io)));
let (sess, codegen_backend, source_map) = util::create_session(
config.opts,
config.crate_cfg,
config.diagnostic_output,
config.file_loader,
config.input_path.clone(),
config.lint_caps,
config.registry,
);
source_map.new_source_file(FileName::Custom("<dummy>".to_string()), " ".to_string());
let compiler = Compiler {
sess,
codegen_backend,
source_map,
input: config.input,
input_path: config.input_path,
output_dir: config.output_dir,
output_file: config.output_file,
crate_name: config.crate_name,
override_queries: config.override_queries,
register_lints: config.register_lints,
};
let compiler = unsafe { mem::transmute(compiler) };
compiler
}
pub fn build_session_from_args(
args: &[String],
file_loader: Option<Box<dyn FileLoader + Sync + Send>>,
) -> Session {
let matches = rustc_driver::handle_options(args).expect("rustc arg parsing failed");
let sopts = session::config::build_session_options(&matches);
let sopts = maybe_set_sysroot(sopts, args);
assert!(matches.free.len() == 1, "expected exactly one input file");
let in_path = Some(Path::new(&matches.free[0]).to_owned());
let (session, _codegen_backend) = build_session(sopts, in_path, file_loader);
session
}
fn build_session(
sopts: SessionOptions,
in_path: Option<PathBuf>,
file_loader: Option<Box<dyn FileLoader + Sync + Send>>,
) -> (Session, Box<dyn CodegenBackend>) {
let descriptions = rustc_driver::diagnostics_registry();
let file_loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader));
let source_map = Rc::new(SourceMap::with_file_loader(
file_loader,
sopts.file_path_mapping(),
));
source_map.new_source_file(FileName::Custom("<dummy>".to_string()), " ".to_string());
let sess = session::build_session_with_source_map(
sopts,
in_path,
descriptions,
source_map,
DiagnosticOutput::Default,
Default::default(),
);
let codegen_backend = get_codegen_backend(&sess);
(sess, codegen_backend)
}
fn make_parser<'a>(sess: &'a Session, src: &str) -> Parser<'a> {
rustc_parse::new_parser_from_source_str(
&sess.parse_sess,
FileName::anon_source_code(src),
src.to_owned(),
)
}
pub fn emit_and_panic(mut db: DiagnosticBuilder, what: &str) -> ! {
db.emit();
panic!("error parsing {}", what);
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_expr(sess: &Session, src: &str) -> P<Expr> {
let mut p = make_parser(sess, src);
match p.parse_expr() {
Ok(mut expr) => {
remove_paren(&mut expr);
expr
}
Err(db) => emit_and_panic(db, "expr"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_pat(sess: &Session, src: &str) -> P<Pat> {
let mut p = make_parser(sess, src);
match p.parse_pat(None) {
Ok(mut pat) => {
remove_paren(&mut pat);
pat
}
Err(db) => emit_and_panic(db, "pat"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_ty(sess: &Session, src: &str) -> P<Ty> {
let mut p = make_parser(sess, src);
match p.parse_ty() {
Ok(mut ty) => {
remove_paren(&mut ty);
ty
}
Err(db) => emit_and_panic(db, "ty"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_stmts(sess: &Session, src: &str) -> Vec<Stmt> {
let mut p = make_parser(sess, &format!("{{ {} }}", src));
match p.parse_block() {
Ok(blk) => blk
.into_inner()
.stmts
.into_iter()
.map(|mut s| {
remove_paren(&mut s);
s.lone()
})
.collect(),
Err(db) => emit_and_panic(db, "stmts"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_items(sess: &Session, src: &str) -> Vec<P<Item>> {
let mut p = make_parser(sess, src);
let mut items = Vec::new();
loop {
match p.parse_item() {
Ok(Some(mut item)) => {
remove_paren(&mut item);
items.push(item.lone());
}
Ok(None) => break,
Err(db) => emit_and_panic(db, "items"),
}
}
items
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_impl_items(sess: &Session, src: &str) -> Vec<ImplItem> {
let mut p = make_parser(sess, &format!("impl ! {{ {} }}", src));
match p.parse_item() {
Ok(item) => match item.expect("expected to find an item").into_inner().kind {
ItemKind::Impl(_, _, _, _, _, _, items) => items,
_ => panic!("expected to find an impl item"),
},
Err(db) => emit_and_panic(db, "impl items"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_foreign_items(sess: &Session, src: &str) -> Vec<ForeignItem> {
let mut p = make_parser(sess, &format!("extern {{ {} }}", src));
match p.parse_item() {
Ok(item) => match item.expect("expected to find an item").into_inner().kind {
ItemKind::ForeignMod(fm) => fm.items,
_ => panic!("expected to find a foreignmod item"),
},
Err(db) => emit_and_panic(db, "foreign items"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_block(sess: &Session, src: &str) -> P<Block> {
let mut p = make_parser(sess, src);
let rules = if p.eat(&TokenKind::Ident(kw::Unsafe, false)) {
BlockCheckMode::Unsafe(UnsafeSource::UserProvided)
} else {
BlockCheckMode::Default
};
match p.parse_block() {
Ok(mut block) => {
remove_paren(&mut block);
block.rules = rules;
block
}
Err(db) => emit_and_panic(db, "block"),
}
}
fn parse_arg_inner<'a>(p: &mut Parser<'a>) -> PResult<'a, Param> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
while let token::Pound = p.token.kind {
attrs.push(p.parse_attribute(false).unwrap());
}
let pat = p.parse_pat(None)?;
p.expect(&TokenKind::Colon)?;
let ty = p.parse_ty()?;
Ok(Param {
attrs: attrs.into(),
pat,
ty,
id: DUMMY_NODE_ID,
span: DUMMY_SP,
is_placeholder: false,
})
}
#[cfg_attr(feature = "profile", flame)]
pub fn parse_arg(sess: &Session, src: &str) -> Param {
let mut p = make_parser(sess, src);
match parse_arg_inner(&mut p) {
Ok(mut arg) => {
remove_paren(&mut arg);
arg
}
Err(db) => emit_and_panic(db, "arg"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn run_parser<F, R>(sess: &Session, src: &str, f: F) -> R
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = make_parser(sess, src);
match f(&mut p) {
Ok(x) => x,
Err(db) => emit_and_panic(db, "src"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn run_parser_tts<F, R>(sess: &Session, tts: Vec<TokenTree>, f: F) -> R
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = rustc_parse::new_parser_from_tts(&sess.parse_sess, tts);
match f(&mut p) {
Ok(x) => x,
Err(db) => emit_and_panic(db, "tts"),
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn try_run_parser<F, R>(sess: &Session, src: &str, f: F) -> Option<R>
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = make_parser(sess, src);
match f(&mut p) {
Ok(x) => Some(x),
Err(mut db) => {
db.cancel();
None
}
}
}
#[cfg_attr(feature = "profile", flame)]
pub fn try_run_parser_tts<F, R>(sess: &Session, tts: Vec<TokenTree>, f: F) -> Option<R>
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = rustc_parse::new_parser_from_tts(&sess.parse_sess, tts);
match f(&mut p) {
Ok(x) => Some(x),
Err(mut db) => {
db.cancel();
None
}
}
}
pub fn make_span_for_text(cm: &SourceMap, s: &str) -> Span {
let fm = cm.new_source_file(FileName::anon_source_code(s), s.to_string());
Span::new(fm.start_pos, fm.end_pos, SyntaxContext::root())
}