use std::{
fs,
io::{
self,
ErrorKind
},
path::PathBuf, sync::atomic::AtomicU32,
sync::atomic::Ordering,
};
use proc_macro2::Span;
use syn::spanned::Spanned;
use syn::visit::Visit;
use triomphe::Arc;
use ra_ap_hir::{
CfgOptions,
Semantics,
};
use ra_ap_ide_db::{
EditionedFileId,
base_db::{CrateOrigin, CrateWorkspaceData, Env},
ChangeWithProcMacros,
};
use ra_ap_project_model::{
CargoConfig,
ProjectWorkspace,
ProjectManifest,
};
use ra_ap_ide::{
Edition,
Analysis,
AnalysisHost,
AssistConfig,
AssistResolveStrategy,
TextSize,
SourceRoot,
};
use ra_ap_base_db::CrateGraph;
use ra_ap_syntax::{
algo,
ast::HasName,
AstNode,
SourceFile
};
use ra_ap_ide_assists::Assist;
use ra_ap_vfs::{
AbsPathBuf,
VfsPath,
file_set::FileSet,
FileId as VfsFileId,
};
use crate::{
error::ExtractionError,
extract::extraction_utils::{
apply_edits, apply_extract_function, check_braces, check_comment, convert_to_abs_path_buf, filter_extract_function_assist, fixup_controlflow, generate_frange, generate_frange_from_fileid, get_assists, get_cargo_config, get_cargo_toml, get_manifest_dir, load_project_manifest, load_project_workspace, load_workspace_data, rename_function, trim_range
}, startup::identify::add_sysroot_deps,
};
use crate::startup;
use rem_interface::metrics as mx;
#[derive(Debug, PartialEq, Clone)]
pub struct ExtractionInput {
pub file_path: String,
pub new_fn_name: String,
pub start_idx: u32,
pub end_idx: u32,
}
impl ExtractionInput {
pub fn new(
file_path: &str,
new_fn_name: &str,
start_idx: u32,
end_idx: u32,
) -> Self { ExtractionInput {
file_path: file_path.to_string(),
new_fn_name: new_fn_name.to_string(),
start_idx,
end_idx,
}
}
#[allow(dead_code)]
pub fn new_absolute(
file_path: &str,
new_fn_name: &str,
start_idx: u32,
end_idx: u32,
) -> Self { ExtractionInput {
file_path: convert_to_abs_path_buf(file_path).unwrap().as_str().to_string(),
new_fn_name: new_fn_name.to_string(),
start_idx,
end_idx,
}
}
}
fn check_file_exists(file_path: &str) -> Result<(), ExtractionError> {
if fs::metadata(file_path).is_err() {
return Err(ExtractionError::Io(io::Error::new(
ErrorKind::NotFound,
format!("File not found: {}", file_path),
)));
}
Ok(())
}
fn check_idx(input: &ExtractionInput) -> Result<(), ExtractionError> {
if input.start_idx == input.end_idx {
return Err(ExtractionError::SameIdx);
} else if input.start_idx > input.end_idx {
return Err(ExtractionError::InvalidIdxPair);
}
if input.start_idx == 0 {
return Err(ExtractionError::InvalidStartIdx);
}
if input.end_idx == 0 {
return Err(ExtractionError::InvalidEndIdx);
}
Ok(())
}
fn verify_input(input: &ExtractionInput) -> Result<(), ExtractionError> {
check_file_exists(&input.file_path)?;
check_idx(input)?;
Ok(())
}
pub fn extract_method_file(input: ExtractionInput) -> Result<(String, String), ExtractionError> {
mx::mark("Extraction Start");
let input_path: &str = &input.file_path;
let callee_name: &str = &input.new_fn_name;
let start_idx: u32 = input.start_idx;
let end_idx: u32 = input.end_idx;
let text: String = fs::read_to_string(input_path).unwrap();
verify_input(&input)?;
mx::mark("Load the analysis");
let (analysis, file_id) = analysis_from_single_file_std( text.clone() );
mx::mark("Analysis Loaded");
let assist_config: AssistConfig = super::extraction_utils::generate_assist_config();
let diagnostics_config = super::extraction_utils::generate_diagnostics_config();
let resolve: AssistResolveStrategy = super::extraction_utils::generate_resolve_strategy();
let range: (u32, u32) = (start_idx, end_idx);
let frange = generate_frange_from_fileid(file_id, range);
mx::mark("Get the assists");
let assists: Vec<Assist> = analysis.assists_with_fixes(
&assist_config,
&diagnostics_config,
resolve,
frange
).unwrap();
mx::mark("Filter for extract function assist");
let assist: Assist = filter_extract_function_assist( assists )?;
mx::mark("Apply extract function assist");
let src_change = assist.source_change
.as_ref()
.unwrap()
.clone();
let (text_edit, maybe_snippet_edit) =
src_change.get_source_and_snippet_edit(
file_id,
).unwrap();
let edited_text: String = apply_edits(
text.clone(),
text_edit.clone(),
maybe_snippet_edit.clone(),
);
let renamed_text: String = rename_function(
edited_text,
"fun_name",
callee_name,
);
let fixed_cf_text: String = fixup_controlflow( renamed_text );
mx::mark("Extraction End");
let parent_method: String = parent_method_from_text(
text,
&range,
);
Ok( (fixed_cf_text, parent_method) )
}
pub fn extract_method(input: ExtractionInput) -> Result<(String, String), ExtractionError> {
mx::mark("Extraction Start");
let input_path: &str = &input.file_path;
let callee_name: &str = &input.new_fn_name;
let start_idx: u32 = input.start_idx;
let end_idx: u32 = input.end_idx;
let input_abs_path: AbsPathBuf = convert_to_abs_path_buf(input_path).unwrap();
verify_input(&input)?;
let manifest_dir: PathBuf = get_manifest_dir(
&PathBuf::from(input_abs_path.as_str())
)?;
let cargo_toml: AbsPathBuf = get_cargo_toml( &manifest_dir );
mx::mark("Load the project workspace");
let project_manifest: ProjectManifest = load_project_manifest( &cargo_toml );
mx::mark("Load the cargo config");
let cargo_config: CargoConfig = get_cargo_config( &project_manifest );
mx::mark("Load the project workspace");
let workspace: ProjectWorkspace = load_project_workspace( &project_manifest, &cargo_config );
mx::mark("Load the analysis database and VFS");
let (db, vfs) = load_workspace_data(workspace, &cargo_config);
let range_: (u32, u32) = (
start_idx,
end_idx,
);
mx::mark("Database Loaded");
let sema: Semantics<'_, ra_ap_ide::RootDatabase> = Semantics::new( &db );
let frange_: ra_ap_hir::FileRangeWrapper<ra_ap_vfs::FileId> = generate_frange( &input_abs_path, &vfs, range_.clone() );
let edition: EditionedFileId = EditionedFileId::current_edition( frange_.file_id );
let source_file: SourceFile = sema.parse( edition );
let range: (u32, u32) = trim_range( &source_file, &range_ );
check_comment( &source_file, &range )?;
check_braces( &source_file, &range )?;
mx::mark("Run the analysis");
mx::mark("Get the assists");
let assists: Vec<Assist> = get_assists( &db, &vfs, &input_abs_path, range );
mx::mark("Filter for extract function assist");
let assist: Assist = filter_extract_function_assist( assists )?;
mx::mark("Apply extract function assist");
let modified_code: String = apply_extract_function(
&assist,
&input_abs_path,
&vfs,
&callee_name,
)?;
mx::mark("Get parent method");
let parent_method: String = parent_method(
&source_file,
range,
)?;
mx::mark("Extraction End");
Ok( (modified_code, parent_method) )
}
fn analysis_from_single_file_no_std(
src: String
) -> (Analysis, VfsFileId) {
let mut files = FileSet::default();
let file_id = ra_ap_vfs::FileId::from_raw(0);
let path = VfsPath::new_virtual_path("/main.rs".to_owned());
files.insert(file_id, path);
let mut config = CfgOptions::default();
config.insert_atom(ra_ap_hir::sym::test.clone());
let mut graph = CrateGraph::default();
graph.add_crate_root(
file_id,
ra_ap_ide::Edition::CURRENT,
None,
None,
Arc::new(config.clone()),
None,
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None},
);
let shared_ws = Arc::new(CrateWorkspaceData {
proc_macro_cwd: None,
data_layout: Err("There is no data layout for a single file analysis".into()),
toolchain: None,
});
let workspace = graph
.iter()
.map(|crate_id| (crate_id, shared_ws.clone()))
.collect();
let mut change = ChangeWithProcMacros::new();
let root = SourceRoot::new_local(files);
change.set_roots(vec![root]);
change.change_file(file_id, Some(src));
change.set_crate_graph(graph, workspace);
let mut analysis = AnalysisHost::default();
analysis.apply_change(change);
(analysis.analysis(), file_id)
}
pub fn analysis_from_single_file_std(
src: String
) -> (Analysis, VfsFileId) {
let ctx = startup::single_file_std_context();
let file_id: VfsFileId = alloc_vfs_file_id();
let mut graph: CrateGraph = ctx.base_graph.clone();
let mut cfg: CfgOptions = CfgOptions::default();
cfg.insert_atom(ra_ap_hir::sym::test.clone());
let my_crate = graph.add_crate_root(
file_id,
Edition::CURRENT,
None, None, Arc::new(cfg), None, Env::default(), false, CrateOrigin::Local { repo: None, name: None },
);
let mut graph = ctx.base_graph.clone();
let mut cfg = CfgOptions::default();
cfg.insert_atom(ra_ap_hir::sym::test.clone());
let my_crate = graph.add_crate_root(
file_id,
Edition::CURRENT,
None,
None,
Arc::new(cfg),
None,
Env::default(),
false,
CrateOrigin::Local { repo: None, name: None },
);
add_sysroot_deps(&mut graph, my_crate);
let ws_data =
startup::identify::build_ws_data(&graph);
let mut local_files = FileSet::default();
local_files.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned()));
let local_root = SourceRoot::new_local(local_files);
let sysroot_files = ctx.sysroot_files.to_file_set();
let sysroot_root = SourceRoot::new_library(sysroot_files);
let mut change = ChangeWithProcMacros::new();
change.set_roots(vec![local_root, sysroot_root]);
change.set_crate_graph(graph, ws_data);
for (abs_path, id) in ctx.sysroot_files.entries() {
match fs::read_to_string(abs_path.as_path()) {
Ok(text) => {
change.change_file(*id, Some(text));
}
Err(err) => {
eprintln!("warn: failed to read sysroot file {:?}: {err}", abs_path);
}
}
}
change.change_file(file_id, Some(src));
let mut host = AnalysisHost::default();
host.apply_change(change);
(host.analysis(), file_id)
}
static NEXT_VFS_FILE_ID: AtomicU32 = AtomicU32::new(1_000_000);
fn alloc_vfs_file_id() -> VfsFileId {
let raw = NEXT_VFS_FILE_ID.fetch_add(1, Ordering::Relaxed);
VfsFileId::from_raw(raw)
}
pub fn parent_method(
source_file: &SourceFile,
range: (u32, u32),
) -> Result<String, ExtractionError> {
let start: TextSize = TextSize::new(range.0);
let node: Option<ra_ap_syntax::ast::Fn> = algo::find_node_at_offset::<ra_ap_syntax::ast::Fn>(
source_file.syntax(),
start,
);
let fn_name: String = match node {
Some(n) => n.name().map_or("".to_string(), |name| name.text().to_string()),
None => "".to_string(),
};
if fn_name.is_empty() {
return Err(ExtractionError::ParentMethodNotFound);
}
Ok( fn_name.trim().to_string() )
}
pub fn parent_method_from_text(text: String, range: &(u32, u32)) -> String {
let Ok(file) = syn::parse_file(&text) else {
return String::new();
};
let line_starts = compute_line_starts(&text);
let selection = (range.0 as usize, range.1 as usize);
let mut visitor = FnCollector {
text: &text,
line_starts: &line_starts,
fns: Vec::new(),
};
visitor.visit_file(&file);
let mut best: Option<(&str, usize, usize)> = None;
for (name, start, end) in visitor.fns {
if contains((start, end), selection) {
match best {
None => best = Some((name, start, end)),
Some((_, b_start, b_end)) => {
if (end - start) < (b_end - b_start) {
best = Some((name, start, end));
}
}
}
}
}
best.map(|(name, _, _)| name.to_string()).unwrap_or_default()
}
struct FnCollector<'a> {
text: &'a str,
line_starts: &'a [usize],
fns: Vec<(&'a str, usize, usize)>,
}
impl<'a, 'ast> Visit<'ast> for FnCollector<'a> {
fn visit_item_fn(&mut self, node: &'ast syn::ItemFn) {
let name = node.sig.ident.to_string();
let (start, end) = span_to_offsets(node.block.span(), self.line_starts, self.text);
self.fns.push((self.leak(name), start, end));
syn::visit::visit_item_fn(self, node);
}
fn visit_item_impl(&mut self, node: &'ast syn::ItemImpl) {
for item in &node.items {
if let syn::ImplItem::Fn(m) = item {
let name = m.sig.ident.to_string();
let (start, end) = span_to_offsets(m.block.span(), self.line_starts, self.text);
self.fns.push((self.leak(name), start, end));
}
}
syn::visit::visit_item_impl(self, node);
}
fn visit_item_trait(&mut self, node: &'ast syn::ItemTrait) {
for item in &node.items {
if let syn::TraitItem::Fn(f) = item {
if let Some(block) = &f.default {
let name = f.sig.ident.to_string();
let (start, end) = span_to_offsets(block.span(), self.line_starts, self.text);
self.fns.push((self.leak(name), start, end));
}
}
}
syn::visit::visit_item_trait(self, node);
}
fn visit_item_mod(&mut self, node: &'ast syn::ItemMod) {
if let Some((_brace, items)) = &node.content {
for it in items {
self.visit_item(it);
}
}
}
}
impl<'a> FnCollector<'a> {
fn leak(&self, s: String) -> &'static str {
Box::leak(s.into_boxed_str())
}
}
fn compute_line_starts(text: &str) -> Vec<usize> {
let mut starts = vec![0]; for (i, b) in text.bytes().enumerate() {
if b == b'\n' {
starts.push(i + 1);
}
}
starts
}
fn span_to_offsets(span: Span, line_starts: &[usize], text: &str) -> (usize, usize) {
let start = span.start();
let end = span.end();
let start_off = lc_to_offset(start.line, start.column, line_starts, text);
let end_off = lc_to_offset(end.line, end.column, line_starts, text);
(start_off.min(text.len()), end_off.min(text.len()))
}
fn lc_to_offset(line: usize, column: usize, line_starts: &[usize], text: &str) -> usize {
if line == 0 || line > line_starts.len() {
return text.len();
}
let base = line_starts[line - 1];
base.saturating_add(column)
}
fn contains(outer: (usize, usize), inner: (usize, usize)) -> bool {
let (o_start, o_end) = outer;
let (i_start, i_end) = inner;
o_start <= i_start && i_end <= o_end && i_start <= i_end
}