use crate::decompress::maybe_decompress;
use crate::error::Result;
use crate::format::is_binary;
use crate::executor::{Match, QueryOptions};
use ignore::WalkBuilder;
use memmap2::Mmap;
use rayon::prelude::*;
use regex::Regex;
use std::fs::File;
use std::io::{BufRead, BufReader, Cursor, Read};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicU32, Ordering};
pub struct Scanner {
root: PathBuf,
}
impl Scanner {
pub fn new(root: &Path) -> Self {
Self {
root: root.to_owned(),
}
}
pub fn scan(
&self,
pattern: &str,
is_regex: bool,
ignore_case: bool,
options: &QueryOptions,
) -> Result<Vec<Match>> {
let raw = if is_regex {
pattern.to_string()
} else {
regex::escape(pattern)
};
let regex_pat = if ignore_case { format!("(?i){raw}") } else { raw };
let regex = Regex::new(®ex_pat)?;
let walker = WalkBuilder::new(&self.root)
.hidden(false)
.git_ignore(true)
.require_git(false)
.add_custom_ignore_filename(".ixignore")
.filter_entry(move |entry| {
let path = entry.path();
let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if entry.file_type().map(|t| t.is_dir()).unwrap_or(false)
&& (name == "lost+found" || name == ".git" || name == "node_modules" ||
name == "target" || name == "__pycache__" || name == ".tox" ||
name == ".venv" || name == "venv" || name == ".ix")
{
return false;
}
if entry.file_type().map(|t| t.is_file()).unwrap_or(false) {
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
match ext {
"so" | "o" | "dylib" | "a" | "dll" | "exe" | "pyc" |
"jpg" | "png" | "gif" | "mp4" | "mp3" | "pdf" |
"zip" | "7z" | "rar" |
"sqlite" | "db" | "bin" => return false,
_ => {}
}
if name.ends_with(".tar.gz") {
return false;
}
}
true
})
.build();
let paths: Vec<PathBuf> = walker
.filter_map(|result| {
match result {
Ok(entry) => Some(entry),
Err(e) => {
eprintln!("ix: warning: scanner skipping path: {}", e);
None
}
}
})
.filter(|entry| entry.file_type().map(|t| t.is_file()).unwrap_or(false))
.map(|entry| entry.path().to_owned())
.collect();
let matches_found = AtomicU32::new(0);
let mut matches: Vec<Match> = paths
.into_par_iter()
.filter_map(|path| {
if options.max_results > 0
&& matches_found.load(Ordering::Relaxed) >= options.max_results as u32
{
return None;
}
if !options.type_filter.is_empty() {
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
if !options.type_filter.iter().any(|e: &String| e == ext) {
return None;
}
}
if options.archive {
let _ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
let _is_tar_gz = path.to_str().map(|s| s.ends_with(".tar.gz")).unwrap_or(false);
#[cfg(feature = "archive")]
{
if _ext == "zip"
&& let Ok(archive_matches) = crate::archive::scan_zip(&path, ®ex, options)
{
matches_found.fetch_add(archive_matches.len() as u32, Ordering::Relaxed);
return Some(archive_matches);
} else if _is_tar_gz
&& let Ok(archive_matches) = crate::archive::scan_tar_gz(&path, ®ex, options)
{
matches_found.fetch_add(archive_matches.len() as u32, Ordering::Relaxed);
return Some(archive_matches);
}
}
}
let file_matches = self.scan_file(&path, ®ex, options).ok()?;
matches_found.fetch_add(file_matches.len() as u32, Ordering::Relaxed);
Some(file_matches)
})
.flatten()
.collect();
if options.max_results > 0 && matches.len() > options.max_results {
matches.truncate(options.max_results);
}
Ok(matches)
}
fn scan_stream<R: Read>(
&self,
reader: R,
path: &Path,
regex: &Regex,
options: &QueryOptions,
) -> Result<Vec<Match>> {
let mut buf_reader = BufReader::new(reader);
let mut matches = Vec::new();
let mut line_number = 0u32;
let mut byte_offset = 0u64;
{
let buffer = buf_reader.fill_buf()?;
if buffer.is_empty() {
return Ok(vec![]);
}
let is_bin = is_binary(buffer);
if is_bin && !options.binary {
return Ok(vec![]);
}
}
let mut line = String::new();
let mut context_before = std::collections::VecDeque::new();
let mut pending_matches: Vec<Match> = Vec::new();
while buf_reader.read_line(&mut line)? > 0 {
line_number += 1;
let line_len = line.len() as u64;
let trimmed_line = line.trim_end().to_string();
for m in &mut pending_matches {
if m.context_after.len() < options.context_lines {
m.context_after.push(trimmed_line.clone());
}
}
let (completed, still_pending): (Vec<_>, Vec<_>) = pending_matches
.into_iter()
.partition(|m| m.context_after.len() >= options.context_lines);
matches.extend(completed);
pending_matches = still_pending;
if let Some(m) = regex.find(&line) {
let context_before_vec: Vec<String> =
context_before.iter().map(|s: &String| s.trim_end().to_string()).collect();
let new_match = Match {
file_path: path.to_owned(),
line_number,
col: (m.start() + 1) as u32,
line_content: if options.count_only {
String::new()
} else {
trimmed_line.clone()
},
byte_offset: byte_offset + m.start() as u64,
context_before: context_before_vec,
context_after: vec![],
is_binary: false,
};
if options.context_lines > 0 {
pending_matches.push(new_match);
} else {
matches.push(new_match);
}
if options.max_results > 0
&& (matches.len() + pending_matches.len()) >= options.max_results
&& (pending_matches.is_empty() || matches.len() >= options.max_results)
{
break;
}
}
if options.context_lines > 0 {
context_before.push_back(line.clone());
if context_before.len() > options.context_lines {
context_before.pop_front();
}
}
byte_offset += line_len;
line.clear();
}
matches.extend(pending_matches);
Ok(matches)
}
fn scan_file(
&self,
path: &Path,
regex: &Regex,
options: &QueryOptions,
) -> Result<Vec<Match>> {
let file = File::open(path)?;
let metadata = file.metadata()?;
if metadata.len() > 100 * 1024 * 1024 && !options.decompress {
return Ok(vec![]);
}
let mmap = unsafe { Mmap::map(&file)? };
if options.decompress
&& let Some(reader) = maybe_decompress(path, &mmap)? {
return self.scan_stream(reader, path, regex, options);
}
self.scan_stream(Cursor::new(&mmap[..]), path, regex, options)
}
}