#[allow(unused_imports)]
use super::ast_impl::{Decl, Located};
#[allow(unused_imports)]
use super::parse_types::*;
#[allow(unused_imports)]
use super::tokens::{Token, TokenKind};
#[allow(dead_code)]
#[derive(Debug, Clone)]
#[allow(missing_docs)]
pub struct TokenBuffer {
tokens: Vec<String>,
pos: usize,
}
impl TokenBuffer {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(tokens: Vec<String>) -> Self {
Self { tokens, pos: 0 }
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn peek(&self) -> Option<&str> {
self.tokens.get(self.pos).map(String::as_str)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn peek_at(&self, n: usize) -> Option<&str> {
self.tokens.get(self.pos + n).map(String::as_str)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn advance(&mut self) -> Option<&str> {
let tok = self.tokens.get(self.pos).map(String::as_str);
if tok.is_some() {
self.pos += 1;
}
tok
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn eat(&mut self, expected: &str) -> bool {
if self.peek() == Some(expected) {
self.pos += 1;
true
} else {
false
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn remaining(&self) -> usize {
self.tokens.len().saturating_sub(self.pos)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_eof(&self) -> bool {
self.pos >= self.tokens.len()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn mark(&self) -> usize {
self.pos
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn reset_to(&mut self, mark: usize) {
self.pos = mark;
}
}
#[allow(dead_code)]
#[derive(Debug, Clone)]
#[allow(missing_docs)]
pub struct ParseContext {
pub file: SourceFile,
pub config: ParserConfig,
#[allow(missing_docs)]
pub diagnostics: ParseDiagnostics,
}
impl ParseContext {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn new(file: SourceFile, config: ParserConfig) -> Self {
Self {
file,
config,
diagnostics: ParseDiagnostics::new(),
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn from_source(source: impl Into<String>) -> Self {
Self::new(SourceFile::virtual_(source), ParserConfig::default())
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn has_errors(&self) -> bool {
!self.diagnostics.is_ok()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn emit_error(&mut self, msg: impl Into<String>) {
self.diagnostics.error(msg);
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn emit_warning(&mut self, msg: impl Into<String>, start: usize, end: usize) {
self.diagnostics.warn(msg, start, end);
}
}
#[allow(dead_code)]
#[derive(Debug, Clone)]
#[allow(missing_docs)]
pub struct SyntacticHole {
pub hint: Option<String>,
pub offset: usize,
}
impl SyntacticHole {
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn anonymous(offset: usize) -> Self {
Self { hint: None, offset }
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn with_hint(hint: impl Into<String>, offset: usize) -> Self {
Self {
hint: Some(hint.into()),
offset,
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn has_hint(&self) -> bool {
self.hint.is_some()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn split_qualified_name(name: &str) -> (Vec<&str>, &str) {
let parts: Vec<&str> = name.split('.').collect();
if parts.len() <= 1 {
(vec![], name)
} else {
let (ns, base) = parts.split_at(parts.len() - 1);
(ns.to_vec(), base[0])
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn join_qualified_name(namespace: &[&str], basename: &str) -> String {
if namespace.is_empty() {
basename.to_string()
} else {
format!("{}.{}", namespace.join("."), basename)
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_qualified_name(s: &str) -> bool {
s.split('.')
.all(|part| !part.is_empty() && token_utils::is_valid_ident(part))
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn strip_namespace_prefix<'a>(name: &'a str, prefix: &str) -> &'a str {
let full_prefix = format!("{}.", prefix);
if let Some(rest) = name.strip_prefix(full_prefix.as_str()) {
rest
} else {
name
}
}
#[cfg(test)]
mod extra2_parse_tests {
use super::*;
#[test]
fn test_token_buffer_peek_advance() {
let mut buf = TokenBuffer::new(vec!["def".into(), "foo".into(), ":=".into()]);
assert_eq!(buf.peek(), Some("def"));
assert_eq!(buf.advance(), Some("def"));
assert_eq!(buf.peek(), Some("foo"));
}
#[test]
fn test_token_buffer_eat_success() {
let mut buf = TokenBuffer::new(vec!["theorem".into(), "foo".into()]);
assert!(buf.eat("theorem"));
assert_eq!(buf.peek(), Some("foo"));
}
#[test]
fn test_token_buffer_eat_fail() {
let mut buf = TokenBuffer::new(vec!["def".into()]);
assert!(!buf.eat("theorem"));
assert_eq!(buf.peek(), Some("def"));
}
#[test]
fn test_token_buffer_eof() {
let mut buf = TokenBuffer::new(vec!["x".into()]);
buf.advance();
assert!(buf.is_eof());
}
#[test]
fn test_token_buffer_backtrack() {
let mut buf = TokenBuffer::new(vec!["a".into(), "b".into(), "c".into()]);
let mark = buf.mark();
buf.advance();
buf.advance();
buf.reset_to(mark);
assert_eq!(buf.peek(), Some("a"));
}
#[test]
fn test_token_buffer_peek_at() {
let buf = TokenBuffer::new(vec!["x".into(), "y".into(), "z".into()]);
assert_eq!(buf.peek_at(1), Some("y"));
assert_eq!(buf.peek_at(5), None);
}
#[test]
fn test_parse_context_has_errors() {
let mut ctx = ParseContext::from_source("def foo := 1");
assert!(!ctx.has_errors());
ctx.emit_error("bad token");
assert!(ctx.has_errors());
}
#[test]
fn test_parse_context_emit_warning() {
let mut ctx = ParseContext::from_source("x");
ctx.emit_warning("suspicious", 0, 1);
assert!(!ctx.has_errors());
assert_eq!(ctx.diagnostics.warnings.len(), 1);
}
#[test]
fn test_syntactic_hole_anonymous() {
let hole = SyntacticHole::anonymous(5);
assert!(!hole.has_hint());
assert_eq!(hole.offset, 5);
}
#[test]
fn test_syntactic_hole_with_hint() {
let hole = SyntacticHole::with_hint("expected Nat", 10);
assert!(hole.has_hint());
assert_eq!(hole.hint.as_deref(), Some("expected Nat"));
}
#[test]
fn test_split_qualified_name_simple() {
let (ns, base) = split_qualified_name("foo");
assert!(ns.is_empty());
assert_eq!(base, "foo");
}
#[test]
fn test_split_qualified_name_dotted() {
let (ns, base) = split_qualified_name("Nat.add_comm");
assert_eq!(ns, vec!["Nat"]);
assert_eq!(base, "add_comm");
}
#[test]
fn test_split_qualified_name_deep() {
let (ns, base) = split_qualified_name("Foo.Bar.baz");
assert_eq!(ns, vec!["Foo", "Bar"]);
assert_eq!(base, "baz");
}
#[test]
fn test_join_qualified_name() {
assert_eq!(join_qualified_name(&["Nat"], "succ"), "Nat.succ");
assert_eq!(join_qualified_name(&[], "foo"), "foo");
}
#[test]
fn test_is_qualified_name_true() {
assert!(is_qualified_name("Foo.Bar.baz"));
assert!(is_qualified_name("foo"));
}
#[test]
fn test_is_qualified_name_false() {
assert!(!is_qualified_name("foo..bar"));
assert!(!is_qualified_name("123"));
}
#[test]
fn test_strip_namespace_prefix_matching() {
let result = strip_namespace_prefix("Nat.add", "Nat");
assert_eq!(result, "add");
}
#[test]
fn test_strip_namespace_prefix_not_matching() {
let result = strip_namespace_prefix("List.map", "Nat");
assert_eq!(result, "List.map");
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct SimpleStringPoolExt {
pub pool: Vec<String>,
}
impl SimpleStringPoolExt {
#[allow(dead_code)]
pub fn new() -> Self {
SimpleStringPoolExt { pool: Vec::new() }
}
#[allow(dead_code)]
pub fn intern(&mut self, s: &str) -> usize {
if let Some(idx) = self.pool.iter().position(|x| x == s) {
return idx;
}
let idx = self.pool.len();
self.pool.push(s.to_string());
idx
}
#[allow(dead_code)]
pub fn get(&self, idx: usize) -> Option<&str> {
self.pool.get(idx).map(|s| s.as_str())
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.pool.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.pool.is_empty()
}
}
impl Default for SimpleStringPoolExt {
fn default() -> Self {
Self::new()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct NameResolutionTableExt {
scopes: Vec<std::collections::HashMap<String, String>>,
}
impl NameResolutionTableExt {
#[allow(dead_code)]
pub fn new() -> Self {
NameResolutionTableExt {
scopes: vec![std::collections::HashMap::new()],
}
}
#[allow(dead_code)]
pub fn push_scope(&mut self) {
self.scopes.push(std::collections::HashMap::new());
}
#[allow(dead_code)]
pub fn pop_scope(&mut self) {
if self.scopes.len() > 1 {
self.scopes.pop();
}
}
#[allow(dead_code)]
pub fn define(&mut self, name: &str, resolved: &str) {
if let Some(scope) = self.scopes.last_mut() {
scope.insert(name.to_string(), resolved.to_string());
}
}
#[allow(dead_code)]
pub fn resolve(&self, name: &str) -> Option<&str> {
for scope in self.scopes.iter().rev() {
if let Some(v) = scope.get(name) {
return Some(v.as_str());
}
}
None
}
}
impl Default for NameResolutionTableExt {
fn default() -> Self {
Self::new()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone, Default)]
pub struct ParseFlagsExt {
pub allow_sorry: bool,
pub unicode_ops: bool,
pub error_recovery: bool,
pub warnings_as_errors: bool,
}
impl ParseFlagsExt {
#[allow(dead_code)]
pub fn default_flags() -> Self {
ParseFlagsExt {
allow_sorry: true,
unicode_ops: true,
error_recovery: true,
warnings_as_errors: false,
}
}
#[allow(dead_code)]
pub fn with_sorry(mut self) -> Self {
self.allow_sorry = true;
self
}
#[allow(dead_code)]
pub fn strict(mut self) -> Self {
self.error_recovery = false;
self
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ParseContextExt {
pub source: String,
pub flags: ParseFlagsExt,
pub filename: Option<String>,
}
impl ParseContextExt {
#[allow(dead_code)]
pub fn new(source: &str) -> Self {
ParseContextExt {
source: source.to_string(),
flags: ParseFlagsExt::default_flags(),
filename: None,
}
}
#[allow(dead_code)]
pub fn with_filename(mut self, name: &str) -> Self {
self.filename = Some(name.to_string());
self
}
#[allow(dead_code)]
pub fn with_flags(mut self, flags: ParseFlagsExt) -> Self {
self.flags = flags;
self
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum CompilePhaseExt {
Lex,
Parse,
Elaborate,
Tactic,
CodeGen,
}
impl std::fmt::Display for CompilePhaseExt {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CompilePhaseExt::Lex => write!(f, "lex"),
CompilePhaseExt::Parse => write!(f, "parse"),
CompilePhaseExt::Elaborate => write!(f, "elaborate"),
CompilePhaseExt::Tactic => write!(f, "tactic"),
CompilePhaseExt::CodeGen => write!(f, "codegen"),
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct PhaseTimerExt {
pub phase: CompilePhaseExt,
pub duration_us: u64,
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Default)]
pub struct PipelineTimingsExt {
pub timings: Vec<PhaseTimerExt>,
}
impl PipelineTimingsExt {
#[allow(dead_code)]
pub fn new() -> Self {
PipelineTimingsExt {
timings: Vec::new(),
}
}
#[allow(dead_code)]
pub fn record(&mut self, phase: CompilePhaseExt, duration_us: u64) {
self.timings.push(PhaseTimerExt { phase, duration_us });
}
#[allow(dead_code)]
pub fn total_us(&self) -> u64 {
self.timings.iter().map(|t| t.duration_us).sum()
}
#[allow(dead_code)]
pub fn format(&self) -> String {
self.timings
.iter()
.map(|t| format!("{}: {}us", t.phase, t.duration_us))
.collect::<Vec<_>>()
.join(", ")
}
}
#[cfg(test)]
mod lib_ext_tests {
use super::*;
#[test]
fn test_simple_string_pool() {
let mut pool = SimpleStringPoolExt::new();
let i1 = pool.intern("hello");
let i2 = pool.intern("world");
let i3 = pool.intern("hello");
assert_eq!(i1, i3);
assert_ne!(i1, i2);
assert_eq!(pool.get(i1), Some("hello"));
assert_eq!(pool.len(), 2);
}
#[test]
fn test_name_resolution_table() {
let mut table = NameResolutionTableExt::new();
table.define("x", "Nat.x");
table.push_scope();
table.define("x", "Int.x");
assert_eq!(table.resolve("x"), Some("Int.x"));
table.pop_scope();
assert_eq!(table.resolve("x"), Some("Nat.x"));
assert_eq!(table.resolve("y"), None);
}
#[test]
fn test_parse_flags() {
let flags = ParseFlagsExt::default_flags();
assert!(flags.allow_sorry);
assert!(flags.unicode_ops);
let strict = ParseFlagsExt::default_flags().strict();
assert!(!strict.error_recovery);
}
#[test]
fn test_parse_context() {
let ctx = ParseContextExt::new("fun x -> x").with_filename("test.lean");
assert_eq!(ctx.filename.as_deref(), Some("test.lean"));
assert!(ctx.flags.allow_sorry);
}
#[test]
fn test_compile_phase_display() {
assert_eq!(CompilePhaseExt::Parse.to_string(), "parse");
assert_eq!(CompilePhaseExt::Elaborate.to_string(), "elaborate");
}
#[test]
fn test_pipeline_timings() {
let mut timings = PipelineTimingsExt::new();
timings.record(CompilePhaseExt::Lex, 100);
timings.record(CompilePhaseExt::Parse, 200);
assert_eq!(timings.total_us(), 300);
let out = timings.format();
assert!(out.contains("lex"));
assert!(out.contains("parse"));
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct SourceFileRegistry {
pub files: std::collections::HashMap<String, String>,
pub file_order: Vec<String>,
}
impl SourceFileRegistry {
#[allow(dead_code)]
pub fn new() -> Self {
SourceFileRegistry {
files: std::collections::HashMap::new(),
file_order: Vec::new(),
}
}
#[allow(dead_code)]
pub fn add(&mut self, name: &str, content: &str) {
self.files.insert(name.to_string(), content.to_string());
if !self.file_order.contains(&name.to_string()) {
self.file_order.push(name.to_string());
}
}
#[allow(dead_code)]
pub fn get(&self, name: &str) -> Option<&str> {
self.files.get(name).map(|s| s.as_str())
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.files.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.files.is_empty()
}
}
impl Default for SourceFileRegistry {
fn default() -> Self {
Self::new()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct CompilationUnit {
pub filename: String,
pub source: String,
pub parse_ok: bool,
pub error_count: usize,
}
impl CompilationUnit {
#[allow(dead_code)]
pub fn new(filename: &str, source: &str) -> Self {
CompilationUnit {
filename: filename.to_string(),
source: source.to_string(),
parse_ok: false,
error_count: 0,
}
}
#[allow(dead_code)]
pub fn mark_parsed(mut self) -> Self {
self.parse_ok = true;
self
}
#[allow(dead_code)]
pub fn with_errors(mut self, count: usize) -> Self {
self.error_count = count;
self
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct TokenFrequencyMapExt {
pub freq: std::collections::HashMap<String, usize>,
}
impl TokenFrequencyMapExt {
#[allow(dead_code)]
pub fn new() -> Self {
TokenFrequencyMapExt {
freq: std::collections::HashMap::new(),
}
}
#[allow(dead_code)]
pub fn record(&mut self, token: &str) {
*self.freq.entry(token.to_string()).or_insert(0) += 1;
}
#[allow(dead_code)]
pub fn most_frequent(&self) -> Option<(&str, usize)> {
self.freq
.iter()
.max_by_key(|(_, c)| *c)
.map(|(k, &c)| (k.as_str(), c))
}
#[allow(dead_code)]
pub fn total(&self) -> usize {
self.freq.values().sum()
}
}
impl Default for TokenFrequencyMapExt {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod lib_ext2_tests {
use super::*;
#[test]
fn test_source_file_registry() {
let mut reg = SourceFileRegistry::new();
reg.add("a.lean", "def foo := 1");
reg.add("b.lean", "def bar := 2");
assert_eq!(reg.len(), 2);
assert_eq!(reg.get("a.lean"), Some("def foo := 1"));
assert_eq!(reg.get("c.lean"), None);
}
#[test]
fn test_compilation_unit() {
let unit = CompilationUnit::new("test.lean", "def x := 1").mark_parsed();
assert!(unit.parse_ok);
assert_eq!(unit.error_count, 0);
}
#[test]
fn test_token_frequency_map_ext() {
let mut m = TokenFrequencyMapExt::new();
m.record("def");
m.record("def");
m.record("fun");
assert_eq!(m.total(), 3);
let (tok, count) = m.most_frequent().expect("test operation should succeed");
assert_eq!(tok, "def");
assert_eq!(count, 2);
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct DeclTable {
pub entries: std::collections::HashMap<String, (String, usize)>,
}
impl DeclTable {
#[allow(dead_code)]
pub fn new() -> Self {
DeclTable {
entries: std::collections::HashMap::new(),
}
}
#[allow(dead_code)]
pub fn register(&mut self, name: &str, file: &str, line: usize) {
self.entries
.insert(name.to_string(), (file.to_string(), line));
}
#[allow(dead_code)]
pub fn lookup(&self, name: &str) -> Option<(&str, usize)> {
self.entries.get(name).map(|(f, l)| (f.as_str(), *l))
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.entries.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
}
impl Default for DeclTable {
fn default() -> Self {
Self::new()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct ImportGraph {
pub imports: std::collections::HashMap<String, Vec<String>>,
}
impl ImportGraph {
#[allow(dead_code)]
pub fn new() -> Self {
ImportGraph {
imports: std::collections::HashMap::new(),
}
}
#[allow(dead_code)]
pub fn add_import(&mut self, from: &str, to: &str) {
self.imports
.entry(from.to_string())
.or_default()
.push(to.to_string());
}
#[allow(dead_code)]
pub fn imports_of(&self, module: &str) -> &[String] {
self.imports
.get(module)
.map(|v| v.as_slice())
.unwrap_or(&[])
}
#[allow(dead_code)]
pub fn edge_count(&self) -> usize {
self.imports.values().map(|v| v.len()).sum()
}
}
impl Default for ImportGraph {
fn default() -> Self {
Self::new()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Default)]
pub struct ParseStatsExt {
pub tokens_processed: usize,
pub decls_parsed: usize,
pub errors: usize,
}
impl ParseStatsExt {
#[allow(dead_code)]
pub fn new() -> Self {
ParseStatsExt::default()
}
#[allow(dead_code)]
pub fn format(&self) -> String {
format!(
"tokens={} decls={} errors={}",
self.tokens_processed, self.decls_parsed, self.errors
)
}
}
#[cfg(test)]
mod lib_ext3_tests {
use super::*;
#[test]
fn test_decl_table() {
let mut t = DeclTable::new();
t.register("foo", "a.lean", 10);
t.register("bar", "b.lean", 20);
assert_eq!(t.len(), 2);
let (file, line) = t.lookup("foo").expect("lookup should succeed");
assert_eq!(file, "a.lean");
assert_eq!(line, 10);
assert!(t.lookup("baz").is_none());
}
#[test]
fn test_import_graph() {
let mut g = ImportGraph::new();
g.add_import("A", "B");
g.add_import("A", "C");
g.add_import("B", "C");
assert_eq!(g.imports_of("A").len(), 2);
assert_eq!(g.edge_count(), 3);
}
#[test]
fn test_parse_stats_ext() {
let mut s = ParseStatsExt::new();
s.tokens_processed = 100;
s.decls_parsed = 5;
let out = s.format();
assert!(out.contains("tokens=100"));
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn word_frequency(text: &str) -> std::collections::HashMap<String, usize> {
let mut freq = std::collections::HashMap::new();
for word in text.split_whitespace() {
*freq.entry(word.to_string()).or_insert(0) += 1;
}
freq
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Default)]
pub struct SourceSummary {
pub lines: usize,
pub words: usize,
pub chars: usize,
pub blank_lines: usize,
}
impl SourceSummary {
#[allow(dead_code)]
#[allow(clippy::should_implement_trait)]
pub fn from_str(src: &str) -> Self {
let mut lines = src.lines().count();
if src.ends_with('\n') {
lines += 1;
}
let blank_lines = src.lines().filter(|l| l.trim().is_empty()).count();
let words = src.split_whitespace().count();
let chars = src.chars().count();
SourceSummary {
lines,
words,
chars,
blank_lines,
}
}
#[allow(dead_code)]
pub fn format(&self) -> String {
format!(
"lines={} words={} chars={} blank={}",
self.lines, self.words, self.chars, self.blank_lines
)
}
}
#[cfg(test)]
mod lib_final_tests {
use super::*;
#[test]
fn test_word_frequency() {
let freq = word_frequency("a b a c b a");
assert_eq!(freq["a"], 3);
assert_eq!(freq["b"], 2);
assert_eq!(freq["c"], 1);
}
#[test]
fn test_source_summary() {
let src = "def foo := 1\n\ndef bar := 2\n";
let s = SourceSummary::from_str(src);
assert_eq!(s.lines, 4);
assert_eq!(s.blank_lines, 1);
let out = s.format();
assert!(out.contains("lines=4"));
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn rough_token_count(src: &str) -> usize {
src.split_whitespace().count()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn source_preview(src: &str, n: usize) -> String {
let truncated: String = src.chars().take(n).collect();
if src.chars().count() > n {
format!("{}...", truncated)
} else {
truncated
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct NamespaceResolver {
pub stack: Vec<String>,
}
impl NamespaceResolver {
#[allow(dead_code)]
pub fn new() -> Self {
NamespaceResolver { stack: Vec::new() }
}
#[allow(dead_code)]
pub fn open(&mut self, ns: &str) {
self.stack.push(ns.to_string());
}
#[allow(dead_code)]
pub fn close(&mut self) {
self.stack.pop();
}
#[allow(dead_code)]
pub fn resolve(&self, name: &str) -> String {
if self.stack.is_empty() {
return name.to_string();
}
format!("{}.{}", self.stack.join("."), name)
}
}
impl Default for NamespaceResolver {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod lib_pad {
use super::*;
#[test]
fn test_rough_token_count() {
assert_eq!(rough_token_count("a b c d"), 4);
}
#[test]
fn test_source_preview() {
assert_eq!(source_preview("hello world", 5), "hello...");
assert_eq!(source_preview("hi", 5), "hi");
}
#[test]
fn test_namespace_resolver() {
let mut r = NamespaceResolver::new();
r.open("Foo");
r.open("Bar");
assert_eq!(r.resolve("baz"), "Foo.Bar.baz");
r.close();
assert_eq!(r.resolve("qux"), "Foo.qux");
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn word_frequency_ext2(src: &str) -> std::collections::HashMap<String, usize> {
let mut map = std::collections::HashMap::new();
for word in src.split_whitespace() {
*map.entry(word.to_string()).or_insert(0) += 1;
}
map
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct SourceSummaryExt2 {
pub line_count: usize,
pub char_count: usize,
pub word_count: usize,
pub name: String,
}
impl SourceSummaryExt2 {
#[allow(dead_code)]
pub fn from_str(name: &str, src: &str) -> Self {
SourceSummaryExt2 {
name: name.to_string(),
line_count: src.lines().count(),
char_count: src.chars().count(),
word_count: src.split_whitespace().count(),
}
}
#[allow(dead_code)]
pub fn summary_line(&self) -> String {
format!(
"{}: {} lines, {} chars, {} words",
self.name, self.line_count, self.char_count, self.word_count
)
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone, Default)]
pub struct DeclTableExt2 {
pub entries: std::collections::HashMap<String, String>,
}
impl DeclTableExt2 {
#[allow(dead_code)]
pub fn new() -> Self {
DeclTableExt2 {
entries: std::collections::HashMap::new(),
}
}
#[allow(dead_code)]
pub fn insert(&mut self, name: &str, ty: &str) {
self.entries.insert(name.to_string(), ty.to_string());
}
#[allow(dead_code)]
pub fn lookup(&self, name: &str) -> Option<&str> {
self.entries.get(name).map(|s| s.as_str())
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.entries.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
}
#[cfg(test)]
mod lib_pad2 {
use super::*;
#[test]
fn test_word_frequency_ext2() {
let freq = word_frequency_ext2("a b a c a b");
assert_eq!(freq["a"], 3);
assert_eq!(freq["b"], 2);
assert_eq!(freq["c"], 1);
}
#[test]
fn test_source_summary() {
let s = SourceSummaryExt2::from_str("test.lean", "def foo := 42\ndef bar := 0");
assert_eq!(s.line_count, 2);
assert!(s.summary_line().contains("test.lean"));
}
#[test]
fn test_decl_table() {
let mut t = DeclTableExt2::new();
t.insert("foo", "Nat");
assert_eq!(t.lookup("foo"), Some("Nat"));
assert_eq!(t.lookup("bar"), None);
assert_eq!(t.len(), 1);
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn most_common_word(src: &str) -> Option<String> {
let freq = word_frequency_ext2(src);
freq.into_iter()
.max_by_key(|(_, count)| *count)
.map(|(w, _)| w)
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn lines_containing(src: &str, keyword: &str) -> Vec<String> {
src.lines()
.filter(|l| l.contains(keyword))
.map(|l| l.to_string())
.collect()
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub fn is_ident_char(c: char) -> bool {
c.is_alphanumeric() || c == '_' || c == '\''
}