use crate::tokens::Span;
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct SourceMapCache {
pub cache: std::collections::HashMap<(usize, usize), (usize, usize)>,
}
impl SourceMapCache {
#[allow(dead_code)]
pub fn new() -> Self {
SourceMapCache {
cache: std::collections::HashMap::new(),
}
}
#[allow(dead_code)]
pub fn insert(&mut self, gen_line: usize, gen_col: usize, orig_line: usize, orig_col: usize) {
self.cache
.insert((gen_line, gen_col), (orig_line, orig_col));
}
#[allow(dead_code)]
pub fn lookup(&self, gen_line: usize, gen_col: usize) -> Option<(usize, usize)> {
self.cache.get(&(gen_line, gen_col)).copied()
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.cache.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.cache.is_empty()
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SourcePosition {
pub offset: usize,
pub line: usize,
pub column: usize,
}
#[allow(dead_code)]
impl SourcePosition {
pub fn new(offset: usize, line: usize, column: usize) -> Self {
Self {
offset,
line,
column,
}
}
pub fn origin() -> Self {
Self {
offset: 0,
line: 1,
column: 1,
}
}
pub fn advance_col(self, bytes: usize) -> Self {
Self {
offset: self.offset + bytes,
column: self.column + 1,
..self
}
}
pub fn advance_line(self, bytes: usize) -> Self {
Self {
offset: self.offset + bytes,
line: self.line + 1,
column: 1,
}
}
pub fn is_before(&self, other: &SourcePosition) -> bool {
self.offset < other.offset
}
pub fn is_after(&self, other: &SourcePosition) -> bool {
self.offset > other.offset
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum DiagnosticSeverity {
Hint,
Information,
Warning,
Error,
}
#[derive(Clone, Debug, PartialEq)]
pub struct SemanticToken {
pub span: Span,
pub token_type: SemanticTokenType,
pub modifiers: Vec<SemanticModifier>,
}
impl SemanticToken {
#[allow(dead_code)]
pub fn new(span: Span, token_type: SemanticTokenType) -> Self {
Self {
span,
token_type,
modifiers: Vec::new(),
}
}
#[allow(dead_code)]
pub fn with_modifiers(
span: Span,
token_type: SemanticTokenType,
modifiers: Vec<SemanticModifier>,
) -> Self {
Self {
span,
token_type,
modifiers,
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Default)]
pub struct SourceMapBatch {
pub entries: Vec<RangeTransform>,
}
impl SourceMapBatch {
#[allow(dead_code)]
pub fn new() -> Self {
SourceMapBatch {
entries: Vec::new(),
}
}
#[allow(dead_code)]
pub fn add(&mut self, e: RangeTransform) {
self.entries.push(e);
}
#[allow(dead_code)]
pub fn total_coverage(&self) -> usize {
self.entries.iter().map(|e| e.orig_len()).sum()
}
#[allow(dead_code)]
pub fn length_preserving_count(&self) -> usize {
self.entries
.iter()
.filter(|e| e.is_length_preserving())
.count()
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct DocumentSymbol {
pub name: String,
pub kind: SymbolKind,
pub full_span: Span,
pub name_span: Span,
pub children: Vec<DocumentSymbol>,
}
#[allow(dead_code)]
impl DocumentSymbol {
pub fn new(name: String, kind: SymbolKind, full_span: Span, name_span: Span) -> Self {
Self {
name,
kind,
full_span,
name_span,
children: Vec::new(),
}
}
pub fn add_child(&mut self, child: DocumentSymbol) {
self.children.push(child);
}
pub fn has_children(&self) -> bool {
!self.children.is_empty()
}
pub fn find_by_name(&self, name: &str) -> Option<&DocumentSymbol> {
if self.name == name {
return Some(self);
}
for child in &self.children {
if let Some(found) = child.find_by_name(name) {
return Some(found);
}
}
None
}
pub fn flatten(&self) -> Vec<&DocumentSymbol> {
let mut result = vec![self];
for child in &self.children {
result.extend(child.flatten());
}
result
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct SourceRegion {
pub label: String,
pub span: Span,
pub meta: Option<String>,
}
#[allow(dead_code)]
impl SourceRegion {
pub fn new(label: impl Into<String>, span: Span) -> Self {
Self {
label: label.into(),
span,
meta: None,
}
}
pub fn with_meta(label: impl Into<String>, span: Span, meta: impl Into<String>) -> Self {
Self {
label: label.into(),
span,
meta: Some(meta.into()),
}
}
pub fn contains_offset(&self, offset: usize) -> bool {
offset >= self.span.start && offset < self.span.end
}
pub fn byte_len(&self) -> usize {
self.span.end.saturating_sub(self.span.start)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SemanticModifier {
Definition,
Declaration,
Deprecated,
Documentation,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SemanticTokenType {
Keyword,
Variable,
Function,
Type,
Constructor,
Operator,
Number,
StringLit,
Comment,
Tactic,
Namespace,
}
#[derive(Clone, Debug, PartialEq)]
pub struct SourceEntry {
pub span: Span,
pub kind: EntryKind,
pub name: Option<String>,
pub ty_info: Option<String>,
}
impl SourceEntry {
#[allow(dead_code)]
pub fn new(span: Span, kind: EntryKind) -> Self {
Self {
span,
kind,
name: None,
ty_info: None,
}
}
#[allow(dead_code)]
pub fn with_name(span: Span, kind: EntryKind, name: &str) -> Self {
Self {
span,
kind,
name: Some(name.to_string()),
ty_info: None,
}
}
#[allow(dead_code)]
pub fn with_name_and_type(span: Span, kind: EntryKind, name: &str, ty: &str) -> Self {
Self {
span,
kind,
name: Some(name.to_string()),
ty_info: Some(ty.to_string()),
}
}
#[allow(dead_code)]
pub fn contains_offset(&self, offset: usize) -> bool {
offset >= self.span.start && offset < self.span.end
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum EntryKind {
Definition,
Reference,
TypeAnnotation,
Binder,
Constructor,
Pattern,
Tactic,
Keyword,
Literal,
Comment,
DocComment,
Operator,
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct SourceDiagnostic {
pub span: Span,
pub severity: DiagnosticSeverity,
pub message: String,
pub code: Option<String>,
pub related: Vec<(Span, String)>,
}
#[allow(dead_code)]
impl SourceDiagnostic {
pub fn new(span: Span, severity: DiagnosticSeverity, message: impl Into<String>) -> Self {
Self {
span,
severity,
message: message.into(),
code: None,
related: Vec::new(),
}
}
pub fn error(span: Span, message: impl Into<String>) -> Self {
Self::new(span, DiagnosticSeverity::Error, message)
}
pub fn warning(span: Span, message: impl Into<String>) -> Self {
Self::new(span, DiagnosticSeverity::Warning, message)
}
pub fn info(span: Span, message: impl Into<String>) -> Self {
Self::new(span, DiagnosticSeverity::Information, message)
}
pub fn with_code(mut self, code: impl Into<String>) -> Self {
self.code = Some(code.into());
self
}
pub fn with_related(mut self, span: Span, message: impl Into<String>) -> Self {
self.related.push((span, message.into()));
self
}
pub fn is_error(&self) -> bool {
self.severity == DiagnosticSeverity::Error
}
pub fn is_warning(&self) -> bool {
self.severity == DiagnosticSeverity::Warning
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, PartialEq)]
pub struct GoToDefinitionResult {
pub name: String,
pub definition_spans: Vec<Span>,
pub is_local: bool,
}
#[allow(dead_code)]
impl GoToDefinitionResult {
pub fn new(name: String, spans: Vec<Span>, is_local: bool) -> Self {
Self {
name,
definition_spans: spans,
is_local,
}
}
pub fn found(&self) -> bool {
!self.definition_spans.is_empty()
}
pub fn primary_span(&self) -> Option<&Span> {
self.definition_spans.first()
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Default)]
pub struct SourceMapStats {
pub total_entries: usize,
pub definitions: usize,
pub references: usize,
pub tactics: usize,
pub comments: usize,
pub operators: usize,
}
#[allow(dead_code)]
impl SourceMapStats {
pub fn from_entries(entries: &[SourceEntry]) -> Self {
let mut definitions = 0usize;
let mut references = 0usize;
let mut tactics = 0usize;
let mut comments = 0usize;
let mut operators = 0usize;
for entry in entries {
match entry.kind {
EntryKind::Definition => definitions += 1,
EntryKind::Reference => references += 1,
EntryKind::Tactic => tactics += 1,
EntryKind::Comment | EntryKind::DocComment => comments += 1,
EntryKind::Operator => operators += 1,
_ => {}
}
}
Self {
total_entries: entries.len(),
definitions,
references,
tactics,
comments,
operators,
}
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Default)]
pub struct SourceIndex {
pub definitions: DefinitionIndex,
pub references: ReferenceIndex,
pub symbols: Vec<DocumentSymbol>,
pub regions: Vec<SourceRegion>,
}
#[allow(dead_code)]
impl SourceIndex {
pub fn new() -> Self {
Self::default()
}
pub fn register_definition(&mut self, name: impl Into<String>, span: Span) {
self.definitions.register(name, span);
}
pub fn record_reference(&mut self, name: impl Into<String>, span: Span) {
self.references.record(name, span);
}
pub fn add_symbol(&mut self, symbol: DocumentSymbol) {
self.symbols.push(symbol);
}
pub fn add_region(&mut self, region: SourceRegion) {
self.regions.push(region);
}
pub fn symbol_at_offset(&self, offset: usize) -> Option<&DocumentSymbol> {
self.symbols
.iter()
.find(|&sym| sym.full_span.start <= offset && offset < sym.full_span.end)
}
pub fn definition_spans(&self, name: &str) -> &[Span] {
self.definitions.lookup(name)
}
pub fn reference_spans(&self, name: &str) -> &[Span] {
self.references.uses_of(name)
}
pub fn top_level_symbols(&self) -> &[DocumentSymbol] {
&self.symbols
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Default)]
pub struct DefinitionIndex {
entries: std::collections::HashMap<String, Vec<Span>>,
}
#[allow(dead_code)]
impl DefinitionIndex {
pub fn new() -> Self {
Self {
entries: std::collections::HashMap::new(),
}
}
pub fn register(&mut self, name: impl Into<String>, span: Span) {
self.entries.entry(name.into()).or_default().push(span);
}
pub fn lookup(&self, name: &str) -> &[Span] {
self.entries.get(name).map(|v| v.as_slice()).unwrap_or(&[])
}
pub fn contains(&self, name: &str) -> bool {
self.entries.contains_key(name)
}
pub fn names(&self) -> Vec<&str> {
self.entries.keys().map(|s| s.as_str()).collect()
}
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub fn remove(&mut self, name: &str) -> Vec<Span> {
self.entries.remove(name).unwrap_or_default()
}
pub fn merge(&mut self, other: DefinitionIndex) {
for (name, spans) in other.entries {
self.entries.entry(name).or_default().extend(spans);
}
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct BidiMapper {
pub forward: Vec<(usize, usize)>,
pub backward: Vec<(usize, usize)>,
}
impl BidiMapper {
#[allow(dead_code)]
pub fn new() -> Self {
BidiMapper {
forward: Vec::new(),
backward: Vec::new(),
}
}
#[allow(dead_code)]
pub fn add(&mut self, orig: usize, gen: usize) {
self.forward.push((orig, gen));
self.backward.push((gen, orig));
}
#[allow(dead_code)]
pub fn to_gen(&self, orig: usize) -> Option<usize> {
self.forward
.iter()
.rev()
.find(|(o, _)| *o <= orig)
.map(|(_, g)| *g)
}
#[allow(dead_code)]
pub fn to_orig(&self, gen: usize) -> Option<usize> {
self.backward
.iter()
.rev()
.find(|(g, _)| *g <= gen)
.map(|(_, o)| *o)
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.forward.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.forward.is_empty()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct RangeTransform {
pub orig_start: usize,
pub orig_end: usize,
pub gen_start: usize,
pub gen_end: usize,
}
impl RangeTransform {
#[allow(dead_code)]
pub fn new(orig_start: usize, orig_end: usize, gen_start: usize, gen_end: usize) -> Self {
RangeTransform {
orig_start,
orig_end,
gen_start,
gen_end,
}
}
#[allow(dead_code)]
pub fn orig_len(&self) -> usize {
self.orig_end.saturating_sub(self.orig_start)
}
#[allow(dead_code)]
pub fn gen_len(&self) -> usize {
self.gen_end.saturating_sub(self.gen_start)
}
#[allow(dead_code)]
pub fn is_length_preserving(&self) -> bool {
self.orig_len() == self.gen_len()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct SortedSourceMap {
pub pairs: Vec<(usize, usize)>,
}
impl SortedSourceMap {
#[allow(dead_code)]
pub fn new() -> Self {
SortedSourceMap { pairs: Vec::new() }
}
#[allow(dead_code)]
pub fn add(&mut self, gen: usize, orig: usize) {
self.pairs.push((gen, orig));
self.pairs.sort_by_key(|(g, _)| *g);
}
#[allow(dead_code)]
pub fn lookup(&self, gen: usize) -> Option<usize> {
let idx = self.pairs.partition_point(|(g, _)| *g <= gen);
if idx == 0 {
return None;
}
Some(self.pairs[idx - 1].1)
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.pairs.len()
}
#[allow(dead_code)]
pub fn is_empty(&self) -> bool {
self.pairs.is_empty()
}
}
#[allow(dead_code)]
#[allow(missing_docs)]
#[derive(Debug, Clone)]
pub struct SourceMapDiff {
pub added: usize,
pub removed: usize,
pub modified: usize,
}
pub struct SourceMap {
entries: Vec<SourceEntry>,
line_offsets: Vec<usize>,
source: String,
semantic_tokens: Vec<SemanticToken>,
}
impl SourceMap {
#[allow(dead_code)]
pub fn new(source: &str) -> Self {
let line_offsets = Self::compute_line_offsets(source);
Self {
entries: Vec::new(),
line_offsets,
source: source.to_string(),
semantic_tokens: Vec::new(),
}
}
#[allow(dead_code)]
pub fn source(&self) -> &str {
&self.source
}
#[allow(dead_code)]
pub fn entries(&self) -> &[SourceEntry] {
&self.entries
}
#[allow(dead_code)]
pub fn line_count(&self) -> usize {
self.line_offsets.len()
}
#[allow(dead_code)]
pub fn offset_to_position(&self, offset: usize) -> (usize, usize) {
let line_idx = match self.line_offsets.binary_search(&offset) {
Ok(idx) => idx,
Err(idx) => {
if idx == 0 {
0
} else {
idx - 1
}
}
};
let line_start = self.line_offsets[line_idx];
let col = offset.saturating_sub(line_start);
(line_idx + 1, col + 1)
}
#[allow(dead_code)]
pub fn position_to_offset(&self, line: usize, col: usize) -> usize {
if line == 0 || line > self.line_offsets.len() {
return self.source.len();
}
let line_start = self.line_offsets[line - 1];
let col_offset = if col > 0 { col - 1 } else { 0 };
let max_col = self.line_length(line);
line_start + col_offset.min(max_col)
}
#[allow(dead_code)]
pub fn line_content(&self, line: usize) -> &str {
if line == 0 || line > self.line_offsets.len() {
return "";
}
let start = self.line_offsets[line - 1];
let end = if line < self.line_offsets.len() {
self.line_offsets[line]
} else {
self.source.len()
};
let text = &self.source[start..end];
text.trim_end_matches('\n').trim_end_matches('\r')
}
#[allow(dead_code)]
fn line_length(&self, line: usize) -> usize {
self.line_content(line).len()
}
#[allow(dead_code)]
pub fn entry_at(&self, line: usize, col: usize) -> Option<&SourceEntry> {
let offset = self.position_to_offset(line, col);
let mut best: Option<&SourceEntry> = None;
for entry in &self.entries {
if entry.contains_offset(offset) {
match best {
None => best = Some(entry),
Some(prev) => {
let prev_size = prev.span.end - prev.span.start;
let curr_size = entry.span.end - entry.span.start;
if curr_size < prev_size {
best = Some(entry);
}
}
}
}
}
best
}
#[allow(dead_code)]
pub fn entries_in_range(&self, start: &Span, end: &Span) -> Vec<&SourceEntry> {
let range_start = start.start;
let range_end = end.end;
self.entries
.iter()
.filter(|e| e.span.start < range_end && e.span.end > range_start)
.collect()
}
#[allow(dead_code)]
pub fn definitions(&self) -> Vec<&SourceEntry> {
self.entries
.iter()
.filter(|e| e.kind == EntryKind::Definition)
.collect()
}
#[allow(dead_code)]
pub fn references_to(&self, name: &str) -> Vec<&SourceEntry> {
self.entries
.iter()
.filter(|e| e.kind == EntryKind::Reference && e.name.as_deref() == Some(name))
.collect()
}
#[allow(dead_code)]
pub fn entries_of_kind(&self, kind: &EntryKind) -> Vec<&SourceEntry> {
self.entries.iter().filter(|e| &e.kind == kind).collect()
}
#[allow(dead_code)]
pub fn hover_info(&self, line: usize, col: usize) -> Option<HoverInfo> {
let entry = self.entry_at(line, col)?;
let name = entry.name.clone()?;
let definition_span = if entry.kind == EntryKind::Reference {
self.entries
.iter()
.find(|e| e.kind == EntryKind::Definition && e.name.as_deref() == Some(&name))
.map(|e| e.span.clone())
} else if entry.kind == EntryKind::Definition {
Some(entry.span.clone())
} else {
None
};
let doc = definition_span
.as_ref()
.and_then(|def_span| self.doc_for_definition(def_span.start));
Some(HoverInfo {
name,
kind: entry.kind.clone(),
ty: entry.ty_info.clone(),
doc,
definition_span,
})
}
fn doc_for_definition(&self, def_start: usize) -> Option<String> {
let best = self
.entries
.iter()
.filter(|e| e.kind == EntryKind::DocComment && e.span.end <= def_start)
.max_by_key(|e| e.span.end)?;
let doc_end = best.span.end;
let intervening_non_doc = self.entries.iter().any(|e| {
e.span.start >= doc_end
&& e.span.start < def_start
&& !matches!(
e.kind,
EntryKind::DocComment | EntryKind::Comment | EntryKind::Keyword
)
});
if intervening_non_doc {
None
} else {
best.name.clone()
}
}
#[allow(dead_code)]
pub fn semantic_tokens(&self) -> Vec<SemanticToken> {
if !self.semantic_tokens.is_empty() {
return self.semantic_tokens.clone();
}
self.entries
.iter()
.map(|entry| {
let token_type = Self::entry_kind_to_token_type(&entry.kind);
let modifiers = Self::entry_kind_to_modifiers(&entry.kind);
SemanticToken {
span: entry.span.clone(),
token_type,
modifiers,
}
})
.collect()
}
#[allow(dead_code)]
pub fn span_text(&self, span: &Span) -> &str {
let start = span.start.min(self.source.len());
let end = span.end.min(self.source.len());
&self.source[start..end]
}
#[allow(dead_code)]
pub fn go_to_definition(&self, line: usize, col: usize) -> Option<Span> {
let entry = self.entry_at(line, col)?;
let name = entry.name.as_deref()?;
if entry.kind == EntryKind::Definition {
return Some(entry.span.clone());
}
self.entries
.iter()
.find(|e| e.kind == EntryKind::Definition && e.name.as_deref() == Some(name))
.map(|e| e.span.clone())
}
#[allow(dead_code)]
pub fn find_all_occurrences(&self, line: usize, col: usize) -> Vec<&SourceEntry> {
let entry = match self.entry_at(line, col) {
Some(e) => e,
None => return Vec::new(),
};
let name = match &entry.name {
Some(n) => n.clone(),
None => return Vec::new(),
};
self.entries
.iter()
.filter(|e| {
e.name.as_deref() == Some(&name)
&& matches!(e.kind, EntryKind::Definition | EntryKind::Reference)
})
.collect()
}
fn compute_line_offsets(source: &str) -> Vec<usize> {
let mut offsets = vec![0usize];
for (i, ch) in source.char_indices() {
if ch == '\n' {
offsets.push(i + 1);
}
}
offsets
}
fn entry_kind_to_token_type(kind: &EntryKind) -> SemanticTokenType {
match kind {
EntryKind::Definition => SemanticTokenType::Function,
EntryKind::Reference => SemanticTokenType::Variable,
EntryKind::TypeAnnotation => SemanticTokenType::Type,
EntryKind::Binder => SemanticTokenType::Variable,
EntryKind::Constructor => SemanticTokenType::Constructor,
EntryKind::Pattern => SemanticTokenType::Variable,
EntryKind::Tactic => SemanticTokenType::Tactic,
EntryKind::Keyword => SemanticTokenType::Keyword,
EntryKind::Literal => SemanticTokenType::Number,
EntryKind::Comment => SemanticTokenType::Comment,
EntryKind::DocComment => SemanticTokenType::Comment,
EntryKind::Operator => SemanticTokenType::Operator,
}
}
fn entry_kind_to_modifiers(kind: &EntryKind) -> Vec<SemanticModifier> {
match kind {
EntryKind::Definition => vec![SemanticModifier::Definition],
EntryKind::DocComment => vec![SemanticModifier::Documentation],
_ => Vec::new(),
}
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Default)]
pub struct ReferenceIndex {
entries: std::collections::HashMap<String, Vec<Span>>,
}
#[allow(dead_code)]
impl ReferenceIndex {
pub fn new() -> Self {
Self {
entries: std::collections::HashMap::new(),
}
}
pub fn record(&mut self, name: impl Into<String>, span: Span) {
self.entries.entry(name.into()).or_default().push(span);
}
pub fn uses_of(&self, name: &str) -> &[Span] {
self.entries.get(name).map(|v| v.as_slice()).unwrap_or(&[])
}
pub fn referenced_names(&self) -> Vec<&str> {
self.entries.keys().map(|s| s.as_str()).collect()
}
pub fn total_references(&self) -> usize {
self.entries.values().map(|v| v.len()).sum()
}
}
pub struct SourceMapBuilder {
entries: Vec<SourceEntry>,
semantic_tokens: Vec<SemanticToken>,
source: String,
}
impl SourceMapBuilder {
#[allow(dead_code)]
pub fn new(source: &str) -> Self {
Self {
entries: Vec::new(),
semantic_tokens: Vec::new(),
source: source.to_string(),
}
}
#[allow(dead_code)]
pub fn add_entry(&mut self, span: Span, kind: EntryKind, name: Option<String>) {
self.entries.push(SourceEntry {
span,
kind,
name,
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_entry_with_type(
&mut self,
span: Span,
kind: EntryKind,
name: Option<String>,
ty_info: String,
) {
self.entries.push(SourceEntry {
span,
kind,
name,
ty_info: Some(ty_info),
});
}
#[allow(dead_code)]
pub fn add_definition(&mut self, span: Span, name: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Definition,
name: Some(name.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_definition_with_type(&mut self, span: Span, name: &str, ty_info: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Definition,
name: Some(name.to_string()),
ty_info: Some(ty_info.to_string()),
});
}
#[allow(dead_code)]
pub fn add_reference(&mut self, span: Span, name: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Reference,
name: Some(name.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_binder(&mut self, span: Span, name: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Binder,
name: Some(name.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_constructor(&mut self, span: Span, name: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Constructor,
name: Some(name.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_keyword(&mut self, span: Span) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Keyword,
name: None,
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_literal(&mut self, span: Span) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Literal,
name: None,
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_operator(&mut self, span: Span, symbol: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Operator,
name: Some(symbol.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_comment(&mut self, span: Span) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Comment,
name: None,
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_doc_comment(&mut self, span: Span, text: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::DocComment,
name: Some(text.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_tactic(&mut self, span: Span, name: &str) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Tactic,
name: Some(name.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_pattern(&mut self, span: Span, name: Option<&str>) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::Pattern,
name: name.map(|n| n.to_string()),
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_type_annotation(&mut self, span: Span) {
self.entries.push(SourceEntry {
span,
kind: EntryKind::TypeAnnotation,
name: None,
ty_info: None,
});
}
#[allow(dead_code)]
pub fn add_semantic_token(&mut self, token: SemanticToken) {
self.semantic_tokens.push(token);
}
#[allow(dead_code)]
pub fn entry_count(&self) -> usize {
self.entries.len()
}
#[allow(dead_code)]
pub fn build(mut self) -> SourceMap {
self.entries.sort_by(|a, b| {
a.span.start.cmp(&b.span.start).then_with(|| {
let a_size = a.span.end - a.span.start;
let b_size = b.span.end - b.span.start;
a_size.cmp(&b_size)
})
});
self.semantic_tokens.sort_by_key(|a| a.span.start);
let line_offsets = SourceMap::compute_line_offsets(&self.source);
SourceMap {
entries: self.entries,
line_offsets,
source: self.source,
semantic_tokens: self.semantic_tokens,
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct HoverInfo {
pub name: String,
pub kind: EntryKind,
pub ty: Option<String>,
pub doc: Option<String>,
pub definition_span: Option<Span>,
}
#[allow(dead_code)]
#[allow(missing_docs)]
pub struct MapChain {
pub first: BidiMapper,
pub second: BidiMapper,
}
impl MapChain {
#[allow(dead_code)]
pub fn new(first: BidiMapper, second: BidiMapper) -> Self {
MapChain { first, second }
}
#[allow(dead_code)]
pub fn a_to_c(&self, a: usize) -> Option<usize> {
let b = self.first.to_gen(a)?;
self.second.to_gen(b)
}
#[allow(dead_code)]
pub fn c_to_a(&self, c: usize) -> Option<usize> {
let b = self.second.to_orig(c)?;
self.first.to_orig(b)
}
}
#[allow(dead_code)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SymbolKind {
Theorem,
Definition,
Axiom,
Inductive,
Structure,
Class,
Instance,
Namespace,
Section,
Constructor,
Field,
Variable,
}