use std::ops::Range;
use egui::{Color32, NumExt as _, Widget as _};
use itertools::Itertools as _;
use smallvec::SmallVec;
use crate::{UiExt as _, icons, list_item};
#[derive(Debug, Clone)]
struct InnerState {
filter_query: String,
session_id: egui::Id,
}
impl Default for InnerState {
fn default() -> Self {
let mut random_bytes = [0u8; 8];
getrandom::fill(&mut random_bytes).expect("Couldn't get random bytes");
Self {
filter_query: String::new(),
session_id: egui::Id::new(random_bytes),
}
}
}
#[derive(Debug, Clone, Default)]
pub struct FilterState {
inner_state: Option<InnerState>,
request_focus: bool,
}
impl FilterState {
pub fn activate(&mut self, query: &str) {
self.inner_state = Some(InnerState {
filter_query: query.to_owned(),
..Default::default()
});
self.request_focus = true;
}
pub fn is_active(&self) -> bool {
self.inner_state.is_some()
}
pub fn query(&self) -> Option<&str> {
self.inner_state
.as_ref()
.map(|state| state.filter_query.as_str())
}
pub fn session_id(&self) -> Option<egui::Id> {
let state = self.inner_state.as_ref()?;
(!state.filter_query.is_empty()).then_some(state.session_id)
}
pub fn filter(&self) -> FilterMatcher {
FilterMatcher::new(self.query())
}
pub fn section_title_ui(
&mut self,
ui: &mut egui::Ui,
section_title: impl Into<egui::WidgetText>,
) -> Option<egui::Response> {
let mut toggle_search_clicked = false;
let is_searching = self.inner_state.is_some();
let section_title = section_title.into();
let galley = section_title.into_galley(
ui,
Some(egui::TextWrapMode::Extend),
f32::INFINITY,
egui::FontSelection::default(),
);
let text_width = galley.size().x;
let mut title_response = None;
list_item::list_item_scope(ui, ui.next_auto_id(), |ui| {
ui.list_item()
.interactive(false)
.force_background(ui.tokens().section_header_color)
.show_flat(
ui,
list_item::CustomContent::new(|ui, _| {
if self.inner_state.is_some()
&& ui.input_mut(|i| {
i.consume_key(egui::Modifiers::NONE, egui::Key::Escape)
})
{
self.inner_state = None;
}
if let Some(inner_state) = self.inner_state.as_mut() {
ui.spacing_mut().text_edit_width =
(ui.max_rect().width() - 10.0).at_least(0.0);
ui.visuals_mut().widgets.hovered.expansion = 0.0;
ui.visuals_mut().widgets.active.expansion = 0.0;
ui.visuals_mut().widgets.open.expansion = 0.0;
ui.visuals_mut().widgets.active.fg_stroke.width = 1.0;
ui.visuals_mut().selection.stroke.width = 1.0;
let response =
egui::TextEdit::singleline(&mut inner_state.filter_query)
.lock_focus(true)
.ui(ui);
if self.request_focus {
self.request_focus = false;
response.request_focus();
}
} else {
title_response = Some(ui.label(galley));
}
})
.with_content_width(text_width)
.action_button(
if is_searching {
&icons::CLOSE
} else {
&icons::SEARCH
},
if is_searching {
"Stop search"
} else {
"Search"
},
|| {
toggle_search_clicked = true;
},
),
);
});
if toggle_search_clicked {
if self.inner_state.is_none() {
self.activate("");
} else {
self.inner_state = None;
}
}
title_response
}
pub fn search_field_ui(&mut self, ui: &mut egui::Ui) {
let inner_state = self.inner_state.get_or_insert_with(Default::default);
let textedit_id = ui.id().with("textedit");
let response = ui.ctx().read_response(textedit_id);
let visuals = response
.as_ref()
.map(|r| ui.style().interact(r))
.unwrap_or(&ui.visuals().widgets.inactive);
let selection_stroke = ui.visuals().selection.stroke;
let stroke = if response.is_some_and(|r| r.has_focus()) {
selection_stroke
} else {
let mut stroke = visuals.bg_stroke;
stroke.width = selection_stroke.width;
stroke
};
egui::Frame::new()
.inner_margin(egui::Margin::symmetric(3, 2))
.fill(ui.visuals().extreme_bg_color)
.stroke(stroke)
.corner_radius(visuals.corner_radius)
.show(ui, |ui| {
ui.horizontal(|ui| {
ui.set_height(19.0);
ui.add_enabled_ui(false, |ui| ui.small_icon_button(&icons::SEARCH, "Search"));
ui.with_layout(egui::Layout::right_to_left(egui::Align::Center), |ui| {
if !inner_state.filter_query.is_empty()
&& ui.small_icon_button(&icons::CLOSE, "Close").clicked()
{
*inner_state = Default::default();
}
ui.add(
egui::TextEdit::singleline(&mut inner_state.filter_query)
.id(textedit_id)
.frame(false)
.hint_text("Search for entity…")
.desired_width(ui.available_width()),
)
});
});
});
if self
.inner_state
.as_ref()
.is_some_and(|state| state.filter_query.is_empty())
{
self.inner_state = None;
}
}
}
pub struct FilterMatcher {
keywords: Option<Vec<Keyword>>,
}
impl FilterMatcher {
fn new(query: Option<&str>) -> Self {
Self {
keywords: query.map(|s| s.split_whitespace().map(Keyword::new).collect()),
}
}
pub fn is_active(&self) -> bool {
self.keywords.is_some()
}
pub fn match_path<'a>(&self, path: impl IntoIterator<Item = &'a str>) -> Option<PathRanges> {
match self.keywords.as_deref() {
None | Some([]) => Some(PathRanges::default()),
Some(keywords) => {
let path = path.into_iter().map(str::to_lowercase).collect_vec();
let all_ranges = keywords
.iter()
.map(|keyword| keyword.match_path(path.iter().map(String::as_str)))
.collect_vec();
if all_ranges.iter().any(|ranges| ranges.is_empty()) {
None
} else {
let mut result = PathRanges::default();
for ranges in all_ranges {
result.merge(ranges);
}
Some(result)
}
}
}
}
}
#[derive(Debug, Clone, PartialEq)]
struct Keyword {
parts: Vec<String>,
match_from_first_part_start: bool,
match_to_last_part_end: bool,
}
impl Keyword {
fn new(mut keyword: &str) -> Self {
debug_assert!(!keyword.is_empty());
debug_assert!(!keyword.contains(char::is_whitespace));
let match_from_first_part_start = if let Some(k) = keyword.strip_prefix('/') {
keyword = k;
true
} else {
false
};
let match_to_last_part_end = if let Some(k) = keyword.strip_suffix('/') {
keyword = k;
true
} else {
false
};
let parts = keyword.split('/').map(str::to_lowercase).collect();
Self {
parts,
match_from_first_part_start,
match_to_last_part_end,
}
}
fn match_path<'a>(&self, lowercase_path: impl ExactSizeIterator<Item = &'a str>) -> PathRanges {
let mut state_machines = vec![];
let path_length = lowercase_path.len();
for (path_part_index, path_part) in lowercase_path.into_iter().enumerate() {
if self.parts.len() <= (path_length - path_part_index) {
state_machines.push(MatchStateMachine::new(self));
}
for state_machine in &mut state_machines {
state_machine.process_next_path_part(path_part, path_part_index);
}
}
state_machines
.into_iter()
.filter_map(|state_machine| {
if state_machine.did_match() {
Some(state_machine.ranges)
} else {
None
}
})
.fold(PathRanges::default(), |mut acc, ranges| {
acc.merge(ranges);
acc
})
}
}
#[derive(Debug, Default)]
pub struct PathRanges {
ranges: ahash::HashMap<usize, Vec<Range<usize>>>,
}
impl PathRanges {
pub fn merge(&mut self, other: Self) {
#[expect(clippy::iter_over_hash_type)] for (part_index, part_ranges) in other.ranges {
self.ranges
.entry(part_index)
.or_default()
.extend(part_ranges);
}
}
pub fn extend(&mut self, part_index: usize, ranges: impl IntoIterator<Item = Range<usize>>) {
self.ranges.entry(part_index).or_default().extend(ranges);
}
pub fn push(&mut self, part_index: usize, range: Range<usize>) {
self.ranges.entry(part_index).or_default().push(range);
}
pub fn remove(
&mut self,
part_index: usize,
) -> Option<impl Iterator<Item = Range<usize>> + use<>> {
self.ranges.remove(&part_index).map(MergeRanges::new)
}
pub fn is_empty(&self) -> bool {
self.ranges.is_empty()
}
pub fn clear(&mut self) {
self.ranges.clear();
}
#[cfg(test)]
fn into_vec(mut self, length: usize) -> Vec<Vec<Range<usize>>> {
let result = (0..length)
.map(|i| {
self.remove(i)
.map(|iter| iter.collect_vec())
.unwrap_or_default()
})
.collect();
debug_assert!(self.is_empty());
result
}
}
#[derive(Debug)]
enum MatchState {
InProgress,
Match,
NoMatch,
}
#[derive(Debug)]
struct MatchStateMachine<'a> {
keyword: &'a Keyword,
current_keyword_part: usize,
state: MatchState,
ranges: PathRanges,
}
impl<'a> MatchStateMachine<'a> {
fn new(keyword: &'a Keyword) -> Self {
Self {
keyword,
current_keyword_part: 0,
state: MatchState::InProgress,
ranges: Default::default(),
}
}
fn did_match(&self) -> bool {
matches!(self.state, MatchState::Match)
}
fn process_next_path_part(&mut self, part: &str, part_index: usize) {
if matches!(self.state, MatchState::Match | MatchState::NoMatch) {
return;
}
let keyword_part = &self.keyword.parts[self.current_keyword_part];
let has_part_after = self.current_keyword_part < self.keyword.parts.len() - 1;
let has_part_before = 0 < self.current_keyword_part;
let must_match_from_start = has_part_before || self.keyword.match_from_first_part_start;
let must_match_to_end = has_part_after || self.keyword.match_to_last_part_end;
let mut ranges = SmallVec::<[Range<usize>; 2]>::new();
match (must_match_from_start, must_match_to_end) {
(false, false) => {
ranges.extend(single_keyword_matches(part, keyword_part));
}
(true, false) => {
if part.starts_with(keyword_part) {
ranges.push(0..keyword_part.len());
}
}
(false, true) => {
if part.ends_with(keyword_part) {
ranges.push(part.len() - keyword_part.len()..part.len());
}
}
(true, true) => {
if part == keyword_part {
ranges.push(0..part.len());
}
}
}
if ranges.is_empty() {
self.state = MatchState::NoMatch;
} else {
self.ranges.extend(part_index, ranges);
self.current_keyword_part += 1;
}
if self.current_keyword_part == self.keyword.parts.len() {
self.state = MatchState::Match;
}
}
}
pub fn format_matching_text(
ctx: &egui::Context,
text: &str,
match_iter: impl Iterator<Item = Range<usize>>,
text_color: Option<egui::Color32>,
) -> egui::WidgetText {
let mut current = 0;
let mut job = egui::text::LayoutJob::default();
let color = text_color.unwrap_or(Color32::PLACEHOLDER);
for Range { start, end } in match_iter {
if current < start {
job.append(
&text[current..start],
0.0,
egui::TextFormat {
font_id: egui::TextStyle::Body.resolve(&ctx.style()),
color,
..Default::default()
},
);
}
job.append(
&text[start..end],
0.0,
egui::TextFormat {
font_id: egui::TextStyle::Body.resolve(&ctx.style()),
color,
background: ctx.style().visuals.selection.bg_fill,
..Default::default()
},
);
current = end;
}
if current < text.len() {
job.append(
&text[current..],
0.0,
egui::TextFormat {
font_id: egui::TextStyle::Body.resolve(&ctx.style()),
color,
..Default::default()
},
);
}
job.into()
}
fn single_keyword_matches<'a>(
lower_case_text: &'a str,
keyword: &'a str,
) -> impl Iterator<Item = Range<usize>> + 'a {
let keyword_len = keyword.len();
let mut start = 0;
std::iter::from_fn(move || {
let index = lower_case_text[start..].find(keyword)?;
let start_index = start + index;
start = start_index + keyword_len;
Some(start_index..(start_index + keyword_len))
})
}
struct MergeRanges {
ranges: Vec<Range<usize>>,
current: Option<Range<usize>>,
}
impl MergeRanges {
fn new(mut ranges: Vec<Range<usize>>) -> Self {
ranges.sort_by_key(|r| usize::MAX - r.start);
let current = ranges.pop();
Self { ranges, current }
}
}
impl Iterator for MergeRanges {
type Item = Range<usize>;
fn next(&mut self) -> Option<Self::Item> {
let mut current = self.current.take()?;
while let Some(next) = self.ranges.pop() {
if next.start <= current.end {
current.end = current.end.max(next.end);
} else {
self.current = Some(next);
return Some(current);
}
}
Some(current)
}
}
#[cfg(test)]
mod test {
#![expect(clippy::single_range_in_vec_init)]
use super::*;
#[test]
fn test_merge_range() {
assert_eq!(MergeRanges::new(vec![0..10, 5..15]).collect_vec(), [0..15]);
assert_eq!(MergeRanges::new(vec![5..15, 0..10]).collect_vec(), [0..15]);
assert_eq!(
MergeRanges::new(vec![0..4, 3..4, 1..2, 5..15, 5..15, 0..10]).collect_vec(),
[0..15]
);
assert_eq!(MergeRanges::new(vec![0..11, 11..15]).collect_vec(), [0..15]);
assert_eq!(
MergeRanges::new(vec![0..5, 11..15]).collect_vec(),
[0..5, 11..15]
);
assert_eq!(
MergeRanges::new(vec![11..15, 0..5]).collect_vec(),
[0..5, 11..15]
);
assert_eq!(
MergeRanges::new(vec![0..5, 20..30, 3..6, 25..27, 30..35]).collect_vec(),
[0..6, 20..35]
);
}
#[test]
fn test_keyword() {
assert_eq!(
Keyword::new("a"),
Keyword {
parts: vec!["a".into()],
match_from_first_part_start: false,
match_to_last_part_end: false
}
);
assert_eq!(
Keyword::new("/a"),
Keyword {
parts: vec!["a".into()],
match_from_first_part_start: true,
match_to_last_part_end: false
}
);
assert_eq!(
Keyword::new("a/"),
Keyword {
parts: vec!["a".into()],
match_from_first_part_start: false,
match_to_last_part_end: true
}
);
assert_eq!(
Keyword::new("/a/"),
Keyword {
parts: vec!["a".into()],
match_from_first_part_start: true,
match_to_last_part_end: true
}
);
assert_eq!(
Keyword::new("a/b"),
Keyword {
parts: vec!["a".into(), "b".into()],
match_from_first_part_start: false,
match_to_last_part_end: false
}
);
assert_eq!(
Keyword::new("a/b/"),
Keyword {
parts: vec!["a".into(), "b".into()],
match_from_first_part_start: false,
match_to_last_part_end: true
}
);
assert_eq!(
Keyword::new("/a/b/c/d"),
Keyword {
parts: vec!["a".into(), "b".into(), "c".into(), "d".into()],
match_from_first_part_start: true,
match_to_last_part_end: false
}
);
}
#[test]
fn test_keyword_match_path() {
fn match_and_normalize(query: &str, lowercase_path: &[&str]) -> Vec<Vec<Range<usize>>> {
Keyword::new(query)
.match_path(lowercase_path.iter().copied())
.into_vec(lowercase_path.len())
}
assert_eq!(match_and_normalize("a", &["a"]), vec![vec![0..1]]);
assert_eq!(match_and_normalize("a", &["aaa"]), vec![vec![0..3]]);
assert_eq!(
match_and_normalize("a/", &["aaa", "aaa"]),
vec![vec![2..3], vec![2..3]]
);
assert_eq!(
match_and_normalize("/a", &["aaa", "aaa"]),
vec![vec![0..1], vec![0..1]]
);
assert_eq!(
match_and_normalize("/a", &["aaa", "bbb"]),
vec![vec![0..1], vec![]]
);
assert_eq!(
match_and_normalize("a/b", &["aaa", "bbb"]),
vec![vec![2..3], vec![0..1]]
);
assert_eq!(
match_and_normalize("a/b/c", &["aaa", "b", "cccc"]),
vec![vec![2..3], vec![0..1], vec![0..1]]
);
assert!(
match_and_normalize("/a/b/c", &["aaa", "b", "cccc"])
.into_iter()
.flatten()
.count()
== 0,
);
assert!(
match_and_normalize("a/b/c/", &["aaa", "b", "cccc"])
.into_iter()
.flatten()
.count()
== 0,
);
assert_eq!(
match_and_normalize("ab/cd", &["xxxab", "cdab", "cdxxx"]),
vec![vec![3..5], vec![0..4], vec![0..2]]
);
assert_eq!(
match_and_normalize("ab/ab", &["xxxab", "ab", "abxxx"]),
vec![vec![3..5], vec![0..2], vec![0..2]]
);
}
#[test]
fn test_match_path() {
fn match_and_normalize(query: &str, path: &[&str]) -> Option<Vec<Vec<Range<usize>>>> {
FilterMatcher::new(Some(query))
.match_path(path.iter().copied())
.map(|ranges| ranges.into_vec(path.len()))
}
assert_eq!(
match_and_normalize("ab/cd", &["xxxAb", "cDaB", "Cdxxx"]),
Some(vec![vec![3..5], vec![0..4], vec![0..2]])
);
assert_eq!(
match_and_normalize("ab/cd xx/", &["xxxAb", "cDaB", "Cdxxx"]),
Some(vec![vec![3..5], vec![0..4], vec![0..2, 3..5]])
);
assert_eq!(
match_and_normalize("ab/cd bla", &["xxxAb", "cDaB", "Cdxxx"]),
None
);
}
}