cortex_runtime/map/
reader.rs1use crate::map::types::*;
4
5impl SiteMap {
6 pub fn filter(&self, query: &NodeQuery) -> Vec<NodeMatch> {
8 let mut results = Vec::new();
9
10 for (i, node) in self.nodes.iter().enumerate() {
11 if let Some(ref types) = query.page_types {
13 if !types.contains(&node.page_type) {
14 continue;
15 }
16 }
17
18 let features = &self.features[i];
20 let mut skip = false;
21 for range in &query.feature_ranges {
22 if range.dimension >= FEATURE_DIM {
23 continue;
24 }
25 let val = features[range.dimension];
26 if let Some(min) = range.min {
27 if val < min {
28 skip = true;
29 break;
30 }
31 }
32 if let Some(max) = range.max {
33 if val > max {
34 skip = true;
35 break;
36 }
37 }
38 }
39 if skip {
40 continue;
41 }
42
43 if let Some(ref req) = query.require_flags {
45 if node.flags.0 & req.0 != req.0 {
46 continue;
47 }
48 }
49
50 if let Some(ref exc) = query.exclude_flags {
52 if node.flags.0 & exc.0 != 0 {
53 continue;
54 }
55 }
56
57 let mut key_features = Vec::new();
59 for range in &query.feature_ranges {
60 if range.dimension < FEATURE_DIM {
61 key_features.push((range.dimension, features[range.dimension]));
62 }
63 }
64
65 results.push(NodeMatch {
66 index: i as u32,
67 url: self.urls[i].clone(),
68 page_type: node.page_type,
69 confidence: node.confidence as f32 / 255.0,
70 features: key_features,
71 similarity: None,
72 });
73 }
74
75 if let Some(sort_dim) = query.sort_by_feature {
77 if sort_dim < FEATURE_DIM {
78 results.sort_by(|a, b| {
79 let va = self.features[a.index as usize][sort_dim];
80 let vb = self.features[b.index as usize][sort_dim];
81 if query.sort_ascending {
82 va.partial_cmp(&vb).unwrap_or(std::cmp::Ordering::Equal)
83 } else {
84 vb.partial_cmp(&va).unwrap_or(std::cmp::Ordering::Equal)
85 }
86 });
87 }
88 }
89
90 if query.limit > 0 && results.len() > query.limit {
92 results.truncate(query.limit);
93 }
94
95 results
96 }
97
98 pub fn nearest(&self, target: &[f32; FEATURE_DIM], k: usize) -> Vec<NodeMatch> {
100 let target_norm: f32 = target.iter().map(|f| f * f).sum::<f32>().sqrt();
101 if target_norm == 0.0 {
102 return Vec::new();
103 }
104
105 let mut scored: Vec<(u32, f32)> = self
106 .features
107 .iter()
108 .enumerate()
109 .map(|(i, feat)| {
110 let node_norm = self.nodes[i].feature_norm;
111 if node_norm == 0.0 {
112 return (i as u32, -1.0);
113 }
114 let dot: f32 = feat.iter().zip(target.iter()).map(|(a, b)| a * b).sum();
115 let similarity = dot / (node_norm * target_norm);
116 (i as u32, similarity)
117 })
118 .collect();
119
120 scored.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
121
122 scored
123 .into_iter()
124 .take(k)
125 .map(|(idx, sim)| NodeMatch {
126 index: idx,
127 url: self.urls[idx as usize].clone(),
128 page_type: self.nodes[idx as usize].page_type,
129 confidence: self.nodes[idx as usize].confidence as f32 / 255.0,
130 features: Vec::new(),
131 similarity: Some(sim),
132 })
133 .collect()
134 }
135
136 pub fn edges_from(&self, node: u32) -> &[EdgeRecord] {
138 let n = node as usize;
139 if n >= self.nodes.len() {
140 return &[];
141 }
142 let start = self.edge_index[n] as usize;
143 let end = self.edge_index[n + 1] as usize;
144 &self.edges[start..end]
145 }
146
147 pub fn node_url(&self, node: u32) -> &str {
149 &self.urls[node as usize]
150 }
151
152 pub fn node_features(&self, node: u32) -> &[f32; FEATURE_DIM] {
154 &self.features[node as usize]
155 }
156
157 pub fn actions_for(&self, node: u32) -> &[ActionRecord] {
159 let n = node as usize;
160 if n >= self.nodes.len() {
161 return &[];
162 }
163 let start = self.action_index[n] as usize;
164 let end = self.action_index[n + 1] as usize;
165 &self.actions[start..end]
166 }
167
168 pub fn update_node(&mut self, index: u32, record: NodeRecord, features: [f32; FEATURE_DIM]) {
170 let idx = index as usize;
171 if idx < self.nodes.len() {
172 self.nodes[idx] = record;
173 self.features[idx] = features;
174 }
175 }
176
177 pub fn shortest_path(&self, from: u32, to: u32, constraints: &PathConstraints) -> Option<Path> {
179 use std::cmp::Reverse;
180 use std::collections::BinaryHeap;
181
182 let n = self.nodes.len();
183 if from as usize >= n || to as usize >= n {
184 return None;
185 }
186
187 let mut dist = vec![f32::INFINITY; n];
188 let mut prev = vec![u32::MAX; n];
189 dist[from as usize] = 0.0;
190
191 let mut heap = BinaryHeap::new();
193 heap.push(Reverse((OrderedF32(0.0), from)));
194
195 while let Some(Reverse((OrderedF32(cost), node))) = heap.pop() {
196 if node == to {
197 break;
198 }
199 if cost > dist[node as usize] {
200 continue;
201 }
202
203 for edge in self.edges_from(node) {
204 let target = edge.target_node;
205 if target as usize >= n {
206 continue;
207 }
208
209 if constraints.avoid_auth && edge.flags.requires_auth() {
211 continue;
212 }
213 if constraints.avoid_state_changes && edge.flags.changes_state() {
214 continue;
215 }
216
217 let edge_cost = match constraints.minimize {
218 PathMinimize::Hops => 1.0,
219 PathMinimize::Weight => edge.weight as f32,
220 PathMinimize::StateChanges => {
221 if edge.flags.changes_state() {
222 10.0
223 } else {
224 1.0
225 }
226 }
227 };
228
229 let new_cost = cost + edge_cost;
230 if new_cost < dist[target as usize] {
231 dist[target as usize] = new_cost;
232 prev[target as usize] = node;
233 heap.push(Reverse((OrderedF32(new_cost), target)));
234 }
235 }
236 }
237
238 if dist[to as usize].is_infinite() {
239 return None;
240 }
241
242 let mut path_nodes = Vec::new();
244 let mut current = to;
245 while current != from {
246 path_nodes.push(current);
247 current = prev[current as usize];
248 if current == u32::MAX {
249 return None;
250 }
251 }
252 path_nodes.push(from);
253 path_nodes.reverse();
254
255 Some(Path {
256 hops: (path_nodes.len() - 1) as u32,
257 total_weight: dist[to as usize],
258 nodes: path_nodes,
259 required_actions: Vec::new(),
260 })
261 }
262}
263
264#[derive(Clone, Copy, PartialEq)]
266struct OrderedF32(f32);
267
268impl Eq for OrderedF32 {}
269
270impl PartialOrd for OrderedF32 {
271 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
272 Some(self.cmp(other))
273 }
274}
275
276impl Ord for OrderedF32 {
277 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
278 self.0
279 .partial_cmp(&other.0)
280 .unwrap_or(std::cmp::Ordering::Equal)
281 }
282}