rustpython_ruff_python_ast/
node_index.rs1use std::num::NonZeroU32;
2use std::sync::atomic::{AtomicU32, Ordering};
3
4pub trait HasNodeIndex {
6 fn node_index(&self) -> &AtomicNodeIndex;
8}
9
10impl<T> HasNodeIndex for &T
11where
12 T: HasNodeIndex,
13{
14 fn node_index(&self) -> &AtomicNodeIndex {
15 T::node_index(*self)
16 }
17}
18
19#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
60#[cfg_attr(feature = "get-size", derive(get_size2::GetSize))]
61pub struct NodeIndex(NonZeroU32);
62
63impl NodeIndex {
64 pub const NONE: NodeIndex = NodeIndex(NonZeroU32::new(NodeIndex::_NONE).unwrap());
66
67 const _NONE: u32 = u32::MAX - 1;
70
71 pub fn as_u32(self) -> Option<u32> {
73 if self == NodeIndex::NONE {
74 None
75 } else {
76 Some(self.0.get() - 1)
77 }
78 }
79}
80
81#[derive(Debug, Copy, Clone)]
82pub enum NodeIndexError {
83 NoParent,
84 TooNested,
85 ExhaustedSubIndices,
86 ExhaustedSubSubIndices,
87 OverflowedIndices,
88 OverflowedSubIndices,
89}
90
91const MAX_LEVEL: u32 = 2;
92const LEVEL_BITS: u32 = 32 - MAX_LEVEL.leading_zeros();
93const LEVEL_SHIFT: u32 = 32 - LEVEL_BITS;
94const LEVEL_MASK: u32 = ((LEVEL_BITS << 1) - 1) << LEVEL_SHIFT;
95const SUB_NODES: u32 = 256;
96const SUB_SUB_NODES: u32 = 8;
97pub const MAX_REAL_INDEX: u32 = (1 << LEVEL_SHIFT) - 1;
98
99pub fn sub_ast_level(index: u32) -> u32 {
101 (index & LEVEL_MASK) >> LEVEL_SHIFT
102}
103
104pub fn sub_indices(index: u32) -> Result<(u32, u32), NodeIndexError> {
106 let level = sub_ast_level(index);
107 if level >= MAX_LEVEL {
108 return Err(NodeIndexError::TooNested);
109 }
110 let next_level = (level + 1) << LEVEL_SHIFT;
111 let without_level = index & !LEVEL_MASK;
112 let (nodes_in_level, error_kind) = if level == 0 {
113 (SUB_NODES, NodeIndexError::OverflowedIndices)
114 } else if level == 1 {
115 (SUB_SUB_NODES, NodeIndexError::OverflowedSubIndices)
116 } else {
117 unreachable!(
118 "Someone made a mistake updating the encoding of node indices: {index:08X} had level {level}"
119 );
120 };
121
122 let sub_index_without_level = without_level
125 .checked_mul(nodes_in_level)
126 .ok_or(error_kind)?;
127 if sub_index_without_level > MAX_REAL_INDEX {
128 return Err(error_kind);
129 }
130
131 let first_index = sub_index_without_level | next_level;
132 let last_index = first_index + nodes_in_level - 1;
134 Ok((first_index, last_index))
135}
136
137impl From<u32> for NodeIndex {
138 fn from(value: u32) -> Self {
139 match NonZeroU32::new(value + 1).map(NodeIndex) {
140 None | Some(NodeIndex::NONE) => panic!("exceeded maximum `NodeIndex`"),
141 Some(index) => index,
142 }
143 }
144}
145
146impl std::fmt::Debug for NodeIndex {
147 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
148 if *self == Self::NONE {
149 f.debug_tuple("NodeIndex(None)").finish()
150 } else {
151 f.debug_tuple("NodeIndex").field(&self.0).finish()
152 }
153 }
154}
155
156#[cfg_attr(feature = "get-size", derive(get_size2::GetSize))]
161pub struct AtomicNodeIndex(AtomicU32);
162
163#[allow(clippy::declare_interior_mutable_const)]
164impl AtomicNodeIndex {
165 pub const NONE: AtomicNodeIndex = AtomicNodeIndex(AtomicU32::new(NodeIndex::_NONE));
167
168 pub fn load(&self) -> NodeIndex {
170 let index = NonZeroU32::new(self.0.load(Ordering::Relaxed))
171 .expect("value stored was a valid `NodeIndex`");
172
173 NodeIndex(index)
174 }
175
176 pub fn set(&self, index: NodeIndex) {
178 self.0.store(index.0.get(), Ordering::Relaxed);
179 }
180}
181
182impl Default for AtomicNodeIndex {
183 fn default() -> Self {
184 Self::NONE
185 }
186}
187
188impl std::fmt::Debug for AtomicNodeIndex {
189 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
190 std::fmt::Debug::fmt(&self.load(), f)
191 }
192}
193
194impl std::hash::Hash for AtomicNodeIndex {
195 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
196 self.load().hash(state);
197 }
198}
199
200impl PartialOrd for AtomicNodeIndex {
201 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
202 Some(self.cmp(other))
203 }
204}
205
206impl Ord for AtomicNodeIndex {
207 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
208 self.load().cmp(&other.load())
209 }
210}
211
212impl Eq for AtomicNodeIndex {}
213
214impl PartialEq for AtomicNodeIndex {
215 fn eq(&self, other: &Self) -> bool {
216 self.load() == other.load()
217 }
218}
219
220impl Clone for AtomicNodeIndex {
221 fn clone(&self) -> Self {
222 Self(AtomicU32::from(self.0.load(Ordering::Relaxed)))
223 }
224}
225
226#[cfg(test)]
227mod tests {
228 use super::{AtomicNodeIndex, NodeIndex};
229
230 #[test]
231 fn test_node_index() {
232 let index = AtomicNodeIndex::NONE;
233
234 assert_eq!(index.load(), NodeIndex::NONE);
235 assert_eq!(format!("{index:?}"), "NodeIndex(None)");
236
237 index.set(NodeIndex::from(1));
238 assert_eq!(index.load(), NodeIndex::from(1));
239 assert_eq!(index.load().as_u32(), Some(1));
240
241 let index = NodeIndex::from(0);
242 assert_eq!(index.as_u32(), Some(0));
243
244 let index = NodeIndex::NONE;
245 assert_eq!(index.as_u32(), None);
246 }
247}