use alloc::{collections::VecDeque, vec::Vec};
use core::hash::Hash;
use crate::{
dom::{DomId, DomNodeHash, DomNodeId, NodeData, IdOrClass},
events::{
ComponentEventFilter, EventData, EventFilter, EventPhase, EventSource, EventType,
LifecycleEventData, LifecycleReason, SyntheticEvent,
},
geom::LogicalRect,
id::NodeId,
styled_dom::{NodeHierarchyItemId, NodeHierarchyItem},
task::Instant,
FastHashMap,
};
#[derive(Debug, Clone, Copy)]
pub struct NodeMove {
pub old_node_id: NodeId,
pub new_node_id: NodeId,
}
#[derive(Debug, Clone)]
pub struct DiffResult {
pub events: Vec<SyntheticEvent>,
pub node_moves: Vec<NodeMove>,
}
impl Default for DiffResult {
fn default() -> Self {
Self {
events: Vec::new(),
node_moves: Vec::new(),
}
}
}
pub fn calculate_reconciliation_key(
node_data: &[NodeData],
hierarchy: &[NodeHierarchyItem],
node_id: NodeId,
) -> u64 {
use highway::{HighwayHash, HighwayHasher, Key};
let node = &node_data[node_id.index()];
if let Some(key) = node.get_key() {
return key;
}
for id_or_class in node.ids_and_classes.as_ref().iter() {
if let IdOrClass::Id(id) = id_or_class {
let mut hasher = HighwayHasher::new(Key([0; 4]));
id.as_str().hash(&mut hasher);
return hasher.finalize64();
}
}
let mut hasher = HighwayHasher::new(Key([0; 4]));
core::mem::discriminant(node.get_node_type()).hash(&mut hasher);
for id_or_class in node.ids_and_classes.as_ref().iter() {
if let IdOrClass::Class(class) = id_or_class {
class.as_str().hash(&mut hasher);
}
}
if let Some(hierarchy_item) = hierarchy.get(node_id.index()) {
if let Some(parent_id) = hierarchy_item.parent_id() {
let mut sibling_index: usize = 0;
let parent_hierarchy = &hierarchy[parent_id.index()];
let mut current = parent_hierarchy.first_child_id(parent_id);
while let Some(sibling_id) = current {
if sibling_id == node_id {
break;
}
let sibling = &node_data[sibling_id.index()];
if core::mem::discriminant(sibling.get_node_type())
== core::mem::discriminant(node.get_node_type())
{
sibling_index += 1;
}
current = hierarchy[sibling_id.index()].next_sibling_id();
}
sibling_index.hash(&mut hasher);
let parent_key = calculate_reconciliation_key(node_data, hierarchy, parent_id);
parent_key.hash(&mut hasher);
}
}
hasher.finalize64()
}
pub fn precompute_reconciliation_keys(
node_data: &[NodeData],
hierarchy: &[NodeHierarchyItem],
) -> FastHashMap<NodeId, u64> {
let mut keys = FastHashMap::default();
for idx in 0..node_data.len() {
let node_id = NodeId::new(idx);
let key = calculate_reconciliation_key(node_data, hierarchy, node_id);
keys.insert(node_id, key);
}
keys
}
pub fn reconcile_dom(
old_node_data: &[NodeData],
new_node_data: &[NodeData],
old_layout: &FastHashMap<NodeId, LogicalRect>,
new_layout: &FastHashMap<NodeId, LogicalRect>,
dom_id: DomId,
timestamp: Instant,
) -> DiffResult {
let mut result = DiffResult::default();
let mut old_keyed: FastHashMap<u64, NodeId> = FastHashMap::default();
let mut old_hashed: FastHashMap<DomNodeHash, VecDeque<NodeId>> = FastHashMap::default();
let mut old_structural: FastHashMap<DomNodeHash, VecDeque<NodeId>> = FastHashMap::default();
let mut old_nodes_consumed = vec![false; old_node_data.len()];
for (idx, node) in old_node_data.iter().enumerate() {
let id = NodeId::new(idx);
if let Some(key) = node.get_key() {
old_keyed.insert(key, id);
} else {
let hash = node.calculate_node_data_hash();
old_hashed.entry(hash).or_default().push_back(id);
let structural_hash = node.calculate_structural_hash();
old_structural.entry(structural_hash).or_default().push_back(id);
}
}
for (new_idx, new_node) in new_node_data.iter().enumerate() {
let new_id = NodeId::new(new_idx);
let mut matched_old_id = None;
if let Some(key) = new_node.get_key() {
if let Some(&old_id) = old_keyed.get(&key) {
if !old_nodes_consumed[old_id.index()] {
matched_old_id = Some(old_id);
}
}
}
else {
let hash = new_node.calculate_node_data_hash();
if let Some(queue) = old_hashed.get_mut(&hash) {
while let Some(old_id) = queue.front() {
if !old_nodes_consumed[old_id.index()] {
matched_old_id = Some(*old_id);
queue.pop_front();
break;
} else {
queue.pop_front();
}
}
}
if matched_old_id.is_none() {
let structural_hash = new_node.calculate_structural_hash();
if let Some(queue) = old_structural.get_mut(&structural_hash) {
while let Some(old_id) = queue.front() {
if !old_nodes_consumed[old_id.index()] {
matched_old_id = Some(*old_id);
queue.pop_front();
break;
} else {
queue.pop_front();
}
}
}
}
}
if let Some(old_id) = matched_old_id {
old_nodes_consumed[old_id.index()] = true;
result.node_moves.push(NodeMove {
old_node_id: old_id,
new_node_id: new_id,
});
let old_rect = old_layout.get(&old_id).copied().unwrap_or(LogicalRect::zero());
let new_rect = new_layout.get(&new_id).copied().unwrap_or(LogicalRect::zero());
if old_rect.size != new_rect.size {
if has_resize_callback(new_node) {
result.events.push(create_lifecycle_event(
EventType::Resize,
new_id,
dom_id,
×tamp,
LifecycleEventData {
reason: LifecycleReason::Resize,
previous_bounds: Some(old_rect),
current_bounds: new_rect,
},
));
}
}
if new_node.get_key().is_some() {
let old_hash = old_node_data[old_id.index()].calculate_node_data_hash();
let new_hash = new_node.calculate_node_data_hash();
if old_hash != new_hash && has_update_callback(new_node) {
result.events.push(create_lifecycle_event(
EventType::Update,
new_id,
dom_id,
×tamp,
LifecycleEventData {
reason: LifecycleReason::Update,
previous_bounds: Some(old_rect),
current_bounds: new_rect,
},
));
}
}
} else {
if has_mount_callback(new_node) {
let bounds = new_layout.get(&new_id).copied().unwrap_or(LogicalRect::zero());
result.events.push(create_lifecycle_event(
EventType::Mount,
new_id,
dom_id,
×tamp,
LifecycleEventData {
reason: LifecycleReason::InitialMount,
previous_bounds: None,
current_bounds: bounds,
},
));
}
}
}
for (old_idx, consumed) in old_nodes_consumed.iter().enumerate() {
if !consumed {
let old_id = NodeId::new(old_idx);
let old_node = &old_node_data[old_idx];
if has_unmount_callback(old_node) {
let bounds = old_layout.get(&old_id).copied().unwrap_or(LogicalRect::zero());
result.events.push(create_lifecycle_event(
EventType::Unmount,
old_id,
dom_id,
×tamp,
LifecycleEventData {
reason: LifecycleReason::InitialMount, previous_bounds: Some(bounds),
current_bounds: LogicalRect::zero(),
},
));
}
}
}
result
}
fn create_lifecycle_event(
event_type: EventType,
node_id: NodeId,
dom_id: DomId,
timestamp: &Instant,
data: LifecycleEventData,
) -> SyntheticEvent {
let dom_node_id = DomNodeId {
dom: dom_id,
node: NodeHierarchyItemId::from_crate_internal(Some(node_id)),
};
SyntheticEvent {
event_type,
source: EventSource::Lifecycle,
phase: EventPhase::Target,
target: dom_node_id,
current_target: dom_node_id,
timestamp: timestamp.clone(),
data: EventData::Lifecycle(data),
stopped: false,
stopped_immediate: false,
prevented_default: false,
}
}
fn has_mount_callback(node: &NodeData) -> bool {
node.get_callbacks().iter().any(|cb| {
matches!(
cb.event,
EventFilter::Component(ComponentEventFilter::AfterMount)
)
})
}
fn has_unmount_callback(node: &NodeData) -> bool {
node.get_callbacks().iter().any(|cb| {
matches!(
cb.event,
EventFilter::Component(ComponentEventFilter::BeforeUnmount)
)
})
}
fn has_resize_callback(node: &NodeData) -> bool {
node.get_callbacks().iter().any(|cb| {
matches!(
cb.event,
EventFilter::Component(ComponentEventFilter::NodeResized)
)
})
}
fn has_update_callback(node: &NodeData) -> bool {
node.get_callbacks().iter().any(|cb| {
matches!(
cb.event,
EventFilter::Component(ComponentEventFilter::Selected)
)
})
}
pub fn create_migration_map(node_moves: &[NodeMove]) -> FastHashMap<NodeId, NodeId> {
let mut map = FastHashMap::default();
for m in node_moves {
map.insert(m.old_node_id, m.new_node_id);
}
map
}
pub fn transfer_states(
old_node_data: &mut [NodeData],
new_node_data: &mut [NodeData],
node_moves: &[NodeMove],
) {
use crate::refany::OptionRefAny;
for movement in node_moves {
let old_idx = movement.old_node_id.index();
let new_idx = movement.new_node_id.index();
if old_idx >= old_node_data.len() || new_idx >= new_node_data.len() {
continue;
}
let merge_callback = match new_node_data[new_idx].get_merge_callback() {
Some(cb) => cb,
None => continue, };
let old_dataset = core::mem::replace(
&mut old_node_data[old_idx].dataset,
OptionRefAny::None
);
let new_dataset = core::mem::replace(
&mut new_node_data[new_idx].dataset,
OptionRefAny::None
);
match (new_dataset, old_dataset) {
(OptionRefAny::Some(new_data), OptionRefAny::Some(old_data)) => {
let merged = (merge_callback.cb)(new_data, old_data);
new_node_data[new_idx].dataset = OptionRefAny::Some(merged);
}
(new_ds, old_ds) => {
new_node_data[new_idx].dataset = new_ds;
old_node_data[old_idx].dataset = old_ds;
}
}
}
}
pub fn calculate_contenteditable_key(
node_data: &[NodeData],
hierarchy: &[crate::styled_dom::NodeHierarchyItem],
node_id: NodeId,
) -> u64 {
use highway::{HighwayHash, HighwayHasher, Key};
use crate::dom::IdOrClass;
let node = &node_data[node_id.index()];
if let Some(explicit_key) = node.get_key() {
return explicit_key;
}
for id_or_class in node.get_ids_and_classes().as_ref().iter() {
if let IdOrClass::Id(id) = id_or_class {
let mut hasher = HighwayHasher::new(Key([1; 4])); hasher.append(id.as_str().as_bytes());
return hasher.finalize64();
}
}
let mut hasher = HighwayHasher::new(Key([2; 4]));
let parent_key = if let Some(parent_id) = hierarchy.get(node_id.index()).and_then(|h| h.parent_id()) {
calculate_contenteditable_key(node_data, hierarchy, parent_id)
} else {
0u64 };
hasher.append(&parent_key.to_le_bytes());
let node_discriminant = core::mem::discriminant(node.get_node_type());
let nth_of_type = if let Some(parent_id) = hierarchy.get(node_id.index()).and_then(|h| h.parent_id()) {
let mut count = 0u32;
let mut sibling_id = hierarchy.get(parent_id.index()).and_then(|h| h.first_child_id(parent_id));
while let Some(sib_id) = sibling_id {
if sib_id == node_id {
break;
}
let sibling_discriminant = core::mem::discriminant(node_data[sib_id.index()].get_node_type());
if sibling_discriminant == node_discriminant {
count += 1;
}
sibling_id = hierarchy.get(sib_id.index()).and_then(|h| h.next_sibling_id());
}
count
} else {
0
};
hasher.append(&nth_of_type.to_le_bytes());
#[cfg(feature = "std")]
{
let type_str = format!("{:?}", node_discriminant);
hasher.append(type_str.as_bytes());
}
#[cfg(not(feature = "std"))]
{
let discriminant_bytes: [u8; core::mem::size_of::<core::mem::Discriminant<crate::dom::NodeType>>()] =
unsafe { core::mem::transmute(node_discriminant) };
hasher.append(&discriminant_bytes);
}
for id_or_class in node.get_ids_and_classes().as_ref().iter() {
if let IdOrClass::Class(class) = id_or_class {
hasher.append(class.as_str().as_bytes());
}
}
hasher.finalize64()
}
pub fn reconcile_cursor_position(
old_text: &str,
new_text: &str,
old_cursor_byte: usize,
) -> usize {
if old_text == new_text {
return old_cursor_byte;
}
if old_text.is_empty() {
return new_text.len();
}
if new_text.is_empty() {
return 0;
}
let common_prefix_bytes = old_text
.bytes()
.zip(new_text.bytes())
.take_while(|(a, b)| a == b)
.count();
if old_cursor_byte <= common_prefix_bytes {
return old_cursor_byte.min(new_text.len());
}
let common_suffix_bytes = old_text
.bytes()
.rev()
.zip(new_text.bytes().rev())
.take_while(|(a, b)| a == b)
.count();
let old_suffix_start = old_text.len().saturating_sub(common_suffix_bytes);
let new_suffix_start = new_text.len().saturating_sub(common_suffix_bytes);
if old_cursor_byte >= old_suffix_start {
let offset_from_end = old_text.len() - old_cursor_byte;
return new_text.len().saturating_sub(offset_from_end);
}
new_suffix_start
}
pub fn get_node_text_content(node: &NodeData) -> Option<&str> {
if let crate::dom::NodeType::Text(ref text) = node.get_node_type() {
Some(text.as_str())
} else {
None
}
}