1use std::{
4 any::{Any, TypeId},
5 cell::RefCell,
6 hash::{Hash, Hasher},
7 marker::PhantomData,
8 sync::Arc,
9 time::Duration,
10};
11
12use parking_lot::RwLock;
13use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet};
14use slotmap::{SlotMap, new_key_type};
15use smallvec::SmallVec;
16
17use crate::{
18 NodeId,
19 accessibility::{AccessibilityActionHandler, AccessibilityNode},
20 component_tree::ComponentTree,
21 execution_context::{OrderFrame, with_execution_context, with_execution_context_mut},
22 focus::{
23 FocusDirection, FocusGroupNode, FocusHandleId, FocusNode, FocusProperties,
24 FocusRegistration, FocusRegistrationKind, FocusRequester, FocusRequesterId,
25 FocusRevealRequest, FocusScopeNode, FocusState, FocusTraversalPolicy,
26 },
27 layout::{LayoutPolicyDyn, RenderPolicyDyn},
28 modifier::Modifier,
29 prop::{CallbackWith, ComponentReplayData, ErasedComponentRunner, Prop},
30 time::Instant,
31};
32
33#[derive(Clone, Copy)]
34enum OrderCounterKind {
35 Remember,
36 Functor,
37 Context,
38 FrameReceiver,
39}
40
41fn push_order_frame() {
42 with_execution_context_mut(|context| {
43 context.order_frame_stack.push(OrderFrame::default());
44 });
45}
46
47fn pop_order_frame(underflow_message: &str) {
48 with_execution_context_mut(|context| {
49 let popped = context.order_frame_stack.pop();
50 debug_assert!(popped.is_some(), "{underflow_message}");
51 });
52}
53
54fn next_order_counter(kind: OrderCounterKind, empty_message: &str) -> u64 {
55 with_execution_context_mut(|context| {
56 debug_assert!(!context.order_frame_stack.is_empty(), "{empty_message}");
57 let frame = context.order_frame_stack.last_mut().expect(empty_message);
58 match kind {
59 OrderCounterKind::Remember => {
60 let counter = frame.remember;
61 frame.remember = frame.remember.wrapping_add(1);
62 counter
63 }
64 OrderCounterKind::Functor => {
65 let counter = frame.functor;
66 frame.functor = frame.functor.wrapping_add(1);
67 counter
68 }
69 OrderCounterKind::Context => {
70 let counter = frame.context;
71 frame.context = frame.context.wrapping_add(1);
72 counter
73 }
74 OrderCounterKind::FrameReceiver => {
75 let counter = frame.frame_receiver;
76 frame.frame_receiver = frame.frame_receiver.wrapping_add(1);
77 counter
78 }
79 }
80 })
81}
82
83fn next_child_instance_call_index() -> u64 {
84 with_execution_context_mut(|context| {
85 let Some(frame) = context.order_frame_stack.last_mut() else {
86 return 0;
87 };
88 let index = frame.instance;
89 frame.instance = frame.instance.wrapping_add(1);
90 index
91 })
92}
93
94pub(crate) fn compute_context_slot_key() -> (u64, u64) {
95 let instance_logic_id = current_instance_logic_id();
96 let group_path_hash = current_group_path_hash();
97
98 let call_counter = next_order_counter(
99 OrderCounterKind::Context,
100 "ORDER_FRAME_STACK is empty; provide_context must be called inside a component",
101 );
102
103 let slot_hash = hash_components(&[&group_path_hash, &call_counter]);
104 (instance_logic_id, slot_hash)
105}
106
107#[derive(Hash, Eq, PartialEq, Clone, Copy)]
108struct SlotKey {
109 instance_logic_id: u64,
110 slot_hash: u64,
111 type_id: TypeId,
112}
113
114impl Default for SlotKey {
115 fn default() -> Self {
116 Self {
117 instance_logic_id: 0,
118 slot_hash: 0,
119 type_id: TypeId::of::<()>(),
120 }
121 }
122}
123
124new_key_type! {
125 struct SlotHandle;
126}
127
128#[derive(Default)]
129struct SlotEntry {
130 key: SlotKey,
131 generation: u64,
132 value: Option<Arc<dyn Any + Send + Sync>>,
133 last_alive_epoch: u64,
134 retained: bool,
135}
136
137#[derive(Default)]
138struct InstanceSlotCursor {
139 previous_order: SmallVec<[SlotHandle; 4]>,
140 current_order: SmallVec<[SlotHandle; 4]>,
141 cursor: usize,
142 epoch: u64,
143}
144
145impl InstanceSlotCursor {
146 fn begin_epoch(&mut self, epoch: u64) {
147 if self.epoch == epoch {
148 return;
149 }
150 self.previous_order = std::mem::take(&mut self.current_order);
151 self.cursor = 0;
152 self.epoch = epoch;
153 }
154
155 fn fast_candidate(&self) -> Option<SlotHandle> {
156 self.previous_order.get(self.cursor).copied()
157 }
158
159 fn record_fast_match(&mut self, slot: SlotHandle) {
160 self.cursor = self.cursor.saturating_add(1);
161 self.current_order.push(slot);
162 }
163
164 fn record_slow_match(&mut self, slot: SlotHandle) {
165 if self.cursor < self.previous_order.len()
166 && let Some(offset) = self.previous_order[self.cursor..]
167 .iter()
168 .position(|candidate| *candidate == slot)
169 {
170 self.cursor += offset + 1;
171 }
172 self.current_order.push(slot);
173 }
174}
175
176#[derive(Default)]
177struct SlotTable {
178 entries: SlotMap<SlotHandle, SlotEntry>,
179 key_to_slot: HashMap<SlotKey, SlotHandle>,
180 cursors_by_instance_logic_id: HashMap<u64, InstanceSlotCursor>,
181 epoch: u64,
182}
183
184impl SlotTable {
185 fn begin_epoch(&mut self) {
186 self.epoch = self.epoch.wrapping_add(1);
187 }
188
189 fn reset(&mut self) {
190 self.entries.clear();
191 self.key_to_slot.clear();
192 self.cursors_by_instance_logic_id.clear();
193 self.epoch = 0;
194 }
195
196 fn try_fast_slot_lookup(&mut self, key: SlotKey) -> Option<SlotHandle> {
197 let epoch = self.epoch;
198 let candidate = {
199 let cursor = self
200 .cursors_by_instance_logic_id
201 .entry(key.instance_logic_id)
202 .or_default();
203 cursor.begin_epoch(epoch);
204 cursor.fast_candidate()
205 }?;
206
207 let is_match = self
208 .entries
209 .get(candidate)
210 .is_some_and(|entry| entry.key == key);
211
212 if !is_match {
213 return None;
214 }
215
216 let cursor = self
217 .cursors_by_instance_logic_id
218 .get_mut(&key.instance_logic_id)
219 .expect("cursor entry should exist");
220 cursor.record_fast_match(candidate);
221 Some(candidate)
222 }
223
224 fn record_slot_usage_slow(&mut self, instance_logic_id: u64, slot: SlotHandle) {
225 let epoch = self.epoch;
226 let cursor = self
227 .cursors_by_instance_logic_id
228 .entry(instance_logic_id)
229 .or_default();
230 cursor.begin_epoch(epoch);
231 cursor.record_slow_match(slot);
232 }
233}
234
235#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
236struct PersistentFocusHandleKey {
237 instance_key: u64,
238 slot_hash: u64,
239}
240
241#[derive(Clone, Copy)]
242struct PersistentFocusHandleEntry<T> {
243 value: T,
244 missing_frames: u8,
245}
246
247impl<T: Copy> PersistentFocusHandleEntry<T> {
248 fn new(value: T) -> Self {
249 Self {
250 value,
251 missing_frames: 0,
252 }
253 }
254
255 fn mark_live(&mut self) -> T {
256 self.missing_frames = 0;
257 self.value
258 }
259
260 fn retain_for_frame(&mut self) -> bool {
261 if self.missing_frames == 0 {
262 self.missing_frames = 1;
263 true
264 } else {
265 false
266 }
267 }
268}
269
270#[derive(Default)]
271struct PersistentFocusHandleStore {
272 targets: HashMap<PersistentFocusHandleKey, PersistentFocusHandleEntry<FocusNode>>,
273 scopes: HashMap<PersistentFocusHandleKey, PersistentFocusHandleEntry<FocusScopeNode>>,
274 groups: HashMap<PersistentFocusHandleKey, PersistentFocusHandleEntry<FocusGroupNode>>,
275 requesters: HashMap<PersistentFocusHandleKey, PersistentFocusHandleEntry<FocusRequester>>,
276}
277
278#[derive(Default)]
279pub(crate) struct RemovedPersistentFocusHandles {
280 pub handle_ids: HashSet<FocusHandleId>,
281 pub requester_ids: HashSet<FocusRequesterId>,
282}
283
284impl PersistentFocusHandleStore {
285 fn retain_instance_keys(
286 &mut self,
287 live_instance_keys: &HashSet<u64>,
288 ) -> RemovedPersistentFocusHandles {
289 let mut removed = RemovedPersistentFocusHandles::default();
290 self.targets.retain(|key, handle| {
291 if !live_instance_keys.contains(&key.instance_key) {
292 if handle.retain_for_frame() {
293 true
294 } else {
295 removed.handle_ids.insert(handle.value.handle_id());
296 false
297 }
298 } else {
299 handle.mark_live();
300 true
301 }
302 });
303 self.scopes.retain(|key, scope| {
304 if !live_instance_keys.contains(&key.instance_key) {
305 if scope.retain_for_frame() {
306 true
307 } else {
308 removed.handle_ids.insert(scope.value.handle_id());
309 false
310 }
311 } else {
312 scope.mark_live();
313 true
314 }
315 });
316 self.groups.retain(|key, group| {
317 if !live_instance_keys.contains(&key.instance_key) {
318 if group.retain_for_frame() {
319 true
320 } else {
321 removed.handle_ids.insert(group.value.handle_id());
322 false
323 }
324 } else {
325 group.mark_live();
326 true
327 }
328 });
329 self.requesters.retain(|key, requester| {
330 if !live_instance_keys.contains(&key.instance_key) {
331 if requester.retain_for_frame() {
332 true
333 } else {
334 removed.requester_ids.insert(requester.value.requester_id());
335 false
336 }
337 } else {
338 requester.mark_live();
339 true
340 }
341 });
342 removed
343 }
344
345 fn contains_handle(&self, handle_id: FocusHandleId) -> bool {
346 self.targets
347 .values()
348 .any(|entry| entry.value.handle_id() == handle_id)
349 || self
350 .scopes
351 .values()
352 .any(|entry| entry.value.handle_id() == handle_id)
353 || self
354 .groups
355 .values()
356 .any(|entry| entry.value.handle_id() == handle_id)
357 }
358
359 fn clear(&mut self) {
360 self.targets.clear();
361 self.scopes.clear();
362 self.groups.clear();
363 self.requesters.clear();
364 }
365}
366
367fn with_slot_table<R>(f: impl FnOnce(&SlotTable) -> R) -> R {
368 RUNTIME_GLOBALS.with(|globals| f(&globals.slot_table.borrow()))
369}
370
371fn with_slot_table_mut<R>(f: impl FnOnce(&mut SlotTable) -> R) -> R {
372 RUNTIME_GLOBALS.with(|globals| f(&mut globals.slot_table.borrow_mut()))
373}
374
375#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
376pub(crate) struct FunctorHandle {
377 slot: SlotHandle,
378 generation: u64,
379}
380
381impl FunctorHandle {
382 fn new(slot: SlotHandle, generation: u64) -> Self {
383 Self { slot, generation }
384 }
385}
386
387struct CallbackCell {
388 current: RwLock<Arc<dyn Fn() + Send + Sync>>,
389}
390
391impl CallbackCell {
392 fn new(current: Arc<dyn Fn() + Send + Sync>) -> Self {
393 Self {
394 current: RwLock::new(current),
395 }
396 }
397
398 fn update(&self, next: Arc<dyn Fn() + Send + Sync>) {
399 *self.current.write() = next;
400 }
401
402 fn shared(&self) -> Arc<dyn Fn() + Send + Sync> {
403 Arc::clone(&self.current.read())
404 }
405}
406
407struct CallbackWithCell<T, R> {
408 current: RwLock<Arc<dyn Fn(T) -> R + Send + Sync>>,
409}
410
411impl<T, R> CallbackWithCell<T, R> {
412 fn new(current: Arc<dyn Fn(T) -> R + Send + Sync>) -> Self {
413 Self {
414 current: RwLock::new(current),
415 }
416 }
417
418 fn update(&self, next: Arc<dyn Fn(T) -> R + Send + Sync>) {
419 *self.current.write() = next;
420 }
421
422 fn shared(&self) -> Arc<dyn Fn(T) -> R + Send + Sync> {
423 Arc::clone(&self.current.read())
424 }
425}
426
427struct RenderSlotCell {
428 current: RwLock<Arc<dyn Fn() + Send + Sync>>,
429}
430
431impl RenderSlotCell {
432 fn new(current: Arc<dyn Fn() + Send + Sync>) -> Self {
433 Self {
434 current: RwLock::new(current),
435 }
436 }
437
438 fn update(&self, next: Arc<dyn Fn() + Send + Sync>) {
439 *self.current.write() = next;
440 }
441
442 fn shared(&self) -> Arc<dyn Fn() + Send + Sync> {
443 Arc::clone(&self.current.read())
444 }
445}
446
447struct RenderSlotWithCell<T> {
448 current: RwLock<Arc<dyn Fn(T) + Send + Sync>>,
449}
450
451impl<T> RenderSlotWithCell<T> {
452 fn new(current: Arc<dyn Fn(T) + Send + Sync>) -> Self {
453 Self {
454 current: RwLock::new(current),
455 }
456 }
457
458 fn update(&self, next: Arc<dyn Fn(T) + Send + Sync>) {
459 *self.current.write() = next;
460 }
461
462 fn shared(&self) -> Arc<dyn Fn(T) + Send + Sync> {
463 Arc::clone(&self.current.read())
464 }
465}
466
467#[derive(Default)]
468struct LayoutDirtyTracker {
469 previous_layout_policies_by_node: HashMap<u64, Box<dyn LayoutPolicyDyn>>,
470 frame_layout_policies_by_node: HashMap<u64, Box<dyn LayoutPolicyDyn>>,
471 pending_measure_self_dirty_nodes: HashSet<u64>,
472 ready_measure_self_dirty_nodes: HashSet<u64>,
473 pending_placement_self_dirty_nodes: HashSet<u64>,
474 ready_placement_self_dirty_nodes: HashSet<u64>,
475 previous_children_by_node: HashMap<u64, Vec<u64>>,
476}
477
478#[derive(Default)]
479pub(crate) struct LayoutDirtyNodes {
480 pub measure_self_nodes: HashSet<u64>,
481 pub placement_self_nodes: HashSet<u64>,
482}
483
484#[derive(Default)]
485pub(crate) struct StructureReconcileResult {
486 pub changed_nodes: HashSet<u64>,
487 pub removed_nodes: HashSet<u64>,
488}
489
490#[allow(dead_code)]
492#[derive(Clone)]
493pub(crate) struct ReplayNodeSnapshot {
494 pub instance_key: u64,
495 pub parent_instance_key: Option<u64>,
496 pub instance_logic_id: u64,
497 pub group_path: Vec<u64>,
498 pub instance_key_override: Option<u64>,
499 pub fn_name: String,
500 pub replay: ComponentReplayData,
501}
502
503#[derive(Default)]
504struct ComponentReplayTracker {
505 previous_nodes: HashMap<u64, ReplayNodeSnapshot>,
506 current_nodes: HashMap<u64, ReplayNodeSnapshot>,
507}
508
509fn with_component_replay_tracker<R>(f: impl FnOnce(&ComponentReplayTracker) -> R) -> R {
510 RUNTIME_GLOBALS.with(|globals| f(&globals.component_replay_tracker.borrow()))
511}
512
513fn with_component_replay_tracker_mut<R>(f: impl FnOnce(&mut ComponentReplayTracker) -> R) -> R {
514 RUNTIME_GLOBALS.with(|globals| f(&mut globals.component_replay_tracker.borrow_mut()))
515}
516
517pub(crate) fn begin_frame_component_replay_tracking() {
518 with_component_replay_tracker_mut(|tracker| tracker.current_nodes.clear());
519}
520
521pub(crate) fn finalize_frame_component_replay_tracking() {
522 with_component_replay_tracker_mut(|tracker| {
523 tracker.previous_nodes = std::mem::take(&mut tracker.current_nodes);
524 });
525}
526
527pub(crate) fn finalize_frame_component_replay_tracking_partial() {
528 with_component_replay_tracker_mut(|tracker| {
529 let current = std::mem::take(&mut tracker.current_nodes);
530 tracker.previous_nodes.extend(current);
531 });
532}
533
534pub(crate) fn reset_component_replay_tracking() {
535 with_component_replay_tracker_mut(|tracker| {
536 *tracker = ComponentReplayTracker::default();
537 });
538}
539
540pub(crate) fn previous_component_replay_nodes() -> HashMap<u64, ReplayNodeSnapshot> {
541 with_component_replay_tracker(|tracker| tracker.previous_nodes.clone())
542}
543
544pub(crate) fn remove_previous_component_replay_nodes(instance_keys: &HashSet<u64>) {
545 if instance_keys.is_empty() {
546 return;
547 }
548 with_component_replay_tracker_mut(|tracker| {
549 tracker
550 .previous_nodes
551 .retain(|instance_key, _| !instance_keys.contains(instance_key));
552 tracker
553 .current_nodes
554 .retain(|instance_key, _| !instance_keys.contains(instance_key));
555 });
556}
557
558#[derive(Default)]
559struct BuildInvalidationTracker {
560 dirty_instance_keys: HashSet<u64>,
561}
562
563#[derive(Default)]
564pub(crate) struct BuildInvalidationSet {
565 pub dirty_instance_keys: HashSet<u64>,
566}
567
568#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
569struct StateReadDependencyKey {
570 slot: SlotHandle,
573 generation: u64,
574}
575
576#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
577struct FocusReadDependencyKey {
578 kind: FocusReadDependencyKind,
579}
580
581#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
582enum FocusReadDependencyKind {
583 Handle(FocusHandleId),
584 Requester(FocusRequesterId),
585}
586
587#[derive(Default)]
588struct StateReadDependencyTracker {
589 readers_by_state: HashMap<StateReadDependencyKey, HashSet<u64>>,
590 states_by_reader: HashMap<u64, HashSet<StateReadDependencyKey>>,
591}
592
593#[derive(Default)]
594struct FocusReadDependencyTracker {
595 readers_by_focus: HashMap<FocusReadDependencyKey, HashSet<u64>>,
596 focus_by_reader: HashMap<u64, HashSet<FocusReadDependencyKey>>,
597}
598
599#[derive(Default)]
600struct RenderSlotReadDependencyTracker {
601 readers_by_slot: HashMap<FunctorHandle, HashSet<u64>>,
602 slots_by_reader: HashMap<u64, HashSet<FunctorHandle>>,
603}
604
605type RedrawWaker = Arc<dyn Fn() + Send + Sync + 'static>;
606
607fn with_build_invalidation_tracker<R>(f: impl FnOnce(&BuildInvalidationTracker) -> R) -> R {
608 RUNTIME_GLOBALS.with(|globals| f(&globals.build_invalidation_tracker.borrow()))
609}
610
611fn with_build_invalidation_tracker_mut<R>(f: impl FnOnce(&mut BuildInvalidationTracker) -> R) -> R {
612 RUNTIME_GLOBALS.with(|globals| f(&mut globals.build_invalidation_tracker.borrow_mut()))
613}
614
615fn with_state_read_dependency_tracker<R>(f: impl FnOnce(&StateReadDependencyTracker) -> R) -> R {
616 RUNTIME_GLOBALS.with(|globals| f(&globals.state_read_dependency_tracker.borrow()))
617}
618
619fn with_state_read_dependency_tracker_mut<R>(
620 f: impl FnOnce(&mut StateReadDependencyTracker) -> R,
621) -> R {
622 RUNTIME_GLOBALS.with(|globals| f(&mut globals.state_read_dependency_tracker.borrow_mut()))
623}
624
625fn with_focus_read_dependency_tracker<R>(f: impl FnOnce(&FocusReadDependencyTracker) -> R) -> R {
626 RUNTIME_GLOBALS.with(|globals| f(&globals.focus_read_dependency_tracker.borrow()))
627}
628
629fn with_focus_read_dependency_tracker_mut<R>(
630 f: impl FnOnce(&mut FocusReadDependencyTracker) -> R,
631) -> R {
632 RUNTIME_GLOBALS.with(|globals| f(&mut globals.focus_read_dependency_tracker.borrow_mut()))
633}
634
635fn with_render_slot_read_dependency_tracker<R>(
636 f: impl FnOnce(&RenderSlotReadDependencyTracker) -> R,
637) -> R {
638 RUNTIME_GLOBALS.with(|globals| f(&globals.render_slot_read_dependency_tracker.borrow()))
639}
640
641fn with_render_slot_read_dependency_tracker_mut<R>(
642 f: impl FnOnce(&mut RenderSlotReadDependencyTracker) -> R,
643) -> R {
644 RUNTIME_GLOBALS.with(|globals| f(&mut globals.render_slot_read_dependency_tracker.borrow_mut()))
645}
646
647fn with_redraw_waker<R>(f: impl FnOnce(&Option<RedrawWaker>) -> R) -> R {
648 RUNTIME_GLOBALS.with(|globals| f(&globals.redraw_waker.borrow()))
649}
650
651fn with_redraw_waker_mut<R>(f: impl FnOnce(&mut Option<RedrawWaker>) -> R) -> R {
652 RUNTIME_GLOBALS.with(|globals| f(&mut globals.redraw_waker.borrow_mut()))
653}
654
655fn schedule_runtime_redraw() {
656 let callback = with_redraw_waker(Clone::clone);
657 if let Some(callback) = callback {
658 callback();
659 }
660}
661
662pub(crate) fn install_redraw_waker(callback: RedrawWaker) {
663 with_redraw_waker_mut(|waker| *waker = Some(callback));
664}
665
666pub(crate) fn clear_redraw_waker() {
667 with_redraw_waker_mut(|waker| *waker = None);
668}
669
670pub(crate) fn current_component_instance_key_from_scope() -> Option<u64> {
671 with_execution_context(|context| context.current_component_instance_stack.last().copied())
672}
673
674fn with_persistent_focus_handle_store<R>(f: impl FnOnce(&PersistentFocusHandleStore) -> R) -> R {
675 RUNTIME_GLOBALS.with(|globals| f(&globals.persistent_focus_handle_store.borrow()))
676}
677
678fn with_persistent_focus_handle_store_mut<R>(
679 f: impl FnOnce(&mut PersistentFocusHandleStore) -> R,
680) -> R {
681 RUNTIME_GLOBALS.with(|globals| f(&mut globals.persistent_focus_handle_store.borrow_mut()))
682}
683
684fn current_persistent_focus_handle_key<K: Hash>(slot_key: K) -> PersistentFocusHandleKey {
685 let Some(instance_key) = current_component_instance_key_from_scope() else {
686 panic!("persistent focus handles must be requested during a component build");
687 };
688 let slot_hash = hash_components(&[&slot_key]);
689 PersistentFocusHandleKey {
690 instance_key,
691 slot_hash,
692 }
693}
694
695pub(crate) fn persistent_focus_target_for_current_instance<K: Hash>(slot_key: K) -> FocusNode {
696 let key = current_persistent_focus_handle_key(slot_key);
697 with_persistent_focus_handle_store_mut(|store| match store.targets.entry(key) {
698 std::collections::hash_map::Entry::Occupied(mut entry) => entry.get_mut().mark_live(),
699 std::collections::hash_map::Entry::Vacant(entry) => {
700 let value = FocusNode::new();
701 entry.insert(PersistentFocusHandleEntry::new(value));
702 value
703 }
704 })
705}
706
707pub(crate) fn persistent_focus_scope_for_current_instance<K: Hash>(slot_key: K) -> FocusScopeNode {
708 let key = current_persistent_focus_handle_key(slot_key);
709 with_persistent_focus_handle_store_mut(|store| match store.scopes.entry(key) {
710 std::collections::hash_map::Entry::Occupied(mut entry) => entry.get_mut().mark_live(),
711 std::collections::hash_map::Entry::Vacant(entry) => {
712 let value = FocusScopeNode::new();
713 entry.insert(PersistentFocusHandleEntry::new(value));
714 value
715 }
716 })
717}
718
719pub(crate) fn persistent_focus_group_for_current_instance<K: Hash>(slot_key: K) -> FocusGroupNode {
720 let key = current_persistent_focus_handle_key(slot_key);
721 with_persistent_focus_handle_store_mut(|store| match store.groups.entry(key) {
722 std::collections::hash_map::Entry::Occupied(mut entry) => entry.get_mut().mark_live(),
723 std::collections::hash_map::Entry::Vacant(entry) => {
724 let value = FocusGroupNode::new();
725 entry.insert(PersistentFocusHandleEntry::new(value));
726 value
727 }
728 })
729}
730
731pub(crate) fn has_persistent_focus_handle(handle_id: FocusHandleId) -> bool {
732 with_persistent_focus_handle_store(|store| store.contains_handle(handle_id))
733}
734
735pub(crate) fn retain_persistent_focus_handles(
736 live_instance_keys: &HashSet<u64>,
737) -> RemovedPersistentFocusHandles {
738 with_persistent_focus_handle_store_mut(|store| store.retain_instance_keys(live_instance_keys))
739}
740
741pub(crate) fn clear_persistent_focus_handles() {
742 with_persistent_focus_handle_store_mut(PersistentFocusHandleStore::clear);
743}
744
745fn take_next_node_instance_logic_id_override() -> Option<u64> {
746 with_execution_context_mut(|context| context.next_node_instance_logic_id_override.take())
747}
748
749pub(crate) fn with_replay_scope<R>(
757 instance_logic_id: u64,
758 group_path: &[u64],
759 instance_key_override: Option<u64>,
760 f: impl FnOnce() -> R,
761) -> R {
762 struct ReplayScopeGuard {
763 previous_group_path: Option<Vec<u64>>,
764 previous_instance_key_stack: Option<Vec<u64>>,
765 previous_instance_logic_id_override: Option<Option<u64>>,
766 }
767
768 impl Drop for ReplayScopeGuard {
769 fn drop(&mut self) {
770 if let Some(previous_group_path) = self.previous_group_path.take() {
771 with_execution_context_mut(|context| {
772 context.group_path_stack = previous_group_path;
773 });
774 }
775 if let Some(previous_instance_key_stack) = self.previous_instance_key_stack.take() {
776 with_execution_context_mut(|context| {
777 context.instance_key_stack = previous_instance_key_stack;
778 });
779 }
780 if let Some(previous_instance_logic_id_override) =
781 self.previous_instance_logic_id_override.take()
782 {
783 with_execution_context_mut(|context| {
784 context.next_node_instance_logic_id_override =
785 previous_instance_logic_id_override;
786 });
787 }
788 }
789 }
790
791 let previous_group_path = with_execution_context_mut(|context| {
792 std::mem::replace(&mut context.group_path_stack, group_path.to_vec())
793 });
794 let previous_instance_key_stack = with_execution_context_mut(|context| {
795 let next_stack = instance_key_override.into_iter().collect::<Vec<_>>();
796 std::mem::replace(&mut context.instance_key_stack, next_stack)
797 });
798 let previous_instance_logic_id_override = with_execution_context_mut(|context| {
799 context
800 .next_node_instance_logic_id_override
801 .replace(instance_logic_id)
802 });
803 let _guard = ReplayScopeGuard {
804 previous_group_path: Some(previous_group_path),
805 previous_instance_key_stack: Some(previous_instance_key_stack),
806 previous_instance_logic_id_override: Some(previous_instance_logic_id_override),
807 };
808
809 f()
810}
811
812pub(crate) fn with_build_dirty_instance_keys<R>(
813 dirty_instance_keys: &HashSet<u64>,
814 f: impl FnOnce() -> R,
815) -> R {
816 struct BuildDirtyScopeGuard {
817 popped: bool,
818 }
819
820 impl Drop for BuildDirtyScopeGuard {
821 fn drop(&mut self) {
822 if self.popped {
823 return;
824 }
825 with_execution_context_mut(|context| {
826 let popped = context.build_dirty_instance_keys_stack.pop();
827 debug_assert!(
828 popped.is_some(),
829 "BUILD_DIRTY_INSTANCE_KEYS_STACK underflow: attempted to pop from empty stack"
830 );
831 });
832 self.popped = true;
833 }
834 }
835
836 with_execution_context_mut(|context| {
837 context
838 .build_dirty_instance_keys_stack
839 .push(Arc::new(dirty_instance_keys.clone()));
840 });
841 let _guard = BuildDirtyScopeGuard { popped: false };
842 f()
843}
844
845pub(crate) fn is_instance_key_build_dirty(instance_key: u64) -> bool {
846 with_execution_context(|context| {
847 context
848 .build_dirty_instance_keys_stack
849 .last()
850 .is_some_and(|dirty_instance_keys| dirty_instance_keys.contains(&instance_key))
851 })
852}
853
854fn consume_pending_build_invalidation(instance_key: u64) -> bool {
855 with_build_invalidation_tracker_mut(|tracker| tracker.dirty_instance_keys.remove(&instance_key))
856}
857
858pub(crate) fn record_component_invalidation_for_instance_key(instance_key: u64) {
859 let inserted = with_build_invalidation_tracker_mut(|tracker| {
860 tracker.dirty_instance_keys.insert(instance_key)
861 });
862 if inserted {
863 schedule_runtime_redraw();
864 }
865}
866
867fn track_state_read_dependency(slot: SlotHandle, generation: u64) {
868 if !matches!(current_phase(), Some(RuntimePhase::Build)) {
869 return;
870 }
871 let Some(reader_instance_key) = current_component_instance_key_from_scope() else {
872 return;
873 };
874
875 let key = StateReadDependencyKey { slot, generation };
876 with_state_read_dependency_tracker_mut(|tracker| {
877 if tracker
878 .readers_by_state
879 .get(&key)
880 .is_some_and(|readers| readers.contains(&reader_instance_key))
881 {
882 return;
883 }
884 tracker
885 .readers_by_state
886 .entry(key)
887 .or_default()
888 .insert(reader_instance_key);
889 tracker
890 .states_by_reader
891 .entry(reader_instance_key)
892 .or_default()
893 .insert(key);
894 });
895}
896
897fn state_read_subscribers(slot: SlotHandle, generation: u64) -> Vec<u64> {
898 let key = StateReadDependencyKey { slot, generation };
899 with_state_read_dependency_tracker(|tracker| {
900 tracker
901 .readers_by_state
902 .get(&key)
903 .map(|readers| readers.iter().copied().collect())
904 .unwrap_or_default()
905 })
906}
907
908fn track_focus_dependency(kind: FocusReadDependencyKind) {
909 if !matches!(current_phase(), Some(RuntimePhase::Build)) {
910 return;
911 }
912 let Some(reader_instance_key) = current_component_instance_key_from_scope() else {
913 return;
914 };
915
916 let key = FocusReadDependencyKey { kind };
917 with_focus_read_dependency_tracker_mut(|tracker| {
918 if tracker
919 .readers_by_focus
920 .get(&key)
921 .is_some_and(|readers| readers.contains(&reader_instance_key))
922 {
923 return;
924 }
925 tracker
926 .readers_by_focus
927 .entry(key)
928 .or_default()
929 .insert(reader_instance_key);
930 tracker
931 .focus_by_reader
932 .entry(reader_instance_key)
933 .or_default()
934 .insert(key);
935 });
936}
937
938fn focus_read_subscribers_by_kind(kind: FocusReadDependencyKind) -> Vec<u64> {
939 let key = FocusReadDependencyKey { kind };
940 with_focus_read_dependency_tracker(|tracker| {
941 tracker
942 .readers_by_focus
943 .get(&key)
944 .map(|readers| readers.iter().copied().collect())
945 .unwrap_or_default()
946 })
947}
948
949pub(crate) fn track_focus_read_dependency(handle_id: FocusHandleId) {
950 track_focus_dependency(FocusReadDependencyKind::Handle(handle_id));
951}
952
953pub(crate) fn track_focus_requester_read_dependency(requester_id: FocusRequesterId) {
954 track_focus_dependency(FocusReadDependencyKind::Requester(requester_id));
955}
956
957pub(crate) fn focus_read_subscribers(handle_id: FocusHandleId) -> Vec<u64> {
958 focus_read_subscribers_by_kind(FocusReadDependencyKind::Handle(handle_id))
959}
960
961pub(crate) fn focus_requester_read_subscribers(requester_id: FocusRequesterId) -> Vec<u64> {
962 focus_read_subscribers_by_kind(FocusReadDependencyKind::Requester(requester_id))
963}
964
965pub(crate) fn track_render_slot_read_dependency(handle: FunctorHandle) {
966 if !matches!(current_phase(), Some(RuntimePhase::Build)) {
967 return;
968 }
969 let Some(reader_instance_key) = current_component_instance_key_from_scope() else {
970 return;
971 };
972
973 with_render_slot_read_dependency_tracker_mut(|tracker| {
974 if tracker
975 .readers_by_slot
976 .get(&handle)
977 .is_some_and(|readers| readers.contains(&reader_instance_key))
978 {
979 return;
980 }
981 tracker
982 .readers_by_slot
983 .entry(handle)
984 .or_default()
985 .insert(reader_instance_key);
986 tracker
987 .slots_by_reader
988 .entry(reader_instance_key)
989 .or_default()
990 .insert(handle);
991 });
992}
993
994fn render_slot_read_subscribers(handle: FunctorHandle) -> Vec<u64> {
995 with_render_slot_read_dependency_tracker(|tracker| {
996 tracker
997 .readers_by_slot
998 .get(&handle)
999 .map(|readers| readers.iter().copied().collect())
1000 .unwrap_or_default()
1001 })
1002}
1003
1004pub(crate) fn remove_state_read_dependencies(instance_keys: &HashSet<u64>) {
1005 if instance_keys.is_empty() {
1006 return;
1007 }
1008 with_state_read_dependency_tracker_mut(|tracker| {
1009 for instance_key in instance_keys {
1010 let Some(state_keys) = tracker.states_by_reader.remove(instance_key) else {
1011 continue;
1012 };
1013 for state_key in state_keys {
1014 let mut remove_entry = false;
1015 if let Some(readers) = tracker.readers_by_state.get_mut(&state_key) {
1016 readers.remove(instance_key);
1017 remove_entry = readers.is_empty();
1018 }
1019 if remove_entry {
1020 tracker.readers_by_state.remove(&state_key);
1021 }
1022 }
1023 }
1024 });
1025}
1026
1027pub(crate) fn remove_focus_read_dependencies(instance_keys: &HashSet<u64>) {
1028 if instance_keys.is_empty() {
1029 return;
1030 }
1031 with_focus_read_dependency_tracker_mut(|tracker| {
1032 for instance_key in instance_keys {
1033 let Some(focus_keys) = tracker.focus_by_reader.remove(instance_key) else {
1034 continue;
1035 };
1036 for focus_key in focus_keys {
1037 let mut remove_entry = false;
1038 if let Some(readers) = tracker.readers_by_focus.get_mut(&focus_key) {
1039 readers.remove(instance_key);
1040 remove_entry = readers.is_empty();
1041 }
1042 if remove_entry {
1043 tracker.readers_by_focus.remove(&focus_key);
1044 }
1045 }
1046 }
1047 });
1048}
1049
1050pub(crate) fn remove_render_slot_read_dependencies(instance_keys: &HashSet<u64>) {
1051 if instance_keys.is_empty() {
1052 return;
1053 }
1054 with_render_slot_read_dependency_tracker_mut(|tracker| {
1055 for instance_key in instance_keys {
1056 let Some(slot_keys) = tracker.slots_by_reader.remove(instance_key) else {
1057 continue;
1058 };
1059 for slot_key in slot_keys {
1060 let mut remove_entry = false;
1061 if let Some(readers) = tracker.readers_by_slot.get_mut(&slot_key) {
1062 readers.remove(instance_key);
1063 remove_entry = readers.is_empty();
1064 }
1065 if remove_entry {
1066 tracker.readers_by_slot.remove(&slot_key);
1067 }
1068 }
1069 }
1070 });
1071}
1072
1073pub(crate) fn reset_state_read_dependencies() {
1074 with_state_read_dependency_tracker_mut(|tracker| {
1075 *tracker = StateReadDependencyTracker::default();
1076 });
1077}
1078
1079pub(crate) fn reset_focus_read_dependencies() {
1080 with_focus_read_dependency_tracker_mut(|tracker| {
1081 *tracker = FocusReadDependencyTracker::default();
1082 });
1083}
1084
1085pub(crate) fn reset_render_slot_read_dependencies() {
1086 with_render_slot_read_dependency_tracker_mut(|tracker| {
1087 *tracker = RenderSlotReadDependencyTracker::default();
1088 });
1089}
1090
1091pub(crate) fn take_build_invalidations() -> BuildInvalidationSet {
1092 with_build_invalidation_tracker_mut(|tracker| BuildInvalidationSet {
1093 dirty_instance_keys: std::mem::take(&mut tracker.dirty_instance_keys),
1094 })
1095}
1096
1097pub(crate) fn reset_build_invalidations() {
1098 with_build_invalidation_tracker_mut(|tracker| {
1099 *tracker = BuildInvalidationTracker::default();
1100 });
1101}
1102
1103pub(crate) fn remove_build_invalidations(instance_keys: &HashSet<u64>) {
1104 if instance_keys.is_empty() {
1105 return;
1106 }
1107 with_build_invalidation_tracker_mut(|tracker| {
1108 tracker
1109 .dirty_instance_keys
1110 .retain(|instance_key| !instance_keys.contains(instance_key));
1111 });
1112}
1113
1114pub(crate) fn has_pending_build_invalidations() -> bool {
1115 with_build_invalidation_tracker(|tracker| !tracker.dirty_instance_keys.is_empty())
1116}
1117
1118#[derive(Default)]
1119struct FrameClockTracker {
1120 frame_origin: Option<Instant>,
1121 current_frame_time: Option<Instant>,
1122 current_frame_nanos: u64,
1123 previous_frame_time: Option<Instant>,
1124 frame_delta: Duration,
1125 receivers: HashMap<FrameNanosReceiverKey, FrameNanosReceiver>,
1126}
1127
1128#[derive(Hash, Eq, PartialEq, Clone, Copy)]
1129struct FrameNanosReceiverKey {
1130 instance_logic_id: u64,
1131 receiver_hash: u64,
1132}
1133
1134#[derive(Clone, Copy, Debug, Eq, PartialEq)]
1136pub enum FrameNanosControl {
1137 Continue,
1139 Stop,
1141}
1142
1143type FrameNanosReceiverCallback = Box<dyn FnMut(u64) -> FrameNanosControl + Send + 'static>;
1144
1145struct FrameNanosReceiver {
1146 owner_instance_key: u64,
1147 callback: FrameNanosReceiverCallback,
1148}
1149
1150fn with_frame_clock_tracker<R>(f: impl FnOnce(&FrameClockTracker) -> R) -> R {
1151 RUNTIME_GLOBALS.with(|globals| f(&globals.frame_clock_tracker.borrow()))
1152}
1153
1154fn with_frame_clock_tracker_mut<R>(f: impl FnOnce(&mut FrameClockTracker) -> R) -> R {
1155 RUNTIME_GLOBALS.with(|globals| f(&mut globals.frame_clock_tracker.borrow_mut()))
1156}
1157
1158pub(crate) fn begin_frame_clock(now: Instant) {
1159 with_frame_clock_tracker_mut(|tracker| {
1160 let frame_origin = *tracker.frame_origin.get_or_insert(now);
1161 tracker.previous_frame_time = tracker.current_frame_time;
1162 tracker.current_frame_time = Some(now);
1163 tracker.current_frame_nanos = now
1164 .saturating_duration_since(frame_origin)
1165 .as_nanos()
1166 .min(u64::MAX as u128) as u64;
1167 tracker.frame_delta = tracker
1168 .previous_frame_time
1169 .map(|previous| now.saturating_duration_since(previous))
1170 .unwrap_or_default();
1171 });
1172}
1173
1174pub(crate) fn reset_frame_clock() {
1175 with_frame_clock_tracker_mut(|tracker| *tracker = FrameClockTracker::default());
1176}
1177
1178pub(crate) fn has_pending_frame_nanos_receivers() -> bool {
1179 with_frame_clock_tracker(|tracker| !tracker.receivers.is_empty())
1180}
1181
1182pub(crate) fn tick_frame_nanos_receivers() {
1183 with_frame_clock_tracker_mut(|tracker| {
1184 let frame_nanos = tracker.current_frame_nanos;
1185 tracker.receivers.retain(|_, receiver| {
1186 matches!(
1187 (receiver.callback)(frame_nanos),
1188 FrameNanosControl::Continue
1189 )
1190 });
1191 });
1192}
1193
1194pub(crate) fn remove_frame_nanos_receivers(instance_keys: &HashSet<u64>) {
1195 if instance_keys.is_empty() {
1196 return;
1197 }
1198 with_frame_clock_tracker_mut(|tracker| {
1199 tracker
1200 .receivers
1201 .retain(|_, receiver| !instance_keys.contains(&receiver.owner_instance_key));
1202 });
1203}
1204
1205pub(crate) fn clear_frame_nanos_receivers() {
1206 with_frame_clock_tracker_mut(|tracker| tracker.receivers.clear());
1207}
1208
1209pub fn current_frame_time() -> Option<Instant> {
1213 with_frame_clock_tracker(|tracker| tracker.current_frame_time)
1214}
1215
1216pub fn current_frame_nanos() -> u64 {
1218 with_frame_clock_tracker(|tracker| tracker.current_frame_nanos)
1219}
1220
1221pub fn frame_delta() -> Duration {
1223 with_frame_clock_tracker(|tracker| tracker.frame_delta)
1224}
1225
1226fn ensure_frame_receive_phase() {
1227 match current_phase() {
1228 Some(RuntimePhase::Build) => {}
1229 Some(RuntimePhase::Measure) => {
1230 panic!("receive_frame_nanos must not be called inside measure")
1231 }
1232 Some(RuntimePhase::Input) => {
1233 panic!("receive_frame_nanos must be called inside a tessera component build")
1234 }
1235 None => panic!("receive_frame_nanos must be called inside a tessera component build"),
1236 }
1237}
1238
1239fn compute_frame_nanos_receiver_key() -> FrameNanosReceiverKey {
1240 let instance_logic_id = current_instance_logic_id();
1241 let group_path_hash = current_group_path_hash();
1242
1243 let call_counter = next_order_counter(
1244 OrderCounterKind::FrameReceiver,
1245 "ORDER_FRAME_STACK is empty; receive_frame_nanos must be called inside a component",
1246 );
1247
1248 let receiver_hash = hash_components(&[&group_path_hash, &call_counter]);
1249 FrameNanosReceiverKey {
1250 instance_logic_id,
1251 receiver_hash,
1252 }
1253}
1254
1255pub fn receive_frame_nanos<F>(callback: F)
1261where
1262 F: FnMut(u64) -> FrameNanosControl + Send + 'static,
1263{
1264 ensure_frame_receive_phase();
1265 let frame_nanos_state = remember(current_frame_nanos);
1266 let _ = frame_nanos_state.get();
1267
1268 let owner_instance_key = current_component_instance_key_from_scope()
1269 .unwrap_or_else(|| panic!("receive_frame_nanos requires an active component node context"));
1270 let key = compute_frame_nanos_receiver_key();
1271
1272 with_frame_clock_tracker_mut(|tracker| {
1273 tracker.receivers.entry(key).or_insert_with(|| {
1274 let mut callback = callback;
1275 FrameNanosReceiver {
1276 owner_instance_key,
1277 callback: Box::new(move |frame_nanos| {
1278 if !frame_nanos_state.is_alive() {
1279 return FrameNanosControl::Stop;
1280 }
1281 frame_nanos_state.set(frame_nanos);
1282 callback(frame_nanos)
1283 }),
1284 }
1285 });
1286 });
1287}
1288
1289pub(crate) fn drop_slots_for_instance_logic_ids(instance_logic_ids: &HashSet<u64>) {
1290 if instance_logic_ids.is_empty() {
1291 return;
1292 }
1293
1294 with_slot_table_mut(|table| {
1295 let mut freed: Vec<(SlotHandle, SlotKey)> = Vec::new();
1296 for (slot, entry) in table.entries.iter() {
1297 if !instance_logic_ids.contains(&entry.key.instance_logic_id) {
1298 continue;
1299 }
1300 if entry.retained {
1301 continue;
1302 }
1303 freed.push((slot, entry.key));
1304 }
1305 for (slot, key) in freed {
1306 table.entries.remove(slot);
1307 table.key_to_slot.remove(&key);
1308 }
1309 for instance_logic_id in instance_logic_ids {
1310 table.cursors_by_instance_logic_id.remove(instance_logic_id);
1311 }
1312 });
1313}
1314
1315fn with_layout_dirty_tracker_mut<R>(f: impl FnOnce(&mut LayoutDirtyTracker) -> R) -> R {
1316 RUNTIME_GLOBALS.with(|globals| f(&mut globals.layout_dirty_tracker.borrow_mut()))
1317}
1318
1319fn record_layout_policy_dirty(instance_key: u64, layout_policy: &dyn LayoutPolicyDyn) {
1320 if current_phase() != Some(RuntimePhase::Build) {
1321 return;
1322 }
1323 with_layout_dirty_tracker_mut(|tracker| {
1324 let (measure_changed, placement_changed, next_layout_policy) = match tracker
1325 .previous_layout_policies_by_node
1326 .remove(&instance_key)
1327 {
1328 Some(previous) => {
1329 let measure_changed = !previous.dyn_measure_eq(layout_policy);
1330 let placement_changed = !previous.dyn_placement_eq(layout_policy);
1331 if !measure_changed && !placement_changed {
1332 (false, false, previous)
1333 } else {
1334 (
1335 measure_changed,
1336 placement_changed,
1337 layout_policy.clone_box(),
1338 )
1339 }
1340 }
1341 None => (true, true, layout_policy.clone_box()),
1342 };
1343 if measure_changed {
1344 tracker
1345 .pending_measure_self_dirty_nodes
1346 .insert(instance_key);
1347 } else if placement_changed {
1348 tracker
1349 .pending_placement_self_dirty_nodes
1350 .insert(instance_key);
1351 }
1352 tracker
1353 .frame_layout_policies_by_node
1354 .insert(instance_key, next_layout_policy);
1355 });
1356}
1357
1358pub(crate) fn begin_frame_layout_dirty_tracking() {
1359 with_layout_dirty_tracker_mut(|tracker| {
1360 tracker.frame_layout_policies_by_node.clear();
1361 tracker.pending_measure_self_dirty_nodes.clear();
1362 tracker.pending_placement_self_dirty_nodes.clear();
1363 });
1364}
1365
1366pub(crate) fn finalize_frame_layout_dirty_tracking() {
1367 with_layout_dirty_tracker_mut(|tracker| {
1368 tracker.ready_measure_self_dirty_nodes =
1369 std::mem::take(&mut tracker.pending_measure_self_dirty_nodes);
1370 tracker.ready_placement_self_dirty_nodes =
1371 std::mem::take(&mut tracker.pending_placement_self_dirty_nodes);
1372 tracker.previous_layout_policies_by_node =
1373 std::mem::take(&mut tracker.frame_layout_policies_by_node);
1374 });
1375}
1376
1377pub(crate) fn take_layout_dirty_nodes() -> LayoutDirtyNodes {
1378 with_layout_dirty_tracker_mut(|tracker| LayoutDirtyNodes {
1379 measure_self_nodes: std::mem::take(&mut tracker.ready_measure_self_dirty_nodes),
1380 placement_self_nodes: std::mem::take(&mut tracker.ready_placement_self_dirty_nodes),
1381 })
1382}
1383
1384pub(crate) fn reset_layout_dirty_tracking() {
1385 with_layout_dirty_tracker_mut(|tracker| *tracker = LayoutDirtyTracker::default());
1386}
1387
1388fn record_component_replay_snapshot(runtime: &TesseraRuntime, node_id: NodeId) {
1389 let Some(node) = runtime.component_tree.get(node_id) else {
1390 return;
1391 };
1392 let Some(replay) = node.replay.clone() else {
1393 return;
1394 };
1395
1396 let tree = runtime.component_tree.tree();
1397 let parent_instance_key = tree
1398 .get(node_id)
1399 .and_then(|n| n.parent())
1400 .and_then(|parent_id| tree.get(parent_id))
1401 .map(|parent| parent.get().instance_key);
1402
1403 let snapshot = ReplayNodeSnapshot {
1404 instance_key: node.instance_key,
1405 parent_instance_key,
1406 instance_logic_id: node.instance_logic_id,
1407 group_path: current_group_path(),
1408 instance_key_override: current_instance_key_override(),
1409 fn_name: node.fn_name.clone(),
1410 replay,
1411 };
1412 with_component_replay_tracker_mut(|tracker| {
1413 tracker
1414 .current_nodes
1415 .insert(snapshot.instance_key, snapshot);
1416 });
1417}
1418
1419pub(crate) fn reconcile_layout_structure(
1420 current_children_by_node: &HashMap<u64, Vec<u64>>,
1421) -> StructureReconcileResult {
1422 with_layout_dirty_tracker_mut(|tracker| {
1423 let previous_children_by_node = &tracker.previous_children_by_node;
1424
1425 let mut changed_nodes = HashSet::default();
1426 let mut removed_nodes = HashSet::default();
1427
1428 for (node, current_children) in current_children_by_node {
1429 match previous_children_by_node.get(node) {
1430 Some(previous_children) if previous_children == current_children => {}
1431 _ => {
1432 changed_nodes.insert(*node);
1433 }
1434 }
1435 }
1436
1437 for node in previous_children_by_node.keys().copied() {
1438 if !current_children_by_node.contains_key(&node) {
1439 changed_nodes.insert(node);
1440 removed_nodes.insert(node);
1441 }
1442 }
1443
1444 tracker.previous_children_by_node = current_children_by_node.clone();
1445 StructureReconcileResult {
1446 changed_nodes,
1447 removed_nodes,
1448 }
1449 })
1450}
1451
1452pub struct State<T> {
1474 slot: SlotHandle,
1475 generation: u64,
1476 _marker: PhantomData<T>,
1477}
1478
1479impl<T> Copy for State<T> {}
1480
1481impl<T> Clone for State<T> {
1482 fn clone(&self) -> Self {
1483 *self
1484 }
1485}
1486
1487impl<T> PartialEq for State<T> {
1488 fn eq(&self, other: &Self) -> bool {
1489 self.slot == other.slot && self.generation == other.generation
1490 }
1491}
1492
1493impl<T> Eq for State<T> {}
1494
1495impl<T> Hash for State<T> {
1496 fn hash<H: Hasher>(&self, state: &mut H) {
1497 self.slot.hash(state);
1498 self.generation.hash(state);
1499 }
1500}
1501
1502impl<T> State<T> {
1503 fn new(slot: SlotHandle, generation: u64) -> Self {
1504 Self {
1505 slot,
1506 generation,
1507 _marker: PhantomData,
1508 }
1509 }
1510}
1511
1512impl<T> State<T>
1513where
1514 T: Send + Sync + 'static,
1515{
1516 fn is_alive(&self) -> bool {
1517 with_slot_table(|table| {
1518 let Some(entry) = table.entries.get(self.slot) else {
1519 return false;
1520 };
1521
1522 entry.generation == self.generation
1523 && entry.key.type_id == TypeId::of::<T>()
1524 && entry.value.is_some()
1525 })
1526 }
1527
1528 fn load_entry(&self) -> Arc<dyn Any + Send + Sync> {
1529 with_slot_table(|table| {
1530 let entry = table
1531 .entries
1532 .get(self.slot)
1533 .unwrap_or_else(|| panic!("State points to freed slot: {:?}", self.slot));
1534
1535 if entry.generation != self.generation {
1536 panic!(
1537 "State is stale (slot {:?}, generation {}, current generation {})",
1538 self.slot, self.generation, entry.generation
1539 );
1540 }
1541
1542 if entry.key.type_id != TypeId::of::<T>() {
1543 panic!(
1544 "State type mismatch for slot {:?}: expected {}, stored {:?}",
1545 self.slot,
1546 std::any::type_name::<T>(),
1547 entry.key.type_id
1548 );
1549 }
1550
1551 entry
1552 .value
1553 .as_ref()
1554 .unwrap_or_else(|| panic!("State slot {:?} has been cleared", self.slot))
1555 .clone()
1556 })
1557 }
1558
1559 fn load_lock(&self) -> Arc<RwLock<T>> {
1560 self.load_entry()
1561 .downcast::<RwLock<T>>()
1562 .unwrap_or_else(|_| panic!("State slot {:?} downcast failed", self.slot))
1563 }
1564
1565 pub fn with<R>(&self, f: impl FnOnce(&T) -> R) -> R {
1567 track_state_read_dependency(self.slot, self.generation);
1568 let lock = self.load_lock();
1569 let guard = lock.read();
1570 f(&guard)
1571 }
1572
1573 #[track_caller]
1575 pub fn with_mut<R>(&self, f: impl FnOnce(&mut T) -> R) -> R {
1576 let lock = self.load_lock();
1577
1578 let result = {
1579 let mut guard = lock.write();
1580 f(&mut guard)
1581 };
1582
1583 let subscribers = state_read_subscribers(self.slot, self.generation);
1584 for instance_key in subscribers {
1585 record_component_invalidation_for_instance_key(instance_key);
1586 }
1587 result
1588 }
1589
1590 pub fn get(&self) -> T
1592 where
1593 T: Clone,
1594 {
1595 self.with(Clone::clone)
1596 }
1597
1598 #[track_caller]
1600 pub fn set(&self, value: T) {
1601 self.with_mut(|slot| *slot = value);
1602 }
1603}
1604
1605struct RuntimeGlobals {
1606 slot_table: RefCell<SlotTable>,
1607 component_replay_tracker: RefCell<ComponentReplayTracker>,
1608 build_invalidation_tracker: RefCell<BuildInvalidationTracker>,
1609 state_read_dependency_tracker: RefCell<StateReadDependencyTracker>,
1610 focus_read_dependency_tracker: RefCell<FocusReadDependencyTracker>,
1611 render_slot_read_dependency_tracker: RefCell<RenderSlotReadDependencyTracker>,
1612 redraw_waker: RefCell<Option<RedrawWaker>>,
1613 persistent_focus_handle_store: RefCell<PersistentFocusHandleStore>,
1614 frame_clock_tracker: RefCell<FrameClockTracker>,
1615 layout_dirty_tracker: RefCell<LayoutDirtyTracker>,
1616 runtime: RefCell<TesseraRuntime>,
1617}
1618
1619impl RuntimeGlobals {
1620 fn new() -> Self {
1621 Self {
1622 slot_table: RefCell::new(SlotTable::default()),
1623 component_replay_tracker: RefCell::new(ComponentReplayTracker::default()),
1624 build_invalidation_tracker: RefCell::new(BuildInvalidationTracker::default()),
1625 state_read_dependency_tracker: RefCell::new(StateReadDependencyTracker::default()),
1626 focus_read_dependency_tracker: RefCell::new(FocusReadDependencyTracker::default()),
1627 render_slot_read_dependency_tracker: RefCell::new(
1628 RenderSlotReadDependencyTracker::default(),
1629 ),
1630 redraw_waker: RefCell::new(None),
1631 persistent_focus_handle_store: RefCell::new(PersistentFocusHandleStore::default()),
1632 frame_clock_tracker: RefCell::new(FrameClockTracker::default()),
1633 layout_dirty_tracker: RefCell::new(LayoutDirtyTracker::default()),
1634 runtime: RefCell::new(TesseraRuntime::default()),
1635 }
1636 }
1637}
1638
1639thread_local! {
1640 static RUNTIME_GLOBALS: RuntimeGlobals = RuntimeGlobals::new();
1641}
1642
1643#[derive(Default)]
1645pub struct TesseraRuntime {
1646 pub component_tree: ComponentTree,
1648 pub(crate) window_size: [u32; 2],
1650 pub cursor_icon_request: Option<winit::window::CursorIcon>,
1652 pub(crate) window_minimized: bool,
1654}
1655
1656impl TesseraRuntime {
1657 pub fn with<F, R>(f: F) -> R
1659 where
1660 F: FnOnce(&Self) -> R,
1661 {
1662 RUNTIME_GLOBALS.with(|globals| f(&globals.runtime.borrow()))
1663 }
1664
1665 pub fn with_mut<F, R>(f: F) -> R
1667 where
1668 F: FnOnce(&mut Self) -> R,
1669 {
1670 RUNTIME_GLOBALS.with(|globals| f(&mut globals.runtime.borrow_mut()))
1671 }
1672
1673 pub fn window_size(&self) -> [u32; 2] {
1675 self.window_size
1676 }
1677
1678 pub(crate) fn set_current_node_identity(&mut self, instance_key: u64, instance_logic_id: u64) {
1680 if let Some(node) = self.component_tree.current_node_mut() {
1681 node.instance_key = instance_key;
1682 node.instance_logic_id = instance_logic_id;
1683 } else {
1684 debug_assert!(
1685 false,
1686 "set_current_node_identity must be called inside a component build"
1687 );
1688 }
1689 }
1690
1691 pub(crate) fn set_current_component_replay<P>(
1693 &mut self,
1694 runner: Arc<dyn ErasedComponentRunner>,
1695 props: &P,
1696 ) -> bool
1697 where
1698 P: Prop,
1699 {
1700 let current_node_info = self
1701 .component_tree
1702 .current_node()
1703 .map(|node| (node.instance_key, node.instance_logic_id));
1704 let previous_replay = current_node_info.and_then(|(instance_key, instance_logic_id)| {
1705 with_component_replay_tracker(|tracker| {
1706 let previous = tracker.previous_nodes.get(&instance_key)?;
1707 if previous.instance_logic_id != instance_logic_id {
1708 return None;
1709 }
1710 if previous.replay.props.equals(props) {
1711 Some(previous.replay.clone())
1712 } else {
1713 None
1714 }
1715 })
1716 });
1717
1718 let pending_dirty = current_node_info
1719 .map(|(instance_key, _)| consume_pending_build_invalidation(instance_key))
1720 .unwrap_or(false);
1721
1722 if let Some((instance_key, instance_logic_id)) = current_node_info
1723 && let Some(replay) = previous_replay.clone()
1724 && !is_instance_key_build_dirty(instance_key)
1725 && !pending_dirty
1726 && self
1727 .component_tree
1728 .try_reuse_current_subtree(instance_key, instance_logic_id)
1729 {
1730 if let Some(node) = self.component_tree.current_node_mut() {
1731 node.replay = Some(replay);
1732 node.props_unchanged_from_previous = true;
1733 }
1734 return true;
1735 }
1736
1737 if let Some(node) = self.component_tree.current_node_mut() {
1738 if let Some(replay) = previous_replay {
1739 node.replay = Some(replay);
1740 node.props_unchanged_from_previous = true;
1741 } else {
1742 node.replay = Some(ComponentReplayData::new(runner, props));
1743 node.props_unchanged_from_previous = false;
1744 }
1745 } else {
1746 debug_assert!(
1747 false,
1748 "set_current_component_replay must be called inside a component build"
1749 );
1750 return false;
1751 }
1752 if let Some(node_id) = current_node_id() {
1753 record_component_replay_snapshot(self, node_id);
1754 }
1755 false
1756 }
1757
1758 pub(crate) fn set_current_layout_policy_boxed(&mut self, policy: Box<dyn LayoutPolicyDyn>) {
1760 if let Some(node) = self.component_tree.current_node_mut() {
1761 node.layout_policy = policy;
1762 } else {
1763 debug_assert!(
1764 false,
1765 "set_current_layout_policy_boxed must be called inside a component build"
1766 );
1767 }
1768 }
1769
1770 pub(crate) fn set_current_render_policy_boxed(&mut self, policy: Box<dyn RenderPolicyDyn>) {
1772 if let Some(node) = self.component_tree.current_node_mut() {
1773 node.render_policy = policy;
1774 } else {
1775 debug_assert!(
1776 false,
1777 "set_current_render_policy_boxed must be called inside a component build"
1778 );
1779 }
1780 }
1781
1782 pub(crate) fn append_current_modifier(&mut self, modifier: Modifier) {
1784 if let Some(node) = self.component_tree.current_node_mut() {
1785 node.modifier = node.modifier.clone().then(modifier);
1786 } else {
1787 debug_assert!(
1788 false,
1789 "append_current_modifier must be called inside a component build"
1790 );
1791 }
1792 }
1793
1794 pub(crate) fn set_current_accessibility(&mut self, accessibility: Option<AccessibilityNode>) {
1795 if let Some(node_id) = current_node_id()
1796 && let Some(mut metadata) = self.component_tree.metadatas().get_mut(&node_id)
1797 {
1798 metadata.accessibility = accessibility;
1799 } else {
1800 debug_assert!(
1801 false,
1802 "set_current_accessibility must be called inside a component build"
1803 );
1804 }
1805 }
1806
1807 pub(crate) fn set_current_accessibility_action_handler(
1808 &mut self,
1809 handler: Option<AccessibilityActionHandler>,
1810 ) {
1811 if let Some(node_id) = current_node_id()
1812 && let Some(mut metadata) = self.component_tree.metadatas().get_mut(&node_id)
1813 {
1814 metadata.accessibility_action_handler = handler;
1815 } else {
1816 debug_assert!(
1817 false,
1818 "set_current_accessibility_action_handler must be called inside a component build"
1819 );
1820 }
1821 }
1822
1823 pub(crate) fn bind_current_focus_requester(&mut self, requester: FocusRequester) {
1824 if let Some(current) = self.component_tree.current_node_mut() {
1825 current.focus_requester_binding = Some(requester);
1826 } else {
1827 debug_assert!(
1828 false,
1829 "bind_current_focus_requester must be called inside a component build"
1830 );
1831 }
1832 }
1833
1834 pub(crate) fn ensure_current_focus_target(&mut self, node: FocusNode) {
1835 if let Some(current) = self.component_tree.current_node_mut() {
1836 if current.focus_registration.is_none() {
1837 current.focus_registration = Some(FocusRegistration::target(node));
1838 }
1839 } else {
1840 debug_assert!(
1841 false,
1842 "ensure_current_focus_target must be called inside a component build"
1843 );
1844 }
1845 }
1846
1847 pub(crate) fn ensure_current_focus_scope(&mut self, scope: FocusScopeNode) {
1848 if let Some(current) = self.component_tree.current_node_mut() {
1849 if current.focus_registration.is_none() {
1850 current.focus_registration = Some(FocusRegistration::scope(scope));
1851 }
1852 } else {
1853 debug_assert!(
1854 false,
1855 "ensure_current_focus_scope must be called inside a component build"
1856 );
1857 }
1858 }
1859
1860 pub(crate) fn ensure_current_focus_group(&mut self, group: FocusGroupNode) {
1861 if let Some(current) = self.component_tree.current_node_mut() {
1862 if current.focus_registration.is_none() {
1863 current.focus_registration = Some(FocusRegistration::group(group));
1864 }
1865 } else {
1866 debug_assert!(
1867 false,
1868 "ensure_current_focus_group must be called inside a component build"
1869 );
1870 }
1871 }
1872
1873 pub(crate) fn current_focus_target_handle(&self) -> Option<FocusNode> {
1874 let registration = self.component_tree.current_node()?.focus_registration?;
1875 (registration.kind == FocusRegistrationKind::Target)
1876 .then(|| FocusNode::from_handle_id(registration.id))
1877 }
1878
1879 pub(crate) fn current_focus_scope_handle(&self) -> Option<FocusScopeNode> {
1880 let registration = self.component_tree.current_node()?.focus_registration?;
1881 (registration.kind == FocusRegistrationKind::Scope)
1882 .then(|| FocusScopeNode::from_handle_id(registration.id))
1883 }
1884
1885 pub(crate) fn current_focus_group_handle(&self) -> Option<FocusGroupNode> {
1886 let registration = self.component_tree.current_node()?.focus_registration?;
1887 (registration.kind == FocusRegistrationKind::Group)
1888 .then(|| FocusGroupNode::from_handle_id(registration.id))
1889 }
1890
1891 pub(crate) fn set_current_focus_properties(&mut self, properties: FocusProperties) {
1892 if let Some(current) = self.component_tree.current_node_mut() {
1893 if let Some(registration) = current.focus_registration.as_mut() {
1894 registration.properties = properties;
1895 } else {
1896 debug_assert!(
1897 false,
1898 "set_current_focus_properties requires focus_target, focus_scope, or focus_group first"
1899 );
1900 }
1901 } else {
1902 debug_assert!(
1903 false,
1904 "set_current_focus_properties must be called inside a component build"
1905 );
1906 }
1907 }
1908
1909 pub(crate) fn set_current_focus_traversal_policy(&mut self, policy: FocusTraversalPolicy) {
1910 if let Some(current) = self.component_tree.current_node_mut() {
1911 if current.focus_registration.is_some_and(|registration| {
1912 matches!(
1913 registration.kind,
1914 FocusRegistrationKind::Scope | FocusRegistrationKind::Group
1915 )
1916 }) {
1917 current.focus_traversal_policy = Some(policy);
1918 } else {
1919 debug_assert!(
1920 false,
1921 "set_current_focus_traversal_policy requires focus_scope or focus_group first"
1922 );
1923 }
1924 } else {
1925 debug_assert!(
1926 false,
1927 "set_current_focus_traversal_policy must be called inside a component build"
1928 );
1929 }
1930 }
1931
1932 pub(crate) fn set_current_focus_changed_handler(&mut self, handler: CallbackWith<FocusState>) {
1933 if let Some(current) = self.component_tree.current_node_mut() {
1934 current.focus_changed_handler = Some(handler);
1935 } else {
1936 debug_assert!(
1937 false,
1938 "set_current_focus_changed_handler must be called inside a component build"
1939 );
1940 }
1941 }
1942
1943 pub(crate) fn set_current_focus_event_handler(&mut self, handler: CallbackWith<FocusState>) {
1944 if let Some(current) = self.component_tree.current_node_mut() {
1945 current.focus_event_handler = Some(handler);
1946 } else {
1947 debug_assert!(
1948 false,
1949 "set_current_focus_event_handler must be called inside a component build"
1950 );
1951 }
1952 }
1953
1954 pub(crate) fn set_current_focus_beyond_bounds_handler(
1955 &mut self,
1956 handler: CallbackWith<FocusDirection, bool>,
1957 ) {
1958 if let Some(current) = self.component_tree.current_node_mut() {
1959 current.focus_beyond_bounds_handler = Some(handler);
1960 } else {
1961 debug_assert!(
1962 false,
1963 "set_current_focus_beyond_bounds_handler must be called inside a component build"
1964 );
1965 }
1966 }
1967
1968 pub(crate) fn set_current_focus_reveal_handler(
1969 &mut self,
1970 handler: CallbackWith<FocusRevealRequest, bool>,
1971 ) {
1972 if let Some(current) = self.component_tree.current_node_mut() {
1973 current.focus_reveal_handler = Some(handler);
1974 } else {
1975 debug_assert!(
1976 false,
1977 "set_current_focus_reveal_handler must be called inside a component build"
1978 );
1979 }
1980 }
1981
1982 pub(crate) fn set_current_focus_restorer_fallback(&mut self, fallback: FocusRequester) {
1983 if let Some(current) = self.component_tree.current_node_mut() {
1984 if current
1985 .focus_registration
1986 .is_some_and(|registration| registration.kind == FocusRegistrationKind::Scope)
1987 {
1988 current.focus_restorer_fallback = Some(fallback);
1989 } else {
1990 debug_assert!(
1991 false,
1992 "set_current_focus_restorer_fallback requires focus_scope or focus_restorer first"
1993 );
1994 }
1995 } else {
1996 debug_assert!(
1997 false,
1998 "set_current_focus_restorer_fallback must be called inside a component build"
1999 );
2000 }
2001 }
2002
2003 pub(crate) fn finalize_current_layout_policy_dirty(&mut self) {
2004 if let Some(node) = self.component_tree.current_node() {
2005 record_layout_policy_dirty(node.instance_key, node.layout_policy.as_ref());
2006 } else {
2007 debug_assert!(
2008 false,
2009 "finalize_current_layout_policy_dirty must be called inside a component build"
2010 );
2011 }
2012 }
2013}
2014
2015pub struct NodeContextGuard {
2018 popped: bool,
2019 instance_logic_id_popped: bool,
2020 #[cfg(feature = "profiling")]
2021 profiling_guard: Option<crate::profiler::ScopeGuard>,
2022}
2023
2024pub struct CurrentComponentInstanceGuard {
2026 popped: bool,
2027}
2028
2029#[derive(Copy, Clone, Debug, Eq, PartialEq)]
2031pub enum RuntimePhase {
2032 Build,
2034 Measure,
2036 Input,
2038}
2039
2040pub struct PhaseGuard {
2042 popped: bool,
2043}
2044
2045impl PhaseGuard {
2046 pub fn pop(mut self) {
2048 if !self.popped {
2049 pop_phase();
2050 self.popped = true;
2051 }
2052 }
2053}
2054
2055impl Drop for PhaseGuard {
2056 fn drop(&mut self) {
2057 if !self.popped {
2058 pop_phase();
2059 self.popped = true;
2060 }
2061 }
2062}
2063
2064impl NodeContextGuard {
2065 pub fn pop(mut self) {
2067 if !self.popped {
2068 pop_current_node();
2069 self.popped = true;
2070 }
2071 if !self.instance_logic_id_popped {
2072 pop_instance_logic_id();
2073 self.instance_logic_id_popped = true;
2074 }
2075 }
2076}
2077
2078impl Drop for NodeContextGuard {
2079 fn drop(&mut self) {
2080 #[cfg(feature = "profiling")]
2081 {
2082 let _ = self.profiling_guard.take();
2083 }
2084 if !self.popped {
2085 pop_current_node();
2086 self.popped = true;
2087 }
2088 if !self.instance_logic_id_popped {
2089 pop_instance_logic_id();
2090 self.instance_logic_id_popped = true;
2091 }
2092 }
2093}
2094
2095impl CurrentComponentInstanceGuard {
2096 pub fn pop(mut self) {
2099 if !self.popped {
2100 pop_current_component_instance_key();
2101 self.popped = true;
2102 }
2103 }
2104}
2105
2106impl Drop for CurrentComponentInstanceGuard {
2107 fn drop(&mut self) {
2108 if !self.popped {
2109 pop_current_component_instance_key();
2110 self.popped = true;
2111 }
2112 }
2113}
2114
2115pub fn push_current_node(
2117 node_id: NodeId,
2118 component_type_id: u64,
2119 fn_name: &str,
2120) -> NodeContextGuard {
2121 #[cfg(not(feature = "profiling"))]
2122 let _ = fn_name;
2123 #[allow(unused_variables)]
2124 let parent_node_id = with_execution_context_mut(|context| {
2125 let parent = context.node_context_stack.last().copied();
2126 context.node_context_stack.push(node_id);
2127 parent
2128 });
2129
2130 let parent_call_index = next_child_instance_call_index();
2134 let parent_instance_logic_id = with_execution_context(|context| {
2135 context.instance_logic_id_stack.last().copied().unwrap_or(0)
2136 });
2137
2138 let group_path_hash = current_group_path_hash();
2139 let has_group_path = with_execution_context(|context| !context.group_path_stack.is_empty());
2140
2141 let instance_salt = if let Some(key_hash) = current_instance_key_override() {
2150 hash_components(&[&key_hash, &group_path_hash, &parent_call_index])
2151 } else if has_group_path {
2152 hash_components(&[&group_path_hash, &parent_call_index])
2153 } else {
2154 parent_call_index
2155 };
2156
2157 let instance_logic_id =
2158 if let Some(instance_logic_id_override) = take_next_node_instance_logic_id_override() {
2159 instance_logic_id_override
2160 } else if parent_call_index == 0
2161 && parent_instance_logic_id == 0
2162 && current_instance_key_override().is_none()
2163 && !has_group_path
2164 {
2165 component_type_id
2166 } else {
2167 hash_components(&[
2168 &component_type_id,
2169 &parent_instance_logic_id,
2170 &instance_salt,
2171 ])
2172 };
2173
2174 with_execution_context_mut(|context| {
2175 context.instance_logic_id_stack.push(instance_logic_id);
2176 });
2177
2178 push_order_frame();
2179
2180 #[cfg(feature = "profiling")]
2181 let profiling_guard = match current_phase() {
2182 Some(RuntimePhase::Build) => {
2183 crate::profiler::make_build_scope_guard(node_id, parent_node_id, fn_name)
2184 }
2185 _ => None,
2186 };
2187
2188 NodeContextGuard {
2189 popped: false,
2190 instance_logic_id_popped: false,
2191 #[cfg(feature = "profiling")]
2192 profiling_guard,
2193 }
2194}
2195
2196pub fn push_current_node_with_instance_logic_id(
2202 node_id: NodeId,
2203 instance_logic_id: u64,
2204 fn_name: &str,
2205) -> NodeContextGuard {
2206 #[cfg(not(feature = "profiling"))]
2207 let _ = fn_name;
2208 #[allow(unused_variables)]
2209 let parent_node_id = with_execution_context_mut(|context| {
2210 let parent = context.node_context_stack.last().copied();
2211 context.node_context_stack.push(node_id);
2212 parent
2213 });
2214
2215 let _ = next_child_instance_call_index();
2216
2217 with_execution_context_mut(|context| {
2218 context.instance_logic_id_stack.push(instance_logic_id);
2219 });
2220 push_order_frame();
2221
2222 #[cfg(feature = "profiling")]
2223 let profiling_guard = match current_phase() {
2224 Some(RuntimePhase::Build) => {
2225 crate::profiler::make_build_scope_guard(node_id, parent_node_id, fn_name)
2226 }
2227 _ => None,
2228 };
2229
2230 NodeContextGuard {
2231 popped: false,
2232 instance_logic_id_popped: false,
2233 #[cfg(feature = "profiling")]
2234 profiling_guard,
2235 }
2236}
2237
2238pub fn push_current_component_instance_key(instance_key: u64) -> CurrentComponentInstanceGuard {
2240 with_execution_context_mut(|context| {
2241 context.current_component_instance_stack.push(instance_key);
2242 });
2243 CurrentComponentInstanceGuard { popped: false }
2244}
2245
2246fn pop_current_component_instance_key() {
2247 with_execution_context_mut(|context| {
2248 let popped = context.current_component_instance_stack.pop();
2249 debug_assert!(
2250 popped.is_some(),
2251 "Attempted to pop current component instance key from an empty stack"
2252 );
2253 });
2254}
2255
2256fn pop_current_node() {
2257 with_execution_context_mut(|context| {
2258 let popped = context.node_context_stack.pop();
2259 debug_assert!(
2260 popped.is_some(),
2261 "Attempted to pop current node from an empty stack"
2262 );
2263 });
2264 pop_order_frame("ORDER_FRAME_STACK underflow: attempted to pop from empty stack");
2265}
2266
2267pub fn current_node_id() -> Option<NodeId> {
2269 with_execution_context(|context| context.node_context_stack.last().copied())
2270}
2271
2272fn current_instance_logic_id_opt() -> Option<u64> {
2273 with_execution_context(|context| context.instance_logic_id_stack.last().copied())
2274}
2275
2276pub(crate) fn current_instance_logic_id() -> u64 {
2278 current_instance_logic_id_opt()
2279 .expect("current_instance_logic_id must be called inside a component")
2280}
2281
2282pub(crate) fn current_instance_key() -> u64 {
2284 let instance_logic_id = current_instance_logic_id_opt()
2285 .expect("current_instance_key must be called inside a component");
2286 let group_path_hash = current_group_path_hash();
2287 hash_components(&[&instance_logic_id, &group_path_hash])
2288}
2289
2290fn pop_instance_logic_id() {
2291 with_execution_context_mut(|context| {
2292 let _ = context.instance_logic_id_stack.pop();
2293 });
2294}
2295
2296pub fn push_phase(phase: RuntimePhase) -> PhaseGuard {
2298 with_execution_context_mut(|context| {
2299 context.phase_stack.push(phase);
2300 });
2301 PhaseGuard { popped: false }
2302}
2303
2304fn pop_phase() {
2305 with_execution_context_mut(|context| {
2306 let popped = context.phase_stack.pop();
2307 debug_assert!(
2308 popped.is_some(),
2309 "Attempted to pop execution phase from an empty stack"
2310 );
2311 });
2312}
2313
2314pub(crate) fn current_phase() -> Option<RuntimePhase> {
2315 with_execution_context(|context| context.phase_stack.last().copied())
2316}
2317
2318pub(crate) fn push_group_id(group_id: u64) {
2320 with_execution_context_mut(|context| {
2321 context.group_path_stack.push(group_id);
2322 });
2323}
2324
2325pub(crate) fn pop_group_id(expected_group_id: u64) {
2327 with_execution_context_mut(|context| {
2328 if let Some(popped) = context.group_path_stack.pop() {
2329 debug_assert_eq!(
2330 popped, expected_group_id,
2331 "Unbalanced GroupGuard stack: expected {}, got {}",
2332 expected_group_id, popped
2333 );
2334 } else {
2335 debug_assert!(false, "Attempted to pop GroupGuard from an empty stack");
2336 }
2337 });
2338}
2339
2340fn current_group_path() -> Vec<u64> {
2342 with_execution_context(|context| context.group_path_stack.clone())
2343}
2344
2345fn current_group_path_hash() -> u64 {
2346 with_execution_context(|context| hash_components(&[&context.group_path_stack[..]]))
2347}
2348
2349fn current_instance_key_override() -> Option<u64> {
2350 with_execution_context(|context| context.instance_key_stack.last().copied())
2351}
2352
2353pub struct GroupGuard {
2358 group_id: u64,
2359}
2360
2361impl GroupGuard {
2362 pub fn new(group_id: u64) -> Self {
2364 push_group_id(group_id);
2365 push_order_frame();
2366 Self { group_id }
2367 }
2368}
2369
2370impl Drop for GroupGuard {
2371 fn drop(&mut self) {
2372 pop_order_frame("ORDER_FRAME_STACK underflow: attempted to pop GroupGuard frame");
2373 pop_group_id(self.group_id);
2374 }
2375}
2376
2377pub struct PathGroupGuard {
2384 group_id: u64,
2385}
2386
2387impl PathGroupGuard {
2388 pub fn new(group_id: u64) -> Self {
2391 push_group_id(group_id);
2392 Self { group_id }
2393 }
2394}
2395
2396impl Drop for PathGroupGuard {
2397 fn drop(&mut self) {
2398 pop_group_id(self.group_id);
2399 }
2400}
2401
2402pub struct InstanceKeyGuard {
2404 key_hash: u64,
2405}
2406
2407impl InstanceKeyGuard {
2408 pub fn new(key_hash: u64) -> Self {
2410 with_execution_context_mut(|context| {
2411 context.instance_key_stack.push(key_hash);
2412 });
2413 Self { key_hash }
2414 }
2415}
2416
2417impl Drop for InstanceKeyGuard {
2418 fn drop(&mut self) {
2419 with_execution_context_mut(|context| {
2420 let popped = context.instance_key_stack.pop();
2421 debug_assert_eq!(
2422 popped,
2423 Some(self.key_hash),
2424 "Unbalanced InstanceKeyGuard stack"
2425 );
2426 });
2427 }
2428}
2429
2430fn hash_components<H: Hash + ?Sized>(parts: &[&H]) -> u64 {
2431 let mut hasher = rustc_hash::FxHasher::default();
2432 for part in parts {
2433 part.hash(&mut hasher);
2434 }
2435 hasher.finish()
2436}
2437
2438fn compute_slot_key<K: Hash>(key: &K) -> (u64, u64) {
2439 let instance_logic_id = current_instance_logic_id();
2440 let group_path_hash = current_group_path_hash();
2441 let key_hash = hash_components(&[key]);
2442
2443 let call_counter = next_order_counter(
2448 OrderCounterKind::Remember,
2449 "ORDER_FRAME_STACK is empty; remember must be called inside a component",
2450 );
2451
2452 let slot_hash = hash_components(&[&group_path_hash, &key_hash, &call_counter]);
2453 (instance_logic_id, slot_hash)
2454}
2455
2456fn compute_functor_slot_key<K: Hash>(key: &K) -> (u64, u64) {
2457 let instance_logic_id = current_instance_logic_id();
2458 let group_path_hash = current_group_path_hash();
2459 let key_hash = hash_components(&[key]);
2460
2461 let call_counter = next_order_counter(
2462 OrderCounterKind::Functor,
2463 "ORDER_FRAME_STACK is empty; callback constructors must be called inside a component",
2464 );
2465
2466 let slot_hash = hash_components(&[&group_path_hash, &key_hash, &call_counter]);
2467 (instance_logic_id, slot_hash)
2468}
2469
2470pub(crate) fn ensure_build_phase() {
2471 match current_phase() {
2472 Some(RuntimePhase::Build) => {}
2473 Some(RuntimePhase::Measure) => {
2474 panic!("remember must not be called inside measure; move state to component render")
2475 }
2476 Some(RuntimePhase::Input) => {
2477 panic!(
2478 "remember must not be called inside typed input handlers; move state to component render"
2479 )
2480 }
2481 None => panic!(
2482 "remember must be called inside a tessera component. Ensure you're calling this from within a function annotated with #[tessera]."
2483 ),
2484 }
2485}
2486
2487fn remember_functor_cell_with_key<K, T, F>(key: K, init: F) -> (Arc<T>, FunctorHandle)
2488where
2489 K: Hash,
2490 T: Send + Sync + 'static,
2491 F: FnOnce() -> T,
2492{
2493 ensure_build_phase();
2494 let (instance_logic_id, slot_hash) = compute_functor_slot_key(&key);
2495 let slot_key = SlotKey {
2496 instance_logic_id,
2497 slot_hash,
2498 type_id: TypeId::of::<T>(),
2499 };
2500
2501 with_slot_table_mut(|table| {
2502 let mut init_opt = Some(init);
2503 if let Some(slot) = table.try_fast_slot_lookup(slot_key) {
2504 let epoch = table.epoch;
2505 let (generation, value): (u64, Arc<dyn Any + Send + Sync>) = {
2506 let entry = table
2507 .entries
2508 .get_mut(slot)
2509 .expect("functor slot entry should exist");
2510
2511 if entry.key.type_id != slot_key.type_id {
2512 panic!(
2513 "callback slot type mismatch: expected {}, found {:?}",
2514 std::any::type_name::<T>(),
2515 entry.key.type_id
2516 );
2517 }
2518
2519 entry.last_alive_epoch = epoch;
2520 if entry.value.is_none() {
2521 let init_fn = init_opt
2522 .take()
2523 .expect("callback slot init called more than once");
2524 entry.value = Some(Arc::new(init_fn()));
2525 entry.generation = entry.generation.wrapping_add(1);
2526 }
2527
2528 (
2529 entry.generation,
2530 entry
2531 .value
2532 .as_ref()
2533 .expect("callback slot must contain a value")
2534 .clone(),
2535 )
2536 };
2537
2538 (
2539 value
2540 .downcast::<T>()
2541 .unwrap_or_else(|_| panic!("callback slot {:?} downcast failed", slot)),
2542 FunctorHandle::new(slot, generation),
2543 )
2544 } else if let Some(slot) = table.key_to_slot.get(&slot_key).copied() {
2545 table.record_slot_usage_slow(instance_logic_id, slot);
2546 let epoch = table.epoch;
2547 let (generation, value): (u64, Arc<dyn Any + Send + Sync>) = {
2548 let entry = table
2549 .entries
2550 .get_mut(slot)
2551 .expect("functor slot entry should exist");
2552
2553 if entry.key.type_id != slot_key.type_id {
2554 panic!(
2555 "callback slot type mismatch: expected {}, found {:?}",
2556 std::any::type_name::<T>(),
2557 entry.key.type_id
2558 );
2559 }
2560
2561 entry.last_alive_epoch = epoch;
2562 if entry.value.is_none() {
2563 let init_fn = init_opt
2564 .take()
2565 .expect("callback slot init called more than once");
2566 entry.value = Some(Arc::new(init_fn()));
2567 entry.generation = entry.generation.wrapping_add(1);
2568 }
2569
2570 (
2571 entry.generation,
2572 entry
2573 .value
2574 .as_ref()
2575 .expect("callback slot must contain a value")
2576 .clone(),
2577 )
2578 };
2579
2580 (
2581 value
2582 .downcast::<T>()
2583 .unwrap_or_else(|_| panic!("callback slot {:?} downcast failed", slot)),
2584 FunctorHandle::new(slot, generation),
2585 )
2586 } else {
2587 let epoch = table.epoch;
2588 let init_fn = init_opt
2589 .take()
2590 .expect("callback slot init called more than once");
2591 let generation = 1u64;
2592 let slot = table.entries.insert(SlotEntry {
2593 key: slot_key,
2594 generation,
2595 value: Some(Arc::new(init_fn())),
2596 last_alive_epoch: epoch,
2597 retained: false,
2598 });
2599
2600 table.key_to_slot.insert(slot_key, slot);
2601 table.record_slot_usage_slow(instance_logic_id, slot);
2602
2603 let value = table
2604 .entries
2605 .get(slot)
2606 .expect("functor slot entry should exist")
2607 .value
2608 .as_ref()
2609 .expect("callback slot must contain a value")
2610 .clone()
2611 .downcast::<T>()
2612 .unwrap_or_else(|_| panic!("callback slot {:?} downcast failed", slot));
2613
2614 (value, FunctorHandle::new(slot, generation))
2615 }
2616 })
2617}
2618
2619fn load_functor_cell<T>(handle: FunctorHandle) -> Arc<T>
2620where
2621 T: Send + Sync + 'static,
2622{
2623 with_slot_table(|table| {
2624 let entry = table
2625 .entries
2626 .get(handle.slot)
2627 .unwrap_or_else(|| panic!("Callback points to freed slot: {:?}", handle.slot));
2628
2629 if entry.generation != handle.generation {
2630 panic!(
2631 "Callback is stale (slot {:?}, generation {}, current generation {})",
2632 handle.slot, handle.generation, entry.generation
2633 );
2634 }
2635
2636 if entry.key.type_id != TypeId::of::<T>() {
2637 panic!(
2638 "Callback type mismatch for slot {:?}: expected {}, stored {:?}",
2639 handle.slot,
2640 std::any::type_name::<T>(),
2641 entry.key.type_id
2642 );
2643 }
2644
2645 entry
2646 .value
2647 .as_ref()
2648 .unwrap_or_else(|| panic!("Callback slot {:?} has been cleared", handle.slot))
2649 .clone()
2650 .downcast::<T>()
2651 .unwrap_or_else(|_| panic!("Callback slot {:?} downcast failed", handle.slot))
2652 })
2653}
2654
2655pub(crate) fn remember_callback_handle<F>(handler: F) -> FunctorHandle
2656where
2657 F: Fn() + Send + Sync + 'static,
2658{
2659 let handler = Arc::new(handler) as Arc<dyn Fn() + Send + Sync>;
2660 let (cell, handle) = remember_functor_cell_with_key((), {
2661 let handler = Arc::clone(&handler);
2662 move || CallbackCell::new(handler)
2663 });
2664 cell.update(handler);
2665 handle
2666}
2667
2668pub(crate) fn invoke_callback_handle(handle: FunctorHandle) {
2669 let callback = load_functor_cell::<CallbackCell>(handle).shared();
2670 callback();
2671}
2672
2673pub(crate) fn remember_render_slot_handle<F>(render: F) -> FunctorHandle
2674where
2675 F: Fn() + Send + Sync + 'static,
2676{
2677 let render = Arc::new(render) as Arc<dyn Fn() + Send + Sync>;
2678 let creator_instance_key = current_component_instance_key_from_scope()
2679 .unwrap_or_else(|| panic!("RenderSlot handles must be created during a component build"));
2680 let (cell, handle) = remember_functor_cell_with_key((), {
2681 let render = Arc::clone(&render);
2682 move || RenderSlotCell::new(render)
2683 });
2684 cell.update(render);
2685 for instance_key in render_slot_read_subscribers(handle) {
2686 if instance_key != creator_instance_key {
2687 record_component_invalidation_for_instance_key(instance_key);
2688 }
2689 }
2690 handle
2691}
2692
2693pub(crate) fn invoke_render_slot_handle(handle: FunctorHandle) {
2694 let render = load_functor_cell::<RenderSlotCell>(handle).shared();
2695 render();
2696}
2697
2698pub(crate) fn remember_render_slot_with_handle<T, F>(render: F) -> FunctorHandle
2699where
2700 T: 'static,
2701 F: Fn(T) + Send + Sync + 'static,
2702{
2703 let render = Arc::new(render) as Arc<dyn Fn(T) + Send + Sync>;
2704 let creator_instance_key = current_component_instance_key_from_scope().unwrap_or_else(|| {
2705 panic!("RenderSlotWith handles must be created during a component build")
2706 });
2707 let (cell, handle) = remember_functor_cell_with_key((), {
2708 let render = Arc::clone(&render);
2709 move || RenderSlotWithCell::new(render)
2710 });
2711 cell.update(render);
2712 for instance_key in render_slot_read_subscribers(handle) {
2713 if instance_key != creator_instance_key {
2714 record_component_invalidation_for_instance_key(instance_key);
2715 }
2716 }
2717 handle
2718}
2719
2720pub(crate) fn invoke_render_slot_with_handle<T>(handle: FunctorHandle, value: T)
2721where
2722 T: 'static,
2723{
2724 let render = load_functor_cell::<RenderSlotWithCell<T>>(handle).shared();
2725 render(value)
2726}
2727
2728pub(crate) fn remember_callback_with_handle<T, R, F>(handler: F) -> FunctorHandle
2729where
2730 T: 'static,
2731 R: 'static,
2732 F: Fn(T) -> R + Send + Sync + 'static,
2733{
2734 let handler = Arc::new(handler) as Arc<dyn Fn(T) -> R + Send + Sync>;
2735 let (cell, handle) = remember_functor_cell_with_key((), {
2736 let handler = Arc::clone(&handler);
2737 move || CallbackWithCell::new(handler)
2738 });
2739 cell.update(handler);
2740 handle
2741}
2742
2743pub(crate) fn invoke_callback_with_handle<T, R>(handle: FunctorHandle, value: T) -> R
2744where
2745 T: 'static,
2746 R: 'static,
2747{
2748 let callback = load_functor_cell::<CallbackWithCell<T, R>>(handle).shared();
2749 callback(value)
2750}
2751
2752pub fn begin_recompose_slot_epoch() {
2754 with_slot_table_mut(SlotTable::begin_epoch);
2755}
2756
2757pub fn reset_slots() {
2759 with_slot_table_mut(SlotTable::reset);
2760}
2761
2762pub(crate) fn recycle_recomposed_slots_for_instance_logic_ids(instance_logic_ids: &HashSet<u64>) {
2763 if instance_logic_ids.is_empty() {
2764 return;
2765 }
2766
2767 with_slot_table_mut(|table| {
2768 let epoch = table.epoch;
2769 let mut freed: Vec<(SlotHandle, SlotKey)> = Vec::new();
2770
2771 for (slot, entry) in table.entries.iter() {
2772 if !instance_logic_ids.contains(&entry.key.instance_logic_id) {
2773 continue;
2774 }
2775 if entry.last_alive_epoch == epoch || entry.retained {
2776 continue;
2777 }
2778 freed.push((slot, entry.key));
2779 }
2780
2781 for (slot, key) in freed {
2782 table.entries.remove(slot);
2783 table.key_to_slot.remove(&key);
2784 }
2785 });
2786}
2787
2788pub(crate) fn live_slot_instance_logic_ids() -> HashSet<u64> {
2789 with_slot_table(|table| {
2790 table
2791 .entries
2792 .iter()
2793 .map(|(_, entry)| entry.key.instance_logic_id)
2794 .collect()
2795 })
2796}
2797
2798pub fn remember_with_key<K, F, T>(key: K, init: F) -> State<T>
2826where
2827 K: Hash,
2828 F: FnOnce() -> T,
2829 T: Send + Sync + 'static,
2830{
2831 ensure_build_phase();
2832 let (instance_logic_id, slot_hash) = compute_slot_key(&key);
2833 let type_id = TypeId::of::<T>();
2834 let slot_key = SlotKey {
2835 instance_logic_id,
2836 slot_hash,
2837 type_id,
2838 };
2839
2840 with_slot_table_mut(|table| {
2841 let mut init_opt = Some(init);
2842 if let Some(slot) = table.try_fast_slot_lookup(slot_key) {
2843 let epoch = table.epoch;
2844 let generation = {
2845 let entry = table
2846 .entries
2847 .get_mut(slot)
2848 .expect("slot entry should exist");
2849
2850 if entry.key.type_id != slot_key.type_id {
2851 panic!(
2852 "remember_with_key type mismatch: expected {}, found {:?}",
2853 std::any::type_name::<T>(),
2854 entry.key.type_id
2855 );
2856 }
2857
2858 entry.last_alive_epoch = epoch;
2859 if entry.value.is_none() {
2860 let init_fn = init_opt
2861 .take()
2862 .expect("remember_with_key init called more than once");
2863 entry.value = Some(Arc::new(RwLock::new(init_fn())));
2864 entry.generation = entry.generation.wrapping_add(1);
2865 }
2866 entry.generation
2867 };
2868
2869 State::new(slot, generation)
2870 } else if let Some(slot) = table.key_to_slot.get(&slot_key).copied() {
2871 table.record_slot_usage_slow(instance_logic_id, slot);
2872 let epoch = table.epoch;
2873 let generation = {
2874 let entry = table
2875 .entries
2876 .get_mut(slot)
2877 .expect("slot entry should exist");
2878
2879 if entry.key.type_id != slot_key.type_id {
2880 panic!(
2881 "remember_with_key type mismatch: expected {}, found {:?}",
2882 std::any::type_name::<T>(),
2883 entry.key.type_id
2884 );
2885 }
2886
2887 entry.last_alive_epoch = epoch;
2888 if entry.value.is_none() {
2889 let init_fn = init_opt
2890 .take()
2891 .expect("remember_with_key init called more than once");
2892 entry.value = Some(Arc::new(RwLock::new(init_fn())));
2893 entry.generation = entry.generation.wrapping_add(1);
2894 }
2895 entry.generation
2896 };
2897
2898 State::new(slot, generation)
2899 } else {
2900 let epoch = table.epoch;
2901 let init_fn = init_opt
2902 .take()
2903 .expect("remember_with_key init called more than once");
2904 let generation = 1u64;
2905 let slot = table.entries.insert(SlotEntry {
2906 key: slot_key,
2907 generation,
2908 value: Some(Arc::new(RwLock::new(init_fn()))),
2909 last_alive_epoch: epoch,
2910 retained: false,
2911 });
2912
2913 table.key_to_slot.insert(slot_key, slot);
2914 table.record_slot_usage_slow(instance_logic_id, slot);
2915 State::new(slot, generation)
2916 }
2917 })
2918}
2919
2920pub fn remember<F, T>(init: F) -> State<T>
2947where
2948 F: FnOnce() -> T,
2949 T: Send + Sync + 'static,
2950{
2951 remember_with_key((), init)
2952}
2953
2954pub fn retain_with_key<K, F, T>(key: K, init: F) -> State<T>
2991where
2992 K: Hash,
2993 F: FnOnce() -> T,
2994 T: Send + Sync + 'static,
2995{
2996 ensure_build_phase();
2997 let (instance_logic_id, slot_hash) = compute_slot_key(&key);
2998 let type_id = TypeId::of::<T>();
2999 let slot_key = SlotKey {
3000 instance_logic_id,
3001 slot_hash,
3002 type_id,
3003 };
3004
3005 with_slot_table_mut(|table| {
3006 let mut init_opt = Some(init);
3007 if let Some(slot) = table.try_fast_slot_lookup(slot_key) {
3008 let epoch = table.epoch;
3009 let generation = {
3010 let entry = table
3011 .entries
3012 .get_mut(slot)
3013 .expect("slot entry should exist");
3014
3015 if entry.key.type_id != slot_key.type_id {
3016 panic!(
3017 "retain_with_key type mismatch: expected {}, found {:?}",
3018 std::any::type_name::<T>(),
3019 entry.key.type_id
3020 );
3021 }
3022
3023 entry.last_alive_epoch = epoch;
3024 entry.retained = true;
3025 if entry.value.is_none() {
3026 let init_fn = init_opt
3027 .take()
3028 .expect("retain_with_key init called more than once");
3029 entry.value = Some(Arc::new(RwLock::new(init_fn())));
3030 entry.generation = entry.generation.wrapping_add(1);
3031 }
3032
3033 entry.generation
3034 };
3035
3036 State::new(slot, generation)
3037 } else if let Some(slot) = table.key_to_slot.get(&slot_key).copied() {
3038 table.record_slot_usage_slow(instance_logic_id, slot);
3039 let epoch = table.epoch;
3040 let generation = {
3041 let entry = table
3042 .entries
3043 .get_mut(slot)
3044 .expect("slot entry should exist");
3045
3046 if entry.key.type_id != slot_key.type_id {
3047 panic!(
3048 "retain_with_key type mismatch: expected {}, found {:?}",
3049 std::any::type_name::<T>(),
3050 entry.key.type_id
3051 );
3052 }
3053
3054 entry.last_alive_epoch = epoch;
3055 entry.retained = true;
3056 if entry.value.is_none() {
3057 let init_fn = init_opt
3058 .take()
3059 .expect("retain_with_key init called more than once");
3060 entry.value = Some(Arc::new(RwLock::new(init_fn())));
3061 entry.generation = entry.generation.wrapping_add(1);
3062 }
3063
3064 entry.generation
3065 };
3066
3067 State::new(slot, generation)
3068 } else {
3069 let epoch = table.epoch;
3070 let init_fn = init_opt
3071 .take()
3072 .expect("retain_with_key init called more than once");
3073 let generation = 1u64;
3074 let slot = table.entries.insert(SlotEntry {
3075 key: slot_key,
3076 generation,
3077 value: Some(Arc::new(RwLock::new(init_fn()))),
3078 last_alive_epoch: epoch,
3079 retained: true,
3080 });
3081
3082 table.key_to_slot.insert(slot_key, slot);
3083 table.record_slot_usage_slow(instance_logic_id, slot);
3084 State::new(slot, generation)
3085 }
3086 })
3087}
3088
3089pub fn retain<F, T>(init: F) -> State<T>
3125where
3126 F: FnOnce() -> T,
3127 T: Send + Sync + 'static,
3128{
3129 retain_with_key((), init)
3130}
3131
3132pub fn key<K, F, R>(key: K, block: F) -> R
3152where
3153 K: Hash,
3154 F: FnOnce() -> R,
3155{
3156 let key_hash = hash_components(&[&key]);
3157 let _group_guard = GroupGuard::new(key_hash);
3158 let _instance_guard = InstanceKeyGuard::new(key_hash);
3159 block()
3160}
3161
3162#[cfg(test)]
3163mod tests {
3164 use std::sync::{
3165 Arc,
3166 atomic::{AtomicUsize, Ordering},
3167 };
3168
3169 use super::*;
3170 use crate::execution_context::{
3171 reset_execution_context, with_execution_context, with_execution_context_mut,
3172 };
3173 use crate::layout::{LayoutInput, LayoutOutput, LayoutPolicy};
3174 use crate::prop::{Callback, RenderSlot};
3175
3176 fn with_test_component_scope<R>(component_type_id: u64, f: impl FnOnce() -> R) -> R {
3177 reset_execution_context();
3178 let mut arena = crate::Arena::<()>::new();
3179 let node_id = arena.new_node(());
3180 let _phase_guard = push_phase(RuntimePhase::Build);
3181 let _node_guard = push_current_node(node_id, component_type_id, "test_component");
3182 let _instance_guard = push_current_component_instance_key(current_instance_key());
3183 f()
3184 }
3185
3186 #[test]
3187 fn frame_receiver_uses_component_scope_instance_key() {
3188 let _instance_guard = push_current_component_instance_key(7);
3189 assert_eq!(current_component_instance_key_from_scope(), Some(7));
3190 }
3191
3192 #[test]
3193 fn receive_frame_nanos_panics_without_component_scope() {
3194 reset_frame_clock();
3195 begin_frame_clock(Instant::now());
3196
3197 let result = std::panic::catch_unwind(|| {
3198 receive_frame_nanos(|_| FrameNanosControl::Continue);
3199 });
3200 assert!(result.is_err());
3201 }
3202
3203 #[test]
3204 fn receive_frame_nanos_panics_in_input_phase() {
3205 let _phase_guard = push_phase(RuntimePhase::Input);
3206 let result = std::panic::catch_unwind(|| {
3207 receive_frame_nanos(|_| FrameNanosControl::Continue);
3208 });
3209 assert!(result.is_err());
3210 }
3211
3212 #[test]
3213 fn tick_frame_nanos_receivers_removes_stopped_receivers() {
3214 reset_frame_clock();
3215 begin_frame_clock(Instant::now());
3216
3217 with_frame_clock_tracker_mut(|tracker| {
3218 tracker.receivers.insert(
3219 FrameNanosReceiverKey {
3220 instance_logic_id: 1,
3221 receiver_hash: 1,
3222 },
3223 FrameNanosReceiver {
3224 owner_instance_key: 123,
3225 callback: Box::new(|_| FrameNanosControl::Stop),
3226 },
3227 );
3228 });
3229
3230 tick_frame_nanos_receivers();
3231 assert!(with_frame_clock_tracker(|tracker| tracker
3232 .receivers
3233 .is_empty()));
3234 }
3235
3236 #[test]
3237 fn with_build_dirty_instance_keys_marks_current_scope() {
3238 let mut outer = HashSet::default();
3239 outer.insert(7);
3240
3241 assert!(!is_instance_key_build_dirty(7));
3242 with_build_dirty_instance_keys(&outer, || {
3243 assert!(is_instance_key_build_dirty(7));
3244 assert!(!is_instance_key_build_dirty(8));
3245
3246 let mut inner = HashSet::default();
3247 inner.insert(8);
3248 with_build_dirty_instance_keys(&inner, || {
3249 assert!(!is_instance_key_build_dirty(7));
3250 assert!(is_instance_key_build_dirty(8));
3251 });
3252
3253 assert!(is_instance_key_build_dirty(7));
3254 assert!(!is_instance_key_build_dirty(8));
3255 });
3256 assert!(!is_instance_key_build_dirty(7));
3257 }
3258
3259 #[test]
3260 fn with_build_dirty_instance_keys_restores_on_panic() {
3261 let mut dirty = HashSet::default();
3262 dirty.insert(11);
3263
3264 let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
3265 with_build_dirty_instance_keys(&dirty, || {
3266 assert!(is_instance_key_build_dirty(11));
3267 panic!("expected panic");
3268 });
3269 }));
3270 assert!(result.is_err());
3271 assert!(!is_instance_key_build_dirty(11));
3272 }
3273
3274 #[derive(Clone, PartialEq)]
3275 struct DirtySplitPolicy {
3276 measure_key: u32,
3277 placement_key: u32,
3278 }
3279
3280 impl LayoutPolicy for DirtySplitPolicy {
3281 fn measure(
3282 &self,
3283 _input: &LayoutInput<'_>,
3284 _output: &mut LayoutOutput<'_>,
3285 ) -> Result<crate::ComputedData, crate::MeasurementError> {
3286 Ok(crate::ComputedData::ZERO)
3287 }
3288
3289 fn measure_eq(&self, other: &Self) -> bool {
3290 self.measure_key == other.measure_key
3291 }
3292
3293 fn placement_eq(&self, other: &Self) -> bool {
3294 self.placement_key == other.placement_key
3295 }
3296 }
3297
3298 #[test]
3299 fn layout_dirty_tracking_separates_measure_and_placement_changes() {
3300 reset_layout_dirty_tracking();
3301
3302 begin_frame_layout_dirty_tracking();
3303 {
3304 let _phase_guard = push_phase(RuntimePhase::Build);
3305 record_layout_policy_dirty(
3306 1,
3307 &DirtySplitPolicy {
3308 measure_key: 0,
3309 placement_key: 0,
3310 },
3311 );
3312 }
3313 finalize_frame_layout_dirty_tracking();
3314 let dirty = take_layout_dirty_nodes();
3315 assert!(dirty.measure_self_nodes.contains(&1));
3316 assert!(dirty.placement_self_nodes.is_empty());
3317
3318 begin_frame_layout_dirty_tracking();
3319 {
3320 let _phase_guard = push_phase(RuntimePhase::Build);
3321 record_layout_policy_dirty(
3322 1,
3323 &DirtySplitPolicy {
3324 measure_key: 0,
3325 placement_key: 1,
3326 },
3327 );
3328 }
3329 finalize_frame_layout_dirty_tracking();
3330 let dirty = take_layout_dirty_nodes();
3331 assert!(!dirty.measure_self_nodes.contains(&1));
3332 assert!(dirty.placement_self_nodes.contains(&1));
3333
3334 begin_frame_layout_dirty_tracking();
3335 {
3336 let _phase_guard = push_phase(RuntimePhase::Build);
3337 record_layout_policy_dirty(
3338 1,
3339 &DirtySplitPolicy {
3340 measure_key: 1,
3341 placement_key: 1,
3342 },
3343 );
3344 }
3345 finalize_frame_layout_dirty_tracking();
3346 let dirty = take_layout_dirty_nodes();
3347 assert!(dirty.measure_self_nodes.contains(&1));
3348 assert!(!dirty.placement_self_nodes.contains(&1));
3349 }
3350
3351 #[test]
3352 fn with_replay_scope_restores_group_path_and_override() {
3353 with_execution_context_mut(|context| {
3354 context.group_path_stack = vec![1, 2, 3];
3355 });
3356 with_execution_context_mut(|context| {
3357 context.instance_key_stack = vec![5];
3358 });
3359 with_execution_context_mut(|context| {
3360 context.next_node_instance_logic_id_override = Some(9);
3361 });
3362
3363 with_replay_scope(42, &[7, 8], Some(11), || {
3364 assert_eq!(current_group_path(), vec![7, 8]);
3365 assert_eq!(current_instance_key_override(), Some(11));
3366 assert_eq!(take_next_node_instance_logic_id_override(), Some(42));
3367 assert_eq!(take_next_node_instance_logic_id_override(), None);
3368 });
3369
3370 assert_eq!(current_group_path(), vec![1, 2, 3]);
3371 assert_eq!(current_instance_key_override(), Some(5));
3372 let restored_override =
3373 with_execution_context(|context| context.next_node_instance_logic_id_override);
3374 assert_eq!(restored_override, Some(9));
3375 }
3376
3377 #[test]
3378 fn with_replay_scope_restores_on_panic() {
3379 with_execution_context_mut(|context| {
3380 context.group_path_stack = vec![5];
3381 });
3382 with_execution_context_mut(|context| {
3383 context.instance_key_stack = vec![13];
3384 });
3385 with_execution_context_mut(|context| {
3386 context.next_node_instance_logic_id_override = None;
3387 });
3388
3389 let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
3390 with_replay_scope(77, &[10], Some(17), || {
3391 assert_eq!(current_group_path(), vec![10]);
3392 assert_eq!(current_instance_key_override(), Some(17));
3393 panic!("expected panic");
3394 });
3395 }));
3396 assert!(result.is_err());
3397
3398 assert_eq!(current_group_path(), vec![5]);
3399 assert_eq!(current_instance_key_override(), Some(13));
3400 let restored_override =
3401 with_execution_context(|context| context.next_node_instance_logic_id_override);
3402 assert_eq!(restored_override, None);
3403 }
3404
3405 #[test]
3406 fn group_local_remember_does_not_shift_following_slots() {
3407 reset_slots();
3408
3409 begin_recompose_slot_epoch();
3410 with_test_component_scope(1001, || {
3411 let stable_state = remember(|| 1usize);
3412 stable_state.set(41);
3413 });
3414
3415 begin_recompose_slot_epoch();
3416 with_test_component_scope(1001, || {
3417 {
3418 let _group_guard = GroupGuard::new(7);
3419 let _branch_state = remember(|| 10usize);
3420 }
3421 let stable_state = remember(|| 1usize);
3422 assert_eq!(stable_state.get(), 41);
3423 });
3424 }
3425
3426 #[test]
3427 fn conditional_frame_receiver_does_not_shift_following_remember_slots() {
3428 reset_slots();
3429 reset_frame_clock();
3430 begin_frame_clock(Instant::now());
3431
3432 begin_recompose_slot_epoch();
3433 with_test_component_scope(1002, || {
3434 let stable_state = remember(|| 1usize);
3435 stable_state.set(99);
3436 });
3437
3438 begin_recompose_slot_epoch();
3439 with_test_component_scope(1002, || {
3440 {
3441 let _group_guard = GroupGuard::new(9);
3442 receive_frame_nanos(|_| FrameNanosControl::Stop);
3443 }
3444 let stable_state = remember(|| 1usize);
3445 assert_eq!(stable_state.get(), 99);
3446 });
3447 }
3448
3449 #[test]
3450 fn callback_handle_stays_stable_and_invokes_latest_closure() {
3451 reset_slots();
3452
3453 let calls = Arc::new(AtomicUsize::new(0));
3454
3455 begin_recompose_slot_epoch();
3456 let first = with_test_component_scope(11001, || {
3457 let calls = Arc::clone(&calls);
3458 Callback::new(move || {
3459 calls.store(1, Ordering::SeqCst);
3460 })
3461 });
3462
3463 begin_recompose_slot_epoch();
3464 let second = with_test_component_scope(11001, || {
3465 let calls = Arc::clone(&calls);
3466 Callback::new(move || {
3467 calls.store(2, Ordering::SeqCst);
3468 })
3469 });
3470
3471 assert!(first == second);
3472 first.call();
3473 assert_eq!(calls.load(Ordering::SeqCst), 2);
3474 }
3475
3476 #[test]
3477 fn render_slot_update_invalidates_reader_instance() {
3478 reset_slots();
3479 reset_render_slot_read_dependencies();
3480 reset_build_invalidations();
3481
3482 begin_recompose_slot_epoch();
3483 let first = with_test_component_scope(11002, || RenderSlot::new(|| {}));
3484
3485 let reader_instance_key = with_test_component_scope(11003, || {
3486 let instance_key =
3487 current_component_instance_key_from_scope().expect("reader must have instance key");
3488 first.render();
3489 instance_key
3490 });
3491
3492 assert!(!has_pending_build_invalidations());
3493
3494 begin_recompose_slot_epoch();
3495 let second = with_test_component_scope(11002, || RenderSlot::new(|| {}));
3496
3497 assert!(first == second);
3498 assert!(has_pending_build_invalidations());
3499
3500 let invalidations = take_build_invalidations();
3501 let mut expected = HashSet::default();
3502 expected.insert(reader_instance_key);
3503 assert_eq!(invalidations.dirty_instance_keys, expected);
3504 }
3505
3506 #[test]
3507 fn group_local_child_identity_does_not_shift_following_siblings() {
3508 fn stable_child_instance_logic_id(with_group_child: bool) -> u64 {
3509 let mut arena = crate::Arena::<()>::new();
3510 let root_node = arena.new_node(());
3511 let stable_child_node = arena.new_node(());
3512 let group_child_node = arena.new_node(());
3513
3514 let _phase_guard = push_phase(RuntimePhase::Build);
3515 let _root_guard = push_current_node(root_node, 2001, "root_component");
3516 let _root_instance_guard = push_current_component_instance_key(current_instance_key());
3517
3518 if with_group_child {
3519 let _group_guard = GroupGuard::new(5);
3520 let _group_child_guard =
3521 push_current_node(group_child_node, 2002, "group_child_component");
3522 let _group_child_instance_guard =
3523 push_current_component_instance_key(current_instance_key());
3524 let _ = current_instance_logic_id();
3525 }
3526
3527 let _stable_child_guard =
3528 push_current_node(stable_child_node, 2003, "stable_child_component");
3529 current_instance_logic_id()
3530 }
3531
3532 assert_eq!(
3533 stable_child_instance_logic_id(false),
3534 stable_child_instance_logic_id(true)
3535 );
3536 }
3537
3538 #[test]
3539 fn child_components_in_different_groups_get_distinct_instance_logic_ids() {
3540 let mut arena = crate::Arena::<()>::new();
3541 let root_node = arena.new_node(());
3542 let first_child_node = arena.new_node(());
3543 let second_child_node = arena.new_node(());
3544
3545 let _phase_guard = push_phase(RuntimePhase::Build);
3546 let _root_guard = push_current_node(root_node, 3001, "root_component");
3547 let _root_instance_guard = push_current_component_instance_key(current_instance_key());
3548
3549 let first_id = {
3550 let _group_guard = GroupGuard::new(11);
3551 let _child_guard = push_current_node(first_child_node, 3002, "grouped_child");
3552 current_instance_logic_id()
3553 };
3554
3555 let second_id = {
3556 let _group_guard = GroupGuard::new(12);
3557 let _child_guard = push_current_node(second_child_node, 3002, "grouped_child");
3558 current_instance_logic_id()
3559 };
3560
3561 assert_ne!(first_id, second_id);
3562 }
3563
3564 #[test]
3565 fn child_components_in_repeated_path_groups_keep_distinct_instance_logic_ids() {
3566 let mut arena = crate::Arena::<()>::new();
3567 let root_node = arena.new_node(());
3568 let first_child_node = arena.new_node(());
3569 let second_child_node = arena.new_node(());
3570
3571 let _phase_guard = push_phase(RuntimePhase::Build);
3572 let _root_guard = push_current_node(root_node, 4001, "root_component");
3573 let _root_instance_guard = push_current_component_instance_key(current_instance_key());
3574
3575 let first_id = {
3576 let _group_guard = PathGroupGuard::new(21);
3577 let _child_guard = push_current_node(first_child_node, 4002, "loop_child");
3578 current_instance_logic_id()
3579 };
3580
3581 let second_id = {
3582 let _group_guard = PathGroupGuard::new(21);
3583 let _child_guard = push_current_node(second_child_node, 4002, "loop_child");
3584 current_instance_logic_id()
3585 };
3586
3587 assert_ne!(first_id, second_id);
3588 }
3589
3590 #[test]
3591 fn drop_slots_for_instance_logic_ids_keeps_retained_entries() {
3592 let mut table = SlotTable::default();
3593 let keep_key = SlotKey {
3594 instance_logic_id: 7,
3595 slot_hash: 11,
3596 type_id: TypeId::of::<i32>(),
3597 };
3598 let drop_key = SlotKey {
3599 instance_logic_id: 7,
3600 slot_hash: 12,
3601 type_id: TypeId::of::<i32>(),
3602 };
3603
3604 let keep_slot = table.entries.insert(SlotEntry {
3605 key: keep_key,
3606 generation: 1,
3607 value: Some(Arc::new(RwLock::new(10_i32))),
3608 last_alive_epoch: 0,
3609 retained: true,
3610 });
3611 let drop_slot = table.entries.insert(SlotEntry {
3612 key: drop_key,
3613 generation: 1,
3614 value: Some(Arc::new(RwLock::new(20_i32))),
3615 last_alive_epoch: 0,
3616 retained: false,
3617 });
3618 table.key_to_slot.insert(keep_key, keep_slot);
3619 table.key_to_slot.insert(drop_key, drop_slot);
3620 with_slot_table_mut(|slot_table| *slot_table = table);
3621
3622 let mut stale = HashSet::default();
3623 stale.insert(7_u64);
3624 drop_slots_for_instance_logic_ids(&stale);
3625
3626 with_slot_table(|table| {
3627 assert!(table.entries.get(keep_slot).is_some());
3628 assert!(table.key_to_slot.contains_key(&keep_key));
3629 assert!(table.entries.get(drop_slot).is_none());
3630 assert!(!table.key_to_slot.contains_key(&drop_key));
3631 });
3632 }
3633}