Skip to main content

tsz_solver/caches/
query_cache.rs

1//! Cached query database implementation for the solver.
2//!
3//! `QueryCache` wraps a `TypeInterner` with memoization for evaluation,
4//! relation, property, and element access queries. This is the concrete
5//! database implementation used by the checker at runtime.
6
7use crate::caches::db::{QueryDatabase, TypeDatabase};
8use crate::caches::query_trace;
9use crate::def::DefId;
10use crate::intern::TypeInterner;
11use crate::objects::element_access::ElementAccessResult;
12use crate::operations::property::PropertyAccessResult;
13use crate::relations::compat::CompatChecker;
14use crate::relations::subtype::TypeResolver;
15use crate::types::{
16    CallableShape, CallableShapeId, ConditionalType, ConditionalTypeId, FunctionShape,
17    FunctionShapeId, IndexInfo, IntrinsicKind, MappedType, MappedTypeId, ObjectFlags, ObjectShape,
18    ObjectShapeId, PropertyInfo, PropertyLookup, RelationCacheKey, StringIntrinsicKind, SymbolRef,
19    TemplateLiteralId, TemplateSpan, TupleElement, TupleListId, TypeApplication, TypeApplicationId,
20    TypeData, TypeId, TypeListId, TypeParamInfo, Variance,
21};
22use rustc_hash::{FxHashMap, FxHashSet};
23use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
24use std::sync::{Arc, RwLock};
25use tsz_binder::SymbolId;
26use tsz_common::interner::Atom;
27
28type EvalCacheKey = (TypeId, bool);
29type ApplicationEvalCacheKey = (DefId, Vec<TypeId>, bool);
30type ElementAccessTypeCacheKey = (TypeId, TypeId, Option<u32>, bool);
31type PropertyAccessCacheKey = (TypeId, Atom, bool);
32
33#[derive(Debug, Clone, Copy, PartialEq, Eq)]
34pub enum RelationCacheProbe {
35    Hit(bool),
36    MissNotCached,
37}
38
39#[derive(Debug, Clone, Copy, PartialEq, Eq)]
40pub struct RelationCacheStats {
41    pub subtype_hits: u64,
42    pub subtype_misses: u64,
43    pub subtype_entries: usize,
44    pub assignability_hits: u64,
45    pub assignability_misses: u64,
46    pub assignability_entries: usize,
47}
48
49/// Query database wrapper with basic caching.
50pub struct QueryCache<'a> {
51    interner: &'a TypeInterner,
52    eval_cache: RwLock<FxHashMap<EvalCacheKey, TypeId>>,
53    application_eval_cache: RwLock<FxHashMap<ApplicationEvalCacheKey, TypeId>>,
54    element_access_cache: RwLock<FxHashMap<ElementAccessTypeCacheKey, TypeId>>,
55    object_spread_properties_cache: RwLock<FxHashMap<TypeId, Vec<PropertyInfo>>>,
56    subtype_cache: RwLock<FxHashMap<RelationCacheKey, bool>>,
57    /// CRITICAL: Separate cache for assignability to prevent cache poisoning.
58    /// This ensures that loose assignability results (e.g., any is assignable to number)
59    /// don't contaminate strict subtype checks.
60    assignability_cache: RwLock<FxHashMap<RelationCacheKey, bool>>,
61    property_cache: RwLock<FxHashMap<PropertyAccessCacheKey, PropertyAccessResult>>,
62    /// Task #41: Variance cache for generic type parameters.
63    /// Stores computed variance masks for `DefIds` to enable O(1) generic assignability.
64    variance_cache: RwLock<FxHashMap<DefId, Arc<[Variance]>>>,
65    /// Task #49: Canonical cache for O(1) structural identity checks.
66    /// Maps `TypeId` -> canonical `TypeId` for structurally identical types.
67    canonical_cache: RwLock<FxHashMap<TypeId, TypeId>>,
68    subtype_cache_hits: AtomicU64,
69    subtype_cache_misses: AtomicU64,
70    assignability_cache_hits: AtomicU64,
71    assignability_cache_misses: AtomicU64,
72    no_unchecked_indexed_access: AtomicBool,
73}
74
75impl<'a> QueryCache<'a> {
76    pub fn new(interner: &'a TypeInterner) -> Self {
77        QueryCache {
78            interner,
79            eval_cache: RwLock::new(FxHashMap::default()),
80            application_eval_cache: RwLock::new(FxHashMap::default()),
81            element_access_cache: RwLock::new(FxHashMap::default()),
82            object_spread_properties_cache: RwLock::new(FxHashMap::default()),
83            subtype_cache: RwLock::new(FxHashMap::default()),
84            assignability_cache: RwLock::new(FxHashMap::default()),
85            property_cache: RwLock::new(FxHashMap::default()),
86            variance_cache: RwLock::new(FxHashMap::default()),
87            canonical_cache: RwLock::new(FxHashMap::default()),
88            subtype_cache_hits: AtomicU64::new(0),
89            subtype_cache_misses: AtomicU64::new(0),
90            assignability_cache_hits: AtomicU64::new(0),
91            assignability_cache_misses: AtomicU64::new(0),
92            no_unchecked_indexed_access: AtomicBool::new(false),
93        }
94    }
95
96    pub fn clear(&self) {
97        // Handle poisoned locks gracefully - if poisoned, clear the cache anyway
98        match self.eval_cache.write() {
99            Ok(mut cache) => cache.clear(),
100            Err(e) => e.into_inner().clear(),
101        }
102        match self.element_access_cache.write() {
103            Ok(mut cache) => cache.clear(),
104            Err(e) => e.into_inner().clear(),
105        }
106        match self.application_eval_cache.write() {
107            Ok(mut cache) => cache.clear(),
108            Err(e) => e.into_inner().clear(),
109        }
110        match self.object_spread_properties_cache.write() {
111            Ok(mut cache) => cache.clear(),
112            Err(e) => e.into_inner().clear(),
113        }
114        match self.subtype_cache.write() {
115            Ok(mut cache) => cache.clear(),
116            Err(e) => e.into_inner().clear(),
117        }
118        match self.assignability_cache.write() {
119            Ok(mut cache) => cache.clear(),
120            Err(e) => e.into_inner().clear(),
121        }
122        match self.property_cache.write() {
123            Ok(mut cache) => cache.clear(),
124            Err(e) => e.into_inner().clear(),
125        }
126        match self.variance_cache.write() {
127            Ok(mut cache) => cache.clear(),
128            Err(e) => e.into_inner().clear(),
129        }
130        match self.canonical_cache.write() {
131            Ok(mut cache) => cache.clear(),
132            Err(e) => e.into_inner().clear(),
133        }
134        self.reset_relation_cache_stats();
135    }
136
137    pub fn relation_cache_stats(&self) -> RelationCacheStats {
138        let subtype_entries = match self.subtype_cache.read() {
139            Ok(cache) => cache.len(),
140            Err(e) => e.into_inner().len(),
141        };
142        let assignability_entries = match self.assignability_cache.read() {
143            Ok(cache) => cache.len(),
144            Err(e) => e.into_inner().len(),
145        };
146        RelationCacheStats {
147            subtype_hits: self.subtype_cache_hits.load(Ordering::Relaxed),
148            subtype_misses: self.subtype_cache_misses.load(Ordering::Relaxed),
149            subtype_entries,
150            assignability_hits: self.assignability_cache_hits.load(Ordering::Relaxed),
151            assignability_misses: self.assignability_cache_misses.load(Ordering::Relaxed),
152            assignability_entries,
153        }
154    }
155
156    pub fn reset_relation_cache_stats(&self) {
157        self.subtype_cache_hits.store(0, Ordering::Relaxed);
158        self.subtype_cache_misses.store(0, Ordering::Relaxed);
159        self.assignability_cache_hits.store(0, Ordering::Relaxed);
160        self.assignability_cache_misses.store(0, Ordering::Relaxed);
161    }
162
163    pub fn probe_subtype_cache(&self, key: RelationCacheKey) -> RelationCacheProbe {
164        match self.lookup_subtype_cache(key) {
165            Some(result) => RelationCacheProbe::Hit(result),
166            None => RelationCacheProbe::MissNotCached,
167        }
168    }
169
170    pub fn probe_assignability_cache(&self, key: RelationCacheKey) -> RelationCacheProbe {
171        match self.lookup_assignability_cache(key) {
172            Some(result) => RelationCacheProbe::Hit(result),
173            None => RelationCacheProbe::MissNotCached,
174        }
175    }
176
177    /// Helper to check a cache with poisoned lock handling.
178    fn check_cache(
179        &self,
180        cache: &RwLock<FxHashMap<RelationCacheKey, bool>>,
181        key: RelationCacheKey,
182    ) -> Option<bool> {
183        match cache.read() {
184            Ok(cached) => cached.get(&key).copied(),
185            Err(e) => e.into_inner().get(&key).copied(),
186        }
187    }
188
189    /// Helper to insert into a cache with poisoned lock handling.
190    fn insert_cache(
191        &self,
192        cache: &RwLock<FxHashMap<RelationCacheKey, bool>>,
193        key: RelationCacheKey,
194        result: bool,
195    ) {
196        match cache.write() {
197            Ok(mut c) => {
198                c.insert(key, result);
199            }
200            Err(e) => {
201                e.into_inner().insert(key, result);
202            }
203        }
204    }
205
206    fn check_property_cache(&self, key: PropertyAccessCacheKey) -> Option<PropertyAccessResult> {
207        match self.property_cache.read() {
208            Ok(cache) => cache.get(&key).cloned(),
209            Err(e) => e.into_inner().get(&key).cloned(),
210        }
211    }
212
213    fn insert_property_cache(&self, key: PropertyAccessCacheKey, result: PropertyAccessResult) {
214        match self.property_cache.write() {
215            Ok(mut cache) => {
216                cache.insert(key, result);
217            }
218            Err(e) => {
219                e.into_inner().insert(key, result);
220            }
221        }
222    }
223
224    fn check_element_access_cache(&self, key: ElementAccessTypeCacheKey) -> Option<TypeId> {
225        match self.element_access_cache.read() {
226            Ok(cache) => cache.get(&key).copied(),
227            Err(e) => e.into_inner().get(&key).copied(),
228        }
229    }
230
231    fn insert_element_access_cache(&self, key: ElementAccessTypeCacheKey, result: TypeId) {
232        match self.element_access_cache.write() {
233            Ok(mut cache) => {
234                cache.insert(key, result);
235            }
236            Err(e) => {
237                e.into_inner().insert(key, result);
238            }
239        }
240    }
241
242    fn check_application_eval_cache(&self, key: ApplicationEvalCacheKey) -> Option<TypeId> {
243        match self.application_eval_cache.read() {
244            Ok(cache) => cache.get(&key).copied(),
245            Err(e) => e.into_inner().get(&key).copied(),
246        }
247    }
248
249    fn insert_application_eval_cache(&self, key: ApplicationEvalCacheKey, result: TypeId) {
250        match self.application_eval_cache.write() {
251            Ok(mut cache) => {
252                cache.insert(key, result);
253            }
254            Err(e) => {
255                e.into_inner().insert(key, result);
256            }
257        }
258    }
259
260    fn check_object_spread_properties_cache(&self, key: TypeId) -> Option<Vec<PropertyInfo>> {
261        match self.object_spread_properties_cache.read() {
262            Ok(cache) => cache.get(&key).cloned(),
263            Err(e) => e.into_inner().get(&key).cloned(),
264        }
265    }
266
267    fn insert_object_spread_properties_cache(&self, key: TypeId, value: Vec<PropertyInfo>) {
268        match self.object_spread_properties_cache.write() {
269            Ok(mut cache) => {
270                cache.insert(key, value);
271            }
272            Err(e) => {
273                e.into_inner().insert(key, value);
274            }
275        }
276    }
277
278    fn collect_object_spread_properties_inner(
279        &self,
280        spread_type: TypeId,
281        visited: &mut FxHashSet<TypeId>,
282    ) -> Vec<PropertyInfo> {
283        let normalized =
284            self.evaluate_type_with_options(spread_type, self.no_unchecked_indexed_access());
285
286        if !visited.insert(normalized) {
287            return Vec::new();
288        }
289
290        if normalized != spread_type {
291            return self.collect_object_spread_properties_inner(normalized, visited);
292        }
293
294        let Some(key) = self.interner.lookup(normalized) else {
295            return Vec::new();
296        };
297
298        match key {
299            TypeData::Object(shape_id) | TypeData::ObjectWithIndex(shape_id) => {
300                self.interner.object_shape(shape_id).properties.to_vec()
301            }
302            TypeData::Callable(shape_id) => {
303                self.interner.callable_shape(shape_id).properties.to_vec()
304            }
305            TypeData::Intersection(members_id) => {
306                let members = self.interner.type_list(members_id);
307                let mut merged: FxHashMap<Atom, PropertyInfo> = FxHashMap::default();
308
309                for &member in members.iter() {
310                    for prop in self.collect_object_spread_properties_inner(member, visited) {
311                        merged.insert(prop.name, prop);
312                    }
313                }
314
315                merged.into_values().collect()
316            }
317            _ => Vec::new(),
318        }
319    }
320}
321
322impl TypeDatabase for QueryCache<'_> {
323    fn intern(&self, key: TypeData) -> TypeId {
324        self.interner.intern(key)
325    }
326
327    fn lookup(&self, id: TypeId) -> Option<TypeData> {
328        self.interner.lookup(id)
329    }
330
331    fn intern_string(&self, s: &str) -> Atom {
332        self.interner.intern_string(s)
333    }
334
335    fn resolve_atom(&self, atom: Atom) -> String {
336        self.interner.resolve_atom(atom)
337    }
338
339    fn resolve_atom_ref(&self, atom: Atom) -> Arc<str> {
340        self.interner.resolve_atom_ref(atom)
341    }
342
343    fn type_list(&self, id: TypeListId) -> Arc<[TypeId]> {
344        self.interner.type_list(id)
345    }
346
347    fn tuple_list(&self, id: TupleListId) -> Arc<[TupleElement]> {
348        self.interner.tuple_list(id)
349    }
350
351    fn template_list(&self, id: TemplateLiteralId) -> Arc<[TemplateSpan]> {
352        self.interner.template_list(id)
353    }
354
355    fn object_shape(&self, id: ObjectShapeId) -> Arc<ObjectShape> {
356        self.interner.object_shape(id)
357    }
358
359    fn object_property_index(&self, shape_id: ObjectShapeId, name: Atom) -> PropertyLookup {
360        self.interner.object_property_index(shape_id, name)
361    }
362
363    fn function_shape(&self, id: FunctionShapeId) -> Arc<FunctionShape> {
364        self.interner.function_shape(id)
365    }
366
367    fn callable_shape(&self, id: CallableShapeId) -> Arc<CallableShape> {
368        self.interner.callable_shape(id)
369    }
370
371    fn conditional_type(&self, id: ConditionalTypeId) -> Arc<ConditionalType> {
372        self.interner.conditional_type(id)
373    }
374
375    fn mapped_type(&self, id: MappedTypeId) -> Arc<MappedType> {
376        self.interner.mapped_type(id)
377    }
378
379    fn type_application(&self, id: TypeApplicationId) -> Arc<TypeApplication> {
380        self.interner.type_application(id)
381    }
382
383    fn literal_string(&self, value: &str) -> TypeId {
384        self.interner.literal_string(value)
385    }
386
387    fn literal_number(&self, value: f64) -> TypeId {
388        self.interner.literal_number(value)
389    }
390
391    fn literal_boolean(&self, value: bool) -> TypeId {
392        self.interner.literal_boolean(value)
393    }
394
395    fn literal_bigint(&self, value: &str) -> TypeId {
396        self.interner.literal_bigint(value)
397    }
398
399    fn literal_bigint_with_sign(&self, negative: bool, digits: &str) -> TypeId {
400        self.interner.literal_bigint_with_sign(negative, digits)
401    }
402
403    fn union(&self, members: Vec<TypeId>) -> TypeId {
404        self.interner.union(members)
405    }
406
407    fn union_from_sorted_vec(&self, flat: Vec<TypeId>) -> TypeId {
408        self.interner.union_from_sorted_vec(flat)
409    }
410
411    fn union2(&self, left: TypeId, right: TypeId) -> TypeId {
412        self.interner.union2(left, right)
413    }
414
415    fn union3(&self, first: TypeId, second: TypeId, third: TypeId) -> TypeId {
416        self.interner.union3(first, second, third)
417    }
418
419    fn intersection(&self, members: Vec<TypeId>) -> TypeId {
420        self.interner.intersection(members)
421    }
422
423    fn intersection2(&self, left: TypeId, right: TypeId) -> TypeId {
424        self.interner.intersection2(left, right)
425    }
426
427    fn intersect_types_raw2(&self, left: TypeId, right: TypeId) -> TypeId {
428        self.interner.intersect_types_raw2(left, right)
429    }
430
431    fn array(&self, element: TypeId) -> TypeId {
432        self.interner.array(element)
433    }
434
435    fn tuple(&self, elements: Vec<TupleElement>) -> TypeId {
436        self.interner.tuple(elements)
437    }
438
439    fn object(&self, properties: Vec<PropertyInfo>) -> TypeId {
440        self.interner.object(properties)
441    }
442
443    fn object_with_flags(&self, properties: Vec<PropertyInfo>, flags: ObjectFlags) -> TypeId {
444        self.interner.object_with_flags(properties, flags)
445    }
446
447    fn object_with_flags_and_symbol(
448        &self,
449        properties: Vec<PropertyInfo>,
450        flags: ObjectFlags,
451        symbol: Option<SymbolId>,
452    ) -> TypeId {
453        self.interner
454            .object_with_flags_and_symbol(properties, flags, symbol)
455    }
456
457    fn object_with_index(&self, shape: ObjectShape) -> TypeId {
458        self.interner.object_with_index(shape)
459    }
460
461    fn function(&self, shape: FunctionShape) -> TypeId {
462        self.interner.function(shape)
463    }
464
465    fn callable(&self, shape: CallableShape) -> TypeId {
466        self.interner.callable(shape)
467    }
468
469    fn template_literal(&self, spans: Vec<TemplateSpan>) -> TypeId {
470        self.interner.template_literal(spans)
471    }
472
473    fn conditional(&self, conditional: ConditionalType) -> TypeId {
474        self.interner.conditional(conditional)
475    }
476
477    fn mapped(&self, mapped: MappedType) -> TypeId {
478        self.interner.mapped(mapped)
479    }
480
481    fn reference(&self, symbol: SymbolRef) -> TypeId {
482        self.interner.reference(symbol)
483    }
484
485    fn lazy(&self, def_id: DefId) -> TypeId {
486        self.interner.lazy(def_id)
487    }
488
489    fn bound_parameter(&self, index: u32) -> TypeId {
490        self.interner.bound_parameter(index)
491    }
492
493    fn recursive(&self, depth: u32) -> TypeId {
494        self.interner.recursive(depth)
495    }
496
497    fn type_param(&self, info: TypeParamInfo) -> TypeId {
498        self.interner.type_param(info)
499    }
500
501    fn type_query(&self, symbol: SymbolRef) -> TypeId {
502        self.interner.type_query(symbol)
503    }
504
505    fn enum_type(&self, def_id: DefId, structural_type: TypeId) -> TypeId {
506        self.interner.enum_type(def_id, structural_type)
507    }
508
509    fn application(&self, base: TypeId, args: Vec<TypeId>) -> TypeId {
510        self.interner.application(base, args)
511    }
512
513    fn literal_string_atom(&self, atom: Atom) -> TypeId {
514        self.interner.literal_string_atom(atom)
515    }
516
517    fn union_preserve_members(&self, members: Vec<TypeId>) -> TypeId {
518        self.interner.union_preserve_members(members)
519    }
520
521    fn readonly_type(&self, inner: TypeId) -> TypeId {
522        self.interner.readonly_type(inner)
523    }
524
525    fn keyof(&self, inner: TypeId) -> TypeId {
526        self.interner.keyof(inner)
527    }
528
529    fn index_access(&self, object_type: TypeId, index_type: TypeId) -> TypeId {
530        self.interner.index_access(object_type, index_type)
531    }
532
533    fn this_type(&self) -> TypeId {
534        self.interner.this_type()
535    }
536
537    fn no_infer(&self, inner: TypeId) -> TypeId {
538        self.interner.no_infer(inner)
539    }
540
541    fn unique_symbol(&self, symbol: SymbolRef) -> TypeId {
542        self.interner.unique_symbol(symbol)
543    }
544
545    fn infer(&self, info: TypeParamInfo) -> TypeId {
546        self.interner.infer(info)
547    }
548
549    fn string_intrinsic(&self, kind: StringIntrinsicKind, type_arg: TypeId) -> TypeId {
550        self.interner.string_intrinsic(kind, type_arg)
551    }
552
553    fn get_class_base_type(&self, symbol_id: SymbolId) -> Option<TypeId> {
554        // Delegate to the interner
555        self.interner.get_class_base_type(symbol_id)
556    }
557
558    fn is_identity_comparable_type(&self, type_id: TypeId) -> bool {
559        self.interner.is_identity_comparable_type(type_id)
560    }
561}
562
563/// Implement `TypeResolver` for `QueryCache` with noop resolution.
564///
565/// `QueryCache` doesn't have access to the Binder or type environment,
566/// so it cannot resolve symbol references or `DefIds`. Only `resolve_ref`
567/// (required) is explicitly implemented; all other resolution methods
568/// inherit the trait's default `None`/`false` behavior. The three boxed/array
569/// methods delegate to the underlying interner.
570impl TypeResolver for QueryCache<'_> {
571    fn resolve_ref(&self, _symbol: SymbolRef, _interner: &dyn TypeDatabase) -> Option<TypeId> {
572        None
573    }
574
575    fn get_boxed_type(&self, kind: IntrinsicKind) -> Option<TypeId> {
576        self.interner.get_boxed_type(kind)
577    }
578
579    fn get_array_base_type(&self) -> Option<TypeId> {
580        self.interner.get_array_base_type()
581    }
582
583    fn get_array_base_type_params(&self) -> &[TypeParamInfo] {
584        self.interner.get_array_base_type_params()
585    }
586}
587
588impl QueryDatabase for QueryCache<'_> {
589    fn as_type_database(&self) -> &dyn TypeDatabase {
590        self
591    }
592
593    fn register_array_base_type(&self, type_id: TypeId, type_params: Vec<TypeParamInfo>) {
594        self.interner.set_array_base_type(type_id, type_params);
595    }
596
597    fn register_boxed_type(&self, kind: IntrinsicKind, type_id: TypeId) {
598        self.interner.set_boxed_type(kind, type_id);
599    }
600
601    fn evaluate_type(&self, type_id: TypeId) -> TypeId {
602        self.evaluate_type_with_options(type_id, self.no_unchecked_indexed_access())
603    }
604
605    fn evaluate_type_with_options(
606        &self,
607        type_id: TypeId,
608        no_unchecked_indexed_access: bool,
609    ) -> TypeId {
610        let trace_enabled = query_trace::enabled();
611        let trace_query_id = trace_enabled.then(|| {
612            let query_id = query_trace::next_query_id();
613            query_trace::unary_start(
614                query_id,
615                "evaluate_type_with_options",
616                type_id,
617                no_unchecked_indexed_access,
618            );
619            query_id
620        });
621        let key = (type_id, no_unchecked_indexed_access);
622        // Handle poisoned locks gracefully
623        let cached = match self.eval_cache.read() {
624            Ok(cache) => cache.get(&key).copied(),
625            Err(e) => e.into_inner().get(&key).copied(),
626        };
627
628        if let Some(result) = cached {
629            if let Some(query_id) = trace_query_id {
630                query_trace::unary_end(query_id, "evaluate_type_with_options", result, true);
631            }
632            return result;
633        }
634
635        let mut evaluator =
636            crate::evaluation::evaluate::TypeEvaluator::new(self.as_type_database());
637        evaluator.set_no_unchecked_indexed_access(no_unchecked_indexed_access);
638        evaluator = evaluator.with_query_db(self);
639        let result = evaluator.evaluate(type_id);
640        match self.eval_cache.write() {
641            Ok(mut cache) => {
642                cache.insert(key, result);
643            }
644            Err(e) => {
645                e.into_inner().insert(key, result);
646            }
647        }
648        if let Some(query_id) = trace_query_id {
649            query_trace::unary_end(query_id, "evaluate_type_with_options", result, false);
650        }
651        result
652    }
653
654    fn lookup_application_eval_cache(
655        &self,
656        def_id: DefId,
657        args: &[TypeId],
658        no_unchecked_indexed_access: bool,
659    ) -> Option<TypeId> {
660        self.check_application_eval_cache((def_id, args.to_vec(), no_unchecked_indexed_access))
661    }
662
663    fn insert_application_eval_cache(
664        &self,
665        def_id: DefId,
666        args: &[TypeId],
667        no_unchecked_indexed_access: bool,
668        result: TypeId,
669    ) {
670        self.insert_application_eval_cache(
671            (def_id, args.to_vec(), no_unchecked_indexed_access),
672            result,
673        );
674    }
675
676    fn is_subtype_of_with_flags(&self, source: TypeId, target: TypeId, flags: u16) -> bool {
677        let trace_enabled = query_trace::enabled();
678        let trace_query_id = trace_enabled.then(|| {
679            let query_id = query_trace::next_query_id();
680            query_trace::relation_start(
681                query_id,
682                "is_subtype_of_with_flags",
683                source,
684                target,
685                flags,
686            );
687            query_id
688        });
689        let key = RelationCacheKey::subtype(source, target, flags, 0);
690        // Handle poisoned locks gracefully
691        let cached = match self.subtype_cache.read() {
692            Ok(cache) => cache.get(&key).copied(),
693            Err(e) => e.into_inner().get(&key).copied(),
694        };
695
696        if let Some(result) = cached {
697            if let Some(query_id) = trace_query_id {
698                query_trace::relation_end(query_id, "is_subtype_of_with_flags", result, true);
699            }
700            return result;
701        }
702
703        let result = crate::relations::subtype::is_subtype_of_with_flags(
704            self.as_type_database(),
705            source,
706            target,
707            flags,
708        );
709        match self.subtype_cache.write() {
710            Ok(mut cache) => {
711                cache.insert(key, result);
712            }
713            Err(e) => {
714                e.into_inner().insert(key, result);
715            }
716        }
717        if let Some(query_id) = trace_query_id {
718            query_trace::relation_end(query_id, "is_subtype_of_with_flags", result, false);
719        }
720        result
721    }
722
723    fn is_assignable_to_with_flags(&self, source: TypeId, target: TypeId, flags: u16) -> bool {
724        let trace_enabled = query_trace::enabled();
725        let trace_query_id = trace_enabled.then(|| {
726            let query_id = query_trace::next_query_id();
727            query_trace::relation_start(
728                query_id,
729                "is_assignable_to_with_flags",
730                source,
731                target,
732                flags,
733            );
734            query_id
735        });
736        // Task A: Use passed flags instead of hardcoded 0,0
737        let key = RelationCacheKey::assignability(source, target, flags, 0);
738
739        if let Some(result) = self.check_cache(&self.assignability_cache, key) {
740            if let Some(query_id) = trace_query_id {
741                query_trace::relation_end(query_id, "is_assignable_to_with_flags", result, true);
742            }
743            return result;
744        }
745
746        // Use CompatChecker with all compatibility rules
747        let mut checker = CompatChecker::new(self.as_type_database());
748
749        // FIX: Apply flags to ensure checker matches the cache key configuration
750        // This prevents cache poisoning where results from non-strict checks
751        // leak into strict checks (Gap C fix)
752        checker.apply_flags(flags);
753
754        let result = checker.is_assignable(source, target);
755
756        self.insert_cache(&self.assignability_cache, key, result);
757        if let Some(query_id) = trace_query_id {
758            query_trace::relation_end(query_id, "is_assignable_to_with_flags", result, false);
759        }
760        result
761    }
762
763    /// Convenience wrapper for `is_subtype_of` with default flags.
764    fn is_subtype_of(&self, source: TypeId, target: TypeId) -> bool {
765        self.is_subtype_of_with_flags(source, target, 0) // Default non-strict mode for backward compatibility
766    }
767
768    /// Convenience wrapper for `is_assignable_to` with default flags.
769    fn is_assignable_to(&self, source: TypeId, target: TypeId) -> bool {
770        self.is_assignable_to_with_flags(source, target, 0) // Default non-strict mode for backward compatibility
771    }
772
773    fn lookup_subtype_cache(&self, key: RelationCacheKey) -> Option<bool> {
774        let result = match self.subtype_cache.read() {
775            Ok(cache) => cache.get(&key).copied(),
776            Err(e) => e.into_inner().get(&key).copied(),
777        };
778        if result.is_some() {
779            self.subtype_cache_hits.fetch_add(1, Ordering::Relaxed);
780        } else {
781            self.subtype_cache_misses.fetch_add(1, Ordering::Relaxed);
782        }
783        result
784    }
785
786    fn insert_subtype_cache(&self, key: RelationCacheKey, result: bool) {
787        match self.subtype_cache.write() {
788            Ok(mut cache) => {
789                cache.insert(key, result);
790            }
791            Err(e) => {
792                e.into_inner().insert(key, result);
793            }
794        }
795    }
796
797    fn lookup_assignability_cache(&self, key: RelationCacheKey) -> Option<bool> {
798        let result = match self.assignability_cache.read() {
799            Ok(cache) => cache.get(&key).copied(),
800            Err(e) => e.into_inner().get(&key).copied(),
801        };
802        if result.is_some() {
803            self.assignability_cache_hits
804                .fetch_add(1, Ordering::Relaxed);
805        } else {
806            self.assignability_cache_misses
807                .fetch_add(1, Ordering::Relaxed);
808        }
809        result
810    }
811
812    fn insert_assignability_cache(&self, key: RelationCacheKey, result: bool) {
813        match self.assignability_cache.write() {
814            Ok(mut cache) => {
815                cache.insert(key, result);
816            }
817            Err(e) => {
818                e.into_inner().insert(key, result);
819            }
820        }
821    }
822
823    fn get_index_signatures(&self, type_id: TypeId) -> IndexInfo {
824        // Delegate to the interner - caching could be added later if needed
825        self.interner.get_index_signatures(type_id)
826    }
827
828    fn is_nullish_type(&self, type_id: TypeId) -> bool {
829        // Delegate to the interner
830        self.interner.is_nullish_type(type_id)
831    }
832
833    fn remove_nullish(&self, type_id: TypeId) -> TypeId {
834        // Delegate to the interner
835        self.interner.remove_nullish(type_id)
836    }
837
838    fn resolve_property_access(
839        &self,
840        object_type: TypeId,
841        prop_name: &str,
842    ) -> crate::operations::property::PropertyAccessResult {
843        self.resolve_property_access_with_options(
844            object_type,
845            prop_name,
846            self.no_unchecked_indexed_access(),
847        )
848    }
849
850    fn resolve_property_access_with_options(
851        &self,
852        object_type: TypeId,
853        prop_name: &str,
854        no_unchecked_indexed_access: bool,
855    ) -> crate::operations::property::PropertyAccessResult {
856        // QueryCache doesn't have full TypeResolver capability, so use PropertyAccessEvaluator
857        // with the current QueryDatabase.
858        let prop_atom = self.interner.intern_string(prop_name);
859        let key = (object_type, prop_atom, no_unchecked_indexed_access);
860        if let Some(result) = self.check_property_cache(key) {
861            return result;
862        }
863
864        let mut evaluator = crate::operations::property::PropertyAccessEvaluator::new(self);
865        evaluator.set_no_unchecked_indexed_access(no_unchecked_indexed_access);
866        let result = evaluator.resolve_property_access(object_type, prop_name);
867        self.insert_property_cache(key, result.clone());
868        result
869    }
870
871    fn resolve_element_access_type(
872        &self,
873        object_type: TypeId,
874        index_type: TypeId,
875        literal_index: Option<usize>,
876    ) -> TypeId {
877        let key = (
878            object_type,
879            index_type,
880            literal_index.map(|idx| idx as u32),
881            self.no_unchecked_indexed_access(),
882        );
883        if let Some(result) = self.check_element_access_cache(key) {
884            return result;
885        }
886
887        let result = match self.resolve_element_access(object_type, index_type, literal_index) {
888            ElementAccessResult::Success(type_id) => type_id,
889            _ => TypeId::ERROR,
890        };
891
892        self.insert_element_access_cache(key, result);
893        result
894    }
895
896    fn collect_object_spread_properties(&self, spread_type: TypeId) -> Vec<PropertyInfo> {
897        if let Some(cached) = self.check_object_spread_properties_cache(spread_type) {
898            return cached;
899        }
900
901        let mut visited: FxHashSet<TypeId> = FxHashSet::default();
902        let result = self.collect_object_spread_properties_inner(spread_type, &mut visited);
903        self.insert_object_spread_properties_cache(spread_type, result.clone());
904        result
905    }
906
907    fn no_unchecked_indexed_access(&self) -> bool {
908        self.no_unchecked_indexed_access.load(Ordering::Relaxed)
909    }
910
911    fn set_no_unchecked_indexed_access(&self, enabled: bool) {
912        self.no_unchecked_indexed_access
913            .store(enabled, Ordering::Relaxed);
914    }
915
916    fn get_type_param_variance(&self, def_id: DefId) -> Option<Arc<[Variance]>> {
917        // 1. Check cache first (lock-free read)
918        if let Ok(cache) = self.variance_cache.read()
919            && let Some(cached) = cache.get(&def_id)
920        {
921            return Some(Arc::clone(cached));
922        }
923
924        // 2. Compute variance using the type's body
925        // This requires the database to also be a TypeResolver (which QueryDatabase is)
926        let params = self.get_lazy_type_params(def_id)?;
927        if params.is_empty() {
928            return None;
929        }
930
931        let body = self.resolve_lazy(def_id, self.as_type_database())?;
932
933        let mut variances = Vec::with_capacity(params.len());
934        for param in &params {
935            // Compute variance for each type parameter
936            let v = crate::relations::variance::compute_variance(self, body, param.name);
937            variances.push(v);
938        }
939        let result = Arc::from(variances);
940
941        // 3. Store in cache
942        match self.variance_cache.write() {
943            Ok(mut cache) => {
944                cache.insert(def_id, Arc::clone(&result));
945            }
946            Err(e) => {
947                e.into_inner().insert(def_id, Arc::clone(&result));
948            }
949        }
950
951        Some(result)
952    }
953
954    fn canonical_id(&self, type_id: TypeId) -> TypeId {
955        // Check cache first
956        let cached = match self.canonical_cache.read() {
957            Ok(cache) => cache.get(&type_id).copied(),
958            Err(e) => e.into_inner().get(&type_id).copied(),
959        };
960
961        if let Some(canonical) = cached {
962            return canonical;
963        }
964
965        // Compute canonical form using a fresh Canonicalizer
966        // CRITICAL: Always start with empty stacks for absolute De Bruijn indices
967        // This ensures the cached TypeId represents the absolute structural form
968        use crate::canonicalize::Canonicalizer;
969        let mut canon = Canonicalizer::new(self.as_type_database(), self);
970        let canonical = canon.canonicalize(type_id);
971
972        // Cache the result
973        match self.canonical_cache.write() {
974            Ok(mut cache) => {
975                cache.insert(type_id, canonical);
976            }
977            Err(e) => {
978                e.into_inner().insert(type_id, canonical);
979            }
980        }
981
982        canonical
983    }
984}
985
986#[cfg(test)]
987#[path = "../../tests/db_tests.rs"]
988mod tests;