1use crate::def::DefId;
24#[cfg(test)]
25use crate::types::*;
26use crate::types::{
27 CallSignature, CallableShape, CallableShapeId, ConditionalType, ConditionalTypeId,
28 FunctionShape, FunctionShapeId, IndexSignature, IntrinsicKind, LiteralValue, MappedType,
29 MappedTypeId, ObjectFlags, ObjectShape, ObjectShapeId, OrderedFloat, PropertyInfo,
30 PropertyLookup, SymbolRef, TemplateLiteralId, TemplateSpan, TupleElement, TupleListId,
31 TypeApplication, TypeApplicationId, TypeData, TypeId, TypeListId, TypeParamInfo, Visibility,
32};
33use crate::visitor::is_identity_comparable_type;
34use dashmap::DashMap;
35use dashmap::mapref::entry::Entry;
36use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet, FxHasher};
37use smallvec::SmallVec;
38use std::hash::{Hash, Hasher};
39use std::sync::{
40 Arc, OnceLock,
41 atomic::{AtomicU32, Ordering},
42};
43use tsz_common::interner::{Atom, ShardedInterner};
44
45mod intersection;
46mod normalize;
47mod template;
48
49const SHARD_BITS: u32 = 6;
50const SHARD_COUNT: usize = 1 << SHARD_BITS; const SHARD_MASK: u32 = (SHARD_COUNT as u32) - 1;
52pub(crate) const PROPERTY_MAP_THRESHOLD: usize = 24;
53const TYPE_LIST_INLINE: usize = 8;
54
55#[cfg(target_arch = "wasm32")]
59pub(crate) const TEMPLATE_LITERAL_EXPANSION_LIMIT: usize = 2_000;
60#[cfg(not(target_arch = "wasm32"))]
61pub(crate) const TEMPLATE_LITERAL_EXPANSION_LIMIT: usize = 100_000;
62
63#[cfg(target_arch = "wasm32")]
66pub(crate) const MAX_INTERNED_TYPES: usize = 500_000;
67#[cfg(not(target_arch = "wasm32"))]
68pub(crate) const MAX_INTERNED_TYPES: usize = 5_000_000;
69
70pub(crate) type TypeListBuffer = SmallVec<[TypeId; TYPE_LIST_INLINE]>;
71type ObjectPropertyIndex = DashMap<ObjectShapeId, Arc<FxHashMap<Atom, usize>>, FxBuildHasher>;
72type ObjectPropertyMap = OnceLock<ObjectPropertyIndex>;
73
74struct TypeShardInner {
76 key_to_index: DashMap<TypeData, u32, FxBuildHasher>,
78 index_to_key: DashMap<u32, Arc<TypeData>, FxBuildHasher>,
80}
81
82struct TypeShard {
87 inner: OnceLock<TypeShardInner>,
89 next_index: AtomicU32,
92}
93
94impl TypeShard {
95 const fn new() -> Self {
96 Self {
97 inner: OnceLock::new(),
98 next_index: AtomicU32::new(0),
99 }
100 }
101
102 #[inline]
104 fn get_inner(&self) -> &TypeShardInner {
105 self.inner.get_or_init(|| TypeShardInner {
106 key_to_index: DashMap::with_hasher(FxBuildHasher),
107 index_to_key: DashMap::with_hasher(FxBuildHasher),
108 })
109 }
110
111 #[inline]
113 fn is_empty(&self) -> bool {
114 self.next_index.load(Ordering::Relaxed) == 0
115 }
116}
117
118struct SliceInternerInner<T> {
120 items: DashMap<u32, Arc<[T]>, FxBuildHasher>,
121 map: DashMap<Arc<[T]>, u32, FxBuildHasher>,
122}
123
124struct ConcurrentSliceInterner<T> {
127 inner: OnceLock<SliceInternerInner<T>>,
128 next_id: AtomicU32,
129}
130
131impl<T> ConcurrentSliceInterner<T>
132where
133 T: Eq + Hash + Clone + Send + Sync + 'static,
134{
135 const fn new() -> Self {
136 Self {
137 inner: OnceLock::new(),
138 next_id: AtomicU32::new(1), }
140 }
141
142 #[inline]
143 fn get_inner(&self) -> &SliceInternerInner<T> {
144 self.inner.get_or_init(|| {
145 let items = DashMap::with_hasher(FxBuildHasher);
146 let map = DashMap::with_hasher(FxBuildHasher);
147 let empty: Arc<[T]> = Arc::from(Vec::new());
148 items.insert(0, std::sync::Arc::clone(&empty));
149 map.insert(empty, 0);
150 SliceInternerInner { items, map }
151 })
152 }
153
154 fn intern(&self, items_slice: &[T]) -> u32 {
155 if items_slice.is_empty() {
156 return 0;
157 }
158
159 let inner = self.get_inner();
160 let temp_arc: Arc<[T]> = Arc::from(items_slice.to_vec());
161
162 if let Some(ref_entry) = inner.map.get(&temp_arc) {
164 return *ref_entry.value();
165 }
166
167 let id = self.next_id.fetch_add(1, Ordering::Relaxed);
169
170 match inner.map.entry(std::sync::Arc::clone(&temp_arc)) {
172 dashmap::mapref::entry::Entry::Vacant(e) => {
173 e.insert(id);
174 inner.items.insert(id, temp_arc);
175 id
176 }
177 dashmap::mapref::entry::Entry::Occupied(e) => *e.get(),
178 }
179 }
180
181 fn get(&self, id: u32) -> Option<Arc<[T]>> {
182 if id == 0 {
184 return Some(Arc::from(Vec::new()));
185 }
186 self.inner
187 .get()?
188 .items
189 .get(&id)
190 .map(|e| std::sync::Arc::clone(e.value()))
191 }
192
193 fn empty(&self) -> Arc<[T]> {
194 Arc::from(Vec::new())
195 }
196}
197
198struct ValueInternerInner<T> {
200 items: DashMap<u32, Arc<T>, FxBuildHasher>,
201 map: DashMap<Arc<T>, u32, FxBuildHasher>,
202}
203
204struct ConcurrentValueInterner<T> {
207 inner: OnceLock<ValueInternerInner<T>>,
208 next_id: AtomicU32,
209}
210
211impl<T> ConcurrentValueInterner<T>
212where
213 T: Eq + Hash + Clone + Send + Sync + 'static,
214{
215 const fn new() -> Self {
216 Self {
217 inner: OnceLock::new(),
218 next_id: AtomicU32::new(0),
219 }
220 }
221
222 #[inline]
223 fn get_inner(&self) -> &ValueInternerInner<T> {
224 self.inner.get_or_init(|| ValueInternerInner {
225 items: DashMap::with_hasher(FxBuildHasher),
226 map: DashMap::with_hasher(FxBuildHasher),
227 })
228 }
229
230 fn intern(&self, value: T) -> u32 {
231 let inner = self.get_inner();
232 let value_arc = Arc::new(value);
233
234 if let Some(ref_entry) = inner.map.get(&value_arc) {
236 return *ref_entry.value();
237 }
238
239 let id = self.next_id.fetch_add(1, Ordering::Relaxed);
241
242 match inner.map.entry(std::sync::Arc::clone(&value_arc)) {
244 Entry::Vacant(e) => {
245 e.insert(id);
246 inner.items.insert(id, value_arc);
247 id
248 }
249 Entry::Occupied(e) => *e.get(),
250 }
251 }
252
253 fn get(&self, id: u32) -> Option<Arc<T>> {
254 self.inner
255 .get()?
256 .items
257 .get(&id)
258 .map(|e| std::sync::Arc::clone(e.value()))
259 }
260}
261
262pub struct TypeInterner {
270 shards: Vec<TypeShard>,
272 pub string_interner: ShardedInterner,
274 type_lists: ConcurrentSliceInterner<TypeId>,
276 tuple_lists: ConcurrentSliceInterner<TupleElement>,
277 template_lists: ConcurrentSliceInterner<TemplateSpan>,
278 object_shapes: ConcurrentValueInterner<ObjectShape>,
279 object_property_maps: ObjectPropertyMap,
281 function_shapes: ConcurrentValueInterner<FunctionShape>,
282 callable_shapes: ConcurrentValueInterner<CallableShape>,
283 conditional_types: ConcurrentValueInterner<ConditionalType>,
284 mapped_types: ConcurrentValueInterner<MappedType>,
285 applications: ConcurrentValueInterner<TypeApplication>,
286 identity_comparable_cache: DashMap<TypeId, bool, FxBuildHasher>,
288 array_base_type: OnceLock<TypeId>,
290 array_base_type_params: OnceLock<Vec<TypeParamInfo>>,
292 boxed_types: DashMap<IntrinsicKind, TypeId, FxBuildHasher>,
295}
296
297impl std::fmt::Debug for TypeInterner {
298 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
299 f.debug_struct("TypeInterner")
300 .field("shards", &self.shards.len())
301 .finish_non_exhaustive()
302 }
303}
304
305impl TypeInterner {
306 pub fn new() -> Self {
311 let shards: Vec<TypeShard> = (0..SHARD_COUNT).map(|_| TypeShard::new()).collect();
312
313 Self {
314 shards,
315 string_interner: ShardedInterner::new(),
317 type_lists: ConcurrentSliceInterner::new(),
318 tuple_lists: ConcurrentSliceInterner::new(),
319 template_lists: ConcurrentSliceInterner::new(),
320 object_shapes: ConcurrentValueInterner::new(),
321 object_property_maps: OnceLock::new(),
322 function_shapes: ConcurrentValueInterner::new(),
323 callable_shapes: ConcurrentValueInterner::new(),
324 conditional_types: ConcurrentValueInterner::new(),
325 mapped_types: ConcurrentValueInterner::new(),
326 applications: ConcurrentValueInterner::new(),
327 identity_comparable_cache: DashMap::with_hasher(FxBuildHasher),
328 array_base_type: OnceLock::new(),
329 array_base_type_params: OnceLock::new(),
330 boxed_types: DashMap::with_hasher(FxBuildHasher),
331 }
332 }
333
334 pub fn set_array_base_type(&self, type_id: TypeId, params: Vec<TypeParamInfo>) {
339 let _ = self.array_base_type.set(type_id);
340 let _ = self.array_base_type_params.set(params);
341 }
342
343 #[inline]
345 pub fn get_array_base_type(&self) -> Option<TypeId> {
346 self.array_base_type.get().copied()
347 }
348
349 #[inline]
351 pub fn get_array_base_type_params(&self) -> &[TypeParamInfo] {
352 self.array_base_type_params
353 .get()
354 .map_or(&[], |v| v.as_slice())
355 }
356
357 pub fn set_boxed_type(&self, kind: IntrinsicKind, type_id: TypeId) {
363 self.boxed_types.insert(kind, type_id);
364 }
365
366 #[inline]
368 pub fn get_boxed_type(&self, kind: IntrinsicKind) -> Option<TypeId> {
369 self.boxed_types.get(&kind).map(|r| *r)
370 }
371
372 #[inline]
374 fn get_object_property_maps(&self) -> &ObjectPropertyIndex {
375 self.object_property_maps
376 .get_or_init(|| DashMap::with_hasher(FxBuildHasher))
377 }
378
379 #[inline]
383 pub fn is_identity_comparable_type(&self, type_id: TypeId) -> bool {
384 if let Some(cached) = self.identity_comparable_cache.get(&type_id) {
386 return *cached;
387 }
388 let result = is_identity_comparable_type(self, type_id);
390 self.identity_comparable_cache.insert(type_id, result);
391 result
392 }
393
394 pub fn intern_string(&self, s: &str) -> Atom {
397 self.string_interner.intern(s)
398 }
399
400 pub fn resolve_atom(&self, atom: Atom) -> String {
403 self.string_interner.resolve(atom).to_string()
404 }
405
406 pub fn resolve_atom_ref(&self, atom: Atom) -> Arc<str> {
408 self.string_interner.resolve(atom)
409 }
410
411 pub fn type_list(&self, id: TypeListId) -> Arc<[TypeId]> {
412 self.type_lists
413 .get(id.0)
414 .unwrap_or_else(|| self.type_lists.empty())
415 }
416
417 pub fn tuple_list(&self, id: TupleListId) -> Arc<[TupleElement]> {
418 self.tuple_lists
419 .get(id.0)
420 .unwrap_or_else(|| self.tuple_lists.empty())
421 }
422
423 pub fn template_list(&self, id: TemplateLiteralId) -> Arc<[TemplateSpan]> {
424 self.template_lists
425 .get(id.0)
426 .unwrap_or_else(|| self.template_lists.empty())
427 }
428
429 pub fn object_shape(&self, id: ObjectShapeId) -> Arc<ObjectShape> {
430 self.object_shapes.get(id.0).unwrap_or_else(|| {
431 Arc::new(ObjectShape {
432 flags: ObjectFlags::empty(),
433 properties: Vec::new(),
434 string_index: None,
435 number_index: None,
436 symbol: None,
437 })
438 })
439 }
440
441 pub fn object_property_index(&self, shape_id: ObjectShapeId, name: Atom) -> PropertyLookup {
442 let shape = self.object_shape(shape_id);
443 if shape.properties.len() < PROPERTY_MAP_THRESHOLD {
444 return PropertyLookup::Uncached;
445 }
446
447 match self.object_property_map(shape_id, &shape) {
448 Some(map) => match map.get(&name) {
449 Some(&idx) => PropertyLookup::Found(idx),
450 None => PropertyLookup::NotFound,
451 },
452 None => PropertyLookup::Uncached,
453 }
454 }
455
456 fn object_property_map(
461 &self,
462 shape_id: ObjectShapeId,
463 shape: &ObjectShape,
464 ) -> Option<Arc<FxHashMap<Atom, usize>>> {
465 if shape.properties.len() < PROPERTY_MAP_THRESHOLD {
466 return None;
467 }
468
469 let maps = self.get_object_property_maps();
470
471 if let Some(map) = maps.get(&shape_id) {
473 return Some(std::sync::Arc::clone(&map));
474 }
475
476 let mut map = FxHashMap::default();
478 for (idx, prop) in shape.properties.iter().enumerate() {
479 map.insert(prop.name, idx);
480 }
481 let map = Arc::new(map);
482
483 match maps.entry(shape_id) {
485 Entry::Vacant(e) => {
486 e.insert(std::sync::Arc::clone(&map));
487 Some(map)
488 }
489 Entry::Occupied(e) => Some(std::sync::Arc::clone(e.get())),
490 }
491 }
492
493 pub fn function_shape(&self, id: FunctionShapeId) -> Arc<FunctionShape> {
494 self.function_shapes.get(id.0).unwrap_or_else(|| {
495 Arc::new(FunctionShape {
496 type_params: Vec::new(),
497 params: Vec::new(),
498 this_type: None,
499 return_type: TypeId::ERROR,
500 type_predicate: None,
501 is_constructor: false,
502 is_method: false,
503 })
504 })
505 }
506
507 pub fn callable_shape(&self, id: CallableShapeId) -> Arc<CallableShape> {
508 self.callable_shapes.get(id.0).unwrap_or_else(|| {
509 Arc::new(CallableShape {
510 call_signatures: Vec::new(),
511 construct_signatures: Vec::new(),
512 properties: Vec::new(),
513 ..Default::default()
514 })
515 })
516 }
517
518 pub fn conditional_type(&self, id: ConditionalTypeId) -> Arc<ConditionalType> {
519 self.conditional_types.get(id.0).unwrap_or_else(|| {
520 Arc::new(ConditionalType {
521 check_type: TypeId::ERROR,
522 extends_type: TypeId::ERROR,
523 true_type: TypeId::ERROR,
524 false_type: TypeId::ERROR,
525 is_distributive: false,
526 })
527 })
528 }
529
530 pub fn mapped_type(&self, id: MappedTypeId) -> Arc<MappedType> {
531 self.mapped_types.get(id.0).unwrap_or_else(|| {
532 Arc::new(MappedType {
533 type_param: TypeParamInfo {
534 is_const: false,
535 name: self.intern_string("_"),
536 constraint: None,
537 default: None,
538 },
539 constraint: TypeId::ERROR,
540 name_type: None,
541 template: TypeId::ERROR,
542 readonly_modifier: None,
543 optional_modifier: None,
544 })
545 })
546 }
547
548 pub fn type_application(&self, id: TypeApplicationId) -> Arc<TypeApplication> {
549 self.applications.get(id.0).unwrap_or_else(|| {
550 Arc::new(TypeApplication {
551 base: TypeId::ERROR,
552 args: Vec::new(),
553 })
554 })
555 }
556
557 pub fn intern(&self, key: TypeData) -> TypeId {
563 if let Some(id) = self.get_intrinsic_id(&key) {
564 return id;
565 }
566
567 if self.approximate_count() > MAX_INTERNED_TYPES {
572 return TypeId::ERROR;
573 }
574
575 let mut hasher = FxHasher::default();
576 key.hash(&mut hasher);
577 let shard_idx = (hasher.finish() as usize) & (SHARD_COUNT - 1);
578 let shard = &self.shards[shard_idx];
579 let inner = shard.get_inner();
580
581 if let Some(entry) = inner.key_to_index.get(&key) {
583 let local_index = *entry.value();
584 return self.make_id(local_index, shard_idx as u32);
585 }
586
587 let local_index = shard.next_index.fetch_add(1, Ordering::Relaxed);
589 if local_index > (u32::MAX >> SHARD_BITS) {
590 return TypeId::ERROR;
592 }
593
594 match inner.key_to_index.entry(key.clone()) {
596 Entry::Vacant(e) => {
597 e.insert(local_index);
598 let key_arc = Arc::new(key);
599 inner.index_to_key.insert(local_index, key_arc);
600 self.make_id(local_index, shard_idx as u32)
601 }
602 Entry::Occupied(e) => {
603 let existing_index = *e.get();
605 self.make_id(existing_index, shard_idx as u32)
606 }
607 }
608 }
609
610 pub fn lookup(&self, id: TypeId) -> Option<TypeData> {
614 if id.is_intrinsic() || id.is_error() {
615 return self.get_intrinsic_key(id);
616 }
617
618 let raw_val = id.0.checked_sub(TypeId::FIRST_USER)?;
619 let shard_idx = (raw_val & SHARD_MASK) as usize;
620 let local_index = raw_val >> SHARD_BITS;
621
622 let shard = self.shards.get(shard_idx)?;
623 if shard.is_empty() {
625 return None;
626 }
627 shard
628 .get_inner()
629 .index_to_key
630 .get(&{ local_index })
631 .map(|r| r.value().as_ref().clone())
632 }
633
634 fn intern_type_list(&self, members: Vec<TypeId>) -> TypeListId {
635 TypeListId(self.type_lists.intern(&members))
636 }
637
638 fn intern_tuple_list(&self, elements: Vec<TupleElement>) -> TupleListId {
639 TupleListId(self.tuple_lists.intern(&elements))
640 }
641
642 pub(crate) fn intern_template_list(&self, spans: Vec<TemplateSpan>) -> TemplateLiteralId {
643 TemplateLiteralId(self.template_lists.intern(&spans))
644 }
645
646 pub fn intern_object_shape(&self, shape: ObjectShape) -> ObjectShapeId {
647 ObjectShapeId(self.object_shapes.intern(shape))
648 }
649
650 fn intern_function_shape(&self, shape: FunctionShape) -> FunctionShapeId {
651 FunctionShapeId(self.function_shapes.intern(shape))
652 }
653
654 fn intern_callable_shape(&self, shape: CallableShape) -> CallableShapeId {
655 CallableShapeId(self.callable_shapes.intern(shape))
656 }
657
658 fn intern_conditional_type(&self, conditional: ConditionalType) -> ConditionalTypeId {
659 ConditionalTypeId(self.conditional_types.intern(conditional))
660 }
661
662 fn intern_mapped_type(&self, mapped: MappedType) -> MappedTypeId {
663 MappedTypeId(self.mapped_types.intern(mapped))
664 }
665
666 fn intern_application(&self, application: TypeApplication) -> TypeApplicationId {
667 TypeApplicationId(self.applications.intern(application))
668 }
669
670 pub fn len(&self) -> usize {
672 let mut total = TypeId::FIRST_USER as usize;
673 for shard in &self.shards {
674 total += shard.next_index.load(Ordering::Relaxed) as usize;
675 }
676 total
677 }
678
679 pub fn is_empty(&self) -> bool {
681 self.len() <= TypeId::FIRST_USER as usize
682 }
683
684 #[inline]
688 fn approximate_count(&self) -> usize {
689 let mut sample_total: usize = 0;
691 for shard in self.shards.iter().take(4) {
692 sample_total += shard.next_index.load(Ordering::Relaxed) as usize;
693 }
694 (sample_total * SHARD_COUNT / 4) + TypeId::FIRST_USER as usize
696 }
697
698 #[inline]
699 fn make_id(&self, local_index: u32, shard_idx: u32) -> TypeId {
700 let raw_val = (local_index << SHARD_BITS) | (shard_idx & SHARD_MASK);
701 let id = TypeId(TypeId::FIRST_USER + raw_val);
702
703 debug_assert!(
707 id.is_global(),
708 "Global TypeId overflow: {id:?} - would conflict with local ID space"
709 );
710
711 id
712 }
713
714 const fn get_intrinsic_id(&self, key: &TypeData) -> Option<TypeId> {
715 match key {
716 TypeData::Intrinsic(kind) => Some(kind.to_type_id()),
717 TypeData::Error => Some(TypeId::ERROR),
718 TypeData::Literal(LiteralValue::Boolean(true)) => Some(TypeId::BOOLEAN_TRUE),
720 TypeData::Literal(LiteralValue::Boolean(false)) => Some(TypeId::BOOLEAN_FALSE),
721 _ => None,
722 }
723 }
724
725 const fn get_intrinsic_key(&self, id: TypeId) -> Option<TypeData> {
726 match id {
727 TypeId::NONE | TypeId::ERROR => Some(TypeData::Error),
728 TypeId::NEVER => Some(TypeData::Intrinsic(IntrinsicKind::Never)),
729 TypeId::UNKNOWN => Some(TypeData::Intrinsic(IntrinsicKind::Unknown)),
730 TypeId::ANY => Some(TypeData::Intrinsic(IntrinsicKind::Any)),
731 TypeId::VOID => Some(TypeData::Intrinsic(IntrinsicKind::Void)),
732 TypeId::UNDEFINED => Some(TypeData::Intrinsic(IntrinsicKind::Undefined)),
733 TypeId::NULL => Some(TypeData::Intrinsic(IntrinsicKind::Null)),
734 TypeId::BOOLEAN => Some(TypeData::Intrinsic(IntrinsicKind::Boolean)),
735 TypeId::NUMBER => Some(TypeData::Intrinsic(IntrinsicKind::Number)),
736 TypeId::STRING => Some(TypeData::Intrinsic(IntrinsicKind::String)),
737 TypeId::BIGINT => Some(TypeData::Intrinsic(IntrinsicKind::Bigint)),
738 TypeId::SYMBOL => Some(TypeData::Intrinsic(IntrinsicKind::Symbol)),
739 TypeId::OBJECT | TypeId::PROMISE_BASE => {
740 Some(TypeData::Intrinsic(IntrinsicKind::Object))
741 }
742 TypeId::BOOLEAN_TRUE => Some(TypeData::Literal(LiteralValue::Boolean(true))),
743 TypeId::BOOLEAN_FALSE => Some(TypeData::Literal(LiteralValue::Boolean(false))),
744 TypeId::FUNCTION => Some(TypeData::Intrinsic(IntrinsicKind::Function)),
745 _ => None,
746 }
747 }
748
749 pub const fn intrinsic(&self, kind: IntrinsicKind) -> TypeId {
755 kind.to_type_id()
756 }
757
758 pub fn literal_string(&self, value: &str) -> TypeId {
760 let atom = self.intern_string(value);
761 self.intern(TypeData::Literal(LiteralValue::String(atom)))
762 }
763
764 pub fn literal_string_atom(&self, atom: Atom) -> TypeId {
766 self.intern(TypeData::Literal(LiteralValue::String(atom)))
767 }
768
769 pub fn literal_number(&self, value: f64) -> TypeId {
771 self.intern(TypeData::Literal(LiteralValue::Number(OrderedFloat(value))))
772 }
773
774 pub fn literal_boolean(&self, value: bool) -> TypeId {
776 self.intern(TypeData::Literal(LiteralValue::Boolean(value)))
777 }
778
779 pub fn literal_bigint(&self, value: &str) -> TypeId {
781 let atom = self.intern_string(&self.normalize_bigint_literal(value));
782 self.intern(TypeData::Literal(LiteralValue::BigInt(atom)))
783 }
784
785 pub fn literal_bigint_with_sign(&self, negative: bool, digits: &str) -> TypeId {
787 let normalized = self.normalize_bigint_literal(digits);
788 if normalized == "0" {
789 return self.literal_bigint(&normalized);
790 }
791 if !negative {
792 return self.literal_bigint(&normalized);
793 }
794
795 let mut value = String::with_capacity(normalized.len() + 1);
796 value.push('-');
797 value.push_str(&normalized);
798 let atom = self.string_interner.intern_owned(value);
799 self.intern(TypeData::Literal(LiteralValue::BigInt(atom)))
800 }
801
802 fn normalize_bigint_literal(&self, value: &str) -> String {
803 let stripped = value.replace('_', "");
804 if stripped.is_empty() {
805 return "0".to_string();
806 }
807
808 let (base, digits) = if stripped.starts_with("0x") || stripped.starts_with("0X") {
809 (16, &stripped[2..])
810 } else if stripped.starts_with("0o") || stripped.starts_with("0O") {
811 (8, &stripped[2..])
812 } else if stripped.starts_with("0b") || stripped.starts_with("0B") {
813 (2, &stripped[2..])
814 } else {
815 (10, stripped.as_str())
816 };
817
818 if digits.is_empty() {
819 return "0".to_string();
820 }
821
822 if base == 10 {
823 let normalized = digits.trim_start_matches('0');
824 return if normalized.is_empty() {
825 "0".to_string()
826 } else {
827 normalized.to_string()
828 };
829 }
830
831 let mut decimal: Vec<u8> = vec![0];
832 for ch in digits.chars() {
833 let Some(digit) = ch.to_digit(base) else {
834 return "0".to_string();
835 };
836 let digit = digit as u16;
837 let mut carry = digit;
838 let base = base as u16;
839 for dec in decimal.iter_mut() {
840 let value = u16::from(*dec) * base + carry;
841 *dec = (value % 10) as u8;
842 carry = value / 10;
843 }
844 while carry > 0 {
845 decimal.push((carry % 10) as u8);
846 carry /= 10;
847 }
848 }
849
850 while decimal.len() > 1 && *decimal.last().unwrap_or(&0) == 0 {
851 decimal.pop();
852 }
853
854 let mut out = String::with_capacity(decimal.len());
855 for digit in decimal.iter().rev() {
856 out.push(char::from(b'0' + *digit));
857 }
858 out
859 }
860
861 pub fn union(&self, members: Vec<TypeId>) -> TypeId {
863 self.union_from_iter(members)
864 }
865
866 pub fn union_from_sorted_vec(&self, flat: Vec<TypeId>) -> TypeId {
869 if flat.is_empty() {
870 return TypeId::NEVER;
871 }
872 if flat.len() == 1 {
873 return flat[0];
874 }
875
876 let list_id = self.intern_type_list(flat);
877 self.intern(TypeData::Union(list_id))
878 }
879
880 pub fn union_preserve_members(&self, members: Vec<TypeId>) -> TypeId {
884 if members.is_empty() {
885 return TypeId::NEVER;
886 }
887
888 let mut flat: TypeListBuffer = SmallVec::new();
889 for member in members {
890 if let Some(TypeData::Union(inner)) = self.lookup(member) {
891 let members = self.type_list(inner);
892 flat.extend(members.iter().copied());
893 } else {
894 flat.push(member);
895 }
896 }
897
898 flat.sort_by_key(|id| id.0);
899 flat.dedup();
900 flat.retain(|id| *id != TypeId::NEVER);
901
902 if flat.is_empty() {
903 return TypeId::NEVER;
904 }
905 if flat.len() == 1 {
906 return flat[0];
907 }
908
909 let list_id = self.intern_type_list(flat.into_vec());
910 self.intern(TypeData::Union(list_id))
911 }
912
913 pub fn union2(&self, left: TypeId, right: TypeId) -> TypeId {
915 if left == right {
917 return left;
918 }
919 if left == TypeId::NEVER {
920 return right;
921 }
922 if right == TypeId::NEVER {
923 return left;
924 }
925 self.union_from_iter([left, right])
926 }
927
928 pub fn union3(&self, first: TypeId, second: TypeId, third: TypeId) -> TypeId {
930 self.union_from_iter([first, second, third])
931 }
932
933 fn union_from_iter<I>(&self, members: I) -> TypeId
934 where
935 I: IntoIterator<Item = TypeId>,
936 {
937 let mut iter = members.into_iter();
938 let Some(first) = iter.next() else {
939 return TypeId::NEVER;
940 };
941 let Some(second) = iter.next() else {
942 return first;
943 };
944
945 let mut flat: TypeListBuffer = SmallVec::new();
946 self.push_union_member(&mut flat, first);
947 self.push_union_member(&mut flat, second);
948 for member in iter {
949 self.push_union_member(&mut flat, member);
950 }
951
952 self.normalize_union(flat)
953 }
954
955 fn push_union_member(&self, flat: &mut TypeListBuffer, member: TypeId) {
956 if let Some(TypeData::Union(inner)) = self.lookup(member) {
957 let members = self.type_list(inner);
958 flat.extend(members.iter().copied());
959 } else {
960 flat.push(member);
961 }
962 }
963
964 fn normalize_union(&self, mut flat: TypeListBuffer) -> TypeId {
965 flat.sort_by_key(|id| id.0);
967 flat.dedup();
968
969 if flat.contains(&TypeId::ERROR) {
971 return TypeId::ERROR;
972 }
973 if flat.is_empty() {
974 return TypeId::NEVER;
975 }
976 if flat.len() == 1 {
977 return flat[0];
978 }
979 if flat.contains(&TypeId::ANY) {
981 return TypeId::ANY;
982 }
983 if flat.contains(&TypeId::UNKNOWN) {
985 return TypeId::UNKNOWN;
986 }
987 flat.retain(|id| *id != TypeId::NEVER);
989 if flat.is_empty() {
990 return TypeId::NEVER;
991 }
992 if flat.len() == 1 {
993 return flat[0];
994 }
995
996 self.absorb_literals_into_primitives(&mut flat);
1001
1002 if flat.is_empty() {
1003 return TypeId::NEVER;
1004 }
1005 if flat.len() == 1 {
1006 return flat[0];
1007 }
1008
1009 let has_complex = flat.iter().any(|&id| {
1012 matches!(
1013 self.lookup(id),
1014 Some(TypeData::TypeParameter(_) | TypeData::Lazy(_))
1015 )
1016 });
1017 if !has_complex {
1018 self.reduce_union_subtypes(&mut flat);
1019 }
1020
1021 if flat.is_empty() {
1022 return TypeId::NEVER;
1023 }
1024 if flat.len() == 1 {
1025 return flat[0];
1026 }
1027
1028 let list_id = self.intern_type_list(flat.into_vec());
1029 self.intern(TypeData::Union(list_id))
1030 }
1031
1032 pub fn intersection(&self, members: Vec<TypeId>) -> TypeId {
1034 self.intersection_from_iter(members)
1035 }
1036
1037 pub fn intersection2(&self, left: TypeId, right: TypeId) -> TypeId {
1039 self.intersection_from_iter([left, right])
1040 }
1041
1042 pub fn intersect_types_raw(&self, members: Vec<TypeId>) -> TypeId {
1052 let mut flat: TypeListBuffer = SmallVec::new();
1054
1055 for member in members {
1056 if let Some(TypeData::Intersection(inner)) = self.lookup(member) {
1058 let inner_members = self.type_list(inner);
1059 flat.extend(inner_members.iter().copied());
1060 } else {
1061 flat.push(member);
1062 }
1063 }
1064
1065 let has_unresolved = flat
1069 .iter()
1070 .any(|&id| matches!(self.lookup(id), Some(TypeData::Lazy(_))));
1071 if has_unresolved {
1072 flat.sort_by_key(|id| id.0);
1074 flat.dedup();
1075 let list_id = self.intern_type_list(flat.into_vec());
1076 return self.intern(TypeData::Intersection(list_id));
1077 }
1078
1079 let has_callables = flat.iter().any(|&id| self.is_callable_type(id));
1086
1087 if !has_callables {
1088 flat.sort_by_key(|id| id.0);
1090 flat.dedup();
1091 } else {
1092 let mut callables = SmallVec::<[TypeId; 4]>::new();
1094
1095 let mut i = 0;
1098 while i < flat.len() {
1099 if self.is_callable_type(flat[i]) {
1100 callables.push(flat.remove(i));
1101 } else {
1102 i += 1;
1103 }
1104 }
1105
1106 flat.sort_by_key(|id| id.0);
1108 flat.dedup();
1109
1110 let mut seen = FxHashSet::default();
1112 callables.retain(|id| seen.insert(*id));
1113
1114 flat.extend(callables);
1116 }
1117
1118 if flat.contains(&TypeId::NEVER) {
1124 return TypeId::NEVER;
1125 }
1126
1127 if flat.contains(&TypeId::ANY) {
1129 return TypeId::ANY;
1130 }
1131
1132 flat.retain(|id| *id != TypeId::UNKNOWN);
1134
1135 if self.has_disjoint_primitives(&flat) {
1138 return TypeId::NEVER;
1139 }
1140
1141 if flat.is_empty() {
1146 return TypeId::UNKNOWN;
1147 }
1148 if flat.len() == 1 {
1149 return flat[0];
1150 }
1151
1152 let list_id = self.intern_type_list(flat.into_vec());
1154 self.intern(TypeData::Intersection(list_id))
1155 }
1156
1157 pub fn intersect_types_raw2(&self, a: TypeId, b: TypeId) -> TypeId {
1159 self.intersect_types_raw(vec![a, b])
1160 }
1161
1162 fn intersection_from_iter<I>(&self, members: I) -> TypeId
1163 where
1164 I: IntoIterator<Item = TypeId>,
1165 {
1166 let mut iter = members.into_iter();
1167 let Some(first) = iter.next() else {
1168 return TypeId::UNKNOWN;
1169 };
1170 let Some(second) = iter.next() else {
1171 return first;
1172 };
1173
1174 let mut flat: TypeListBuffer = SmallVec::new();
1175 self.push_intersection_member(&mut flat, first);
1176 self.push_intersection_member(&mut flat, second);
1177 for member in iter {
1178 self.push_intersection_member(&mut flat, member);
1179 }
1180
1181 self.normalize_intersection(flat)
1182 }
1183
1184 fn push_intersection_member(&self, flat: &mut TypeListBuffer, member: TypeId) {
1185 if let Some(TypeData::Intersection(inner)) = self.lookup(member) {
1186 let members = self.type_list(inner);
1187 flat.extend(members.iter().copied());
1188 } else {
1189 flat.push(member);
1190 }
1191 }
1192
1193 fn is_callable_type(&self, id: TypeId) -> bool {
1200 matches!(
1201 self.lookup(id),
1202 Some(TypeData::Function(_) | TypeData::Callable(_))
1203 )
1204 }
1205
1206 pub fn array(&self, element: TypeId) -> TypeId {
1211 self.intern(TypeData::Array(element))
1212 }
1213
1214 pub fn this_type(&self) -> TypeId {
1216 self.intern(TypeData::ThisType)
1217 }
1218
1219 pub fn readonly_array(&self, element: TypeId) -> TypeId {
1222 let array_type = self.array(element);
1223 self.intern(TypeData::ReadonlyType(array_type))
1224 }
1225
1226 pub fn tuple(&self, elements: Vec<TupleElement>) -> TypeId {
1228 let list_id = self.intern_tuple_list(elements);
1229 self.intern(TypeData::Tuple(list_id))
1230 }
1231
1232 pub fn readonly_tuple(&self, elements: Vec<TupleElement>) -> TypeId {
1235 let tuple_type = self.tuple(elements);
1236 self.intern(TypeData::ReadonlyType(tuple_type))
1237 }
1238
1239 pub fn readonly_type(&self, inner: TypeId) -> TypeId {
1242 self.intern(TypeData::ReadonlyType(inner))
1243 }
1244
1245 pub fn no_infer(&self, inner: TypeId) -> TypeId {
1247 self.intern(TypeData::NoInfer(inner))
1248 }
1249
1250 pub fn unique_symbol(&self, symbol: SymbolRef) -> TypeId {
1252 self.intern(TypeData::UniqueSymbol(symbol))
1253 }
1254
1255 pub fn infer(&self, info: TypeParamInfo) -> TypeId {
1257 self.intern(TypeData::Infer(info))
1258 }
1259
1260 pub fn bound_parameter(&self, index: u32) -> TypeId {
1261 self.intern(TypeData::BoundParameter(index))
1262 }
1263
1264 pub fn recursive(&self, depth: u32) -> TypeId {
1265 self.intern(TypeData::Recursive(depth))
1266 }
1267
1268 pub fn keyof(&self, inner: TypeId) -> TypeId {
1270 self.intern(TypeData::KeyOf(inner))
1271 }
1272
1273 pub fn index_access(&self, object_type: TypeId, index_type: TypeId) -> TypeId {
1275 self.intern(TypeData::IndexAccess(object_type, index_type))
1276 }
1277
1278 pub fn enum_type(&self, def_id: DefId, structural_type: TypeId) -> TypeId {
1281 self.intern(TypeData::Enum(def_id, structural_type))
1282 }
1283
1284 pub fn object(&self, properties: Vec<PropertyInfo>) -> TypeId {
1286 self.object_with_flags(properties, ObjectFlags::empty())
1287 }
1288
1289 pub fn object_fresh(&self, properties: Vec<PropertyInfo>) -> TypeId {
1291 self.object_with_flags(properties, ObjectFlags::FRESH_LITERAL)
1292 }
1293
1294 pub fn object_with_flags(
1296 &self,
1297 mut properties: Vec<PropertyInfo>,
1298 flags: ObjectFlags,
1299 ) -> TypeId {
1300 properties.sort_by_key(|a| a.name);
1302 let shape_id = self.intern_object_shape(ObjectShape {
1303 flags,
1304 properties,
1305 string_index: None,
1306 number_index: None,
1307 symbol: None,
1308 });
1309 self.intern(TypeData::Object(shape_id))
1310 }
1311
1312 pub fn object_with_flags_and_symbol(
1315 &self,
1316 mut properties: Vec<PropertyInfo>,
1317 flags: ObjectFlags,
1318 symbol: Option<tsz_binder::SymbolId>,
1319 ) -> TypeId {
1320 properties.sort_by_key(|a| a.name);
1322 let shape_id = self.intern_object_shape(ObjectShape {
1323 flags,
1324 properties,
1325 string_index: None,
1326 number_index: None,
1327 symbol,
1328 });
1329 self.intern(TypeData::Object(shape_id))
1330 }
1331
1332 pub fn object_with_index(&self, mut shape: ObjectShape) -> TypeId {
1334 shape.properties.sort_by_key(|a| a.name);
1336 let shape_id = self.intern_object_shape(shape);
1337 self.intern(TypeData::ObjectWithIndex(shape_id))
1338 }
1339
1340 pub fn function(&self, shape: FunctionShape) -> TypeId {
1342 let shape_id = self.intern_function_shape(shape);
1343 self.intern(TypeData::Function(shape_id))
1344 }
1345
1346 pub fn callable(&self, shape: CallableShape) -> TypeId {
1348 let shape_id = self.intern_callable_shape(shape);
1349 self.intern(TypeData::Callable(shape_id))
1350 }
1351
1352 pub fn conditional(&self, conditional: ConditionalType) -> TypeId {
1354 let conditional_id = self.intern_conditional_type(conditional);
1355 self.intern(TypeData::Conditional(conditional_id))
1356 }
1357
1358 pub fn mapped(&self, mapped: MappedType) -> TypeId {
1360 let mapped_id = self.intern_mapped_type(mapped);
1361 self.intern(TypeData::Mapped(mapped_id))
1362 }
1363
1364 pub fn string_intrinsic(
1366 &self,
1367 kind: crate::types::StringIntrinsicKind,
1368 type_arg: TypeId,
1369 ) -> TypeId {
1370 self.intern(TypeData::StringIntrinsic { kind, type_arg })
1371 }
1372
1373 pub fn reference(&self, symbol: SymbolRef) -> TypeId {
1380 let def_id = DefId(symbol.0);
1383 self.intern(TypeData::Lazy(def_id))
1384 }
1385
1386 pub fn lazy(&self, def_id: DefId) -> TypeId {
1394 self.intern(TypeData::Lazy(def_id))
1395 }
1396
1397 pub fn type_param(&self, info: TypeParamInfo) -> TypeId {
1399 self.intern(TypeData::TypeParameter(info))
1400 }
1401
1402 pub fn type_query(&self, symbol: SymbolRef) -> TypeId {
1404 self.intern(TypeData::TypeQuery(symbol))
1405 }
1406
1407 pub fn application(&self, base: TypeId, args: Vec<TypeId>) -> TypeId {
1409 let app_id = self.intern_application(TypeApplication { base, args });
1410 self.intern(TypeData::Application(app_id))
1411 }
1412}
1413
1414impl Default for TypeInterner {
1415 fn default() -> Self {
1416 Self::new()
1417 }
1418}
1419
1420#[cfg(test)]
1421#[path = "../../tests/intern_tests.rs"]
1422mod tests;
1423
1424#[cfg(test)]
1425#[path = "../../tests/concurrent_tests.rs"]
1426mod concurrent_tests;