1use crate::{
2 expr::ModPath,
3 typ::{FnType, PrintFlag, Type, PRINT_FLAGS},
4};
5use arcstr::ArcStr;
6use compact_str::format_compact;
7use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};
8use std::{
9 cmp::{Eq, PartialEq},
10 fmt::{self, Debug},
11 hash::Hash,
12 ops::Deref,
13};
14use triomphe::Arc;
15
16atomic_id!(TVarId);
17
18pub(super) fn would_cycle_inner(addr: usize, t: &Type) -> bool {
19 match t {
20 Type::Primitive(_) | Type::Any | Type::Bottom | Type::Ref { .. } => false,
21 Type::TVar(t) => {
22 Arc::as_ptr(&t.read().typ).addr() == addr
23 || match &*t.read().typ.read() {
24 None => false,
25 Some(t) => would_cycle_inner(addr, t),
26 }
27 }
28 Type::Error(t) => would_cycle_inner(addr, t),
29 Type::Array(a) => would_cycle_inner(addr, &**a),
30 Type::Map { key, value } => {
31 would_cycle_inner(addr, &**key) || would_cycle_inner(addr, &**value)
32 }
33 Type::ByRef(t) => would_cycle_inner(addr, t),
34 Type::Tuple(ts) => ts.iter().any(|t| would_cycle_inner(addr, t)),
35 Type::Variant(_, ts) => ts.iter().any(|t| would_cycle_inner(addr, t)),
36 Type::Struct(ts) => ts.iter().any(|(_, t)| would_cycle_inner(addr, t)),
37 Type::Set(s) => s.iter().any(|t| would_cycle_inner(addr, t)),
38 Type::Fn(f) => {
39 let FnType { args, vargs, rtype, constraints, throws } = &**f;
40 args.iter().any(|t| would_cycle_inner(addr, &t.typ))
41 || match vargs {
42 None => false,
43 Some(t) => would_cycle_inner(addr, t),
44 }
45 || would_cycle_inner(addr, rtype)
46 || constraints.read().iter().any(|a| {
47 Arc::as_ptr(&a.0.read().typ).addr() == addr
48 || would_cycle_inner(addr, &a.1)
49 })
50 || would_cycle_inner(addr, throws)
51 }
52 }
53}
54
55#[derive(Debug)]
56pub struct TVarInnerInner {
57 pub(crate) id: TVarId,
58 pub(crate) frozen: bool,
59 pub(crate) typ: Arc<RwLock<Option<Type>>>,
60}
61
62#[derive(Debug)]
63pub struct TVarInner {
64 pub name: ArcStr,
65 pub(crate) typ: RwLock<TVarInnerInner>,
66}
67
68#[derive(Debug, Clone)]
69pub struct TVar(Arc<TVarInner>);
70
71impl fmt::Display for TVar {
72 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
73 if !PRINT_FLAGS.get().contains(PrintFlag::DerefTVars) {
74 write!(f, "'{}", self.name)
75 } else {
76 write!(f, "'{}: ", self.name)?;
77 match &*self.read().typ.read() {
78 Some(t) => write!(f, "{t}"),
79 None => write!(f, "unbound"),
80 }
81 }
82 }
83}
84
85impl Default for TVar {
86 fn default() -> Self {
87 Self::empty_named(ArcStr::from(format_compact!("_{}", TVarId::new().0).as_str()))
88 }
89}
90
91impl Deref for TVar {
92 type Target = TVarInner;
93
94 fn deref(&self) -> &Self::Target {
95 &*self.0
96 }
97}
98
99impl PartialEq for TVar {
100 fn eq(&self, other: &Self) -> bool {
101 let t0 = self.read();
102 let t1 = other.read();
103 t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() || {
104 let t0 = t0.typ.read();
105 let t1 = t1.typ.read();
106 *t0 == *t1
107 }
108 }
109}
110
111impl Eq for TVar {}
112
113impl PartialOrd for TVar {
114 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
115 let t0 = self.read();
116 let t1 = other.read();
117 if t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() {
118 Some(std::cmp::Ordering::Equal)
119 } else {
120 let t0 = t0.typ.read();
121 let t1 = t1.typ.read();
122 t0.partial_cmp(&*t1)
123 }
124 }
125}
126
127impl Ord for TVar {
128 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
129 let t0 = self.read();
130 let t1 = other.read();
131 if t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() {
132 std::cmp::Ordering::Equal
133 } else {
134 let t0 = t0.typ.read();
135 let t1 = t1.typ.read();
136 t0.cmp(&*t1)
137 }
138 }
139}
140
141impl TVar {
142 pub fn scope_refs(&self, scope: &ModPath) -> Self {
143 match Type::TVar(self.clone()).scope_refs(scope) {
144 Type::TVar(tv) => tv,
145 _ => unreachable!(),
146 }
147 }
148
149 pub fn empty_named(name: ArcStr) -> Self {
150 Self(Arc::new(TVarInner {
151 name,
152 typ: RwLock::new(TVarInnerInner {
153 id: TVarId::new(),
154 frozen: false,
155 typ: Arc::new(RwLock::new(None)),
156 }),
157 }))
158 }
159
160 pub fn named(name: ArcStr, typ: Type) -> Self {
161 Self(Arc::new(TVarInner {
162 name,
163 typ: RwLock::new(TVarInnerInner {
164 id: TVarId::new(),
165 frozen: false,
166 typ: Arc::new(RwLock::new(Some(typ))),
167 }),
168 }))
169 }
170
171 pub fn read<'a>(&'a self) -> RwLockReadGuard<'a, TVarInnerInner> {
172 self.typ.read()
173 }
174
175 pub fn write<'a>(&'a self) -> RwLockWriteGuard<'a, TVarInnerInner> {
176 self.typ.write()
177 }
178
179 pub fn alias(&self, other: &Self) {
181 let mut s = self.write();
182 if !s.frozen {
183 s.frozen = true;
184 let o = other.read();
185 s.id = o.id;
186 s.typ = Arc::clone(&o.typ);
187 }
188 }
189
190 pub fn freeze(&self) {
191 self.write().frozen = true;
192 }
193
194 pub fn copy(&self, other: &Self) {
196 let s = self.read();
197 let o = other.read();
198 *s.typ.write() = o.typ.read().clone();
199 }
200
201 pub fn normalize(&self) -> Self {
202 match &mut *self.read().typ.write() {
203 None => (),
204 Some(t) => {
205 *t = t.normalize();
206 }
207 }
208 self.clone()
209 }
210
211 pub fn unbind(&self) {
212 *self.read().typ.write() = None
213 }
214
215 pub(super) fn would_cycle(&self, t: &Type) -> bool {
216 let addr = Arc::as_ptr(&self.read().typ).addr();
217 would_cycle_inner(addr, t)
218 }
219
220 pub(super) fn addr(&self) -> usize {
221 Arc::as_ptr(&self.0).addr()
222 }
223}