1use crate::{
2 expr::ModPath,
3 typ::{FnType, PrintFlag, Type, PRINT_FLAGS},
4};
5use arcstr::ArcStr;
6use compact_str::format_compact;
7use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};
8use std::{
9 cmp::{Eq, PartialEq},
10 fmt::{self, Debug},
11 hash::Hash,
12 ops::Deref,
13};
14use triomphe::Arc;
15
16atomic_id!(TVarId);
17
18pub(super) fn would_cycle_inner(addr: usize, t: &Type) -> bool {
19 match t {
20 Type::Primitive(_) | Type::Any | Type::Bottom | Type::Ref { .. } => false,
21 Type::TVar(t) => {
22 Arc::as_ptr(&t.read().typ).addr() == addr
23 || match &*t.read().typ.read() {
24 None => false,
25 Some(t) => would_cycle_inner(addr, t),
26 }
27 }
28 Type::Error(t) => would_cycle_inner(addr, t),
29 Type::Array(a) => would_cycle_inner(addr, &**a),
30 Type::Map { key, value } => {
31 would_cycle_inner(addr, &**key) || would_cycle_inner(addr, &**value)
32 }
33 Type::ByRef(t) => would_cycle_inner(addr, t),
34 Type::Tuple(ts) => ts.iter().any(|t| would_cycle_inner(addr, t)),
35 Type::Variant(_, ts) => ts.iter().any(|t| would_cycle_inner(addr, t)),
36 Type::Struct(ts) => ts.iter().any(|(_, t)| would_cycle_inner(addr, t)),
37 Type::Set(s) => s.iter().any(|t| would_cycle_inner(addr, t)),
38 Type::Fn(f) => {
39 let FnType { args, vargs, rtype, constraints, throws, explicit_throws: _ } =
40 &**f;
41 args.iter().any(|t| would_cycle_inner(addr, &t.typ))
42 || match vargs {
43 None => false,
44 Some(t) => would_cycle_inner(addr, t),
45 }
46 || would_cycle_inner(addr, rtype)
47 || constraints.read().iter().any(|a| {
48 Arc::as_ptr(&a.0.read().typ).addr() == addr
49 || would_cycle_inner(addr, &a.1)
50 })
51 || would_cycle_inner(addr, &throws)
52 }
53 }
54}
55
56#[derive(Debug)]
57pub struct TVarInnerInner {
58 pub(crate) id: TVarId,
59 pub(crate) frozen: bool,
60 pub(crate) typ: Arc<RwLock<Option<Type>>>,
61}
62
63#[derive(Debug)]
64pub struct TVarInner {
65 pub name: ArcStr,
66 pub(crate) typ: RwLock<TVarInnerInner>,
67}
68
69#[derive(Debug, Clone)]
70pub struct TVar(Arc<TVarInner>);
71
72impl fmt::Display for TVar {
73 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
74 if !PRINT_FLAGS.get().contains(PrintFlag::DerefTVars) {
75 write!(f, "'{}", self.name)
76 } else {
77 write!(f, "'{}: ", self.name)?;
78 match &*self.read().typ.read() {
79 Some(t) => write!(f, "{t}"),
80 None => write!(f, "unbound"),
81 }
82 }
83 }
84}
85
86impl Default for TVar {
87 fn default() -> Self {
88 Self::empty_named(ArcStr::from(format_compact!("_{}", TVarId::new().0).as_str()))
89 }
90}
91
92impl Deref for TVar {
93 type Target = TVarInner;
94
95 fn deref(&self) -> &Self::Target {
96 &*self.0
97 }
98}
99
100impl PartialEq for TVar {
101 fn eq(&self, other: &Self) -> bool {
102 let t0 = self.read();
103 let t1 = other.read();
104 t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() || {
105 let t0 = t0.typ.read();
106 let t1 = t1.typ.read();
107 *t0 == *t1
108 }
109 }
110}
111
112impl Eq for TVar {}
113
114impl PartialOrd for TVar {
115 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
116 let t0 = self.read();
117 let t1 = other.read();
118 if t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() {
119 Some(std::cmp::Ordering::Equal)
120 } else {
121 let t0 = t0.typ.read();
122 let t1 = t1.typ.read();
123 t0.partial_cmp(&*t1)
124 }
125 }
126}
127
128impl Ord for TVar {
129 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
130 let t0 = self.read();
131 let t1 = other.read();
132 if t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() {
133 std::cmp::Ordering::Equal
134 } else {
135 let t0 = t0.typ.read();
136 let t1 = t1.typ.read();
137 t0.cmp(&*t1)
138 }
139 }
140}
141
142impl TVar {
143 pub fn scope_refs(&self, scope: &ModPath) -> Self {
144 match Type::TVar(self.clone()).scope_refs(scope) {
145 Type::TVar(tv) => tv,
146 _ => unreachable!(),
147 }
148 }
149
150 pub fn empty_named(name: ArcStr) -> Self {
151 Self(Arc::new(TVarInner {
152 name,
153 typ: RwLock::new(TVarInnerInner {
154 id: TVarId::new(),
155 frozen: false,
156 typ: Arc::new(RwLock::new(None)),
157 }),
158 }))
159 }
160
161 pub fn named(name: ArcStr, typ: Type) -> Self {
162 Self(Arc::new(TVarInner {
163 name,
164 typ: RwLock::new(TVarInnerInner {
165 id: TVarId::new(),
166 frozen: false,
167 typ: Arc::new(RwLock::new(Some(typ))),
168 }),
169 }))
170 }
171
172 pub fn read<'a>(&'a self) -> RwLockReadGuard<'a, TVarInnerInner> {
173 self.typ.read()
174 }
175
176 pub fn write<'a>(&'a self) -> RwLockWriteGuard<'a, TVarInnerInner> {
177 self.typ.write()
178 }
179
180 pub fn alias(&self, other: &Self) {
182 let mut s = self.write();
183 if !s.frozen {
184 s.frozen = true;
185 let o = other.read();
186 s.id = o.id;
187 s.typ = Arc::clone(&o.typ);
188 }
189 }
190
191 pub fn freeze(&self) {
192 self.write().frozen = true;
193 }
194
195 pub fn copy(&self, other: &Self) {
197 let s = self.read();
198 let o = other.read();
199 *s.typ.write() = o.typ.read().clone();
200 }
201
202 pub fn normalize(&self) -> Self {
203 match &mut *self.read().typ.write() {
204 None => (),
205 Some(t) => {
206 *t = t.normalize();
207 }
208 }
209 self.clone()
210 }
211
212 pub fn unbind(&self) {
213 *self.read().typ.write() = None
214 }
215
216 pub(super) fn would_cycle(&self, t: &Type) -> bool {
217 let addr = Arc::as_ptr(&self.read().typ).addr();
218 would_cycle_inner(addr, t)
219 }
220
221 pub(super) fn addr(&self) -> usize {
222 Arc::as_ptr(&self.0).addr()
223 }
224
225 pub(super) fn inner_addr(&self) -> usize {
226 Arc::as_ptr(&self.read().typ).addr()
227 }
228}