graphix_compiler/typ/
tvar.rs1use crate::{
2 expr::ModPath,
3 typ::{FnType, PrintFlag, Type, PRINT_FLAGS},
4};
5use arcstr::ArcStr;
6use compact_str::format_compact;
7use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};
8use std::{
9 cmp::{Eq, PartialEq},
10 fmt::{self, Debug},
11 hash::Hash,
12 ops::Deref,
13};
14use triomphe::Arc;
15
16atomic_id!(TVarId);
17
18pub(super) fn would_cycle_inner(addr: usize, t: &Type) -> bool {
19 match t {
20 Type::Primitive(_) | Type::Any | Type::Bottom | Type::Ref { .. } => false,
21 Type::TVar(t) => {
22 Arc::as_ptr(&t.read().typ).addr() == addr
23 || match &*t.read().typ.read() {
24 None => false,
25 Some(t) => would_cycle_inner(addr, t),
26 }
27 }
28 Type::Array(a) => would_cycle_inner(addr, &**a),
29 Type::ByRef(t) => would_cycle_inner(addr, t),
30 Type::Tuple(ts) => ts.iter().any(|t| would_cycle_inner(addr, t)),
31 Type::Variant(_, ts) => ts.iter().any(|t| would_cycle_inner(addr, t)),
32 Type::Struct(ts) => ts.iter().any(|(_, t)| would_cycle_inner(addr, t)),
33 Type::Set(s) => s.iter().any(|t| would_cycle_inner(addr, t)),
34 Type::Fn(f) => {
35 let FnType { args, vargs, rtype, constraints } = &**f;
36 args.iter().any(|t| would_cycle_inner(addr, &t.typ))
37 || match vargs {
38 None => false,
39 Some(t) => would_cycle_inner(addr, t),
40 }
41 || would_cycle_inner(addr, &rtype)
42 || constraints.read().iter().any(|a| {
43 Arc::as_ptr(&a.0.read().typ).addr() == addr
44 || would_cycle_inner(addr, &a.1)
45 })
46 }
47 }
48}
49
50#[derive(Debug)]
51pub struct TVarInnerInner {
52 pub(super) id: TVarId,
53 pub(super) frozen: bool,
54 pub(super) typ: Arc<RwLock<Option<Type>>>,
55}
56
57#[derive(Debug)]
58pub struct TVarInner {
59 pub name: ArcStr,
60 pub(super) typ: RwLock<TVarInnerInner>,
61}
62
63#[derive(Debug, Clone)]
64pub struct TVar(Arc<TVarInner>);
65
66impl fmt::Display for TVar {
67 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
68 if !PRINT_FLAGS.get().contains(PrintFlag::DerefTVars) {
69 write!(f, "'{}", self.name)
70 } else {
71 write!(f, "'{}: ", self.name)?;
72 match &*self.read().typ.read() {
73 Some(t) => write!(f, "{t}"),
74 None => write!(f, "unbound"),
75 }
76 }
77 }
78}
79
80impl Default for TVar {
81 fn default() -> Self {
82 Self::empty_named(ArcStr::from(format_compact!("_{}", TVarId::new().0).as_str()))
83 }
84}
85
86impl Deref for TVar {
87 type Target = TVarInner;
88
89 fn deref(&self) -> &Self::Target {
90 &*self.0
91 }
92}
93
94impl PartialEq for TVar {
95 fn eq(&self, other: &Self) -> bool {
96 let t0 = self.read();
97 let t1 = other.read();
98 t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() || {
99 let t0 = t0.typ.read();
100 let t1 = t1.typ.read();
101 *t0 == *t1
102 }
103 }
104}
105
106impl Eq for TVar {}
107
108impl PartialOrd for TVar {
109 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
110 let t0 = self.read();
111 let t1 = other.read();
112 if t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() {
113 Some(std::cmp::Ordering::Equal)
114 } else {
115 let t0 = t0.typ.read();
116 let t1 = t1.typ.read();
117 t0.partial_cmp(&*t1)
118 }
119 }
120}
121
122impl Ord for TVar {
123 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
124 let t0 = self.read();
125 let t1 = other.read();
126 if t0.typ.as_ptr().addr() == t1.typ.as_ptr().addr() {
127 std::cmp::Ordering::Equal
128 } else {
129 let t0 = t0.typ.read();
130 let t1 = t1.typ.read();
131 t0.cmp(&*t1)
132 }
133 }
134}
135
136impl TVar {
137 pub fn scope_refs(&self, scope: &ModPath) -> Self {
138 match Type::TVar(self.clone()).scope_refs(scope) {
139 Type::TVar(tv) => tv,
140 _ => unreachable!(),
141 }
142 }
143
144 pub fn empty_named(name: ArcStr) -> Self {
145 Self(Arc::new(TVarInner {
146 name,
147 typ: RwLock::new(TVarInnerInner {
148 id: TVarId::new(),
149 frozen: false,
150 typ: Arc::new(RwLock::new(None)),
151 }),
152 }))
153 }
154
155 pub fn named(name: ArcStr, typ: Type) -> Self {
156 Self(Arc::new(TVarInner {
157 name,
158 typ: RwLock::new(TVarInnerInner {
159 id: TVarId::new(),
160 frozen: false,
161 typ: Arc::new(RwLock::new(Some(typ))),
162 }),
163 }))
164 }
165
166 pub fn read<'a>(&'a self) -> RwLockReadGuard<'a, TVarInnerInner> {
167 self.typ.read()
168 }
169
170 pub fn write<'a>(&'a self) -> RwLockWriteGuard<'a, TVarInnerInner> {
171 self.typ.write()
172 }
173
174 pub fn alias(&self, other: &Self) {
176 let mut s = self.write();
177 if !s.frozen {
178 s.frozen = true;
179 let o = other.read();
180 s.id = o.id;
181 s.typ = Arc::clone(&o.typ);
182 }
183 }
184
185 pub fn freeze(&self) {
186 self.write().frozen = true;
187 }
188
189 pub fn copy(&self, other: &Self) {
191 let s = self.read();
192 let o = other.read();
193 *s.typ.write() = o.typ.read().clone();
194 }
195
196 pub fn normalize(&self) -> Self {
197 match &mut *self.read().typ.write() {
198 None => (),
199 Some(t) => {
200 *t = t.normalize();
201 }
202 }
203 self.clone()
204 }
205
206 pub fn unbind(&self) {
207 *self.read().typ.write() = None
208 }
209
210 pub(super) fn would_cycle(&self, t: &Type) -> bool {
211 let addr = Arc::as_ptr(&self.read().typ).addr();
212 would_cycle_inner(addr, t)
213 }
214
215 pub(super) fn addr(&self) -> usize {
216 self.0.as_ptr().addr()
217 }
218}