1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
//! An implementation of the Congruence Closure algorithm based on the
//! paper "Fast Decision Procedures Based on Congruence Closure" by Nelson
//! and Oppen, JACM 1980.

use graph::{Graph, NodeIndex};
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::Hash;
use unify::{UnifyKey, UnificationTable, UnionedKeys};

#[cfg(test)]
mod test;

pub struct CongruenceClosure<K: Hash + Eq> {
    map: HashMap<K, Token>,
    table: UnificationTable<Token>,
    graph: Graph<K, ()>,
}

pub trait Key : Hash + Eq + Clone + Debug {
    // If this Key has some efficient way of converting itself into a
    // congruence closure `Token`, then it shold return `Some(token)`.
    // Otherwise, return `None`, in which case the CC will internally
    // map the key to a token. Typically, this is used by layers that
    // wrap the CC, where inference variables are mapped directly to
    // particular tokens.
    fn to_token(&self) -> Option<Token> {
        None
    }
    fn shallow_eq(&self, key: &Self) -> bool;
    fn successors(&self) -> Vec<Self>;
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Token {
    // this is the index both for the graph and the unification table,
    // since for every node there is also a slot in the unification
    // table
    index: u32,
}

impl Token {
    fn new(index: u32) -> Token {
        Token { index: index }
    }

    fn from_node(node: NodeIndex) -> Token {
        Token { index: node.0 as u32 }
    }

    fn node(&self) -> NodeIndex {
        NodeIndex(self.index as usize)
    }
}

impl UnifyKey for Token {
    type Value = ();
    fn index(&self) -> u32 {
        self.index
    }
    fn from_index(i: u32) -> Token {
        Token::new(i)
    }
    fn tag() -> &'static str {
        "CongruenceClosure"
    }
}

impl<K: Key> CongruenceClosure<K> {
    pub fn new() -> CongruenceClosure<K> {
        CongruenceClosure {
            map: HashMap::new(),
            table: UnificationTable::new(),
            graph: Graph::new(),
        }
    }

    /// Manually create a new CC token. You don't normally need to do
    /// this, as CC tokens are automatically created for each key when
    /// we first observe it. However, if you wish to have keys that
    /// make use of the `to_token` method to bypass the `key -> token`
    /// map, then you can use this function to make a new-token.  The
    /// callback `key_op` will be invoked to create the key for the
    /// fresh token (typically, it will wrap the token in some kind of
    /// enum indicating an inference variable).
    ///
    /// **WARNING:** The new key **must** be a leaf (no successor
    /// keys) or else things will not work right. This invariant is
    /// not currently checked.
    pub fn new_token<OP>(&mut self, key_op: OP) -> Token
        where OP: FnOnce(Token) -> K
    {
        let token = self.table.new_key(());
        let key = key_op(token);
        let node = self.graph.add_node(key);
        assert_eq!(token.node(), node);
        token
    }

    /// Return the key for a given token
    pub fn key(&self, token: Token) -> &K {
        self.graph.node_data(token.node())
    }

    /// Indicates they `key1` and `key2` are equivalent.
    pub fn merge(&mut self, key1: K, key2: K) {
        let token1 = self.add(key1);
        let token2 = self.add(key2);
        self.algorithm().merge(token1, token2);
    }

    /// Indicates whether `key1` and `key2` are equivalent.
    pub fn merged(&mut self, key1: K, key2: K) -> bool {
        // Careful: you cannot naively remove the `add` calls
        // here. The reason is because of patterns like the test
        // `struct_union_no_add`. If we unify X and Y, and then unify
        // F(X) and F(Z), we need to be sure to figure out that F(Y)
        // == F(Z). This requires a non-trivial deduction step, so
        // just checking if the arguments are congruent will fail,
        // because `Y == Z` does not hold.

        debug!("merged: called({:?}, {:?})", key1, key2);

        let token1 = self.add(key1);
        let token2 = self.add(key2);
        self.algorithm().unioned(token1, token2)
    }

    /// Returns an iterator over all keys that are known to have been
    /// merged with `key`. This is a bit dubious, since the set of
    /// merged keys will be dependent on what has been added, and is
    /// not the full set of equivalencies that one might imagine. See the
    /// test `merged_keys` for an example.
    pub fn merged_keys(&mut self, key: K) -> MergedKeys<K> {
        let token = self.add(key);
        MergedKeys {
            graph: &self.graph,
            iterator: self.table.unioned_keys(token),
        }
    }

    /// Add a key into the CC table, returning the corresponding
    /// token. This is not part of the public API, though it could be
    /// if we wanted.
    fn add(&mut self, key: K) -> Token {
        debug!("add(): key={:?}", key);

        let (is_new, token) = self.get_or_add(&key);
        debug!("add: key={:?} is_new={:?} token={:?}", key, is_new, token);

        // if this node is already in the graph, we are done
        if !is_new {
            return token;
        }

        // Otherwise, we want to add the 'successors' also. So, for
        // example, if we are adding `Box<Foo>`, the successor would
        // be `Foo`.  So go ahead and recursively add `Foo` if it
        // doesn't already exist.
        let successors: Vec<Token> = key.successors()
                                        .into_iter()
                                        .map(|s| self.add(s))
                                        .collect();

        debug!("add: key={:?} successors={:?}", key, successors);

        // Now we have to be a bit careful. It might be that we are
        // adding `Box<Foo>`, but `Foo` was already present, and in
        // fact equated with `Bar`. That is, maybe we had a graph like:
        //
        //      Box<Bar> -> Bar == Foo
        //
        // Now we just added `Box<Foo>`, but we need to equate
        // `Box<Foo>` and `Box<Bar>`.
        for successor in successors {
            // get set of predecessors for each successor BEFORE we add the new node;
            // this would be `Box<Bar>` in the above example.
            let predecessors: Vec<_> = self.algorithm().all_preds(successor);

            debug!("add: key={:?} successor={:?} predecessors={:?}",
                   key,
                   successor,
                   predecessors);

            // add edge from new node `Box<Foo>` to its successor `Foo`
            self.graph.add_edge(token.node(), successor.node(), ());

            // Now we have to consider merging the old predecessors,
            // like `Box<Bar>`, with this new node `Box<Foo>`.
            //
            // Note that in other cases it might be that no merge will
            // occur. For example, if we were adding `(A1, B1)` to a
            // graph like this:
            //
            //     (A, B) -> A == A1
            //        |
            //        v
            //        B
            //
            // In this case, the predecessor would be `(A, B)`; but we don't
            // know that `B == B1`, so we can't merge that with `(A1, B1)`.
            for predecessor in predecessors {
                self.algorithm().maybe_merge(token, predecessor);
            }
        }

        token
    }

    /// Gets the token for a key, if any.
    fn get(&self, key: &K) -> Option<Token> {
        key.to_token()
           .or_else(|| self.map.get(key).cloned())
    }

    /// Gets the token for a key, adding one if none exists. Returns the token
    /// and a boolean indicating whether it had to be added.
    fn get_or_add(&mut self, key: &K) -> (bool, Token) {
        if let Some(token) = self.get(key) {
            return (false, token);
        }

        let token = self.new_token(|_| key.clone());
        self.map.insert(key.clone(), token);
        (true, token)
    }

    fn algorithm(&mut self) -> Algorithm<K> {
        Algorithm {
            graph: &self.graph,
            table: &mut self.table,
        }
    }
}

// # Walking merged keys

pub struct MergedKeys<'cc, K: Key + 'cc> {
    graph: &'cc Graph<K, ()>,
    iterator: UnionedKeys<'cc, Token>,
}

impl<'cc, K: Key> Iterator for MergedKeys<'cc, K> {
    type Item = K;

    fn next(&mut self) -> Option<Self::Item> {
        self.iterator
            .next()
            .map(|token| self.graph.node_data(token.node()).clone())
    }
}

// # The core algorithm

struct Algorithm<'a, K: 'a> {
    graph: &'a Graph<K, ()>,
    table: &'a mut UnificationTable<Token>,
}

impl<'a, K: Key> Algorithm<'a, K> {
    fn merge(&mut self, u: Token, v: Token) {
        debug!("merge(): u={:?} v={:?}", u, v);

        if self.unioned(u, v) {
            return;
        }

        let u_preds = self.all_preds(u);
        let v_preds = self.all_preds(v);

        self.union(u, v);

        for &p_u in &u_preds {
            for &p_v in &v_preds {
                self.maybe_merge(p_u, p_v);
            }
        }
    }

    fn all_preds(&mut self, u: Token) -> Vec<Token> {
        let graph = self.graph;
        self.table
            .unioned_keys(u)
            .flat_map(|k| graph.predecessor_nodes(k.node()))
            .map(|i| Token::from_node(i))
            .collect()
    }

    fn maybe_merge(&mut self, p_u: Token, p_v: Token) {
        debug!("maybe_merge(): p_u={:?} p_v={:?}", p_u, p_v);

        if !self.unioned(p_u, p_v) && self.shallow_eq(p_u, p_v) && self.congruent(p_u, p_v) {
            self.merge(p_u, p_v);
        }
    }

    // Check whether each of the successors are unioned. So if you
    // have `Box<X1>` and `Box<X2>`, this is true if `X1 == X2`. (The
    // result of this fn is not really meaningful unless the two nodes
    // are shallow equal here.)
    fn congruent(&mut self, p_u: Token, p_v: Token) -> bool {
        let ss_u: Vec<_> = self.graph.successor_nodes(p_u.node()).collect();
        let ss_v: Vec<_> = self.graph.successor_nodes(p_v.node()).collect();
        ss_u.len() == ss_v.len() &&
        {
            ss_u.into_iter()
                .zip(ss_v.into_iter())
                .all(|(s_u, s_v)| self.unioned(Token::from_node(s_u), Token::from_node(s_v)))
        }
    }

    // Compare the local data, not considering successor nodes. So e.g
    // `Box<X>` and `Box<Y>` are shallow equal for any `X` and `Y`.
    fn shallow_eq(&self, u: Token, v: Token) -> bool {
        let key_u = self.graph.node_data(u.node());
        let key_v = self.graph.node_data(v.node());
        key_u.shallow_eq(key_v)
    }

    fn unioned(&mut self, u: Token, v: Token) -> bool {
        self.table.unioned(u, v)
    }

    fn union(&mut self, u: Token, v: Token) {
        self.table.union(u, v)
    }
}