1use crate::flattened::UnorderedFlattenedDocument;
3use crate::{ExpandedDocument, FlattenedDocument, IndexedNode, IndexedObject, Object};
4use contextual::WithContext;
5use rdf_types::{Generator, Vocabulary};
6use std::collections::HashSet;
7use std::hash::Hash;
8
9mod environment;
10mod node_map;
11
12pub use environment::Environment;
13pub use node_map::*;
14
15pub type FlattenResult<I, B> = Result<FlattenedDocument<I, B>, ConflictingIndexes<I, B>>;
16
17pub type FlattenUnorderedResult<I, B> =
18 Result<UnorderedFlattenedDocument<I, B>, ConflictingIndexes<I, B>>;
19
20pub trait Flatten<I, B> {
21 fn flatten_with<V, G: Generator<V>>(
22 self,
23 vocabulary: &mut V,
24 generator: G,
25 ordered: bool,
26 ) -> FlattenResult<I, B>
27 where
28 V: Vocabulary<Iri = I, BlankId = B>;
29
30 fn flatten_unordered_with<V, G: Generator<V>>(
31 self,
32 vocabulary: &mut V,
33 generator: G,
34 ) -> FlattenUnorderedResult<I, B>
35 where
36 V: Vocabulary<Iri = I, BlankId = B>;
37
38 fn flatten<G: Generator>(self, generator: G, ordered: bool) -> FlattenResult<I, B>
39 where
40 (): Vocabulary<Iri = I, BlankId = B>,
41 Self: Sized,
42 {
43 self.flatten_with(
44 rdf_types::vocabulary::no_vocabulary_mut(),
45 generator,
46 ordered,
47 )
48 }
49
50 fn flatten_unordered<G: Generator>(self, generator: G) -> FlattenUnorderedResult<I, B>
51 where
52 (): Vocabulary<Iri = I, BlankId = B>,
53 Self: Sized,
54 {
55 self.flatten_unordered_with(rdf_types::vocabulary::no_vocabulary_mut(), generator)
56 }
57}
58
59impl<I: Clone + Eq + Hash, B: Clone + Eq + Hash> Flatten<I, B> for ExpandedDocument<I, B> {
60 fn flatten_with<V, G: Generator<V>>(
61 self,
62 vocabulary: &mut V,
63 generator: G,
64 ordered: bool,
65 ) -> FlattenResult<I, B>
66 where
67 V: Vocabulary<Iri = I, BlankId = B>,
68 {
69 Ok(self
70 .generate_node_map_with(vocabulary, generator)?
71 .flatten_with(vocabulary, ordered))
72 }
73
74 fn flatten_unordered_with<V, G: Generator<V>>(
75 self,
76 vocabulary: &mut V,
77 generator: G,
78 ) -> FlattenUnorderedResult<I, B>
79 where
80 V: Vocabulary<Iri = I, BlankId = B>,
81 {
82 Ok(self
83 .generate_node_map_with(vocabulary, generator)?
84 .flatten_unordered())
85 }
86}
87
88fn filter_graph<T, B>(node: IndexedNode<T, B>) -> Option<IndexedNode<T, B>> {
89 if node.index().is_none() && node.is_empty() {
90 None
91 } else {
92 Some(node)
93 }
94}
95
96fn filter_sub_graph<T, B>(mut node: IndexedNode<T, B>) -> Option<IndexedObject<T, B>> {
97 if node.index().is_none() && node.properties().is_empty() {
98 None
99 } else {
100 node.set_graph_entry(None);
101 node.set_included(None);
102 node.set_reverse_properties(None);
103 Some(node.map_inner(Object::node))
104 }
105}
106
107impl<T: Clone + Eq + Hash, B: Clone + Eq + Hash> NodeMap<T, B> {
108 pub fn flatten(self, ordered: bool) -> FlattenedDocument<T, B>
109 where
110 (): Vocabulary<Iri = T, BlankId = B>,
111 {
112 self.flatten_with(&(), ordered)
113 }
114
115 pub fn flatten_with<V>(self, vocabulary: &V, ordered: bool) -> FlattenedDocument<T, B>
116 where
117 V: Vocabulary<Iri = T, BlankId = B>,
118 {
119 let (mut default_graph, named_graphs) = self.into_parts();
120
121 let mut named_graphs: Vec<_> = named_graphs.into_iter().collect();
122 if ordered {
123 named_graphs.sort_by(|a, b| {
124 a.0.with(vocabulary)
125 .as_str()
126 .cmp(b.0.with(vocabulary).as_str())
127 });
128 }
129
130 for (graph_id, graph) in named_graphs {
131 let entry = default_graph.declare_node(graph_id, None).ok().unwrap();
132 let mut nodes: Vec<_> = graph.into_nodes().collect();
133 if ordered {
134 nodes.sort_by(|a, b| {
135 a.id.as_ref()
136 .unwrap()
137 .with(vocabulary)
138 .as_str()
139 .cmp(b.id.as_ref().unwrap().with(vocabulary).as_str())
140 });
141 }
142 entry.set_graph_entry(Some(
143 nodes.into_iter().filter_map(filter_sub_graph).collect(),
144 ));
145 }
146
147 let mut nodes: Vec<_> = default_graph
148 .into_nodes()
149 .filter_map(filter_graph)
150 .collect();
151
152 if ordered {
153 nodes.sort_by(|a, b| {
154 a.id.as_ref()
155 .unwrap()
156 .with(vocabulary)
157 .as_str()
158 .cmp(b.id.as_ref().unwrap().with(vocabulary).as_str())
159 });
160 }
161
162 nodes
163 }
164
165 pub fn flatten_unordered(self) -> HashSet<IndexedNode<T, B>> {
166 let (mut default_graph, named_graphs) = self.into_parts();
167
168 for (graph_id, graph) in named_graphs {
169 let entry = default_graph.declare_node(graph_id, None).ok().unwrap();
170 entry.set_graph_entry(Some(
171 graph.into_nodes().filter_map(filter_sub_graph).collect(),
172 ));
173 }
174
175 default_graph
176 .into_nodes()
177 .filter_map(filter_graph)
178 .collect()
179 }
180}