1#[cfg(feature = "dev")]
6use arbitrary::Arbitrary;
7use compact_u64::{cu64_decode, cu64_encode, write_tag};
8use derive_more::{Display, Error};
9use ufotofu::ProducerExt;
10
11use crate::entry::Entry;
12use crate::groupings::{Area, TimeRange, subspace_includes_subspace};
13use crate::is_bitflagged;
14use crate::paths::Path;
15use crate::paths::private::PrivatePathContext;
16use crate::prelude::{Keylike, Namespaced};
17use either::Either::Left;
18use ufotofu::{
19 codec::{Blame, Decodable, DecodeError, Encodable},
20 codec_relative::{RelativeDecodable, RelativeEncodable},
21};
22
23#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
27#[cfg_attr(feature = "dev", derive(Arbitrary))]
28pub struct PrivateInterest<const MCL: usize, const MCC: usize, const MPL: usize, N, S> {
29 namespace: N,
30 subspace: Option<S>,
31 path: Path<MCL, MCC, MPL>,
32}
33
34impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S>
35 PrivateInterest<MCL, MCC, MPL, N, S>
36{
37 pub fn new(namespace_id: N, subspace_id: Option<S>, path: Path<MCL, MCC, MPL>) -> Self {
39 Self {
40 namespace: namespace_id,
41 subspace: subspace_id,
42 path,
43 }
44 }
45
46 pub fn namespace(&self) -> &N {
48 &self.namespace
49 }
50
51 pub fn subspace(&self) -> Option<&S> {
53 self.subspace.as_ref()
54 }
55
56 pub fn path(&self) -> &Path<MCL, MCC, MPL> {
58 &self.path
59 }
60}
61
62impl<const MCL: usize, const MCC: usize, const MPL: usize, N: PartialEq, S: PartialEq + Clone>
63 PrivateInterest<MCL, MCC, MPL, N, S>
64{
65 pub fn is_more_specific(&self, other: &Self) -> bool {
67 self.namespace == other.namespace
68 && subspace_includes_subspace(other.subspace(), self.subspace())
69 && self.path.is_prefixed_by(&other.path)
70 }
71
72 pub fn is_disjoint(&self, other: &Self) -> bool {
74 if self.namespace != other.namespace {
75 return true;
76 }
77
78 if !subspace_includes_subspace(self.subspace(), other.subspace()) {
79 return true;
80 }
81
82 if !self.path.is_related_to(&other.path) {
83 return true;
84 }
85
86 false
87 }
88
89 pub fn is_less_specific(&self, other: &Self) -> bool {
91 other.is_more_specific(self)
92 }
93
94 pub fn is_comparable(&self, other: &Self) -> bool {
96 self.is_more_specific(other) || self.is_less_specific(other)
97 }
98
99 pub fn are_awkward(&self, other: &Self) -> bool {
101 !self.is_comparable(other) && !self.is_disjoint(other)
102 }
103}
104
105impl<const MCL: usize, const MCC: usize, const MPL: usize, N: PartialEq, S: PartialEq + Clone>
106 PrivateInterest<MCL, MCC, MPL, N, S>
107{
108 pub fn includes_entry<PD>(&self, entry: &Entry<MCL, MCC, MPL, N, S, PD>) -> bool {
110 &self.namespace == entry.wdm_namespace_id()
111 && self
112 .subspace()
113 .map(|subspace_id| subspace_id == entry.wdm_subspace_id())
114 .unwrap_or(true)
115 && self.path.is_prefix_of(entry.wdm_path())
116 }
117}
118
119impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S: PartialEq + Clone>
120 PrivateInterest<MCL, MCC, MPL, N, S>
121{
122 pub fn includes_area(&self, area: &Area<MCL, MCC, MPL, S>) -> bool {
124 subspace_includes_subspace(self.subspace(), area.subspace())
125 && self.path.is_prefix_of(area.path())
126 }
127
128 pub fn is_related_to_area(&self, area: &Area<MCL, MCC, MPL, S>) -> bool {
133 subspace_includes_subspace(self.subspace(), area.subspace())
134 && self.path.is_related_to(area.path())
135 }
136}
137
138impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S: PartialEq>
139 PrivateInterest<MCL, MCC, MPL, N, S>
140{
141 pub fn almost_includes_area(&self, area: &Area<MCL, MCC, MPL, S>) -> bool {
143 let subspace_is_fine = match (self.subspace(), area.subspace()) {
144 (Some(self_id), Some(other_id)) => self_id == other_id,
145 _ => true,
146 };
147
148 subspace_is_fine && self.path.is_related_to(area.path())
149 }
150}
151
152impl<const MCL: usize, const MCC: usize, const MPL: usize, N: Clone, S: Clone>
153 PrivateInterest<MCL, MCC, MPL, N, S>
154{
155 pub fn relax(&self) -> Self {
157 let mut cloned = self.clone();
158
159 if self.subspace.is_some() {
160 cloned.subspace = None;
161 }
162
163 cloned
164 }
165}
166
167#[derive(Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
169#[cfg_attr(feature = "dev", derive(Arbitrary))]
170pub struct PrivateAreaContext<const MCL: usize, const MCC: usize, const MPL: usize, N, S> {
171 private: PrivateInterest<MCL, MCC, MPL, N, S>,
173 rel: Area<MCL, MCC, MPL, S>,
175}
176
177#[derive(Debug, Display, Clone, Copy, Error)]
179#[display("area is not almost included")]
180pub struct AreaNotAlmostIncludedError;
181
182impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S>
183 PrivateAreaContext<MCL, MCC, MPL, N, S>
184{
185 pub fn new(
187 private: PrivateInterest<MCL, MCC, MPL, N, S>,
188 rel: Area<MCL, MCC, MPL, S>,
189 ) -> Result<Self, AreaNotAlmostIncludedError> {
190 if !private.path.is_related_to(rel.path()) {
191 return Err(AreaNotAlmostIncludedError);
192 }
193
194 Ok(Self { private, rel })
195 }
196
197 pub fn private(&self) -> &PrivateInterest<MCL, MCC, MPL, N, S> {
199 &self.private
200 }
201
202 pub fn rel(&self) -> &Area<MCL, MCC, MPL, S> {
204 &self.rel
205 }
206}
207
208impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S: PartialEq>
209 PrivateAreaContext<MCL, MCC, MPL, N, S>
210{
211 pub fn almost_includes_area(&self, other: &Area<MCL, MCC, MPL, S>) -> bool {
213 let subspace_is_fine = match (self.rel.subspace(), other.subspace()) {
214 (Some(self_id), Some(other_id)) => self_id == other_id,
215 _ => true,
216 };
217 subspace_is_fine
218 && self.rel.path().is_prefix_of(other.path())
219 && self.rel.times().includes_willow_range(other.times())
220 }
221}
222
223impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S>
224 RelativeEncodable<PrivateAreaContext<MCL, MCC, MPL, N, S>> for Area<MCL, MCC, MPL, S>
225where
226 S: PartialEq + Encodable,
227{
228 async fn relative_encode<C>(
229 &self,
230 rel: &PrivateAreaContext<MCL, MCC, MPL, N, S>,
231 consumer: &mut C,
232 ) -> Result<(), C::Error>
233 where
234 C: ufotofu::BulkConsumer<Item = u8> + ?Sized,
235 {
236 let (start_from_start, end_from_start) = match (rel.rel.times().end(), self.times().end()) {
237 (None, None) => (true, false),
238 (None, Some(_)) => (true, true),
239 (Some(rel_end), None) => {
240 let start_from_start = *self.times().start() - *rel.rel().times().start()
241 <= *rel_end - *self.times().start();
242
243 (start_from_start, false)
244 }
245 (Some(rel_end), Some(val_end)) => {
246 let start_from_start = *self.times().start() - *rel.rel().times().start()
247 <= *rel_end - *self.times().start();
248
249 let end_from_start = *val_end - *rel.rel().times().start() <= *rel_end - *val_end;
250
251 (start_from_start, end_from_start)
252 }
253 };
254
255 let mut header: u8 = 0b0000_0000;
256
257 if self.subspace() != rel.rel().subspace() {
258 header |= 0b1000_0000;
259 }
260
261 if self.subspace().is_none() {
262 header |= 0b0100_0000
263 }
264
265 if start_from_start {
266 header |= 0b0010_0000
267 }
268
269 if end_from_start {
270 header |= 0b0001_0000
271 }
272
273 let start_diff = match (rel.rel.times().end(), self.times().end()) {
274 (None, None) => *self.times().start() - *rel.rel.times().start(),
275 (None, Some(_)) => *self.times().start() - *rel.rel.times().start(),
276 (Some(rel_end), None) => {
277 let start_from_start_diff = *self.times().start() - *rel.rel().times().start();
278
279 let start_from_end_diff = *rel_end - *self.times().start();
280
281 if start_from_start_diff <= start_from_end_diff {
282 start_from_start_diff
283 } else {
284 start_from_end_diff
285 }
286 }
287 (Some(rel_end), Some(_)) => {
288 let start_from_start_diff = *self.times().start() - *rel.rel().times().start();
289 let start_from_end_diff = *rel_end - *self.times().start();
290
291 if start_from_start_diff <= start_from_end_diff {
292 start_from_start_diff
293 } else {
294 start_from_end_diff
295 }
296 }
297 };
298
299 compact_u64::write_tag(&mut header, 2, 4, start_diff.into());
300
301 let end_diff = match (rel.rel.times().end(), self.times().end()) {
302 (None, None) | (Some(_), None) => None,
303 (None, Some(val_end)) => {
304 let diff = *val_end - *rel.rel.times().start();
305 compact_u64::write_tag(&mut header, 2, 6, diff.into());
306 Some(diff)
307 }
308 (Some(rel_end), Some(val_end)) => {
309 let end_from_start = *val_end - *rel.rel().times().start() <= *rel_end - *val_end;
310
311 let diff = if end_from_start {
312 *val_end - *rel.rel.times().start()
313 } else {
314 *rel_end - *val_end
315 };
316
317 write_tag(&mut header, 2, 6, diff.into());
318
319 Some(diff)
320 }
321 };
322
323 consumer.consume(Left(header)).await?;
324
325 match (rel.rel().subspace(), self.subspace()) {
326 (None, Some(id)) => {
327 id.encode(consumer).await?;
328 }
329 (Some(rel_id), Some(id)) => {
330 if rel_id != id {
331 id.encode(consumer).await?;
332 }
333 }
334 _ => {}
335 }
336
337 cu64_encode(start_diff.into(), 2, consumer).await?;
338
339 if let Some(end_diff) = end_diff {
340 cu64_encode(end_diff.into(), 2, consumer).await?;
341 }
342
343 debug_assert!(rel.private().path().is_related_to(rel.rel().path()));
345
346 let private_path_context = unsafe {
347 PrivatePathContext::new_unchecked(
348 rel.private().path().clone(),
349 rel.rel().path().clone(),
350 )
351 };
352
353 self.path()
354 .relative_encode(&private_path_context, consumer)
355 .await?;
356
357 Ok(())
358 }
359
360 fn can_be_encoded_relative_to(&self, rel: &PrivateAreaContext<MCL, MCC, MPL, N, S>) -> bool {
362 rel.almost_includes_area(self) && rel.private.almost_includes_area(self)
363 }
364}
365
366impl<const MCL: usize, const MCC: usize, const MPL: usize, N, S>
367 RelativeDecodable<PrivateAreaContext<MCL, MCC, MPL, N, S>> for Area<MCL, MCC, MPL, S>
368where
369 S: Clone + Decodable,
370{
371 type ErrorReason = Blame;
372
373 async fn relative_decode<P>(
374 rel: &PrivateAreaContext<MCL, MCC, MPL, N, S>,
375 producer: &mut P,
376 ) -> Result<Self, DecodeError<P::Final, P::Error, Self::ErrorReason>>
377 where
378 P: ufotofu::BulkProducer<Item = u8> + ?Sized,
379 Self: Sized,
380 {
381 let header = producer.produce_item().await?;
382
383 let is_subspace_not_equal = is_bitflagged(header, 0);
384 let is_subspace_any = is_bitflagged(header, 1);
385 let is_start_from_start = is_bitflagged(header, 2);
386 let is_end_from_start = is_bitflagged(header, 3);
387
388 let subspace = if !is_subspace_any && is_subspace_not_equal {
389 let id = S::decode(producer)
390 .await
391 .map_err(|err| err.map_other(|_| Blame::TheirFault))?;
392
393 Some(id)
394 } else if is_subspace_any {
395 None
396 } else {
397 rel.rel().subspace().cloned()
398 };
399
400 let start_diff = cu64_decode(header, 2, 4, producer)
401 .await
402 .map_err(|err| err.map_other(|_| Blame::TheirFault))?;
403
404 let time_start = if is_start_from_start {
405 start_diff
406 .checked_add(u64::from(*rel.rel().times().start()))
407 .ok_or(DecodeError::Other(Blame::TheirFault))?
408 } else {
409 match rel.rel().times().end() {
410 Some(end) => u64::from(*end)
411 .checked_sub(start_diff)
412 .ok_or(DecodeError::Other(Blame::TheirFault))?,
413 None => {
414 return Err(DecodeError::Other(Blame::TheirFault));
415 }
416 }
417 };
418
419 let is_time_end_open = match rel.rel.times().end() {
420 Some(_) => false,
421 None => !is_end_from_start,
422 };
423
424 let time_end = if is_time_end_open {
425 None
426 } else {
427 let end_diff = cu64_decode(header, 2, 6, producer)
428 .await
429 .map_err(|err| err.map_other(|_| Blame::TheirFault))?;
430
431 let end = if is_end_from_start {
432 end_diff
433 .checked_add(u64::from(*rel.rel.times().start()))
434 .ok_or(DecodeError::Other(Blame::TheirFault))?
435 } else {
436 match rel.rel.times().end() {
437 Some(end) => u64::from(*end)
438 .checked_sub(end_diff)
439 .ok_or(DecodeError::Other(Blame::TheirFault))?,
440 None => {
441 return Err(DecodeError::Other(Blame::TheirFault));
442 }
443 }
444 };
445
446 Some(end)
447 };
448
449 debug_assert!(rel.private().path().is_related_to(rel.rel.path()));
451
452 let private_path_context = unsafe {
453 PrivatePathContext::new_unchecked(
454 rel.private().path().clone(),
455 rel.rel().path().clone(),
456 )
457 };
458
459 let path = Path::relative_decode(&private_path_context, producer).await?;
460
461 match time_end {
462 Some(end) => {
463 if time_start > end {
464 return Err(DecodeError::Other(Blame::TheirFault));
465 }
466 let times =
468 unsafe { TimeRange::new_closed_unchecked(time_start.into(), end.into()) };
469
470 Ok(Area::new(subspace, path, times))
471 }
472 None => {
473 let times = TimeRange::new_open(time_start.into());
474
475 Ok(Area::new(subspace, path, times))
476 }
477 }
478 }
479}