1use core::{
2 cmp,
3 marker::PhantomData,
4 ops::{Bound, RangeBounds},
5};
6
7use either::Either;
8use ref_cast::RefCast;
9use skl::{
10 generic::{Comparable, Equivalent, KeyRef, MaybeStructured, Type, TypeRef},
11 VacantBuffer,
12};
13
14use super::{EXCLUDED, INCLUDED, UNBOUNDED};
15
16#[repr(transparent)]
17pub(super) struct PhantomRangeKey<K: ?Sized>(PhantomData<K>);
18
19impl<K: ?Sized> core::fmt::Debug for PhantomRangeKey<K> {
20 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
21 f.debug_tuple("PhantomRangeKey").field(&self.0).finish()
22 }
23}
24
25impl<K> Type for PhantomRangeKey<K>
26where
27 K: ?Sized + Type,
28{
29 type Ref<'a> = RangeKeyRef<'a, K>;
30
31 type Error = core::convert::Infallible;
32
33 #[inline(never)]
34 #[cold]
35 fn encoded_len(&self) -> usize {
36 0
37 }
38
39 #[inline(never)]
40 #[cold]
41 fn encode_to_buffer(&self, _: &mut skl::VacantBuffer<'_>) -> Result<usize, Self::Error> {
42 Ok(0)
43 }
44}
45
46#[derive(PartialEq, Eq, PartialOrd, Ord, ref_cast::RefCast)]
47#[repr(transparent)]
48pub(super) struct Query<Q: ?Sized> {
49 key: Q,
50}
51
52impl<'a, K, Q> Equivalent<RangeKeyRef<'a, K>> for Query<Q>
53where
54 K: ?Sized + Type,
55 Q: ?Sized + Equivalent<K::Ref<'a>>,
56{
57 #[inline]
58 fn equivalent(&self, key: &RangeKeyRef<'a, K>) -> bool {
59 match key.bound.as_ref() {
60 Bound::Unbounded => false,
61 Bound::Included(k) => self.key.equivalent(k),
62 Bound::Excluded(k) => self.key.equivalent(k),
63 }
64 }
65}
66
67impl<'a, K, Q> Comparable<RangeKeyRef<'a, K>> for Query<Q>
68where
69 K: ?Sized + Type,
70 Q: ?Sized + Comparable<K::Ref<'a>>,
71{
72 #[inline]
73 fn compare(&self, key: &RangeKeyRef<'a, K>) -> cmp::Ordering {
74 match key.bound.as_ref() {
75 Bound::Unbounded => cmp::Ordering::Greater,
76 Bound::Included(k) => self.key.compare(k),
77 Bound::Excluded(k) => self.key.compare(k),
78 }
79 }
80}
81
82pub(super) struct QueryRange<Q: ?Sized, R>
83where
84 R: RangeBounds<Q>,
85{
86 r: R,
87 _q: PhantomData<Q>,
88}
89
90impl<Q: ?Sized, R> QueryRange<Q, R>
91where
92 R: RangeBounds<Q>,
93{
94 #[inline]
95 pub(super) const fn new(r: R) -> Self {
96 Self { r, _q: PhantomData }
97 }
98}
99
100impl<Q: ?Sized, R> RangeBounds<Query<Q>> for QueryRange<Q, R>
101where
102 R: RangeBounds<Q>,
103{
104 #[inline]
105 fn start_bound(&self) -> Bound<&Query<Q>> {
106 self.r.start_bound().map(RefCast::ref_cast)
107 }
108
109 fn end_bound(&self) -> Bound<&Query<Q>> {
110 self.r.end_bound().map(RefCast::ref_cast)
111 }
112}
113
114pub(super) struct RangeKeyRef<'a, K>
115where
116 K: ?Sized + Type,
117{
118 bound: Bound<K::Ref<'a>>,
119 raw: &'a [u8],
120}
121
122impl<'a, K> PartialEq for RangeKeyRef<'a, K>
123where
124 K: ?Sized + Type,
125 K::Ref<'a>: PartialEq,
126{
127 #[inline]
128 fn eq(&self, other: &Self) -> bool {
129 match (self.bound, other.bound) {
130 (Bound::Unbounded, Bound::Unbounded) => true,
131 (Bound::Included(_), Bound::Unbounded) => false,
132 (Bound::Excluded(_), Bound::Unbounded) => false,
133 (Bound::Unbounded, Bound::Included(_)) => false,
134 (Bound::Unbounded, Bound::Excluded(_)) => false,
135
136 (Bound::Included(a), Bound::Included(b)) => a == b,
137 (Bound::Included(a), Bound::Excluded(b)) => a == b,
138 (Bound::Excluded(a), Bound::Included(b)) => a == b,
139 (Bound::Excluded(a), Bound::Excluded(b)) => a == b,
140 }
141 }
142}
143
144impl<'a, K> Eq for RangeKeyRef<'a, K>
145where
146 K: ?Sized + Type,
147 K::Ref<'a>: Eq,
148{
149}
150
151impl<'a, K> PartialOrd for RangeKeyRef<'a, K>
152where
153 K: ?Sized + Type,
154 K::Ref<'a>: Ord,
155{
156 #[inline]
157 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
158 Some(self.cmp(other))
159 }
160}
161
162impl<'a, K> Ord for RangeKeyRef<'a, K>
163where
164 K: ?Sized + Type,
165 K::Ref<'a>: Ord,
166{
167 #[inline]
168 fn cmp(&self, other: &Self) -> cmp::Ordering {
169 match (self.bound, other.bound) {
170 (Bound::Included(_), Bound::Unbounded) => cmp::Ordering::Greater,
171 (Bound::Excluded(_), Bound::Unbounded) => cmp::Ordering::Greater,
172 (Bound::Unbounded, Bound::Included(_)) => cmp::Ordering::Less,
173 (Bound::Unbounded, Bound::Excluded(_)) => cmp::Ordering::Less,
174 (Bound::Unbounded, Bound::Unbounded) => cmp::Ordering::Equal,
175
176 (Bound::Included(a), Bound::Included(b)) => a.cmp(&b),
177 (Bound::Included(a), Bound::Excluded(b)) => a.cmp(&b),
178 (Bound::Excluded(a), Bound::Included(b)) => a.cmp(&b),
179 (Bound::Excluded(a), Bound::Excluded(b)) => a.cmp(&b),
180 }
181 }
182}
183
184impl<'a, K> core::fmt::Debug for RangeKeyRef<'a, K>
185where
186 K: ?Sized + Type,
187 K::Ref<'a>: core::fmt::Debug,
188{
189 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
190 f.debug_tuple("RangeKeyRef").field(&self.bound).finish()
191 }
192}
193
194impl<K> Clone for RangeKeyRef<'_, K>
195where
196 K: ?Sized + Type,
197{
198 fn clone(&self) -> Self {
199 *self
200 }
201}
202
203impl<K> Copy for RangeKeyRef<'_, K> where K: ?Sized + Type {}
204
205impl<'a, K> TypeRef<'a> for RangeKeyRef<'a, K>
206where
207 K: ?Sized + Type,
208{
209 #[inline]
210 unsafe fn from_slice(src: &'a [u8]) -> Self {
211 let (bound, key) = src.split_at(1);
212 let bound = match bound[0] {
213 UNBOUNDED => Bound::Unbounded,
214 INCLUDED => Bound::Included(K::Ref::from_slice(key)),
215 EXCLUDED => Bound::Excluded(K::Ref::from_slice(key)),
216 _ => unreachable!(),
217 };
218
219 Self { bound, raw: src }
220 }
221
222 #[inline]
223 fn as_raw(&self) -> Option<&'a [u8]> {
224 Some(self.raw)
225 }
226}
227
228impl<K> Equivalent<PhantomRangeKey<K>> for RangeKeyRef<'_, K>
229where
230 K: ?Sized + Type,
231{
232 #[inline(never)]
233 #[cold]
234 fn equivalent(&self, _: &PhantomRangeKey<K>) -> bool {
235 unreachable!()
236 }
237}
238
239impl<K> Comparable<PhantomRangeKey<K>> for RangeKeyRef<'_, K>
240where
241 K: ?Sized + Type,
242{
243 #[inline(never)]
244 #[cold]
245 fn compare(&self, _: &PhantomRangeKey<K>) -> cmp::Ordering {
246 unreachable!()
247 }
248}
249
250impl<'a, K> KeyRef<'a, PhantomRangeKey<K>> for RangeKeyRef<'a, K>
251where
252 K: ?Sized + Type,
253 for<'b> K::Ref<'b>: Ord,
254{
255 #[inline]
256 fn compare<Q>(&self, a: &Q) -> cmp::Ordering
257 where
258 Q: ?Sized + Ord + Comparable<Self>,
259 {
260 a.compare(self).reverse()
261 }
262
263 #[inline]
264 unsafe fn compare_binary(a: &[u8], b: &[u8]) -> cmp::Ordering {
265 let ak = <RangeKeyRef<'_, K> as TypeRef<'_>>::from_slice(a);
266 let bk = <RangeKeyRef<'_, K> as TypeRef<'_>>::from_slice(b);
267
268 ak.cmp(&bk)
269 }
270}
271
272impl<'a, K> RangeKeyRef<'a, K>
273where
274 K: ?Sized + Type,
275{
276 #[inline]
277 pub(super) fn bound(&self) -> Bound<&K::Ref<'a>> {
278 self.bound.as_ref()
279 }
280}
281
282#[repr(transparent)]
283pub(super) struct PhantomRangeDeletionSpan<K: ?Sized> {
284 _k: PhantomData<K>,
285}
286
287impl<K> core::fmt::Debug for PhantomRangeDeletionSpan<K>
288where
289 K: ?Sized,
290{
291 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
292 f.debug_tuple("PhantomRangeDeletionSpan")
293 .field(&self._k)
294 .finish()
295 }
296}
297
298impl<K> Type for PhantomRangeDeletionSpan<K>
299where
300 K: ?Sized + Type,
301{
302 type Ref<'a> = RangeDeletionSpanRef<'a, K>;
303
304 type Error = core::convert::Infallible;
305
306 #[inline(never)]
307 #[cold]
308 fn encoded_len(&self) -> usize {
309 0
310 }
311
312 #[inline(never)]
313 #[cold]
314 fn encode_to_buffer(&self, _: &mut skl::VacantBuffer<'_>) -> Result<usize, Self::Error> {
315 Ok(0)
316 }
317}
318
319pub(super) struct RangeDeletionSpanRef<'a, K>
320where
321 K: ?Sized + Type,
322{
323 key: Bound<K::Ref<'a>>,
325 raw: &'a [u8],
326}
327
328impl<K> core::fmt::Debug for RangeDeletionSpanRef<'_, K>
329where
330 K: ?Sized + Type,
331 for<'a> K::Ref<'a>: core::fmt::Debug,
332{
333 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
334 f.debug_tuple("RangeDeletionSpanRef")
335 .field(&self.key)
336 .finish()
337 }
338}
339
340impl<K> Clone for RangeDeletionSpanRef<'_, K>
341where
342 K: ?Sized + Type,
343{
344 fn clone(&self) -> Self {
345 *self
346 }
347}
348
349impl<K> Copy for RangeDeletionSpanRef<'_, K> where K: ?Sized + Type {}
350
351impl<'a, K> TypeRef<'a> for RangeDeletionSpanRef<'a, K>
352where
353 K: ?Sized + Type,
354{
355 #[inline]
356 unsafe fn from_slice(src: &'a [u8]) -> Self {
357 let (bound, key) = src.split_at(1);
358 let bound = match bound[0] {
359 UNBOUNDED => Bound::Unbounded,
360 INCLUDED => Bound::Included(K::Ref::from_slice(key)),
361 EXCLUDED => Bound::Excluded(K::Ref::from_slice(key)),
362 _ => panic!("invalid bound type"),
363 };
364
365 Self {
366 key: bound,
367 raw: src,
368 }
369 }
370
371 #[inline]
372 fn as_raw(&self) -> Option<&'a [u8]> {
373 Some(self.raw)
374 }
375}
376
377impl<'a, K> RangeDeletionSpanRef<'a, K>
378where
379 K: ?Sized + Type,
380{
381 #[inline]
382 pub(super) fn bound(&self) -> Bound<&K::Ref<'a>> {
383 self.key.as_ref()
384 }
385}
386
387impl<'a, K> RangeDeletionSpanRef<'a, K>
388where
389 K: ?Sized + Type,
390 for<'b> K::Ref<'b>: KeyRef<'b, K>,
391{
392 #[inline]
393 pub(super) fn contains(&self, start: &RangeKeyRef<'a, K>, k: &K::Ref<'a>) -> bool {
394 (start.bound(), self.key.as_ref()).contains(k)
395 }
396}
397
398#[repr(transparent)]
399pub(super) struct PhantomRangeUpdateSpan<K, V>
400where
401 K: ?Sized,
402 V: ?Sized,
403{
404 _k: PhantomData<K>,
405 _v: PhantomData<V>,
406}
407
408impl<K, V> core::fmt::Debug for PhantomRangeUpdateSpan<K, V>
409where
410 K: ?Sized,
411 V: ?Sized,
412{
413 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
414 f.debug_struct("PhantomRangeUpdateSpan")
415 .field("key", &self._k)
416 .field("value", &self._v)
417 .finish()
418 }
419}
420
421impl<K, V> Type for PhantomRangeUpdateSpan<K, V>
422where
423 K: ?Sized + Type,
424 V: ?Sized + Type,
425{
426 type Ref<'a> = RangeUpdateSpanRef<'a, K, V>;
427
428 type Error = core::convert::Infallible;
429
430 #[inline(never)]
431 #[cold]
432 fn encoded_len(&self) -> usize {
433 0
434 }
435
436 #[inline(never)]
437 #[cold]
438 fn encode_to_buffer(&self, _: &mut skl::VacantBuffer<'_>) -> Result<usize, Self::Error> {
439 Ok(0)
440 }
441}
442
443pub(super) struct RangeUpdateSpanRef<'a, K, V>
444where
445 K: ?Sized + Type,
446 V: ?Sized + Type,
447{
448 key: Bound<K::Ref<'a>>,
450 value: V::Ref<'a>,
451 raw: &'a [u8],
452}
453
454impl<K, V> core::fmt::Debug for RangeUpdateSpanRef<'_, K, V>
455where
456 K: ?Sized + Type,
457 for<'a> K::Ref<'a>: core::fmt::Debug,
458 V: ?Sized + Type,
459{
460 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
461 f.debug_struct("RangeUpdateSpanRef")
462 .field("key", &self.key)
463 .field("value", &self.value)
464 .finish()
465 }
466}
467
468impl<K, V> Clone for RangeUpdateSpanRef<'_, K, V>
469where
470 K: ?Sized + Type,
471 V: ?Sized + Type,
472{
473 fn clone(&self) -> Self {
474 *self
475 }
476}
477
478impl<K, V> Copy for RangeUpdateSpanRef<'_, K, V>
479where
480 K: ?Sized + Type,
481 V: ?Sized + Type,
482{
483}
484
485impl<'a, K, V> TypeRef<'a> for RangeUpdateSpanRef<'a, K, V>
486where
487 K: ?Sized + Type,
488 V: ?Sized + Type,
489{
490 #[inline]
491 unsafe fn from_slice(src: &'a [u8]) -> Self {
492 let (bound, remaining) = src.split_at(1);
493 let (key, value) = match bound[0] {
494 UNBOUNDED => (Bound::Unbounded, V::Ref::from_slice(remaining)),
495 INCLUDED => {
496 let (len_buf, remaining) = remaining.split_at(4);
497 let klen = u32::from_le_bytes(len_buf.try_into().unwrap()) as usize;
498 let (key, value) = remaining.split_at(klen);
499 (
500 Bound::Included(K::Ref::from_slice(key)),
501 V::Ref::from_slice(value),
502 )
503 }
504 EXCLUDED => {
505 let (len_buf, remaining) = remaining.split_at(4);
506 let klen = u32::from_le_bytes(len_buf.try_into().unwrap()) as usize;
507 let (key, value) = remaining.split_at(klen);
508 (
509 Bound::Excluded(K::Ref::from_slice(key)),
510 V::Ref::from_slice(value),
511 )
512 }
513 _ => panic!("invalid bound type"),
514 };
515
516 Self {
517 key,
518 value,
519 raw: src,
520 }
521 }
522
523 #[inline]
524 fn as_raw(&self) -> Option<&'a [u8]> {
525 Some(self.raw)
526 }
527}
528
529impl<'a, K, V> RangeUpdateSpanRef<'a, K, V>
530where
531 K: ?Sized + Type,
532 V: ?Sized + Type,
533{
534 #[inline]
535 pub(super) fn value(&self) -> &V::Ref<'a> {
536 &self.value
537 }
538
539 #[inline]
540 pub(super) fn bound(&self) -> Bound<&K::Ref<'a>> {
541 self.key.as_ref()
542 }
543}
544
545impl<'a, K, V> RangeUpdateSpanRef<'a, K, V>
546where
547 K: ?Sized + Type,
548 for<'b> K::Ref<'b>: KeyRef<'b, K>,
549 V: ?Sized + Type,
550{
551 #[inline]
552 pub(super) fn contains(&self, start: &RangeKeyRef<'a, K>, k: &K::Ref<'a>) -> bool {
553 (start.bound(), self.key.as_ref()).contains(k)
554 }
555}
556
557pub(super) struct RangeKeyEncoder<'a, K>
558where
559 K: ?Sized,
560{
561 key: Bound<MaybeStructured<'a, K>>,
562 pub(super) encoded_len: usize,
563}
564
565impl<'a, K> RangeKeyEncoder<'a, K>
566where
567 K: ?Sized + Type,
568{
569 #[inline]
570 pub(super) fn new(key: Bound<MaybeStructured<'a, K>>) -> Self {
571 let len = match key {
572 Bound::Included(key) => 1 + key.encoded_len(),
573 Bound::Excluded(key) => 1 + key.encoded_len(),
574 Bound::Unbounded => 1,
575 };
576
577 Self {
578 key,
579 encoded_len: len,
580 }
581 }
582
583 #[inline]
584 pub(super) fn encode(&self, buf: &mut VacantBuffer<'_>) -> Result<usize, K::Error> {
585 match self.key.as_ref() {
586 Bound::Excluded(key) => {
587 buf.put_u8_unchecked(EXCLUDED);
588 key.encode_to_buffer(buf).map(|_| self.encoded_len)
589 }
590 Bound::Included(key) => {
591 buf.put_u8_unchecked(INCLUDED);
592 key.encode_to_buffer(buf).map(|_| self.encoded_len)
593 }
594 Bound::Unbounded => {
595 buf.put_u8_unchecked(UNBOUNDED);
596 Ok(1)
597 }
598 }
599 }
600}
601
602pub(super) struct RangeDeletionSpan<'a, K: ?Sized> {
603 end_bound: Bound<MaybeStructured<'a, K>>,
604 pub(super) encoded_len: usize,
605}
606
607impl<K: ?Sized> Clone for RangeDeletionSpan<'_, K> {
608 #[inline]
609 fn clone(&self) -> Self {
610 *self
611 }
612}
613
614impl<K: ?Sized> Copy for RangeDeletionSpan<'_, K> {}
615
616impl<'a, K> RangeDeletionSpan<'a, K>
617where
618 K: ?Sized + Type,
619{
620 #[inline]
621 pub(super) fn new(end_bound: Bound<MaybeStructured<'a, K>>) -> Self {
622 let end_len = match end_bound {
623 Bound::Included(key) | Bound::Excluded(key) => 1 + key.encoded_len(),
624 Bound::Unbounded => 1,
625 };
626
627 Self {
628 end_bound,
629 encoded_len: end_len,
630 }
631 }
632
633 #[inline]
634 pub(super) fn encode(&self, buf: &mut VacantBuffer<'_>) -> Result<usize, K::Error> {
635 macro_rules! encode_deletion_end_key {
636 ($this:ident($end_bound:ident, $buf:ident $(, $k:ident)?)) => {{
637 $buf.put_u8_unchecked($end_bound);
638 $($k.encode_to_buffer($buf).map(|_| $this.encoded_len))?
639 }};
640 }
641
642 match self.end_bound {
643 Bound::Included(k) => encode_deletion_end_key!(self(INCLUDED, buf, k)),
644 Bound::Excluded(k) => encode_deletion_end_key!(self(EXCLUDED, buf, k)),
645 Bound::Unbounded => {
646 encode_deletion_end_key!(self(UNBOUNDED, buf));
647 Ok(self.encoded_len)
648 }
649 }
650 }
651}
652
653pub(super) struct RangeUpdateSpan<'a, K, V>
654where
655 K: ?Sized,
656 V: ?Sized,
657{
658 end_bound: Bound<MaybeStructured<'a, K>>,
659 end_key_len: u32,
660 pub(super) encoded_len: usize,
661 value: MaybeStructured<'a, V>,
662}
663
664impl<K, V> Clone for RangeUpdateSpan<'_, K, V>
665where
666 K: ?Sized,
667 V: ?Sized,
668{
669 #[inline]
670 fn clone(&self) -> Self {
671 *self
672 }
673}
674
675impl<K, V> Copy for RangeUpdateSpan<'_, K, V>
676where
677 K: ?Sized,
678 V: ?Sized,
679{
680}
681
682impl<'a, K, V> RangeUpdateSpan<'a, K, V>
683where
684 K: ?Sized + Type,
685 V: ?Sized + Type,
686{
687 #[inline]
688 pub(super) fn new(
689 end_bound: Bound<MaybeStructured<'a, K>>,
690 value: MaybeStructured<'a, V>,
691 ) -> Self {
692 const END_KEY_LEN_SIZE: usize = core::mem::size_of::<u32>();
693
694 let (end_len, end_key_len) = match end_bound {
695 Bound::Included(key) | Bound::Excluded(key) => {
696 let len = key.encoded_len();
697 (1 + END_KEY_LEN_SIZE + len, len as u32)
698 }
699 Bound::Unbounded => (1, 0),
700 };
701
702 Self {
703 end_bound,
704 end_key_len,
705 encoded_len: end_len + value.encoded_len(), value,
707 }
708 }
709
710 #[inline]
711 pub(super) fn encode(
712 &self,
713 buf: &mut VacantBuffer<'_>,
714 ) -> Result<usize, Either<K::Error, V::Error>> {
715 macro_rules! encode_update_end_key {
716 ($this:ident($end_bound:ident, $buf:ident, $k:ident)) => {{
717 $buf.put_u8_unchecked($end_bound);
718 $buf.put_u32_le_unchecked($this.end_key_len);
719 $k.encode_to_buffer($buf).map_err(Either::Left)?;
720 $this
721 .value
722 .encode_to_buffer($buf)
723 .map_err(Either::Right)
724 .map(|_| self.encoded_len)
725 }};
726 }
727
728 match self.end_bound {
729 Bound::Included(k) => encode_update_end_key!(self(INCLUDED, buf, k)),
730 Bound::Excluded(k) => encode_update_end_key!(self(EXCLUDED, buf, k)),
731 Bound::Unbounded => {
732 buf.put_u8_unchecked(UNBOUNDED);
733 self
734 .value
735 .encode_to_buffer(buf)
736 .map_err(Either::Right)
737 .map(|_| self.encoded_len)
738 }
739 }
740 }
741}