oxigdal_core/io/
traits.rs1use crate::error::Result;
7
8#[derive(Debug, Clone, Copy, PartialEq, Eq)]
10pub struct ByteRange {
11 pub start: u64,
13 pub end: u64,
15}
16
17impl ByteRange {
18 #[must_use]
20 pub const fn new(start: u64, end: u64) -> Self {
21 Self { start, end }
22 }
23
24 #[must_use]
26 pub const fn from_offset_length(offset: u64, length: u64) -> Self {
27 Self {
28 start: offset,
29 end: offset + length,
30 }
31 }
32
33 #[must_use]
35 pub const fn len(&self) -> u64 {
36 self.end - self.start
37 }
38
39 #[must_use]
41 pub const fn is_empty(&self) -> bool {
42 self.start >= self.end
43 }
44
45 #[must_use]
47 pub const fn overlaps(&self, other: &Self) -> bool {
48 self.start < other.end && self.end > other.start
49 }
50
51 #[must_use]
53 pub const fn is_adjacent(&self, other: &Self) -> bool {
54 self.end == other.start || self.start == other.end
55 }
56
57 #[must_use]
59 pub fn merge(&self, other: &Self) -> Option<Self> {
60 if self.overlaps(other) || self.is_adjacent(other) {
61 Some(Self {
62 start: self.start.min(other.start),
63 end: self.end.max(other.end),
64 })
65 } else {
66 None
67 }
68 }
69}
70
71pub trait DataSource: Send + Sync {
73 fn size(&self) -> Result<u64>;
75
76 fn read_range(&self, range: ByteRange) -> Result<Vec<u8>>;
78
79 fn read_ranges(&self, ranges: &[ByteRange]) -> Result<Vec<Vec<u8>>> {
81 ranges.iter().map(|r| self.read_range(*r)).collect()
82 }
83
84 fn supports_range_requests(&self) -> bool {
86 true
87 }
88}
89
90#[cfg(feature = "async")]
92#[async_trait::async_trait]
93pub trait AsyncDataSource: Send + Sync {
94 async fn size(&self) -> Result<u64>;
96
97 async fn read_range(&self, range: ByteRange) -> Result<Vec<u8>>;
99
100 async fn read_ranges(&self, ranges: &[ByteRange]) -> Result<Vec<Vec<u8>>> {
102 let mut results = Vec::with_capacity(ranges.len());
103 for range in ranges {
104 results.push(self.read_range(*range).await?);
105 }
106 Ok(results)
107 }
108
109 fn supports_range_requests(&self) -> bool {
111 true
112 }
113}
114
115pub trait DataSink: Send + Sync {
117 fn write_at(&mut self, offset: u64, data: &[u8]) -> Result<()>;
119
120 fn append(&mut self, data: &[u8]) -> Result<u64>;
122
123 fn flush(&mut self) -> Result<()>;
125
126 fn truncate(&mut self, size: u64) -> Result<()>;
128
129 fn size(&self) -> Result<u64>;
131}
132
133pub trait RasterRead {
135 type Buffer;
137
138 fn read_region(
140 &self,
141 band: u32,
142 x_offset: u64,
143 y_offset: u64,
144 width: u64,
145 height: u64,
146 ) -> Result<Self::Buffer>;
147
148 fn read_tile(&self, band: u32, tile_col: u32, tile_row: u32) -> Result<Self::Buffer>;
150}
151
152pub trait RasterWrite {
154 type Buffer;
156
157 fn write_region(
159 &mut self,
160 band: u32,
161 x_offset: u64,
162 y_offset: u64,
163 data: &Self::Buffer,
164 ) -> Result<()>;
165
166 fn write_tile(
168 &mut self,
169 band: u32,
170 tile_col: u32,
171 tile_row: u32,
172 data: &Self::Buffer,
173 ) -> Result<()>;
174}
175
176#[cfg(feature = "async")]
178#[async_trait::async_trait]
179pub trait AsyncRasterRead: Send + Sync {
180 type Buffer: Send;
182
183 async fn read_region(
185 &self,
186 band: u32,
187 x_offset: u64,
188 y_offset: u64,
189 width: u64,
190 height: u64,
191 ) -> Result<Self::Buffer>;
192
193 async fn read_tile(&self, band: u32, tile_col: u32, tile_row: u32) -> Result<Self::Buffer>;
195}
196
197pub trait OverviewSupport {
199 fn overview_count(&self) -> u32;
201
202 fn overview_dimensions(&self, level: u32) -> Option<(u64, u64)>;
204}
205
206pub trait CogSupport: OverviewSupport {
208 fn tile_size(&self) -> (u32, u32);
210
211 fn tile_count(&self) -> (u32, u32);
213
214 fn tile_byte_range(&self, level: u32, tile_col: u32, tile_row: u32) -> Option<ByteRange>;
216}
217
218#[cfg(test)]
219mod tests {
220 use super::*;
221
222 #[test]
223 fn test_byte_range() {
224 let range = ByteRange::new(100, 200);
225 assert_eq!(range.len(), 100);
226 assert!(!range.is_empty());
227
228 let empty = ByteRange::new(100, 100);
229 assert!(empty.is_empty());
230 }
231
232 #[test]
233 fn test_byte_range_overlap() {
234 let a = ByteRange::new(0, 100);
235 let b = ByteRange::new(50, 150);
236 let c = ByteRange::new(200, 300);
237
238 assert!(a.overlaps(&b));
239 assert!(b.overlaps(&a));
240 assert!(!a.overlaps(&c));
241 }
242
243 #[test]
244 fn test_byte_range_merge() {
245 let a = ByteRange::new(0, 100);
246 let b = ByteRange::new(100, 200);
247 let c = ByteRange::new(50, 150);
248
249 let merged_adj = a.merge(&b);
251 assert!(merged_adj.is_some());
252 let merged = merged_adj.expect("merge should work");
253 assert_eq!(merged.start, 0);
254 assert_eq!(merged.end, 200);
255
256 let merged_overlap = a.merge(&c);
258 assert!(merged_overlap.is_some());
259 let merged2 = merged_overlap.expect("merge should work");
260 assert_eq!(merged2.start, 0);
261 assert_eq!(merged2.end, 150);
262
263 let d = ByteRange::new(300, 400);
265 assert!(a.merge(&d).is_none());
266 }
267
268 #[test]
269 fn test_from_offset_length() {
270 let range = ByteRange::from_offset_length(100, 50);
271 assert_eq!(range.start, 100);
272 assert_eq!(range.end, 150);
273 assert_eq!(range.len(), 50);
274 }
275}