1use crate::alloc::Deallocation;
19use crate::buffer::Buffer;
20use crate::native::ArrowNativeType;
21use crate::{BufferBuilder, MutableBuffer, OffsetBuffer};
22use std::fmt::Formatter;
23use std::marker::PhantomData;
24use std::ops::Deref;
25
26#[derive(Clone)]
45pub struct ScalarBuffer<T: ArrowNativeType> {
46 buffer: Buffer,
48 phantom: PhantomData<T>,
49}
50
51impl<T: ArrowNativeType> std::fmt::Debug for ScalarBuffer<T> {
52 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
53 f.debug_tuple("ScalarBuffer").field(&self.as_ref()).finish()
54 }
55}
56
57impl<T: ArrowNativeType> ScalarBuffer<T> {
58 pub fn new(buffer: Buffer, offset: usize, len: usize) -> Self {
69 let size = std::mem::size_of::<T>();
70 let byte_offset = offset.checked_mul(size).expect("offset overflow");
71 let byte_len = len.checked_mul(size).expect("length overflow");
72 buffer.slice_with_length(byte_offset, byte_len).into()
73 }
74
75 pub fn shrink_to_fit(&mut self) {
77 self.buffer.shrink_to_fit();
78 }
79
80 pub fn slice(&self, offset: usize, len: usize) -> Self {
82 Self::new(self.buffer.clone(), offset, len)
83 }
84
85 pub fn inner(&self) -> &Buffer {
87 &self.buffer
88 }
89
90 pub fn into_inner(self) -> Buffer {
92 self.buffer
93 }
94
95 #[inline]
99 pub fn ptr_eq(&self, other: &Self) -> bool {
100 self.buffer.ptr_eq(&other.buffer)
101 }
102}
103
104impl<T: ArrowNativeType> Deref for ScalarBuffer<T> {
105 type Target = [T];
106
107 #[inline]
108 fn deref(&self) -> &Self::Target {
109 unsafe {
111 std::slice::from_raw_parts(
112 self.buffer.as_ptr() as *const T,
113 self.buffer.len() / std::mem::size_of::<T>(),
114 )
115 }
116 }
117}
118
119impl<T: ArrowNativeType> AsRef<[T]> for ScalarBuffer<T> {
120 #[inline]
121 fn as_ref(&self) -> &[T] {
122 self
123 }
124}
125
126impl<T: ArrowNativeType> From<MutableBuffer> for ScalarBuffer<T> {
127 fn from(value: MutableBuffer) -> Self {
128 Buffer::from(value).into()
129 }
130}
131
132impl<T: ArrowNativeType> From<Buffer> for ScalarBuffer<T> {
133 fn from(buffer: Buffer) -> Self {
134 let align = std::mem::align_of::<T>();
135 let is_aligned = buffer.as_ptr().align_offset(align) == 0;
136
137 match buffer.deallocation() {
138 Deallocation::Standard(_) => assert!(
139 is_aligned,
140 "Memory pointer is not aligned with the specified scalar type"
141 ),
142 Deallocation::Custom(_, _) =>
143 assert!(is_aligned, "Memory pointer from external source (e.g, FFI) is not aligned with the specified scalar type. Before importing buffer through FFI, please make sure the allocation is aligned."),
144 }
145
146 Self {
147 buffer,
148 phantom: Default::default(),
149 }
150 }
151}
152
153impl<T: ArrowNativeType> From<OffsetBuffer<T>> for ScalarBuffer<T> {
154 fn from(value: OffsetBuffer<T>) -> Self {
155 value.into_inner()
156 }
157}
158
159impl<T: ArrowNativeType> From<Vec<T>> for ScalarBuffer<T> {
160 fn from(value: Vec<T>) -> Self {
161 Self {
162 buffer: Buffer::from_vec(value),
163 phantom: Default::default(),
164 }
165 }
166}
167
168impl<T: ArrowNativeType> From<ScalarBuffer<T>> for Vec<T> {
169 fn from(value: ScalarBuffer<T>) -> Self {
170 value
171 .buffer
172 .into_vec()
173 .unwrap_or_else(|buffer| buffer.typed_data::<T>().into())
174 }
175}
176
177impl<T: ArrowNativeType> From<BufferBuilder<T>> for ScalarBuffer<T> {
178 fn from(mut value: BufferBuilder<T>) -> Self {
179 let len = value.len();
180 Self::new(value.finish(), 0, len)
181 }
182}
183
184impl<T: ArrowNativeType> FromIterator<T> for ScalarBuffer<T> {
185 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
186 iter.into_iter().collect::<Vec<_>>().into()
187 }
188}
189
190impl<'a, T: ArrowNativeType> IntoIterator for &'a ScalarBuffer<T> {
191 type Item = &'a T;
192 type IntoIter = std::slice::Iter<'a, T>;
193
194 fn into_iter(self) -> Self::IntoIter {
195 self.as_ref().iter()
196 }
197}
198
199impl<T: ArrowNativeType, S: AsRef<[T]> + ?Sized> PartialEq<S> for ScalarBuffer<T> {
200 fn eq(&self, other: &S) -> bool {
201 self.as_ref().eq(other.as_ref())
202 }
203}
204
205impl<T: ArrowNativeType, const N: usize> PartialEq<ScalarBuffer<T>> for [T; N] {
206 fn eq(&self, other: &ScalarBuffer<T>) -> bool {
207 self.as_ref().eq(other.as_ref())
208 }
209}
210
211impl<T: ArrowNativeType> PartialEq<ScalarBuffer<T>> for [T] {
212 fn eq(&self, other: &ScalarBuffer<T>) -> bool {
213 self.as_ref().eq(other.as_ref())
214 }
215}
216
217impl<T: ArrowNativeType> PartialEq<ScalarBuffer<T>> for Vec<T> {
218 fn eq(&self, other: &ScalarBuffer<T>) -> bool {
219 self.as_slice().eq(other.as_ref())
220 }
221}
222
223#[cfg(test)]
224mod tests {
225 use std::{ptr::NonNull, sync::Arc};
226
227 use super::*;
228
229 #[test]
230 fn test_basic() {
231 let expected = [0_i32, 1, 2];
232 let buffer = Buffer::from_iter(expected.iter().cloned());
233 let typed = ScalarBuffer::<i32>::new(buffer.clone(), 0, 3);
234 assert_eq!(*typed, expected);
235
236 let typed = ScalarBuffer::<i32>::new(buffer.clone(), 1, 2);
237 assert_eq!(*typed, expected[1..]);
238
239 let typed = ScalarBuffer::<i32>::new(buffer.clone(), 1, 0);
240 assert!(typed.is_empty());
241
242 let typed = ScalarBuffer::<i32>::new(buffer, 3, 0);
243 assert!(typed.is_empty());
244 }
245
246 #[test]
247 fn test_debug() {
248 let buffer = ScalarBuffer::from(vec![1, 2, 3]);
249 assert_eq!(format!("{buffer:?}"), "ScalarBuffer([1, 2, 3])");
250 }
251
252 #[test]
253 #[should_panic(expected = "Memory pointer is not aligned with the specified scalar type")]
254 fn test_unaligned() {
255 let expected = [0_i32, 1, 2];
256 let buffer = Buffer::from_iter(expected.iter().cloned());
257 let buffer = buffer.slice(1);
258 ScalarBuffer::<i32>::new(buffer, 0, 2);
259 }
260
261 #[test]
262 #[should_panic(expected = "the offset of the new Buffer cannot exceed the existing length")]
263 fn test_length_out_of_bounds() {
264 let buffer = Buffer::from_iter([0_i32, 1, 2]);
265 ScalarBuffer::<i32>::new(buffer, 1, 3);
266 }
267
268 #[test]
269 #[should_panic(expected = "the offset of the new Buffer cannot exceed the existing length")]
270 fn test_offset_out_of_bounds() {
271 let buffer = Buffer::from_iter([0_i32, 1, 2]);
272 ScalarBuffer::<i32>::new(buffer, 4, 0);
273 }
274
275 #[test]
276 #[should_panic(expected = "offset overflow")]
277 fn test_length_overflow() {
278 let buffer = Buffer::from_iter([0_i32, 1, 2]);
279 ScalarBuffer::<i32>::new(buffer, usize::MAX, 1);
280 }
281
282 #[test]
283 #[should_panic(expected = "offset overflow")]
284 fn test_start_overflow() {
285 let buffer = Buffer::from_iter([0_i32, 1, 2]);
286 ScalarBuffer::<i32>::new(buffer, usize::MAX / 4 + 1, 0);
287 }
288
289 #[test]
290 #[should_panic(expected = "length overflow")]
291 fn test_end_overflow() {
292 let buffer = Buffer::from_iter([0_i32, 1, 2]);
293 ScalarBuffer::<i32>::new(buffer, 0, usize::MAX / 4 + 1);
294 }
295
296 #[test]
297 fn convert_from_buffer_builder() {
298 let input = vec![1, 2, 3, 4];
299 let buffer_builder = BufferBuilder::from(input.clone());
300 let scalar_buffer = ScalarBuffer::from(buffer_builder);
301 assert_eq!(scalar_buffer.as_ref(), input);
302 }
303
304 #[test]
305 fn into_vec() {
306 let input = vec![1u8, 2, 3, 4];
307
308 let input_buffer = Buffer::from_vec(input.clone());
310 let input_ptr = input_buffer.as_ptr();
311 let input_len = input_buffer.len();
312 let scalar_buffer = ScalarBuffer::<u8>::new(input_buffer, 0, input_len);
313 let vec = Vec::from(scalar_buffer);
314 assert_eq!(vec.as_slice(), input.as_slice());
315 assert_eq!(vec.as_ptr(), input_ptr);
316
317 let mut input_clone = input.clone();
319 let input_ptr = NonNull::new(input_clone.as_mut_ptr()).unwrap();
320 let dealloc = Arc::new(());
321 let buffer =
322 unsafe { Buffer::from_custom_allocation(input_ptr, input_clone.len(), dealloc as _) };
323 let scalar_buffer = ScalarBuffer::<u8>::new(buffer, 0, input.len());
324 let vec = Vec::from(scalar_buffer);
325 assert_eq!(vec, input.as_slice());
326 assert_ne!(vec.as_ptr(), input_ptr.as_ptr());
327
328 let input_buffer = Buffer::from_vec(input.clone());
330 let input_ptr = input_buffer.as_ptr();
331 let input_len = input_buffer.len();
332 let scalar_buffer = ScalarBuffer::<u8>::new(input_buffer, 1, input_len - 1);
333 let vec = Vec::from(scalar_buffer);
334 assert_eq!(vec.as_slice(), &input[1..]);
335 assert_ne!(vec.as_ptr(), input_ptr);
336
337 let buffer = Buffer::from_slice_ref(input.as_slice());
339 let scalar_buffer = ScalarBuffer::<u8>::new(buffer, 0, input.len());
340 let vec = Vec::from(scalar_buffer);
341 assert_eq!(vec, input.as_slice());
342 assert_ne!(vec.as_ptr(), input.as_ptr());
343 }
344}