1#![cfg_attr(test, deny(warnings))]
2#![deny(missing_docs)]
3
4extern crate memalloc;
10
11use std::sync::atomic::{self, AtomicUsize, Ordering};
12use std::ops::Deref;
13use std::io::Read;
14use std::{io, mem, fmt};
15
16pub struct AppendBuf {
18 alloc: *mut AllocInfo,
19 position: usize
20}
21
22unsafe impl Send for AppendBuf {}
23unsafe impl Sync for AppendBuf {}
24
25struct AllocInfo {
26 refcount: AtomicUsize,
27 buf: [u8]
28}
29
30unsafe impl Send for AllocInfo {}
31unsafe impl Sync for AllocInfo {}
32
33pub struct Slice {
35 alloc: *mut AllocInfo,
36 offset: usize,
37 len: usize
38}
39
40unsafe impl Send for Slice {}
41unsafe impl Sync for Slice {}
42
43impl Slice {
44 pub fn slice_from(&self, offset: usize) -> Slice {
46 if self.len < offset {
47 panic!("Sliced past the end of an appendbuf::Slice,
48 the length was {:?} and the desired offset was {:?}",
49 self.len, offset);
50 }
51
52 self.allocinfo().increment();
53
54 Slice {
55 alloc: self.alloc,
56 offset: self.offset + offset,
57 len: self.len - offset
58 }
59 }
60
61 pub fn slice_to(&self, len: usize) -> Slice {
63 if self.len < len {
64 panic!("Sliced past the end of an appendbuf::Slice,
65 the length was {:?} and the desired length was {:?}",
66 self.len, len);
67 }
68
69 self.allocinfo().increment();
70
71 Slice {
72 alloc: self.alloc,
73 offset: self.offset,
74 len: len
75 }
76 }
77
78 pub fn slice(&self, start: usize, end: usize) -> Slice {
81 let slice = self.slice_from(start);
82 slice.slice_to(end - start)
83 }
84
85 fn allocinfo(&self) -> &AllocInfo {
86 unsafe { mem::transmute(self.alloc) }
87 }
88}
89
90impl AppendBuf {
91 pub fn new(len: usize) -> AppendBuf {
93 AppendBuf {
94 alloc: unsafe { AllocInfo::allocate(len) },
95 position: 0
96 }
97 }
98
99 pub fn from_buf(vec: Vec<u8>) -> Result<Self, Vec<u8>> {
107 if vec.capacity() < mem::size_of::<AtomicUsize>() {
108 return Err(vec)
109 }
110
111 let vec_len = vec.len();
112 let alloc_info = unsafe { AllocInfo::from_buf(vec) };
113
114 Ok(AppendBuf {
115 alloc: alloc_info,
116 position: vec_len - mem::size_of::<AtomicUsize>()
117 })
118 }
119
120 pub fn slice(&self) -> Slice {
122 self.allocinfo().increment();
123
124 Slice {
125 alloc: self.alloc,
126 offset: 0,
127 len: self.position
128 }
129 }
130
131 pub fn remaining(&self) -> usize {
133 self.allocinfo().buf.len() - self.position
134 }
135
136 pub fn fill(&mut self, buf: &[u8]) -> usize {
141 use std::io::Write;
142
143 let amount = self.get_write_buf().write(buf).unwrap();
145 self.position += amount;
146
147 amount
148 }
149
150 pub fn get_write_buf(&mut self) -> &mut [u8] {
158 let position = self.position;
159 &mut self.allocinfo_mut().buf[position..]
160 }
161
162 pub unsafe fn advance(&mut self, amount: usize) {
167 self.position += amount;
168 }
169
170 pub fn read_from<R: Read>(&mut self, reader: &mut R) -> io::Result<usize> {
176 reader.read(self.get_write_buf()).map(|n| {
177 unsafe { self.advance(n) };
178 n
179 })
180 }
181
182 fn allocinfo(&self) -> &AllocInfo {
183 unsafe { mem::transmute(self.alloc) }
184 }
185
186 fn allocinfo_mut(&mut self) -> &mut AllocInfo {
187 unsafe { mem::transmute(self.alloc) }
188 }
189}
190
191impl fmt::Debug for AppendBuf {
192 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
193 fmt::Debug::fmt(&**self, f)
194 }
195}
196
197impl fmt::Debug for Slice {
198 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
199 fmt::Debug::fmt(&**self, f)
200 }
201}
202
203impl Deref for AppendBuf {
204 type Target = [u8];
205
206 fn deref(&self) -> &[u8] {
207 &self.allocinfo().buf[..self.position]
208 }
209}
210
211impl AsRef<[u8]> for AppendBuf {
212 fn as_ref(&self) -> &[u8] { self }
213}
214
215impl io::Write for AppendBuf {
216 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
217 Ok(self.fill(buf))
218 }
219
220 fn flush(&mut self) -> io::Result<()> { Ok(()) }
221}
222
223impl Deref for Slice {
224 type Target = [u8];
225
226 fn deref(&self) -> &[u8] {
227 unsafe { &(*self.alloc).buf[self.offset..self.offset + self.len] }
228 }
229}
230
231impl AsRef<[u8]> for Slice {
232 fn as_ref(&self) -> &[u8] { self }
233}
234
235impl Clone for Slice {
236 fn clone(&self) -> Slice {
237 self.allocinfo().increment();
238
239 Slice {
240 alloc: self.alloc,
241 offset: self.offset,
242 len: self.len
243 }
244 }
245}
246
247impl AllocInfo {
248 unsafe fn allocate(size: usize) -> *mut Self {
249 let alloc = memalloc::allocate(size + mem::size_of::<AtomicUsize>());
250 AllocInfo::from_raw_buf(alloc, size)
251 }
252
253 unsafe fn from_buf(mut buf: Vec<u8>) -> *mut Self {
257 let refcount_size = mem::size_of::<AtomicUsize>();
258 let this = AllocInfo::from_raw_buf(buf.as_mut_ptr(), buf.capacity() - refcount_size);
259 mem::forget(buf);
260 this
261 }
262
263 unsafe fn from_raw_buf(buf: *mut u8, buf_cap: usize) -> *mut Self {
267 let this = mem::transmute::<_, *mut Self>((buf, buf_cap));
268 (*this).refcount = AtomicUsize::new(1);
269 this
270 }
271
272 #[inline(always)]
273 fn increment(&self) {
274 self.refcount.fetch_add(1, Ordering::Relaxed);
275 }
276
277 #[inline(always)]
278 unsafe fn decrement(&self) {
279 if self.refcount.fetch_sub(1, Ordering::Release) != 1 { return }
284
285 atomic::fence(Ordering::Acquire);
303
304 drop(mem::transmute::<&AllocInfo, Box<AllocInfo>>(self))
305 }
306}
307
308impl Drop for Slice {
309 fn drop(&mut self) {
310 unsafe { (*self.alloc).decrement() }
311 }
312}
313
314impl Drop for AppendBuf {
315 fn drop(&mut self) {
316 unsafe { (*self.alloc).decrement() }
317 }
318}
319
320fn _compile_test() {
321 fn _is_send_sync<T: Send + Sync>() {}
322 _is_send_sync::<AppendBuf>();
323 _is_send_sync::<Slice>();
324}
325
326#[test]
327fn test_write_and_slice() {
328 let mut buf = AppendBuf::new(10);
329 assert_eq!(buf.fill(&[1, 2, 3]), 3);
330 let slice = buf.slice();
331 assert_eq!(&*slice, &[1, 2, 3]);
332
333 assert_eq!(&*buf, &[1, 2, 3]);
334}
335
336#[test]
337fn test_overlong_write() {
338 let mut buf = AppendBuf::new(5);
339 assert_eq!(buf.fill(&[1, 2, 3, 4, 5, 6]), 5);
340 let slice = buf.slice();
341 assert_eq!(&*slice, &[1, 2, 3, 4, 5]);
342}
343
344#[test]
345fn test_slice_slicing() {
346 let data = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
347
348 let mut buf = AppendBuf::new(10);
349 assert_eq!(buf.fill(data), 10);
350
351 assert_eq!(&*buf.slice(), data);
352 assert_eq!(&*buf.slice().slice_to(5), &data[..5]);
353 assert_eq!(&*buf.slice().slice_from(6), &data[6..]);
354 assert_eq!(&*buf.slice().slice(2, 7), &data[2..7]);
355}
356
357#[test]
358fn test_many_writes() {
359 let mut buf = AppendBuf::new(100);
360
361 assert_eq!(buf.fill(&[1, 2, 3, 4]), 4);
362 assert_eq!(buf.fill(&[10, 12, 13, 14, 15]), 5);
363 assert_eq!(buf.fill(&[34, 35]), 2);
364
365 assert_eq!(&*buf.slice(), &[1, 2, 3, 4, 10, 12, 13, 14, 15, 34, 35]);
366}
367
368#[test]
369fn test_slice_then_write() {
370 let mut buf = AppendBuf::new(20);
371 let empty = buf.slice();
372 assert_eq!(&*empty, &[]);
373
374 assert_eq!(buf.fill(&[5, 6, 7, 8]), 4);
375
376 let not_empty = buf.slice();
377 assert_eq!(&*empty, &[]);
378 assert_eq!(&*not_empty, &[5, 6, 7, 8]);
379
380 assert_eq!(buf.fill(&[9, 10, 11, 12, 13]), 5);
381 assert_eq!(&*empty, &[]);
382 assert_eq!(&*not_empty, &[5, 6, 7, 8]);
383 assert_eq!(&*buf.slice(), &[5, 6, 7, 8, 9, 10, 11, 12, 13]);
384}
385
386#[test]
387fn test_slice_bounds_edge_cases() {
388 let data = &[1, 2, 3, 4, 5, 6];
389
390 let mut buf = AppendBuf::new(data.len());
391 assert_eq!(buf.fill(data), data.len());
392
393 let slice = buf.slice().slice_to(data.len());
394 assert_eq!(&*slice, data);
395
396 let slice = buf.slice().slice_from(0);
397 assert_eq!(&*slice, data);
398}
399
400#[test]
401#[should_panic = "the desired offset"]
402fn test_slice_from_bounds_checks() {
403 let data = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
404
405 let mut buf = AppendBuf::new(10);
406 assert_eq!(buf.fill(data), 10);
407
408 buf.slice().slice_from(100);
409}
410
411#[test]
412#[should_panic = "the desired length"]
413fn test_slice_to_bounds_checks() {
414 let data = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
415
416 let mut buf = AppendBuf::new(10);
417 assert_eq!(buf.fill(data), 10);
418
419 buf.slice().slice_to(100);
420}
421
422#[test]
423fn test_convert_from_vec() {
424 let buf = vec![0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8]; let append_buf = AppendBuf::from_buf(buf.clone()).unwrap();
427 assert_eq!(&*append_buf, &buf[8..]);
428
429 let buf = vec![0, 0, 0, 0]; assert_eq!(AppendBuf::from_buf(buf.clone()).unwrap_err(), buf);
431}
432