1use std::any::type_name;
15use std::ffi::{c_char, c_void, CStr};
16use std::fmt::Debug;
17use std::marker::PhantomData;
18use std::mem::{align_of, size_of, transmute, MaybeUninit};
19use std::num::NonZeroUsize;
20use std::ptr;
21use std::slice::from_raw_parts_mut;
22
23use memchr::memchr;
24
25#[cfg(varnishsys_6)]
26use crate::ffi::WS_Inside;
27use crate::ffi::{txt, VCL_STRING};
28#[cfg(not(varnishsys_6))]
29use crate::ffi::{vrt_blob, WS_Allocated, VCL_BLOB};
30#[cfg(not(varnishsys_6))]
31pub use crate::vcl::ws_str_buffer::WsBlobBuffer;
32pub use crate::vcl::ws_str_buffer::{WsBuffer, WsStrBuffer, WsTempBuffer};
33use crate::vcl::{VclError, VclResult};
34use crate::{ffi, validate_ws};
35
36#[cfg(not(test))]
37impl ffi::ws {
38 pub(crate) unsafe fn alloc(&mut self, size: u32) -> *mut c_void {
39 assert!(size > 0);
40 ffi::WS_Alloc(self, size)
41 }
42 pub(crate) unsafe fn reserve_all(&mut self) -> u32 {
43 ffi::WS_ReserveAll(self)
44 }
45 pub(crate) unsafe fn release(&mut self, len: u32) {
46 ffi::WS_Release(self, len);
47 }
48}
49
50#[cfg(test)]
51impl ffi::ws {
52 const ALIGN: usize = align_of::<*const c_void>();
53 pub(crate) unsafe fn alloc(&mut self, size: u32) -> *mut c_void {
54 let ws = validate_ws(self);
59 assert!(size > 0);
60 let aligned_sz = (size as usize).div_ceil(Self::ALIGN) * Self::ALIGN;
61 if ws.e.offset_from(ws.f) < aligned_sz as isize {
62 ptr::null_mut()
63 } else {
64 let p = ws.f.cast::<c_void>();
65 ws.f = ws.f.add(aligned_sz);
66 assert!(p.is_aligned());
67 p
68 }
69 }
70
71 #[allow(clippy::unused_self)]
72 pub(crate) unsafe fn reserve_all(&mut self) -> u32 {
73 let ws = validate_ws(self);
74 assert!(ws.r.is_null());
75 ws.r = ws.e;
76 ws.e.offset_from(ws.f).try_into().unwrap()
77 }
78
79 #[allow(clippy::unused_self)]
80 pub(crate) unsafe fn release(&mut self, size: u32) {
81 let ws = validate_ws(self);
82 assert!(isize::try_from(size).unwrap() <= ws.e.offset_from(ws.f));
83 assert!(isize::try_from(size).unwrap() <= ws.r.offset_from(ws.f));
84 assert!(!ws.r.is_null());
85 let aligned_sz = usize::try_from(size).unwrap().div_ceil(Self::ALIGN) * Self::ALIGN;
86 ws.f = ws.f.add(aligned_sz);
87 assert!(ws.f.is_aligned());
88 ws.r = ptr::null_mut::<c_char>();
89 }
90}
91
92#[derive(Debug)]
100pub struct Workspace<'ctx> {
101 pub raw: *mut ffi::ws,
103 _phantom: PhantomData<&'ctx ()>,
104}
105
106impl<'ctx> Workspace<'ctx> {
107 pub(crate) fn from_ptr(raw: *mut ffi::ws) -> Self {
109 assert!(!raw.is_null(), "raw pointer was null");
110 Self {
111 raw,
112 _phantom: PhantomData,
113 }
114 }
115
116 pub unsafe fn alloc(&mut self, size: NonZeroUsize) -> *mut c_void {
121 validate_ws(self.raw).alloc(size.get() as u32)
122 }
123
124 pub fn contains(&self, data: &[u8]) -> bool {
126 #[cfg(varnishsys_6)]
127 {
128 let last = match data.last() {
129 None => data.as_ptr(),
130 Some(p) => p as *const _,
131 };
132 unsafe { WS_Inside(self.raw, data.as_ptr().cast(), last.cast()) == 1 }
133 }
134 #[cfg(not(varnishsys_6))]
135 {
136 unsafe { WS_Allocated(self.raw, data.as_ptr().cast(), data.len() as isize) == 1 }
137 }
138 }
139
140 pub fn allocate(
143 &mut self,
144 size: NonZeroUsize,
145 ) -> Result<&'ctx mut [MaybeUninit<u8>], VclError> {
146 let ptr = unsafe { self.alloc(size) };
147 if ptr.is_null() {
148 Err(VclError::WsOutOfMemory(size))
149 } else {
150 Ok(unsafe { from_raw_parts_mut(ptr.cast(), size.get()) })
151 }
152 }
153
154 pub fn allocate_zeroed(&mut self, size: NonZeroUsize) -> Result<&'ctx mut [u8], VclError> {
156 let buf = self.allocate(size)?;
157 unsafe {
158 buf.as_mut_ptr().write_bytes(0, buf.len());
159 Ok(slice_assume_init_mut(buf))
160 }
161 }
162
163 pub(crate) fn copy_value<T>(&mut self, value: T) -> Result<&'ctx mut T, VclError> {
166 let size = NonZeroUsize::new(size_of::<T>())
167 .unwrap_or_else(|| panic!("Type {} has sizeof=0", type_name::<T>()));
168
169 let val = unsafe { self.alloc(size).cast::<T>().as_mut() };
170 let val = val.ok_or(VclError::WsOutOfMemory(size))?;
171 *val = value;
172 Ok(val)
173 }
174
175 fn copy_bytes(&mut self, src: impl AsRef<[u8]>) -> Result<&'ctx [u8], VclError> {
177 let src = src.as_ref();
181 let Some(len) = NonZeroUsize::new(src.len()) else {
182 Err(VclError::CStr(c"Unable to allocate 0 bytes in a Workspace"))?
183 };
184 let dest = self.allocate(len)?;
185 dest.copy_from_slice(maybe_uninit(src));
186 Ok(unsafe { slice_assume_init_mut(dest) })
187 }
188
189 #[cfg(not(varnishsys_6))]
191 pub fn copy_blob(&mut self, value: impl AsRef<[u8]>) -> Result<VCL_BLOB, VclError> {
192 let buf = self.copy_bytes(value)?;
193 let blob = self.copy_value(vrt_blob {
194 blob: ptr::from_ref(buf).cast::<c_void>(),
195 len: buf.len(),
196 ..Default::default()
197 })?;
198 Ok(VCL_BLOB(ptr::from_ref(blob)))
199 }
200
201 pub fn copy_txt(&mut self, value: impl AsRef<CStr>) -> Result<txt, VclError> {
203 let dest = self.copy_bytes(value.as_ref().to_bytes_with_nul())?;
204 Ok(bytes_with_nul_to_txt(dest))
205 }
206
207 pub fn copy_cstr(&mut self, value: impl AsRef<CStr>) -> Result<VCL_STRING, VclError> {
209 Ok(VCL_STRING(self.copy_txt(value)?.b))
210 }
211
212 pub fn copy_bytes_with_null(&mut self, src: impl AsRef<[u8]>) -> Result<txt, VclError> {
216 let src = src.as_ref();
217 match memchr(0, src) {
218 Some(pos) if pos + 1 == src.len() => {
219 self.copy_txt(unsafe { CStr::from_bytes_with_nul_unchecked(src) })
221 }
222 Some(_) => Err(VclError::CStr(c"NULL byte found in the source string")),
223 None => {
224 let len = src.len();
227 let dest = self.allocate(unsafe { NonZeroUsize::new_unchecked(len + 1) })?;
228 dest[..len].copy_from_slice(maybe_uninit(src));
229 dest[len].write(b'\0');
230 let dest = unsafe { slice_assume_init_mut(dest) };
231 Ok(bytes_with_nul_to_txt(dest))
232 }
233 }
234 }
235
236 pub fn vcl_string_builder(&mut self) -> VclResult<WsStrBuffer<'ctx>> {
241 unsafe { WsStrBuffer::new(validate_ws(self.raw)) }
242 }
243
244 #[cfg(not(varnishsys_6))]
247 pub fn vcl_blob_builder(&mut self) -> VclResult<WsBlobBuffer<'ctx>> {
248 unsafe { WsBlobBuffer::new(validate_ws(self.raw)) }
249 }
250
251 pub fn slice_builder<T: Copy>(&mut self) -> VclResult<WsTempBuffer<'ctx, T>> {
257 unsafe { WsTempBuffer::new(validate_ws(self.raw)) }
258 }
259}
260
261fn maybe_uninit(value: &[u8]) -> &[MaybeUninit<u8>] {
263 unsafe {
266 #[expect(clippy::transmute_ptr_to_ptr)]
267 transmute(value)
268 }
269}
270
271unsafe fn slice_assume_init_mut(value: &mut [MaybeUninit<u8>]) -> &mut [u8] {
273 #[expect(clippy::ref_as_ptr)]
276 &mut *(value as *mut [MaybeUninit<u8>] as *mut [u8])
277}
278
279fn bytes_with_nul_to_txt(buf: &[u8]) -> txt {
281 txt::from_cstr(unsafe { CStr::from_bytes_with_nul_unchecked(buf) })
282}
283
284#[derive(Debug)]
289pub struct TestWS {
290 c_ws: ffi::ws,
291 #[expect(dead_code)]
292 space: Vec<c_char>,
293}
294
295impl TestWS {
296 pub fn new(sz: usize) -> Self {
298 let al = align_of::<*const c_void>();
299 let aligned_sz = (sz / al) * al;
300 let mut space: Vec<c_char> = vec![0; sz];
301 let s = space.as_mut_ptr();
302 assert!(s.is_aligned());
303 assert!(unsafe { s.add(aligned_sz).is_aligned() });
304 Self {
305 c_ws: ffi::ws {
306 magic: ffi::WS_MAGIC,
307 id: ['t' as c_char, 's' as c_char, 't' as c_char, '\0' as c_char],
308 s,
309 f: s,
310 r: ptr::null_mut(),
311 e: unsafe { s.add(aligned_sz) },
312 },
313 space,
314 }
315 }
316
317 pub fn as_ptr(&mut self) -> *mut ffi::ws {
320 ptr::from_mut::<ffi::ws>(&mut self.c_ws)
321 }
322
323 pub fn workspace(&mut self) -> Workspace<'_> {
325 Workspace::from_ptr(self.as_ptr())
326 }
327}
328
329#[cfg(test)]
330mod tests {
331 use std::num::NonZero;
332
333 use super::*;
334
335 #[test]
336 fn ws_test_alloc() {
337 let mut test_ws = TestWS::new(160);
338 let mut ws = test_ws.workspace();
339 for _ in 0..10 {
340 unsafe {
341 assert!(!ws.alloc(NonZero::new(16).unwrap()).is_null());
342 }
343 }
344 unsafe {
345 assert!(ws.alloc(NonZero::new(1).unwrap()).is_null());
346 }
347 }
348}