1use FromNativeObject;
16use TryDestroyError;
17use TryDestroyErrorKind;
18use VulkanObject;
19use core::allocator_helper::AllocatorHelper;
20use core::{self, Device};
21use libc::c_void;
22use nv_external_memory_capabilities;
23use std::cmp::Ordering;
24use std::hash::{Hash, Hasher};
25use std::mem;
26use std::ptr;
27use std::sync::Arc;
28use vks;
29use win32_types;
30
31#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
33pub struct DeviceMemory(Arc<Inner>);
34
35impl VulkanObject for DeviceMemory {
36 type NativeVulkanObject = vks::core::VkDeviceMemory;
37
38 #[inline]
39 fn id(&self) -> u64 {
40 self.handle()
41 }
42
43 #[inline]
44 fn as_native_vulkan_object(&self) -> Self::NativeVulkanObject {
45 self.handle()
46 }
47
48 fn try_destroy(self) -> Result<(), TryDestroyError<Self>> {
49 let strong_count = Arc::strong_count(&self.0);
50 if strong_count == 1 {
51 Ok(())
52 }
53 else {
54 Err(TryDestroyError::new(self, TryDestroyErrorKind::InUse(Some(strong_count))))
55 }
56 }
57}
58
59pub struct FromNativeDeviceMemoryParameters {
60 pub owned: bool,
62
63 pub device: Device,
65
66 pub size: u64,
68
69 pub allocator: Option<Box<core::Allocator>>,
73}
74
75impl FromNativeDeviceMemoryParameters {
76 #[inline]
77 pub fn new(owned: bool, device: Device, size: u64, allocator: Option<Box<core::Allocator>>) -> Self {
78 FromNativeDeviceMemoryParameters {
79 owned: owned,
80 device: device,
81 size: size,
82 allocator: allocator,
83 }
84 }
85}
86
87impl FromNativeObject for DeviceMemory {
88 type Parameters = FromNativeDeviceMemoryParameters;
89
90 unsafe fn from_native_object(object: Self::NativeVulkanObject, params: Self::Parameters) -> Self {
91 DeviceMemory::new(object, params.owned, params.device, params.allocator.map(AllocatorHelper::new), params.size)
92 }
93}
94
95impl DeviceMemory {
96 pub(crate) fn new(handle: vks::core::VkDeviceMemory, owned: bool, device: Device, allocator: Option<AllocatorHelper>, size: u64) -> Self {
97 DeviceMemory(Arc::new(Inner {
98 handle: handle,
99 owned: owned,
100 device: device,
101 allocator: allocator,
102 size: size,
103 }))
104 }
105
106 #[inline]
107 pub(crate) fn handle(&self) -> vks::core::VkDeviceMemory {
108 self.0.handle
109 }
110
111 #[inline]
112 pub(crate) fn loader(&self) -> &vks::DeviceProcAddrLoader {
113 self.0.device.loader()
114 }
115
116 #[inline]
117 pub(crate) fn device_handle(&self) -> vks::core::VkDevice {
118 self.0.device.handle()
119 }
120
121 pub fn size(&self) -> u64 {
122 self.0.size
123 }
124
125 pub fn get_commitment(&self) -> u64 {
127 let mut commitment = 0;
128 unsafe {
129 self.loader().core.vkGetDeviceMemoryCommitment(self.device_handle(), self.handle(), &mut commitment)
130 };
131
132 commitment
133 }
134
135 pub fn map(&self, offset: u64, size: core::OptionalDeviceSize, flags: core::MemoryMapFlags) -> Result<MappedMemory, core::Error> {
137 let mut mapped = ptr::null_mut();
138 let res = unsafe {
139 self.loader().core.vkMapMemory(self.device_handle(), self.handle(), offset, size.into(), flags.bits(), &mut mapped)
140 };
141
142 if res == vks::core::VK_SUCCESS {
143 let size = match size {
144 core::OptionalDeviceSize::Size(size) => size,
145 core::OptionalDeviceSize::WholeSize => self.0.size - offset,
146 };
147
148 Ok(MappedMemory {
149 memory: self.clone(),
150 mapped: mapped,
151 offset: offset,
152 size: size,
153 })
154 }
155 else {
156 Err(res.into())
157 }
158 }
159
160 pub fn flush(ranges: &[core::MappedMemoryRange]) -> Result<(), core::Error> {
162 let loader = ranges[0].memory.loader();
163 let device_handle = ranges[0].memory.device_handle();
164
165 let ranges_wrappers: Vec<_> = ranges.iter().map(|r| core::VkMappedMemoryRangeWrapper::new(r, true)).collect();
166 let ranges: Vec<_> = ranges_wrappers.iter().map(|r| r.vks_struct).collect();
167
168 let res = unsafe {
169 loader.core.vkFlushMappedMemoryRanges(device_handle, ranges.len() as u32, ranges.as_ptr())
170 };
171
172 if res == vks::core::VK_SUCCESS {
173 Ok(())
174 }
175 else {
176 Err(res.into())
177 }
178 }
179
180 pub fn invalidate(ranges: &[core::MappedMemoryRange]) -> Result<(), core::Error> {
182 let loader = ranges[0].memory.loader();
183 let device_handle = ranges[0].memory.device_handle();
184
185 let ranges_wrappers: Vec<_> = ranges.iter().map(|r| core::VkMappedMemoryRangeWrapper::new(r, true)).collect();
186 let ranges: Vec<_> = ranges_wrappers.iter().map(|r| r.vks_struct).collect();
187
188 let res = unsafe {
189 loader.core.vkInvalidateMappedMemoryRanges(device_handle, ranges.len() as u32, ranges.as_ptr())
190 };
191
192 if res == vks::core::VK_SUCCESS {
193 Ok(())
194 }
195 else {
196 Err(res.into())
197 }
198 }
199
200 pub fn get_win32_handle_nv(&self, handle_type: nv_external_memory_capabilities::ExternalMemoryHandleTypeFlagsNv) -> Result<win32_types::HANDLE, core::Error> {
203 unsafe {
204 let mut handle = mem::uninitialized();
205 let res = self.loader().nv_external_memory_win32.vkGetMemoryWin32HandleNV(self.device_handle(), self.handle(), handle_type.bits(), &mut handle);
206
207 if res == vks::core::VK_SUCCESS {
208 Ok(handle)
209 }
210 else {
211 Err(res.into())
212 }
213 }
214 }
215}
216
217#[derive(Debug)]
218pub struct MappedMemory {
219 memory: DeviceMemory,
220 mapped: *mut c_void,
221 offset: u64,
222 size: u64,
223}
224
225impl Drop for MappedMemory {
226 fn drop(&mut self) {
227 unsafe {
228 self.memory.loader().core.vkUnmapMemory(self.memory.device_handle(), self.memory.handle());
229 }
230 }
231}
232
233impl MappedMemory {
234 pub fn as_ptr(&self) -> *mut c_void {
235 self.mapped
236 }
237
238 pub fn offset(&self) -> u64 {
239 self.offset
240 }
241
242 pub fn size(&self) -> u64 {
243 self.size
244 }
245
246 pub fn flush(&self, chain: &Option<core::MappedMemoryRangeChain>) -> Result<(), core::Error> {
248 #[allow(unused_variables)]
249 let (pnext, chain_wrapper) = core::MappedMemoryRangeChainWrapper::new_optional(chain, true);
250
251 let range = vks::core::VkMappedMemoryRange {
252 sType: vks::core::VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
253 pNext: pnext,
254 memory: self.memory.handle(),
255 offset: self.offset,
256 size: vks::core::VK_WHOLE_SIZE,
257 };
258
259 let res = unsafe {
260 self.memory.loader().core.vkFlushMappedMemoryRanges(self.memory.device_handle(), 1, &range)
261 };
262
263 if res == vks::core::VK_SUCCESS {
264 Ok(())
265 }
266 else {
267 Err(res.into())
268 }
269 }
270
271 pub fn invalidate(&self, chain: &Option<core::MappedMemoryRangeChain>) -> Result<(), core::Error> {
273 #[allow(unused_variables)]
274 let (pnext, chain_wrapper) = core::MappedMemoryRangeChainWrapper::new_optional(chain, true);
275
276 let range = vks::core::VkMappedMemoryRange {
277 sType: vks::core::VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
278 pNext: pnext,
279 memory: self.memory.handle(),
280 offset: self.offset,
281 size: vks::core::VK_WHOLE_SIZE,
282 };
283
284 let res = unsafe {
285 self.memory.loader().core.vkInvalidateMappedMemoryRanges(self.memory.device_handle(), 1, &range)
286 };
287
288 if res == vks::core::VK_SUCCESS {
289 Ok(())
290 }
291 else {
292 Err(res.into())
293 }
294 }
295}
296
297#[derive(Debug)]
298struct Inner {
299 handle: vks::core::VkDeviceMemory,
300 owned: bool,
301 device: Device,
302 allocator: Option<AllocatorHelper>,
303 size: u64,
304}
305
306impl Drop for Inner {
307 fn drop(&mut self) {
308 if self.owned {
309 let allocator = match self.allocator {
310 Some(ref allocator) => allocator.callbacks(),
311 None => ptr::null(),
312 };
313
314 unsafe {
315 self.device.loader().core.vkFreeMemory(self.device.handle(), self.handle, allocator);
316 }
317 }
318 }
319}
320
321unsafe impl Send for Inner { }
322
323unsafe impl Sync for Inner { }
324
325impl PartialEq for Inner {
326 #[inline]
327 fn eq(&self, other: &Self) -> bool {
328 self.handle == other.handle
329 }
330}
331
332impl Eq for Inner { }
333
334impl PartialOrd for Inner {
335 #[inline]
336 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
337 self.handle.partial_cmp(&other.handle)
338 }
339}
340
341impl Ord for Inner {
342 #[inline]
343 fn cmp(&self, other: &Self) -> Ordering {
344 self.handle.cmp(&other.handle)
345 }
346}
347
348impl Hash for Inner {
349 #[inline]
350 fn hash<H: Hasher>(&self, state: &mut H) {
351 self.handle.hash(state);
352 }
353}