dacite/core/
render_pass.rs1use FromNativeObject;
16use TryDestroyError;
17use TryDestroyErrorKind;
18use VulkanObject;
19use core::allocator_helper::AllocatorHelper;
20use core::{self, Device};
21use std::cmp::Ordering;
22use std::hash::{Hash, Hasher};
23use std::mem;
24use std::ptr;
25use std::sync::Arc;
26use vks;
27
28#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
30pub struct RenderPass(Arc<Inner>);
31
32impl VulkanObject for RenderPass {
33 type NativeVulkanObject = vks::core::VkRenderPass;
34
35 #[inline]
36 fn id(&self) -> u64 {
37 self.handle()
38 }
39
40 #[inline]
41 fn as_native_vulkan_object(&self) -> Self::NativeVulkanObject {
42 self.handle()
43 }
44
45 fn try_destroy(self) -> Result<(), TryDestroyError<Self>> {
46 let strong_count = Arc::strong_count(&self.0);
47 if strong_count == 1 {
48 Ok(())
49 }
50 else {
51 Err(TryDestroyError::new(self, TryDestroyErrorKind::InUse(Some(strong_count))))
52 }
53 }
54}
55
56pub struct FromNativeRenderPassParameters {
57 pub owned: bool,
59
60 pub device: Device,
62
63 pub allocator: Option<Box<core::Allocator>>,
67}
68
69impl FromNativeRenderPassParameters {
70 #[inline]
71 pub fn new(owned: bool, device: Device, allocator: Option<Box<core::Allocator>>) -> Self {
72 FromNativeRenderPassParameters {
73 owned: owned,
74 device: device,
75 allocator: allocator,
76 }
77 }
78}
79
80impl FromNativeObject for RenderPass {
81 type Parameters = FromNativeRenderPassParameters;
82
83 unsafe fn from_native_object(object: Self::NativeVulkanObject, params: Self::Parameters) -> Self {
84 RenderPass::new(object, params.owned, params.device, params.allocator.map(AllocatorHelper::new))
85 }
86}
87
88impl RenderPass {
89 pub(crate) fn new(handle: vks::core::VkRenderPass, owned: bool, device: Device, allocator: Option<AllocatorHelper>) -> Self {
90 RenderPass(Arc::new(Inner {
91 handle: handle,
92 owned: owned,
93 device: device,
94 allocator: allocator,
95 }))
96 }
97
98 #[inline]
99 pub(crate) fn handle(&self) -> vks::core::VkRenderPass {
100 self.0.handle
101 }
102
103 #[inline]
104 pub(crate) fn loader(&self) -> &vks::DeviceProcAddrLoader {
105 self.0.device.loader()
106 }
107
108 #[inline]
109 pub(crate) fn device_handle(&self) -> vks::core::VkDevice {
110 self.0.device.handle()
111 }
112
113 pub fn get_render_area_granularity(&self) -> core::Extent2D {
115 unsafe {
116 let mut granularity = mem::uninitialized();
117 self.loader().core.vkGetRenderAreaGranularity(self.device_handle(), self.handle(), &mut granularity);
118 (&granularity).into()
119 }
120 }
121}
122
123#[derive(Debug)]
124struct Inner {
125 handle: vks::core::VkRenderPass,
126 owned: bool,
127 device: Device,
128 allocator: Option<AllocatorHelper>,
129}
130
131impl Drop for Inner {
132 fn drop(&mut self) {
133 if self.owned {
134 let allocator = match self.allocator {
135 Some(ref allocator) => allocator.callbacks(),
136 None => ptr::null(),
137 };
138
139 unsafe {
140 self.device.loader().core.vkDestroyRenderPass(self.device.handle(), self.handle, allocator);
141 }
142 }
143 }
144}
145
146unsafe impl Send for Inner { }
147
148unsafe impl Sync for Inner { }
149
150impl PartialEq for Inner {
151 #[inline]
152 fn eq(&self, other: &Self) -> bool {
153 self.handle == other.handle
154 }
155}
156
157impl Eq for Inner { }
158
159impl PartialOrd for Inner {
160 #[inline]
161 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
162 self.handle.partial_cmp(&other.handle)
163 }
164}
165
166impl Ord for Inner {
167 #[inline]
168 fn cmp(&self, other: &Self) -> Ordering {
169 self.handle.cmp(&other.handle)
170 }
171}
172
173impl Hash for Inner {
174 #[inline]
175 fn hash<H: Hasher>(&self, state: &mut H) {
176 self.handle.hash(state);
177 }
178}