1use FromNativeObject;
16use TryDestroyError;
17use TryDestroyErrorKind;
18use VulkanObject;
19use core::allocator_helper::AllocatorHelper;
20use core::{self, Device};
21use std::cmp::Ordering;
22use std::hash::{Hash, Hasher};
23use std::ptr;
24use std::sync::Arc;
25use vks;
26
27#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
29pub struct Sampler(Arc<Inner>);
30
31impl VulkanObject for Sampler {
32 type NativeVulkanObject = vks::core::VkSampler;
33
34 #[inline]
35 fn id(&self) -> u64 {
36 self.handle()
37 }
38
39 #[inline]
40 fn as_native_vulkan_object(&self) -> Self::NativeVulkanObject {
41 self.handle()
42 }
43
44 fn try_destroy(self) -> Result<(), TryDestroyError<Self>> {
45 let strong_count = Arc::strong_count(&self.0);
46 if strong_count == 1 {
47 Ok(())
48 }
49 else {
50 Err(TryDestroyError::new(self, TryDestroyErrorKind::InUse(Some(strong_count))))
51 }
52 }
53}
54
55pub struct FromNativeSamplerParameters {
56 pub owned: bool,
58
59 pub device: Device,
61
62 pub allocator: Option<Box<core::Allocator>>,
66}
67
68impl FromNativeSamplerParameters {
69 #[inline]
70 pub fn new(owned: bool, device: Device, allocator: Option<Box<core::Allocator>>) -> Self {
71 FromNativeSamplerParameters {
72 owned: owned,
73 device: device,
74 allocator: allocator,
75 }
76 }
77}
78
79impl FromNativeObject for Sampler {
80 type Parameters = FromNativeSamplerParameters;
81
82 unsafe fn from_native_object(object: Self::NativeVulkanObject, params: Self::Parameters) -> Self {
83 Sampler::new(object, params.owned, params.device, params.allocator.map(AllocatorHelper::new))
84 }
85}
86
87impl Sampler {
88 pub(crate) fn new(handle: vks::core::VkSampler, owned: bool, device: Device, allocator: Option<AllocatorHelper>) -> Self {
89 Sampler(Arc::new(Inner {
90 handle: handle,
91 owned: owned,
92 device: device,
93 allocator: allocator,
94 }))
95 }
96
97 #[inline]
98 pub(crate) fn handle(&self) -> vks::core::VkSampler {
99 self.0.handle
100 }
101}
102
103#[derive(Debug)]
104struct Inner {
105 handle: vks::core::VkSampler,
106 owned: bool,
107 device: Device,
108 allocator: Option<AllocatorHelper>,
109}
110
111impl Drop for Inner {
112 fn drop(&mut self) {
113 if self.owned {
114 let allocator = match self.allocator {
115 Some(ref allocator) => allocator.callbacks(),
116 None => ptr::null(),
117 };
118
119 unsafe {
120 self.device.loader().core.vkDestroySampler(self.device.handle(), self.handle, allocator);
121 }
122 }
123 }
124}
125
126unsafe impl Send for Inner { }
127
128unsafe impl Sync for Inner { }
129
130impl PartialEq for Inner {
131 #[inline]
132 fn eq(&self, other: &Self) -> bool {
133 self.handle == other.handle
134 }
135}
136
137impl Eq for Inner { }
138
139impl PartialOrd for Inner {
140 #[inline]
141 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
142 self.handle.partial_cmp(&other.handle)
143 }
144}
145
146impl Ord for Inner {
147 #[inline]
148 fn cmp(&self, other: &Self) -> Ordering {
149 self.handle.cmp(&other.handle)
150 }
151}
152
153impl Hash for Inner {
154 #[inline]
155 fn hash<H: Hasher>(&self, state: &mut H) {
156 self.handle.hash(state);
157 }
158}