1#![warn(
6 trivial_casts,
7 trivial_numeric_casts,
8 unused_extern_crates,
9 unused_qualifications
10)]
11
12#[macro_use]
13mod macros;
14
15pub mod backend {
16 pub use gfx_backend_empty::Backend as Empty;
17
18 #[cfg(windows)]
19 pub use gfx_backend_dx11::Backend as Dx11;
20 #[cfg(windows)]
21 pub use gfx_backend_dx12::Backend as Dx12;
22 #[cfg(any(target_os = "ios", target_os = "macos"))]
23 pub use gfx_backend_metal::Backend as Metal;
24 #[cfg(any(
25 not(any(target_os = "ios", target_os = "macos")),
26 feature = "gfx-backend-vulkan"
27 ))]
28 pub use gfx_backend_vulkan::Backend as Vulkan;
29}
30
31pub mod binding_model;
32pub mod command;
33mod conv;
34pub mod device;
35pub mod hub;
36pub mod id;
37pub mod instance;
38pub mod logging;
39pub mod pipeline;
40pub mod power;
41pub mod resource;
42pub mod swap_chain;
43mod track;
44mod validation;
45
46pub use hal::pso::read_spirv;
47
48#[cfg(test)]
49use loom::sync::atomic;
50#[cfg(not(test))]
51use std::sync::atomic;
52
53use atomic::{AtomicUsize, Ordering};
54
55use std::{os::raw::c_char, ptr};
56
57const MAX_BIND_GROUPS: usize = 8;
58
59type SubmissionIndex = usize;
60type Index = u32;
61type Epoch = u32;
62
63pub type RawString = *const c_char;
64
65#[derive(Debug)]
67struct RefCount(ptr::NonNull<AtomicUsize>);
68
69unsafe impl Send for RefCount {}
70unsafe impl Sync for RefCount {}
71
72impl RefCount {
73 const MAX: usize = 1 << 24;
74
75 fn load(&self) -> usize {
76 unsafe { self.0.as_ref() }.load(Ordering::Acquire)
77 }
78
79 #[cfg(test)]
84 fn rich_drop_outer(self) -> bool {
85 unsafe { std::mem::ManuallyDrop::new(self).rich_drop_inner() }
86 }
87
88 unsafe fn rich_drop_inner(&mut self) -> bool {
92 if self.0.as_ref().fetch_sub(1, Ordering::AcqRel) == 1 {
93 let _ = Box::from_raw(self.0.as_ptr());
94 true
95 } else {
96 false
97 }
98 }
99}
100
101impl Clone for RefCount {
102 fn clone(&self) -> Self {
103 let old_size = unsafe { self.0.as_ref() }.fetch_add(1, Ordering::Release);
104 assert!(old_size < Self::MAX);
105 RefCount(self.0)
106 }
107}
108
109impl Drop for RefCount {
110 fn drop(&mut self) {
111 unsafe {
112 self.rich_drop_inner();
113 }
114 }
115}
116
117#[cfg(test)]
118#[test]
119fn loom() {
120 loom::model(move || {
121 let bx = Box::new(AtomicUsize::new(1));
122 let ref_count_main = ptr::NonNull::new(Box::into_raw(bx)).map(RefCount).unwrap();
123 let ref_count_spawned = ref_count_main.clone();
124
125 let join_handle = loom::thread::spawn(move || {
126 let _ = ref_count_spawned.clone();
127 ref_count_spawned.rich_drop_outer()
128 });
129
130 let dropped_in_main = ref_count_main.rich_drop_outer();
131 let dropped_in_spawned = join_handle.join().unwrap();
132 assert_ne!(
133 dropped_in_main, dropped_in_spawned,
134 "must drop exactly once"
135 );
136 });
137}
138
139#[derive(Debug)]
141struct MultiRefCount(ptr::NonNull<AtomicUsize>);
142
143unsafe impl Send for MultiRefCount {}
144unsafe impl Sync for MultiRefCount {}
145
146impl MultiRefCount {
147 fn new() -> Self {
148 let bx = Box::new(AtomicUsize::new(1));
149 MultiRefCount(unsafe { ptr::NonNull::new_unchecked(Box::into_raw(bx)) })
150 }
151
152 fn inc(&self) {
153 unsafe { self.0.as_ref() }.fetch_add(1, Ordering::Release);
154 }
155
156 fn add_ref(&self) -> RefCount {
157 self.inc();
158 RefCount(self.0)
159 }
160
161 fn dec(&self) -> Option<RefCount> {
162 match unsafe { self.0.as_ref() }.fetch_sub(1, Ordering::AcqRel) {
163 0 => unreachable!(),
164 1 => Some(self.add_ref()),
165 _ => None,
166 }
167 }
168}
169
170impl Drop for MultiRefCount {
171 fn drop(&mut self) {
172 }
177}
178
179#[derive(Debug)]
180struct LifeGuard {
181 ref_count: Option<RefCount>,
182 submission_index: AtomicUsize,
183}
184
185impl LifeGuard {
186 fn new() -> Self {
187 let bx = Box::new(AtomicUsize::new(1));
188 LifeGuard {
189 ref_count: ptr::NonNull::new(Box::into_raw(bx)).map(RefCount),
190 submission_index: AtomicUsize::new(0),
191 }
192 }
193
194 fn add_ref(&self) -> RefCount {
195 self.ref_count.clone().unwrap()
196 }
197
198 fn use_at(&self, submit_index: SubmissionIndex) -> bool {
200 self.submission_index.store(submit_index, Ordering::Release);
201 self.ref_count.is_some()
202 }
203}
204
205#[derive(Clone, Debug)]
206struct Stored<T> {
207 value: T,
208 ref_count: RefCount,
209}
210
211#[derive(Clone, Copy, Debug)]
212struct PrivateFeatures {
213 shader_validation: bool,
214 anisotropic_filtering: bool,
215 texture_d24_s8: bool,
216}
217
218#[macro_export]
219macro_rules! gfx_select {
220 ($id:expr => $global:ident.$method:ident( $($param:expr),+ )) => {
221 match $id.backend() {
222 #[cfg(any(not(any(target_os = "ios", target_os = "macos")), feature = "gfx-backend-vulkan"))]
223 wgt::Backend::Vulkan => $global.$method::<$crate::backend::Vulkan>( $($param),+ ),
224 #[cfg(any(target_os = "ios", target_os = "macos"))]
225 wgt::Backend::Metal => $global.$method::<$crate::backend::Metal>( $($param),+ ),
226 #[cfg(windows)]
227 wgt::Backend::Dx12 => $global.$method::<$crate::backend::Dx12>( $($param),+ ),
228 #[cfg(windows)]
229 wgt::Backend::Dx11 => $global.$method::<$crate::backend::Dx11>( $($param),+ ),
230 _ => unreachable!()
231 }
232 };
233}
234
235type FastHashMap<K, V> =
237 std::collections::HashMap<K, V, std::hash::BuildHasherDefault<fxhash::FxHasher>>;
238
239#[test]
240fn test_default_limits() {
241 let limits = wgt::Limits::default();
242 assert!(limits.max_bind_groups <= MAX_BIND_GROUPS as u32);
243}