1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
//! This file has been automatically generated by `objc2`'s `header-translator`.
//! DO NOT EDIT
use core::ffi::*;
use core::ptr::NonNull;
#[cfg(feature = "objc2")]
use objc2::__framework_prelude::*;
#[cfg(feature = "objc2-av-foundation")]
use objc2_av_foundation::*;
#[cfg(feature = "objc2-core-foundation")]
use objc2_core_foundation::*;
#[cfg(feature = "objc2-core-video")]
use objc2_core_video::*;
#[cfg(feature = "objc2-foundation")]
use objc2_foundation::*;
#[cfg(feature = "objc2-metal")]
use objc2_metal::*;
#[cfg(feature = "objc2-ui-kit")]
use objc2_ui_kit::*;
use crate::*;
/// Segmentation classes which defines a pixel's semantic label.
///
/// When running a configuration with 'ARFrameSemanticPersonSegmentation' every pixel in the
/// segmentationBuffer on the ARFrame will conform to one of these classes.
///
/// See: -[ARConfiguration setFrameSemantics:]
///
/// See: -[ARFrame segmentationBuffer]
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arsegmentationclass?language=objc)
// NS_ENUM
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ARSegmentationClass(pub u8);
impl ARSegmentationClass {
#[doc(alias = "ARSegmentationClassNone")]
pub const None: Self = Self(0);
#[doc(alias = "ARSegmentationClassPerson")]
pub const Person: Self = Self(255);
}
#[cfg(feature = "objc2")]
unsafe impl Encode for ARSegmentationClass {
const ENCODING: Encoding = u8::ENCODING;
}
#[cfg(feature = "objc2")]
unsafe impl RefEncode for ARSegmentationClass {
const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}
/// A value describing the world mapping status for the area visible in a given frame.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldmappingstatus?language=objc)
// NS_ENUM
#[cfg(feature = "objc2")]
#[repr(transparent)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ARWorldMappingStatus(pub NSInteger);
#[cfg(feature = "objc2")]
impl ARWorldMappingStatus {
/// World mapping is not available.
#[doc(alias = "ARWorldMappingStatusNotAvailable")]
pub const NotAvailable: Self = Self(0);
/// World mapping is available but has limited features.
/// For the device's current position, the session’s world map is not recommended for relocalization.
#[doc(alias = "ARWorldMappingStatusLimited")]
pub const Limited: Self = Self(1);
/// World mapping is actively extending the map with the user's motion.
/// The world map will be relocalizable for previously visited areas but is still being updated for the current space.
#[doc(alias = "ARWorldMappingStatusExtending")]
pub const Extending: Self = Self(2);
/// World mapping has adequately mapped the visible area.
/// The map can be used to relocalize for the device's current position.
#[doc(alias = "ARWorldMappingStatusMapped")]
pub const Mapped: Self = Self(3);
}
#[cfg(feature = "objc2")]
unsafe impl Encode for ARWorldMappingStatus {
const ENCODING: Encoding = NSInteger::ENCODING;
}
#[cfg(feature = "objc2")]
unsafe impl RefEncode for ARWorldMappingStatus {
const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
}
#[cfg(feature = "objc2")]
extern_class!(
/// An object encapsulating the state of everything being tracked for a given moment in time.
///
/// The model provides a snapshot of all data needed to render a given frame.
///
/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arframe?language=objc)
#[unsafe(super(NSObject))]
#[derive(Debug, PartialEq, Eq, Hash)]
#[cfg(feature = "objc2")]
pub struct ARFrame;
);
#[cfg(feature = "objc2")]
unsafe impl Send for ARFrame {}
#[cfg(feature = "objc2")]
unsafe impl Sync for ARFrame {}
#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
extern_conformance!(
unsafe impl NSCopying for ARFrame {}
);
#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
unsafe impl CopyingHelper for ARFrame {
type Result = Self;
}
#[cfg(feature = "objc2")]
extern_conformance!(
unsafe impl NSObjectProtocol for ARFrame {}
);
#[cfg(feature = "objc2")]
impl ARFrame {
extern_methods!(
#[cfg(feature = "objc2-foundation")]
/// A timestamp identifying the frame.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(timestamp))]
#[unsafe(method_family = none)]
pub unsafe fn timestamp(&self) -> NSTimeInterval;
#[cfg(feature = "objc2-core-video")]
/// The frame’s captured image.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(capturedImage))]
#[unsafe(method_family = none)]
pub unsafe fn capturedImage(&self) -> Retained<CVPixelBuffer>;
#[cfg(feature = "objc2-foundation")]
/// A dictionary of EXIF metadata for the captured image.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(exifData))]
#[unsafe(method_family = none)]
pub unsafe fn exifData(&self) -> Retained<NSDictionary<NSString, AnyObject>>;
#[cfg(feature = "objc2-metal")]
/// A tileable texture that contains image noise matching the current camera streams
/// noise properties.
///
///
/// A camera stream depicts image noise that gives the captured image
/// a grainy look and varies with light conditions.
/// The variations are stored along the depth dimension of the camera grain texture
/// and can be selected at runtime using the camera grain intensity of the current frame.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(cameraGrainTexture))]
#[unsafe(method_family = none)]
pub unsafe fn cameraGrainTexture(&self)
-> Option<Retained<ProtocolObject<dyn MTLTexture>>>;
/// The frame’s camera grain intensity in range 0 to 1.
///
///
/// A camera stream depicts image noise that gives the captured image
/// a grainy look and varies with light conditions.
/// The camera grain intensity can be used to select a texture slice from the frames
/// camera grain texture.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(cameraGrainIntensity))]
#[unsafe(method_family = none)]
pub unsafe fn cameraGrainIntensity(&self) -> c_float;
#[cfg(feature = "objc2-av-foundation")]
/// The frame’s captured depth data.
///
/// Depth data is only provided with face tracking on frames where depth data was captured.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(capturedDepthData))]
#[unsafe(method_family = none)]
pub unsafe fn capturedDepthData(&self) -> Option<Retained<AVDepthData>>;
#[cfg(feature = "objc2-foundation")]
/// A timestamp identifying the depth data.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(capturedDepthDataTimestamp))]
#[unsafe(method_family = none)]
pub unsafe fn capturedDepthDataTimestamp(&self) -> NSTimeInterval;
#[cfg(feature = "ARCamera")]
/// The camera used to capture the frame’s image.
///
/// The camera provides the device’s position and orientation as well as camera parameters.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(camera))]
#[unsafe(method_family = none)]
pub unsafe fn camera(&self) -> Retained<ARCamera>;
#[cfg(all(feature = "ARAnchor", feature = "objc2-foundation"))]
/// A list of anchors in the scene.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(anchors))]
#[unsafe(method_family = none)]
pub unsafe fn anchors(&self) -> Retained<NSArray<ARAnchor>>;
#[cfg(feature = "ARLightEstimate")]
/// A light estimate representing the light in the scene.
///
/// Returns nil if there is no light estimation.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(lightEstimate))]
#[unsafe(method_family = none)]
pub unsafe fn lightEstimate(&self) -> Option<Retained<ARLightEstimate>>;
#[cfg(feature = "ARPointCloud")]
/// Feature points in the scene with respect to the frame’s origin.
///
/// The feature points are only provided for configurations using world tracking.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(rawFeaturePoints))]
#[unsafe(method_family = none)]
pub unsafe fn rawFeaturePoints(&self) -> Option<Retained<ARPointCloud>>;
/// The status of world mapping for the area visible to the frame.
///
/// This can be used to identify the state of the world map for the visible area and if additional scanning
/// should be done before saving a world map.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(worldMappingStatus))]
#[unsafe(method_family = none)]
pub unsafe fn worldMappingStatus(&self) -> ARWorldMappingStatus;
#[cfg(feature = "objc2-core-video")]
/// A buffer that represents the segmented content of the capturedImage.
///
/// In order to identify to which class a pixel has been classified one needs to compare its intensity value with the values
/// found in `ARSegmentationClass`.
///
/// See: ARSegmentationClass
///
/// See: -[ARConfiguration setFrameSemantics:]
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(segmentationBuffer))]
#[unsafe(method_family = none)]
pub unsafe fn segmentationBuffer(&self) -> Option<Retained<CVPixelBuffer>>;
#[cfg(feature = "objc2-core-video")]
/// A buffer that represents the estimated depth values for a performed segmentation.
///
/// For each non-background pixel in the segmentation buffer the corresponding depth value can be accessed in this buffer.
///
/// See: -[ARConfiguration setFrameSemantics:]
///
/// See: -[ARFrame segmentationBuffer]
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(estimatedDepthData))]
#[unsafe(method_family = none)]
pub unsafe fn estimatedDepthData(&self) -> Option<Retained<CVPixelBuffer>>;
#[cfg(feature = "ARBody2D")]
/// A detected body in the current frame.
///
/// See: -[ARConfiguration setFrameSemantics:]
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(detectedBody))]
#[unsafe(method_family = none)]
pub unsafe fn detectedBody(&self) -> Option<Retained<ARBody2D>>;
#[cfg(feature = "ARGeoTrackingTypes")]
/// The status of geo tracking.
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(geoTrackingStatus))]
#[unsafe(method_family = none)]
pub unsafe fn geoTrackingStatus(&self) -> Option<Retained<ARGeoTrackingStatus>>;
#[cfg(feature = "ARDepthData")]
/// Scene depth data.
///
/// See: ARFrameSemanticSceneDepth.
///
/// See: -[ARConfiguration setFrameSemantics:]
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(sceneDepth))]
#[unsafe(method_family = none)]
pub unsafe fn sceneDepth(&self) -> Option<Retained<ARDepthData>>;
#[cfg(feature = "ARDepthData")]
/// Scene depth data, smoothed for temporal consistency.
///
/// See: ARFrameSemanticSmoothedSceneDepth.
///
/// See: -[ARConfiguration setFrameSemantics:]
///
/// This property is not atomic.
///
/// # Safety
///
/// This might not be thread-safe.
#[unsafe(method(smoothedSceneDepth))]
#[unsafe(method_family = none)]
pub unsafe fn smoothedSceneDepth(&self) -> Option<Retained<ARDepthData>>;
#[cfg(all(
feature = "ARHitTestResult",
feature = "objc2-core-foundation",
feature = "objc2-foundation"
))]
/// Searches the frame for objects corresponding to a point in the captured image.
///
///
/// A 2D point in the captured image’s coordinate space can refer to any point along a line segment
/// in the 3D coordinate space. Hit-testing is the process of finding objects in the world located along this line segment.
///
/// Parameter `point`: A point in the image-space coordinate system of the captured image.
/// Values should range from (0,0) - upper left corner to (1,1) - lower right corner.
///
/// Parameter `types`: The types of results to search for.
///
/// Returns: An array of all hit-test results sorted from nearest to farthest.
#[deprecated = "Use [ARSession raycast:]"]
#[unsafe(method(hitTest:types:))]
#[unsafe(method_family = none)]
pub unsafe fn hitTest_types(
&self,
point: CGPoint,
types: ARHitTestResultType,
) -> Retained<NSArray<ARHitTestResult>>;
#[cfg(all(feature = "ARRaycastQuery", feature = "objc2-core-foundation"))]
/// Creates a raycast query originating from the point on the captured image, aligned along the center of the field of view of the camera.
///
/// A 2D point in the captured image’s coordinate space and the field of view of the frame's camera is used to create a ray in the 3D
/// cooridnate space originating at the point.
///
/// Parameter `point`: A point in the image-space coordinate system of the captured image.
/// Values should range from (0,0) - upper left corner to (1,1) - lower right corner.
///
/// Parameter `target`: Type of target where the ray should terminate.
///
/// Parameter `alignment`: Alignment of the target.
#[unsafe(method(raycastQueryFromPoint:allowingTarget:alignment:))]
#[unsafe(method_family = none)]
pub unsafe fn raycastQueryFromPoint_allowingTarget_alignment(
&self,
point: CGPoint,
target: ARRaycastTarget,
alignment: ARRaycastTargetAlignment,
) -> Retained<ARRaycastQuery>;
#[cfg(all(feature = "objc2-core-foundation", feature = "objc2-ui-kit"))]
/// Returns a display transform for the provided viewport size and orientation.
///
///
/// The display transform can be used to convert normalized points in the image-space coordinate system
/// of the captured image to normalized points in the view’s coordinate space. The transform provides the correct rotation
/// and aspect-fill for presenting the captured image in the given orientation and size.
///
/// Parameter `orientation`: The orientation of the viewport.
///
/// Parameter `viewportSize`: The size of the viewport.
#[unsafe(method(displayTransformForOrientation:viewportSize:))]
#[unsafe(method_family = none)]
pub unsafe fn displayTransformForOrientation_viewportSize(
&self,
orientation: UIInterfaceOrientation,
viewport_size: CGSize,
) -> CGAffineTransform;
/// Unavailable
#[unsafe(method(init))]
#[unsafe(method_family = init)]
pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
#[unsafe(method(new))]
#[unsafe(method_family = new)]
pub unsafe fn new() -> Retained<Self>;
);
}