objc2_ar_kit/generated/
ARFrame.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5#[cfg(feature = "objc2")]
6use objc2::__framework_prelude::*;
7#[cfg(feature = "objc2-av-foundation")]
8use objc2_av_foundation::*;
9#[cfg(feature = "objc2-core-foundation")]
10use objc2_core_foundation::*;
11#[cfg(feature = "objc2-core-video")]
12use objc2_core_video::*;
13#[cfg(feature = "objc2-foundation")]
14use objc2_foundation::*;
15#[cfg(feature = "objc2-metal")]
16use objc2_metal::*;
17#[cfg(feature = "objc2-ui-kit")]
18use objc2_ui_kit::*;
19
20use crate::*;
21
22/// Segmentation classes which defines a pixel's semantic label.
23///
24/// When running a configuration with 'ARFrameSemanticPersonSegmentation' every pixel in the
25/// segmentationBuffer on the ARFrame will conform to one of these classes.
26///
27/// See: -[ARConfiguration setFrameSemantics:]
28///
29/// See: -[ARFrame segmentationBuffer]
30///
31/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arsegmentationclass?language=objc)
32// NS_ENUM
33#[repr(transparent)]
34#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
35pub struct ARSegmentationClass(pub u8);
36impl ARSegmentationClass {
37    #[doc(alias = "ARSegmentationClassNone")]
38    pub const None: Self = Self(0);
39    #[doc(alias = "ARSegmentationClassPerson")]
40    pub const Person: Self = Self(255);
41}
42
43#[cfg(feature = "objc2")]
44unsafe impl Encode for ARSegmentationClass {
45    const ENCODING: Encoding = u8::ENCODING;
46}
47
48#[cfg(feature = "objc2")]
49unsafe impl RefEncode for ARSegmentationClass {
50    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
51}
52
53/// A value describing the world mapping status for the area visible in a given frame.
54///
55/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldmappingstatus?language=objc)
56// NS_ENUM
57#[cfg(feature = "objc2")]
58#[repr(transparent)]
59#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
60pub struct ARWorldMappingStatus(pub NSInteger);
61#[cfg(feature = "objc2")]
62impl ARWorldMappingStatus {
63    /// World mapping is not available.
64    #[doc(alias = "ARWorldMappingStatusNotAvailable")]
65    pub const NotAvailable: Self = Self(0);
66    /// World mapping is available but has limited features.
67    /// For the device's current position, the session’s world map is not recommended for relocalization.
68    #[doc(alias = "ARWorldMappingStatusLimited")]
69    pub const Limited: Self = Self(1);
70    /// World mapping is actively extending the map with the user's motion.
71    /// The world map will be relocalizable for previously visited areas but is still being updated for the current space.
72    #[doc(alias = "ARWorldMappingStatusExtending")]
73    pub const Extending: Self = Self(2);
74    /// World mapping has adequately mapped the visible area.
75    /// The map can be used to relocalize for the device's current position.
76    #[doc(alias = "ARWorldMappingStatusMapped")]
77    pub const Mapped: Self = Self(3);
78}
79
80#[cfg(feature = "objc2")]
81unsafe impl Encode for ARWorldMappingStatus {
82    const ENCODING: Encoding = NSInteger::ENCODING;
83}
84
85#[cfg(feature = "objc2")]
86unsafe impl RefEncode for ARWorldMappingStatus {
87    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
88}
89
90#[cfg(feature = "objc2")]
91extern_class!(
92    /// An object encapsulating the state of everything being tracked for a given moment in time.
93    ///
94    /// The model provides a snapshot of all data needed to render a given frame.
95    ///
96    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arframe?language=objc)
97    #[unsafe(super(NSObject))]
98    #[derive(Debug, PartialEq, Eq, Hash)]
99    #[cfg(feature = "objc2")]
100    pub struct ARFrame;
101);
102
103#[cfg(feature = "objc2")]
104unsafe impl Send for ARFrame {}
105
106#[cfg(feature = "objc2")]
107unsafe impl Sync for ARFrame {}
108
109#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
110extern_conformance!(
111    unsafe impl NSCopying for ARFrame {}
112);
113
114#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
115unsafe impl CopyingHelper for ARFrame {
116    type Result = Self;
117}
118
119#[cfg(feature = "objc2")]
120extern_conformance!(
121    unsafe impl NSObjectProtocol for ARFrame {}
122);
123
124#[cfg(feature = "objc2")]
125impl ARFrame {
126    extern_methods!(
127        #[cfg(feature = "objc2-foundation")]
128        /// A timestamp identifying the frame.
129        ///
130        /// This property is not atomic.
131        ///
132        /// # Safety
133        ///
134        /// This might not be thread-safe.
135        #[unsafe(method(timestamp))]
136        #[unsafe(method_family = none)]
137        pub unsafe fn timestamp(&self) -> NSTimeInterval;
138
139        #[cfg(feature = "objc2-core-video")]
140        /// The frame’s captured image.
141        ///
142        /// This property is not atomic.
143        ///
144        /// # Safety
145        ///
146        /// This might not be thread-safe.
147        #[unsafe(method(capturedImage))]
148        #[unsafe(method_family = none)]
149        pub unsafe fn capturedImage(&self) -> Retained<CVPixelBuffer>;
150
151        #[cfg(feature = "objc2-foundation")]
152        /// A dictionary of EXIF metadata for the captured image.
153        ///
154        /// This property is not atomic.
155        ///
156        /// # Safety
157        ///
158        /// This might not be thread-safe.
159        #[unsafe(method(exifData))]
160        #[unsafe(method_family = none)]
161        pub unsafe fn exifData(&self) -> Retained<NSDictionary<NSString, AnyObject>>;
162
163        #[cfg(feature = "objc2-metal")]
164        /// A tileable texture that contains image noise matching the current camera streams
165        /// noise properties.
166        ///
167        ///
168        /// A camera stream depicts image noise that gives the captured image
169        /// a grainy look and varies with light conditions.
170        /// The variations are stored along the depth dimension of the camera grain texture
171        /// and can be selected at runtime using the camera grain intensity of the current frame.
172        ///
173        /// This property is not atomic.
174        ///
175        /// # Safety
176        ///
177        /// This might not be thread-safe.
178        #[unsafe(method(cameraGrainTexture))]
179        #[unsafe(method_family = none)]
180        pub unsafe fn cameraGrainTexture(&self)
181            -> Option<Retained<ProtocolObject<dyn MTLTexture>>>;
182
183        /// The frame’s camera grain intensity in range 0 to 1.
184        ///
185        ///
186        /// A camera stream depicts image noise that gives the captured image
187        /// a grainy look and varies with light conditions.
188        /// The camera grain intensity can be used to select a texture slice from the frames
189        /// camera grain texture.
190        ///
191        /// This property is not atomic.
192        ///
193        /// # Safety
194        ///
195        /// This might not be thread-safe.
196        #[unsafe(method(cameraGrainIntensity))]
197        #[unsafe(method_family = none)]
198        pub unsafe fn cameraGrainIntensity(&self) -> c_float;
199
200        #[cfg(feature = "objc2-av-foundation")]
201        /// The frame’s captured depth data.
202        ///
203        /// Depth data is only provided with face tracking on frames where depth data was captured.
204        ///
205        /// This property is not atomic.
206        ///
207        /// # Safety
208        ///
209        /// This might not be thread-safe.
210        #[unsafe(method(capturedDepthData))]
211        #[unsafe(method_family = none)]
212        pub unsafe fn capturedDepthData(&self) -> Option<Retained<AVDepthData>>;
213
214        #[cfg(feature = "objc2-foundation")]
215        /// A timestamp identifying the depth data.
216        ///
217        /// This property is not atomic.
218        ///
219        /// # Safety
220        ///
221        /// This might not be thread-safe.
222        #[unsafe(method(capturedDepthDataTimestamp))]
223        #[unsafe(method_family = none)]
224        pub unsafe fn capturedDepthDataTimestamp(&self) -> NSTimeInterval;
225
226        #[cfg(feature = "ARCamera")]
227        /// The camera used to capture the frame’s image.
228        ///
229        /// The camera provides the device’s position and orientation as well as camera parameters.
230        ///
231        /// This property is not atomic.
232        ///
233        /// # Safety
234        ///
235        /// This might not be thread-safe.
236        #[unsafe(method(camera))]
237        #[unsafe(method_family = none)]
238        pub unsafe fn camera(&self) -> Retained<ARCamera>;
239
240        #[cfg(all(feature = "ARAnchor", feature = "objc2-foundation"))]
241        /// A list of anchors in the scene.
242        ///
243        /// This property is not atomic.
244        ///
245        /// # Safety
246        ///
247        /// This might not be thread-safe.
248        #[unsafe(method(anchors))]
249        #[unsafe(method_family = none)]
250        pub unsafe fn anchors(&self) -> Retained<NSArray<ARAnchor>>;
251
252        #[cfg(feature = "ARLightEstimate")]
253        /// A light estimate representing the light in the scene.
254        ///
255        /// Returns nil if there is no light estimation.
256        ///
257        /// This property is not atomic.
258        ///
259        /// # Safety
260        ///
261        /// This might not be thread-safe.
262        #[unsafe(method(lightEstimate))]
263        #[unsafe(method_family = none)]
264        pub unsafe fn lightEstimate(&self) -> Option<Retained<ARLightEstimate>>;
265
266        #[cfg(feature = "ARPointCloud")]
267        /// Feature points in the scene with respect to the frame’s origin.
268        ///
269        /// The feature points are only provided for configurations using world tracking.
270        ///
271        /// This property is not atomic.
272        ///
273        /// # Safety
274        ///
275        /// This might not be thread-safe.
276        #[unsafe(method(rawFeaturePoints))]
277        #[unsafe(method_family = none)]
278        pub unsafe fn rawFeaturePoints(&self) -> Option<Retained<ARPointCloud>>;
279
280        /// The status of world mapping for the area visible to the frame.
281        ///
282        /// This can be used to identify the state of the world map for the visible area and if additional scanning
283        /// should be done before saving a world map.
284        ///
285        /// This property is not atomic.
286        ///
287        /// # Safety
288        ///
289        /// This might not be thread-safe.
290        #[unsafe(method(worldMappingStatus))]
291        #[unsafe(method_family = none)]
292        pub unsafe fn worldMappingStatus(&self) -> ARWorldMappingStatus;
293
294        #[cfg(feature = "objc2-core-video")]
295        /// A buffer that represents the segmented content of the capturedImage.
296        ///
297        /// In order to identify to which class a pixel has been classified one needs to compare its intensity value with the values
298        /// found in `ARSegmentationClass`.
299        ///
300        /// See: ARSegmentationClass
301        ///
302        /// See: -[ARConfiguration setFrameSemantics:]
303        ///
304        /// This property is not atomic.
305        ///
306        /// # Safety
307        ///
308        /// This might not be thread-safe.
309        #[unsafe(method(segmentationBuffer))]
310        #[unsafe(method_family = none)]
311        pub unsafe fn segmentationBuffer(&self) -> Option<Retained<CVPixelBuffer>>;
312
313        #[cfg(feature = "objc2-core-video")]
314        /// A buffer that represents the estimated depth values for a performed segmentation.
315        ///
316        /// For each non-background pixel in the segmentation buffer the corresponding depth value can be accessed in this buffer.
317        ///
318        /// See: -[ARConfiguration setFrameSemantics:]
319        ///
320        /// See: -[ARFrame segmentationBuffer]
321        ///
322        /// This property is not atomic.
323        ///
324        /// # Safety
325        ///
326        /// This might not be thread-safe.
327        #[unsafe(method(estimatedDepthData))]
328        #[unsafe(method_family = none)]
329        pub unsafe fn estimatedDepthData(&self) -> Option<Retained<CVPixelBuffer>>;
330
331        #[cfg(feature = "ARBody2D")]
332        /// A detected body in the current frame.
333        ///
334        /// See: -[ARConfiguration setFrameSemantics:]
335        ///
336        /// This property is not atomic.
337        ///
338        /// # Safety
339        ///
340        /// This might not be thread-safe.
341        #[unsafe(method(detectedBody))]
342        #[unsafe(method_family = none)]
343        pub unsafe fn detectedBody(&self) -> Option<Retained<ARBody2D>>;
344
345        #[cfg(feature = "ARGeoTrackingTypes")]
346        /// The status of geo tracking.
347        ///
348        /// This property is not atomic.
349        ///
350        /// # Safety
351        ///
352        /// This might not be thread-safe.
353        #[unsafe(method(geoTrackingStatus))]
354        #[unsafe(method_family = none)]
355        pub unsafe fn geoTrackingStatus(&self) -> Option<Retained<ARGeoTrackingStatus>>;
356
357        #[cfg(feature = "ARDepthData")]
358        /// Scene depth data.
359        ///
360        /// See: ARFrameSemanticSceneDepth.
361        ///
362        /// See: -[ARConfiguration setFrameSemantics:]
363        ///
364        /// This property is not atomic.
365        ///
366        /// # Safety
367        ///
368        /// This might not be thread-safe.
369        #[unsafe(method(sceneDepth))]
370        #[unsafe(method_family = none)]
371        pub unsafe fn sceneDepth(&self) -> Option<Retained<ARDepthData>>;
372
373        #[cfg(feature = "ARDepthData")]
374        /// Scene depth data, smoothed for temporal consistency.
375        ///
376        /// See: ARFrameSemanticSmoothedSceneDepth.
377        ///
378        /// See: -[ARConfiguration setFrameSemantics:]
379        ///
380        /// This property is not atomic.
381        ///
382        /// # Safety
383        ///
384        /// This might not be thread-safe.
385        #[unsafe(method(smoothedSceneDepth))]
386        #[unsafe(method_family = none)]
387        pub unsafe fn smoothedSceneDepth(&self) -> Option<Retained<ARDepthData>>;
388
389        #[cfg(all(
390            feature = "ARHitTestResult",
391            feature = "objc2-core-foundation",
392            feature = "objc2-foundation"
393        ))]
394        /// Searches the frame for objects corresponding to a point in the captured image.
395        ///
396        ///
397        /// A 2D point in the captured image’s coordinate space can refer to any point along a line segment
398        /// in the 3D coordinate space. Hit-testing is the process of finding objects in the world located along this line segment.
399        ///
400        /// Parameter `point`: A point in the image-space coordinate system of the captured image.
401        /// Values should range from (0,0) - upper left corner to (1,1) - lower right corner.
402        ///
403        /// Parameter `types`: The types of results to search for.
404        ///
405        /// Returns: An array of all hit-test results sorted from nearest to farthest.
406        #[deprecated = "Use [ARSession raycast:]"]
407        #[unsafe(method(hitTest:types:))]
408        #[unsafe(method_family = none)]
409        pub unsafe fn hitTest_types(
410            &self,
411            point: CGPoint,
412            types: ARHitTestResultType,
413        ) -> Retained<NSArray<ARHitTestResult>>;
414
415        #[cfg(all(feature = "ARRaycastQuery", feature = "objc2-core-foundation"))]
416        /// Creates a raycast query originating from the point on the captured image, aligned along the center of the field of view of the camera.
417        ///
418        /// A 2D point in the captured image’s coordinate space and the field of view of the frame's camera is used to create a ray in the 3D
419        /// cooridnate space originating at the point.
420        ///
421        /// Parameter `point`: A point in the image-space coordinate system of the captured image.
422        /// Values should range from (0,0) - upper left corner to (1,1) - lower right corner.
423        ///
424        /// Parameter `target`: Type of target where the ray should terminate.
425        ///
426        /// Parameter `alignment`: Alignment of the target.
427        #[unsafe(method(raycastQueryFromPoint:allowingTarget:alignment:))]
428        #[unsafe(method_family = none)]
429        pub unsafe fn raycastQueryFromPoint_allowingTarget_alignment(
430            &self,
431            point: CGPoint,
432            target: ARRaycastTarget,
433            alignment: ARRaycastTargetAlignment,
434        ) -> Retained<ARRaycastQuery>;
435
436        #[cfg(all(feature = "objc2-core-foundation", feature = "objc2-ui-kit"))]
437        /// Returns a display transform for the provided viewport size and orientation.
438        ///
439        ///
440        /// The display transform can be used to convert normalized points in the image-space coordinate system
441        /// of the captured image to normalized points in the view’s coordinate space. The transform provides the correct rotation
442        /// and aspect-fill for presenting the captured image in the given orientation and size.
443        ///
444        /// Parameter `orientation`: The orientation of the viewport.
445        ///
446        /// Parameter `viewportSize`: The size of the viewport.
447        #[unsafe(method(displayTransformForOrientation:viewportSize:))]
448        #[unsafe(method_family = none)]
449        pub unsafe fn displayTransformForOrientation_viewportSize(
450            &self,
451            orientation: UIInterfaceOrientation,
452            viewport_size: CGSize,
453        ) -> CGAffineTransform;
454
455        /// Unavailable
456        #[unsafe(method(init))]
457        #[unsafe(method_family = init)]
458        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
459
460        #[unsafe(method(new))]
461        #[unsafe(method_family = new)]
462        pub unsafe fn new() -> Retained<Self>;
463    );
464}