objc2_ar_kit/generated/
ARConfiguration.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5#[cfg(feature = "objc2")]
6use objc2::__framework_prelude::*;
7#[cfg(feature = "objc2-av-foundation")]
8use objc2_av_foundation::*;
9#[cfg(feature = "objc2-core-location")]
10use objc2_core_location::*;
11#[cfg(feature = "objc2-foundation")]
12use objc2_foundation::*;
13
14use crate::*;
15
16/// Option set indicating semantic understanding types of the image frame.
17///
18/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arframesemantics?language=objc)
19// NS_OPTIONS
20#[cfg(feature = "objc2")]
21#[repr(transparent)]
22#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
23pub struct ARFrameSemantics(pub NSUInteger);
24#[cfg(feature = "objc2")]
25bitflags::bitflags! {
26    impl ARFrameSemantics: NSUInteger {
27/// No semantic operation is run.
28        #[doc(alias = "ARFrameSemanticNone")]
29        const None = 0;
30/// Person segmentation.
31///
32/// A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
33///
34/// See: -[ARFrame segmentationBuffer]
35///
36/// See: ARSegmentationClass
37        #[doc(alias = "ARFrameSemanticPersonSegmentation")]
38        const PersonSegmentation = 1<<0;
39/// Person segmentation with depth.
40///
41/// A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
42/// Additionally, every pixel in the image frame that gets classified as person will also have a depth value.
43///
44/// See: -[ARFrame estimatedDepthData]
45///
46/// See: -[ARFrame segmentationBuffer]
47        #[doc(alias = "ARFrameSemanticPersonSegmentationWithDepth")]
48        const PersonSegmentationWithDepth = (1<<1)|(1<<0);
49/// Body detection.
50///
51/// Once activated an ARFrame will contain information about a detected body.
52///
53/// See: -[ARFrame detectedBody]
54///
55/// See: ARBody2D
56        #[doc(alias = "ARFrameSemanticBodyDetection")]
57        const BodyDetection = 1<<2;
58/// Scene Depth.
59///
60/// Each capturedImage will have an associated scene depth data.
61///
62/// See: - [ARFrame sceneDepth]
63        #[doc(alias = "ARFrameSemanticSceneDepth")]
64        const SceneDepth = 1<<3;
65/// Smoothed Scene Depth.
66///
67/// Each capturedImage will have an associated scene depth data that is temporally smoothed.
68///
69/// See: - [ARFrame smoothedSceneDepth]
70        #[doc(alias = "ARFrameSemanticSmoothedSceneDepth")]
71        const SmoothedSceneDepth = 1<<4;
72    }
73}
74
75#[cfg(feature = "objc2")]
76unsafe impl Encode for ARFrameSemantics {
77    const ENCODING: Encoding = NSUInteger::ENCODING;
78}
79
80#[cfg(feature = "objc2")]
81unsafe impl RefEncode for ARFrameSemantics {
82    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
83}
84
85/// Enum constants for indicating the world alignment.
86///
87/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldalignment?language=objc)
88// NS_ENUM
89#[cfg(feature = "objc2")]
90#[repr(transparent)]
91#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
92pub struct ARWorldAlignment(pub NSInteger);
93#[cfg(feature = "objc2")]
94impl ARWorldAlignment {
95    /// Aligns the world with gravity that is defined by vector (0, -1, 0).
96    #[doc(alias = "ARWorldAlignmentGravity")]
97    pub const Gravity: Self = Self(0);
98    /// Aligns the world with gravity that is defined by the vector (0, -1, 0)
99    /// and heading (w.r.t. True North) that is given by the vector (0, 0, -1).
100    #[doc(alias = "ARWorldAlignmentGravityAndHeading")]
101    pub const GravityAndHeading: Self = Self(1);
102    /// Aligns the world with the camera’s orientation.
103    #[doc(alias = "ARWorldAlignmentCamera")]
104    pub const Camera: Self = Self(2);
105}
106
107#[cfg(feature = "objc2")]
108unsafe impl Encode for ARWorldAlignment {
109    const ENCODING: Encoding = NSInteger::ENCODING;
110}
111
112#[cfg(feature = "objc2")]
113unsafe impl RefEncode for ARWorldAlignment {
114    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
115}
116
117/// Enum constants for indicating the mode of environment texturing to run.
118///
119/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arenvironmenttexturing?language=objc)
120// NS_ENUM
121#[cfg(feature = "objc2")]
122#[repr(transparent)]
123#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
124pub struct AREnvironmentTexturing(pub NSInteger);
125#[cfg(feature = "objc2")]
126impl AREnvironmentTexturing {
127    /// No texture information is gathered.
128    #[doc(alias = "AREnvironmentTexturingNone")]
129    pub const None: Self = Self(0);
130    /// Texture information is gathered for the environment.
131    /// Environment textures will be generated for AREnvironmentProbes added to the session.
132    #[doc(alias = "AREnvironmentTexturingManual")]
133    pub const Manual: Self = Self(1);
134    /// Texture information is gathered for the environment and probes automatically placed in the scene.
135    #[doc(alias = "AREnvironmentTexturingAutomatic")]
136    pub const Automatic: Self = Self(2);
137}
138
139#[cfg(feature = "objc2")]
140unsafe impl Encode for AREnvironmentTexturing {
141    const ENCODING: Encoding = NSInteger::ENCODING;
142}
143
144#[cfg(feature = "objc2")]
145unsafe impl RefEncode for AREnvironmentTexturing {
146    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
147}
148
149/// Types of scene reconstruction.
150///
151/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arscenereconstruction?language=objc)
152// NS_OPTIONS
153#[cfg(feature = "objc2")]
154#[repr(transparent)]
155#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
156pub struct ARSceneReconstruction(pub NSUInteger);
157#[cfg(feature = "objc2")]
158bitflags::bitflags! {
159    impl ARSceneReconstruction: NSUInteger {
160/// No scene reconstruction is run.
161        #[doc(alias = "ARSceneReconstructionNone")]
162        const None = 0;
163/// Scene reconstruction generates a mesh of the world
164        #[doc(alias = "ARSceneReconstructionMesh")]
165        const Mesh = 1<<0;
166/// Scene reconstruction generates a mesh of the world with classification for each face.
167        #[doc(alias = "ARSceneReconstructionMeshWithClassification")]
168        const MeshWithClassification = (1<<1)|(1<<0);
169    }
170}
171
172#[cfg(feature = "objc2")]
173unsafe impl Encode for ARSceneReconstruction {
174    const ENCODING: Encoding = NSUInteger::ENCODING;
175}
176
177#[cfg(feature = "objc2")]
178unsafe impl RefEncode for ARSceneReconstruction {
179    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
180}
181
182#[cfg(feature = "objc2")]
183extern_class!(
184    /// An object to describe and configure the Augmented Reality techniques to be used in an ARSession.
185    ///
186    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arconfiguration?language=objc)
187    #[unsafe(super(NSObject))]
188    #[derive(Debug, PartialEq, Eq, Hash)]
189    #[cfg(feature = "objc2")]
190    pub struct ARConfiguration;
191);
192
193#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
194extern_conformance!(
195    unsafe impl NSCopying for ARConfiguration {}
196);
197
198#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
199unsafe impl CopyingHelper for ARConfiguration {
200    type Result = Self;
201}
202
203#[cfg(feature = "objc2")]
204extern_conformance!(
205    unsafe impl NSObjectProtocol for ARConfiguration {}
206);
207
208#[cfg(feature = "objc2")]
209impl ARConfiguration {
210    extern_methods!(
211        /// Determines whether this device supports the ARConfiguration.
212        #[unsafe(method(isSupported))]
213        #[unsafe(method_family = none)]
214        pub unsafe fn isSupported() -> bool;
215
216        #[cfg(all(feature = "ARVideoFormat", feature = "objc2-foundation"))]
217        /// A list of supported video formats for this configuration and device.
218        ///
219        /// The first element in the list is the default format for session output.
220        #[unsafe(method(supportedVideoFormats))]
221        #[unsafe(method_family = none)]
222        pub unsafe fn supportedVideoFormats() -> Retained<NSArray<ARVideoFormat>>;
223
224        #[cfg(feature = "ARVideoFormat")]
225        /// Video format of the session output.
226        #[unsafe(method(videoFormat))]
227        #[unsafe(method_family = none)]
228        pub unsafe fn videoFormat(&self) -> Retained<ARVideoFormat>;
229
230        #[cfg(feature = "ARVideoFormat")]
231        /// Setter for [`videoFormat`][Self::videoFormat].
232        #[unsafe(method(setVideoFormat:))]
233        #[unsafe(method_family = none)]
234        pub unsafe fn setVideoFormat(&self, video_format: &ARVideoFormat);
235
236        /// Determines how the coordinate system should be aligned with the world.
237        ///
238        /// The default is ARWorldAlignmentGravity.
239        #[unsafe(method(worldAlignment))]
240        #[unsafe(method_family = none)]
241        pub unsafe fn worldAlignment(&self) -> ARWorldAlignment;
242
243        /// Setter for [`worldAlignment`][Self::worldAlignment].
244        #[unsafe(method(setWorldAlignment:))]
245        #[unsafe(method_family = none)]
246        pub unsafe fn setWorldAlignment(&self, world_alignment: ARWorldAlignment);
247
248        /// Enable or disable light estimation.
249        ///
250        /// Enabled by default.
251        #[unsafe(method(isLightEstimationEnabled))]
252        #[unsafe(method_family = none)]
253        pub unsafe fn isLightEstimationEnabled(&self) -> bool;
254
255        /// Setter for [`isLightEstimationEnabled`][Self::isLightEstimationEnabled].
256        #[unsafe(method(setLightEstimationEnabled:))]
257        #[unsafe(method_family = none)]
258        pub unsafe fn setLightEstimationEnabled(&self, light_estimation_enabled: bool);
259
260        /// Determines whether to capture and provide audio data.
261        ///
262        /// Disabled by default.
263        #[unsafe(method(providesAudioData))]
264        #[unsafe(method_family = none)]
265        pub unsafe fn providesAudioData(&self) -> bool;
266
267        /// Setter for [`providesAudioData`][Self::providesAudioData].
268        #[unsafe(method(setProvidesAudioData:))]
269        #[unsafe(method_family = none)]
270        pub unsafe fn setProvidesAudioData(&self, provides_audio_data: bool);
271
272        /// The type of semantic understanding to provide with each frame.
273        ///
274        ///
275        /// Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the set of frame semantics.
276        /// For example, when running a session with a configuration of type ARWorldTrackingConfiguration one would need to use `+[ ARWorldTrackingConfiguration
277        /// supportsFrameSemantics:]` to perform said check. An exception is thrown if the option is not supported. Defaults to ARFrameSemanticNone.
278        ///
279        /// See: ARFrameSemantics
280        ///
281        /// See: +[ARConfiguration supportsFrameSemantics:]
282        #[unsafe(method(frameSemantics))]
283        #[unsafe(method_family = none)]
284        pub unsafe fn frameSemantics(&self) -> ARFrameSemantics;
285
286        /// Setter for [`frameSemantics`][Self::frameSemantics].
287        #[unsafe(method(setFrameSemantics:))]
288        #[unsafe(method_family = none)]
289        pub unsafe fn setFrameSemantics(&self, frame_semantics: ARFrameSemantics);
290
291        /// Determines whether the type of frame semantics is supported by the device and ARConfiguration class.
292        ///
293        ///
294        /// Semantic frame understanding is not supported on all devices. Use the `supportsFrameSemantics` class method to check if the configuration
295        /// type you intend to run supports the set of frame semantics. For example, when running a session with a configuration of type
296        /// ARWorldTrackingConfiguration one would need to use
297        /// `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
298        ///
299        /// See: ARFrameSemantics
300        #[unsafe(method(supportsFrameSemantics:))]
301        #[unsafe(method_family = none)]
302        pub unsafe fn supportsFrameSemantics(frame_semantics: ARFrameSemantics) -> bool;
303
304        #[cfg(feature = "objc2-av-foundation")]
305        /// Returns a pointer to the capture device of the camera that's used for rendering, so developers can adjust capture settings.
306        ///
307        /// May return nil if it is not recommended to modify capture settings, for example if the primary camera is used for tracking.
308        #[unsafe(method(configurableCaptureDeviceForPrimaryCamera))]
309        #[unsafe(method_family = none)]
310        pub unsafe fn configurableCaptureDeviceForPrimaryCamera(
311        ) -> Option<Retained<AVCaptureDevice>>;
312
313        #[cfg(feature = "ARVideoFormat")]
314        /// Returns a video format using a 4K resolution from the list of supported video formats.
315        ///
316        /// May return nil if 4K is not supported for this configuration or device.
317        #[unsafe(method(recommendedVideoFormatFor4KResolution))]
318        #[unsafe(method_family = none)]
319        pub unsafe fn recommendedVideoFormatFor4KResolution() -> Option<Retained<ARVideoFormat>>;
320
321        #[cfg(feature = "ARVideoFormat")]
322        /// Returns a recommended video format that supports capturing high resolution frames with a significantly higher resolution than the streaming camera
323        /// resolution.
324        ///
325        /// Using this format may consume more power. Other video formats may support capturing high resolution frames as well, albeit at a lower
326        /// quality or resolution.
327        ///
328        /// See: [ARSession captureHighResolutionFrameWithCompletion:]
329        #[unsafe(method(recommendedVideoFormatForHighResolutionFrameCapturing))]
330        #[unsafe(method_family = none)]
331        pub unsafe fn recommendedVideoFormatForHighResolutionFrameCapturing(
332        ) -> Option<Retained<ARVideoFormat>>;
333
334        /// Whether HDR capturing is allowed if the current video format supports it. Defaults to
335        /// `NO.`
336        #[unsafe(method(videoHDRAllowed))]
337        #[unsafe(method_family = none)]
338        pub unsafe fn videoHDRAllowed(&self) -> bool;
339
340        /// Setter for [`videoHDRAllowed`][Self::videoHDRAllowed].
341        #[unsafe(method(setVideoHDRAllowed:))]
342        #[unsafe(method_family = none)]
343        pub unsafe fn setVideoHDRAllowed(&self, video_hdr_allowed: bool);
344
345        /// Unavailable
346        #[unsafe(method(init))]
347        #[unsafe(method_family = init)]
348        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
349
350        #[unsafe(method(new))]
351        #[unsafe(method_family = new)]
352        pub unsafe fn new() -> Retained<Self>;
353    );
354}
355
356#[cfg(feature = "objc2")]
357extern_class!(
358    /// A configuration for running world tracking.
359    ///
360    ///
361    /// World tracking provides 6 degrees of freedom tracking of the device.
362    /// By finding feature points in the scene, world tracking enables performing hit-tests against the frame.
363    /// Tracking can no longer be resumed once the session is paused.
364    ///
365    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldtrackingconfiguration?language=objc)
366    #[unsafe(super(ARConfiguration, NSObject))]
367    #[derive(Debug, PartialEq, Eq, Hash)]
368    #[cfg(feature = "objc2")]
369    pub struct ARWorldTrackingConfiguration;
370);
371
372#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
373extern_conformance!(
374    unsafe impl NSCopying for ARWorldTrackingConfiguration {}
375);
376
377#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
378unsafe impl CopyingHelper for ARWorldTrackingConfiguration {
379    type Result = Self;
380}
381
382#[cfg(feature = "objc2")]
383extern_conformance!(
384    unsafe impl NSObjectProtocol for ARWorldTrackingConfiguration {}
385);
386
387#[cfg(feature = "objc2")]
388impl ARWorldTrackingConfiguration {
389    extern_methods!(
390        /// Enable or disable continuous auto focus.
391        ///
392        /// Enabled by default.
393        #[unsafe(method(isAutoFocusEnabled))]
394        #[unsafe(method_family = none)]
395        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
396
397        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
398        #[unsafe(method(setAutoFocusEnabled:))]
399        #[unsafe(method_family = none)]
400        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
401
402        /// The mode of environment texturing to run.
403        ///
404        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
405        /// will get the current environment texture available from that probe's perspective which can be used for lighting
406        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
407        #[unsafe(method(environmentTexturing))]
408        #[unsafe(method_family = none)]
409        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
410
411        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
412        #[unsafe(method(setEnvironmentTexturing:))]
413        #[unsafe(method_family = none)]
414        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
415
416        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
417        #[unsafe(method(wantsHDREnvironmentTextures))]
418        #[unsafe(method_family = none)]
419        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
420
421        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
422        #[unsafe(method(setWantsHDREnvironmentTextures:))]
423        #[unsafe(method_family = none)]
424        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
425
426        #[cfg(feature = "ARPlaneDetectionTypes")]
427        /// Type of planes to detect in the scene.
428        ///
429        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
430        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
431        #[unsafe(method(planeDetection))]
432        #[unsafe(method_family = none)]
433        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
434
435        #[cfg(feature = "ARPlaneDetectionTypes")]
436        /// Setter for [`planeDetection`][Self::planeDetection].
437        #[unsafe(method(setPlaneDetection:))]
438        #[unsafe(method_family = none)]
439        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
440
441        #[cfg(feature = "ARWorldMap")]
442        /// The initial map of the physical space that world tracking will localize to and track.
443        ///
444        /// If set, the session will attempt to localize to the provided map with
445        /// a limited tracking state until localization is successful or run is called again
446        /// with a different (or no) initial map specified. Once localized, the map will be extended
447        /// and can again be saved using the `getCurrentWorldMap` method on the session.
448        #[unsafe(method(initialWorldMap))]
449        #[unsafe(method_family = none)]
450        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
451
452        #[cfg(feature = "ARWorldMap")]
453        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
454        #[unsafe(method(setInitialWorldMap:))]
455        #[unsafe(method_family = none)]
456        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
457
458        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
459        /// Images to detect in the scene.
460        ///
461        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
462        #[unsafe(method(detectionImages))]
463        #[unsafe(method_family = none)]
464        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
465
466        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
467        /// Setter for [`detectionImages`][Self::detectionImages].
468        ///
469        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
470        #[unsafe(method(setDetectionImages:))]
471        #[unsafe(method_family = none)]
472        pub unsafe fn setDetectionImages(&self, detection_images: Option<&NSSet<ARReferenceImage>>);
473
474        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
475        ///
476        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
477        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
478        ///
479        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
480        #[unsafe(method(automaticImageScaleEstimationEnabled))]
481        #[unsafe(method_family = none)]
482        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
483
484        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
485        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
486        #[unsafe(method_family = none)]
487        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
488            &self,
489            automatic_image_scale_estimation_enabled: bool,
490        );
491
492        /// Maximum number of images to track simultaneously.
493        ///
494        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
495        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
496        /// Images will continue to be detected regardless of images tracked. Default value is zero.
497        #[unsafe(method(maximumNumberOfTrackedImages))]
498        #[unsafe(method_family = none)]
499        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
500
501        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
502        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
503        #[unsafe(method_family = none)]
504        pub unsafe fn setMaximumNumberOfTrackedImages(
505            &self,
506            maximum_number_of_tracked_images: NSInteger,
507        );
508
509        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
510        /// Objects to detect in the scene.
511        ///
512        /// If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the
513        /// session.
514        #[unsafe(method(detectionObjects))]
515        #[unsafe(method_family = none)]
516        pub unsafe fn detectionObjects(&self) -> Retained<NSSet<ARReferenceObject>>;
517
518        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
519        /// Setter for [`detectionObjects`][Self::detectionObjects].
520        ///
521        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
522        #[unsafe(method(setDetectionObjects:))]
523        #[unsafe(method_family = none)]
524        pub unsafe fn setDetectionObjects(&self, detection_objects: &NSSet<ARReferenceObject>);
525
526        /// Enable/disable a collaborative session. Disabled by default.
527        ///
528        ///
529        /// When enabled, ARSession will output collaboration data for other participants using its delegate didOutputCollaborationData.
530        /// It is the responsibility of the caller to send the data to each participant. When data is received by a participant, it
531        /// should be passed to the ARSession by calling updateWithCollaborationData.
532        #[unsafe(method(isCollaborationEnabled))]
533        #[unsafe(method_family = none)]
534        pub unsafe fn isCollaborationEnabled(&self) -> bool;
535
536        /// Setter for [`isCollaborationEnabled`][Self::isCollaborationEnabled].
537        #[unsafe(method(setCollaborationEnabled:))]
538        #[unsafe(method_family = none)]
539        pub unsafe fn setCollaborationEnabled(&self, collaboration_enabled: bool);
540
541        /// Indicates whether user face tracking using the front facing camera can be enabled on this device.
542        #[unsafe(method(supportsUserFaceTracking))]
543        #[unsafe(method_family = none)]
544        pub unsafe fn supportsUserFaceTracking() -> bool;
545
546        /// Enable or disable running Face Tracking using the front facing camera. Disabled by default.
547        /// When enabled, ARSession detects faces (if visible in the front-facing camera image) and adds to its list of anchors,
548        /// an ARFaceAnchor object representing each face.
549        ///
550        ///
551        /// The transform of the ARFaceAnchor objects will be in the world coordinate space.
552        ///
553        /// See: ARFaceAnchor
554        #[unsafe(method(userFaceTrackingEnabled))]
555        #[unsafe(method_family = none)]
556        pub unsafe fn userFaceTrackingEnabled(&self) -> bool;
557
558        /// Setter for [`userFaceTrackingEnabled`][Self::userFaceTrackingEnabled].
559        #[unsafe(method(setUserFaceTrackingEnabled:))]
560        #[unsafe(method_family = none)]
561        pub unsafe fn setUserFaceTrackingEnabled(&self, user_face_tracking_enabled: bool);
562
563        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
564        #[unsafe(method(appClipCodeTrackingEnabled))]
565        #[unsafe(method_family = none)]
566        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
567
568        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
569        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
570        #[unsafe(method_family = none)]
571        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
572
573        /// Indicates whether app clip code tracking can be enabled on this device.
574        #[unsafe(method(supportsAppClipCodeTracking))]
575        #[unsafe(method_family = none)]
576        pub unsafe fn supportsAppClipCodeTracking() -> bool;
577
578        /// Indicates whether the scene reconstruction type is supported for the configuration on this device.
579        #[unsafe(method(supportsSceneReconstruction:))]
580        #[unsafe(method_family = none)]
581        pub unsafe fn supportsSceneReconstruction(
582            scene_reconstruction: ARSceneReconstruction,
583        ) -> bool;
584
585        /// Type of scene reconstruction to run. Defaults to ARSceneReconstructionNone.
586        ///
587        /// See: ARMeshAnchor
588        ///
589        /// If set to a value other than ARSceneReconstructionNone, output of scene reconstruction will be added to the session as
590        /// ARMeshAnchor objects.
591        #[unsafe(method(sceneReconstruction))]
592        #[unsafe(method_family = none)]
593        pub unsafe fn sceneReconstruction(&self) -> ARSceneReconstruction;
594
595        /// Setter for [`sceneReconstruction`][Self::sceneReconstruction].
596        #[unsafe(method(setSceneReconstruction:))]
597        #[unsafe(method_family = none)]
598        pub unsafe fn setSceneReconstruction(&self, scene_reconstruction: ARSceneReconstruction);
599
600        #[unsafe(method(init))]
601        #[unsafe(method_family = init)]
602        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
603
604        #[unsafe(method(new))]
605        #[unsafe(method_family = new)]
606        pub unsafe fn new() -> Retained<Self>;
607    );
608}
609
610#[cfg(feature = "objc2")]
611extern_class!(
612    /// A configuration for running orientation tracking.
613    ///
614    ///
615    /// Orientation tracking provides 3 degrees of freedom tracking of the device.
616    ///
617    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arorientationtrackingconfiguration?language=objc)
618    #[unsafe(super(ARConfiguration, NSObject))]
619    #[derive(Debug, PartialEq, Eq, Hash)]
620    #[cfg(feature = "objc2")]
621    pub struct AROrientationTrackingConfiguration;
622);
623
624#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
625extern_conformance!(
626    unsafe impl NSCopying for AROrientationTrackingConfiguration {}
627);
628
629#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
630unsafe impl CopyingHelper for AROrientationTrackingConfiguration {
631    type Result = Self;
632}
633
634#[cfg(feature = "objc2")]
635extern_conformance!(
636    unsafe impl NSObjectProtocol for AROrientationTrackingConfiguration {}
637);
638
639#[cfg(feature = "objc2")]
640impl AROrientationTrackingConfiguration {
641    extern_methods!(
642        /// Enable or disable continuous auto focus.
643        ///
644        /// Enabled by default.
645        #[unsafe(method(isAutoFocusEnabled))]
646        #[unsafe(method_family = none)]
647        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
648
649        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
650        #[unsafe(method(setAutoFocusEnabled:))]
651        #[unsafe(method_family = none)]
652        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
653
654        #[unsafe(method(init))]
655        #[unsafe(method_family = init)]
656        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
657
658        #[unsafe(method(new))]
659        #[unsafe(method_family = new)]
660        pub unsafe fn new() -> Retained<Self>;
661    );
662}
663
664#[cfg(feature = "objc2")]
665extern_class!(
666    /// A configuration for running face tracking.
667    ///
668    ///
669    /// Face tracking uses the front facing camera to track the face in 3D providing details on the topology and expression of the face.
670    /// A detected face will be added to the session as an ARFaceAnchor object which contains information about head pose, mesh, eye pose, and blend shape
671    /// coefficients. If light estimation is enabled the detected face will be treated as a light probe and used to estimate the direction of incoming light.
672    ///
673    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arfacetrackingconfiguration?language=objc)
674    #[unsafe(super(ARConfiguration, NSObject))]
675    #[derive(Debug, PartialEq, Eq, Hash)]
676    #[cfg(feature = "objc2")]
677    pub struct ARFaceTrackingConfiguration;
678);
679
680#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
681extern_conformance!(
682    unsafe impl NSCopying for ARFaceTrackingConfiguration {}
683);
684
685#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
686unsafe impl CopyingHelper for ARFaceTrackingConfiguration {
687    type Result = Self;
688}
689
690#[cfg(feature = "objc2")]
691extern_conformance!(
692    unsafe impl NSObjectProtocol for ARFaceTrackingConfiguration {}
693);
694
695#[cfg(feature = "objc2")]
696impl ARFaceTrackingConfiguration {
697    extern_methods!(
698        /// Maximum number of faces which can be tracked simultaneously.
699        #[unsafe(method(supportedNumberOfTrackedFaces))]
700        #[unsafe(method_family = none)]
701        pub unsafe fn supportedNumberOfTrackedFaces() -> NSInteger;
702
703        /// Maximum number of faces to track simultaneously.
704        ///
705        /// Setting the maximum number of tracked faces will limit the number of faces that can be tracked in a given frame.
706        /// If more than the maximum is visible, only the faces already being tracked will continue to track until tracking is lost or another face is removed.
707        /// Default value is one.
708        #[unsafe(method(maximumNumberOfTrackedFaces))]
709        #[unsafe(method_family = none)]
710        pub unsafe fn maximumNumberOfTrackedFaces(&self) -> NSInteger;
711
712        /// Setter for [`maximumNumberOfTrackedFaces`][Self::maximumNumberOfTrackedFaces].
713        #[unsafe(method(setMaximumNumberOfTrackedFaces:))]
714        #[unsafe(method_family = none)]
715        pub unsafe fn setMaximumNumberOfTrackedFaces(
716            &self,
717            maximum_number_of_tracked_faces: NSInteger,
718        );
719
720        /// Indicates whether world tracking can be enabled on this device.
721        #[unsafe(method(supportsWorldTracking))]
722        #[unsafe(method_family = none)]
723        pub unsafe fn supportsWorldTracking() -> bool;
724
725        /// Enable or disable World Tracking. Disabled by default.
726        ///
727        ///
728        /// When enabled, ARSession uses the back facing camera to track the device's orientation and position in the world. The camera transform and
729        /// the ARFaceAnchor transform will be in the world coordinate space.
730        #[unsafe(method(isWorldTrackingEnabled))]
731        #[unsafe(method_family = none)]
732        pub unsafe fn isWorldTrackingEnabled(&self) -> bool;
733
734        /// Setter for [`isWorldTrackingEnabled`][Self::isWorldTrackingEnabled].
735        #[unsafe(method(setWorldTrackingEnabled:))]
736        #[unsafe(method_family = none)]
737        pub unsafe fn setWorldTrackingEnabled(&self, world_tracking_enabled: bool);
738
739        #[unsafe(method(init))]
740        #[unsafe(method_family = init)]
741        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
742
743        #[unsafe(method(new))]
744        #[unsafe(method_family = new)]
745        pub unsafe fn new() -> Retained<Self>;
746    );
747}
748
749#[cfg(feature = "objc2")]
750extern_class!(
751    /// A configuration for running image tracking.
752    ///
753    ///
754    /// Image tracking provides 6 degrees of freedom tracking of known images. Four images may be tracked simultaneously.
755    ///
756    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arimagetrackingconfiguration?language=objc)
757    #[unsafe(super(ARConfiguration, NSObject))]
758    #[derive(Debug, PartialEq, Eq, Hash)]
759    #[cfg(feature = "objc2")]
760    pub struct ARImageTrackingConfiguration;
761);
762
763#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
764extern_conformance!(
765    unsafe impl NSCopying for ARImageTrackingConfiguration {}
766);
767
768#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
769unsafe impl CopyingHelper for ARImageTrackingConfiguration {
770    type Result = Self;
771}
772
773#[cfg(feature = "objc2")]
774extern_conformance!(
775    unsafe impl NSObjectProtocol for ARImageTrackingConfiguration {}
776);
777
778#[cfg(feature = "objc2")]
779impl ARImageTrackingConfiguration {
780    extern_methods!(
781        /// Enable or disable continuous auto focus.
782        ///
783        /// Enabled by default.
784        #[unsafe(method(isAutoFocusEnabled))]
785        #[unsafe(method_family = none)]
786        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
787
788        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
789        #[unsafe(method(setAutoFocusEnabled:))]
790        #[unsafe(method_family = none)]
791        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
792
793        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
794        /// Images to track in the scene.
795        #[unsafe(method(trackingImages))]
796        #[unsafe(method_family = none)]
797        pub unsafe fn trackingImages(&self) -> Retained<NSSet<ARReferenceImage>>;
798
799        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
800        /// Setter for [`trackingImages`][Self::trackingImages].
801        ///
802        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
803        #[unsafe(method(setTrackingImages:))]
804        #[unsafe(method_family = none)]
805        pub unsafe fn setTrackingImages(&self, tracking_images: &NSSet<ARReferenceImage>);
806
807        /// Maximum number of images to track simultaneously.
808        ///
809        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
810        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
811        /// Default value is one.
812        #[unsafe(method(maximumNumberOfTrackedImages))]
813        #[unsafe(method_family = none)]
814        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
815
816        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
817        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
818        #[unsafe(method_family = none)]
819        pub unsafe fn setMaximumNumberOfTrackedImages(
820            &self,
821            maximum_number_of_tracked_images: NSInteger,
822        );
823
824        #[unsafe(method(init))]
825        #[unsafe(method_family = init)]
826        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
827
828        #[unsafe(method(new))]
829        #[unsafe(method_family = new)]
830        pub unsafe fn new() -> Retained<Self>;
831    );
832}
833
834#[cfg(feature = "objc2")]
835extern_class!(
836    /// A configuration for scanning objects.
837    ///
838    ///
839    /// The object scanning configuration runs world tracking, capturing additional detail in order to create reference objects.
840    /// Running object scanning will consume additional power in order to provide more detailed features.
841    /// The createReferenceObject method can be called on the session to capture a scan of an object in the world.
842    ///
843    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arobjectscanningconfiguration?language=objc)
844    #[unsafe(super(ARConfiguration, NSObject))]
845    #[derive(Debug, PartialEq, Eq, Hash)]
846    #[cfg(feature = "objc2")]
847    pub struct ARObjectScanningConfiguration;
848);
849
850#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
851extern_conformance!(
852    unsafe impl NSCopying for ARObjectScanningConfiguration {}
853);
854
855#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
856unsafe impl CopyingHelper for ARObjectScanningConfiguration {
857    type Result = Self;
858}
859
860#[cfg(feature = "objc2")]
861extern_conformance!(
862    unsafe impl NSObjectProtocol for ARObjectScanningConfiguration {}
863);
864
865#[cfg(feature = "objc2")]
866impl ARObjectScanningConfiguration {
867    extern_methods!(
868        /// Enable or disable continuous auto focus.
869        ///
870        /// Enabled by default.
871        #[unsafe(method(isAutoFocusEnabled))]
872        #[unsafe(method_family = none)]
873        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
874
875        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
876        #[unsafe(method(setAutoFocusEnabled:))]
877        #[unsafe(method_family = none)]
878        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
879
880        #[cfg(feature = "ARPlaneDetectionTypes")]
881        /// Type of planes to detect in the scene.
882        ///
883        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
884        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
885        #[unsafe(method(planeDetection))]
886        #[unsafe(method_family = none)]
887        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
888
889        #[cfg(feature = "ARPlaneDetectionTypes")]
890        /// Setter for [`planeDetection`][Self::planeDetection].
891        #[unsafe(method(setPlaneDetection:))]
892        #[unsafe(method_family = none)]
893        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
894
895        #[unsafe(method(init))]
896        #[unsafe(method_family = init)]
897        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
898
899        #[unsafe(method(new))]
900        #[unsafe(method_family = new)]
901        pub unsafe fn new() -> Retained<Self>;
902    );
903}
904
905#[cfg(feature = "objc2")]
906extern_class!(
907    /// A configuration for running body tracking.
908    ///
909    ///
910    /// Body tracking provides 6 degrees of freedom tracking of a detected body in the scene. By default, ARFrameSemanticBodyDetection will be
911    /// enabled.
912    ///
913    /// See: ARBodyAnchor
914    ///
915    /// See: -[ARFrame detectedBody]
916    ///
917    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arbodytrackingconfiguration?language=objc)
918    #[unsafe(super(ARConfiguration, NSObject))]
919    #[derive(Debug, PartialEq, Eq, Hash)]
920    #[cfg(feature = "objc2")]
921    pub struct ARBodyTrackingConfiguration;
922);
923
924#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
925extern_conformance!(
926    unsafe impl NSCopying for ARBodyTrackingConfiguration {}
927);
928
929#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
930unsafe impl CopyingHelper for ARBodyTrackingConfiguration {
931    type Result = Self;
932}
933
934#[cfg(feature = "objc2")]
935extern_conformance!(
936    unsafe impl NSObjectProtocol for ARBodyTrackingConfiguration {}
937);
938
939#[cfg(feature = "objc2")]
940impl ARBodyTrackingConfiguration {
941    extern_methods!(
942        /// Enable or disable continuous auto focus.
943        ///
944        /// Enabled by default.
945        #[unsafe(method(isAutoFocusEnabled))]
946        #[unsafe(method_family = none)]
947        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
948
949        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
950        #[unsafe(method(setAutoFocusEnabled:))]
951        #[unsafe(method_family = none)]
952        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
953
954        #[cfg(feature = "ARWorldMap")]
955        /// The initial map of the physical space that world tracking will localize to and track.
956        ///
957        /// If set, the session will attempt to localize to the provided map with
958        /// a limited tracking state until localization is successful or run is called again
959        /// with a different (or no) initial map specified. Once localized, the map will be extended
960        /// and can again be saved using the `getCurrentWorldMap` method on the session.
961        #[unsafe(method(initialWorldMap))]
962        #[unsafe(method_family = none)]
963        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
964
965        #[cfg(feature = "ARWorldMap")]
966        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
967        #[unsafe(method(setInitialWorldMap:))]
968        #[unsafe(method_family = none)]
969        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
970
971        /// The mode of environment texturing to run.
972        ///
973        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
974        /// will get the current environment texture available from that probe's perspective which can be used for lighting
975        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
976        #[unsafe(method(environmentTexturing))]
977        #[unsafe(method_family = none)]
978        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
979
980        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
981        #[unsafe(method(setEnvironmentTexturing:))]
982        #[unsafe(method_family = none)]
983        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
984
985        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
986        #[unsafe(method(wantsHDREnvironmentTextures))]
987        #[unsafe(method_family = none)]
988        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
989
990        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
991        #[unsafe(method(setWantsHDREnvironmentTextures:))]
992        #[unsafe(method_family = none)]
993        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
994
995        #[cfg(feature = "ARPlaneDetectionTypes")]
996        /// Type of planes to detect in the scene.
997        ///
998        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
999        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1000        #[unsafe(method(planeDetection))]
1001        #[unsafe(method_family = none)]
1002        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1003
1004        #[cfg(feature = "ARPlaneDetectionTypes")]
1005        /// Setter for [`planeDetection`][Self::planeDetection].
1006        #[unsafe(method(setPlaneDetection:))]
1007        #[unsafe(method_family = none)]
1008        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1009
1010        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1011        /// Images to detect in the scene.
1012        ///
1013        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
1014        #[unsafe(method(detectionImages))]
1015        #[unsafe(method_family = none)]
1016        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
1017
1018        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1019        /// Setter for [`detectionImages`][Self::detectionImages].
1020        ///
1021        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1022        #[unsafe(method(setDetectionImages:))]
1023        #[unsafe(method_family = none)]
1024        pub unsafe fn setDetectionImages(&self, detection_images: &NSSet<ARReferenceImage>);
1025
1026        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
1027        ///
1028        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1029        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
1030        ///
1031        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1032        #[unsafe(method(automaticImageScaleEstimationEnabled))]
1033        #[unsafe(method_family = none)]
1034        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
1035
1036        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
1037        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
1038        #[unsafe(method_family = none)]
1039        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
1040            &self,
1041            automatic_image_scale_estimation_enabled: bool,
1042        );
1043
1044        /// Enables the estimation of a scale factor which may be used to correct the physical size of a skeleton in 3D.
1045        ///
1046        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1047        /// differs from the default one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARBodyAnchor.
1048        ///
1049        /// Note: When set to true the transform of a returned ARBodyAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1050        #[unsafe(method(automaticSkeletonScaleEstimationEnabled))]
1051        #[unsafe(method_family = none)]
1052        pub unsafe fn automaticSkeletonScaleEstimationEnabled(&self) -> bool;
1053
1054        /// Setter for [`automaticSkeletonScaleEstimationEnabled`][Self::automaticSkeletonScaleEstimationEnabled].
1055        #[unsafe(method(setAutomaticSkeletonScaleEstimationEnabled:))]
1056        #[unsafe(method_family = none)]
1057        pub unsafe fn setAutomaticSkeletonScaleEstimationEnabled(
1058            &self,
1059            automatic_skeleton_scale_estimation_enabled: bool,
1060        );
1061
1062        /// Maximum number of images to track simultaneously.
1063        ///
1064        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
1065        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
1066        /// Images will continue to be detected regardless of images tracked. Default value is zero.
1067        #[unsafe(method(maximumNumberOfTrackedImages))]
1068        #[unsafe(method_family = none)]
1069        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
1070
1071        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
1072        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
1073        #[unsafe(method_family = none)]
1074        pub unsafe fn setMaximumNumberOfTrackedImages(
1075            &self,
1076            maximum_number_of_tracked_images: NSInteger,
1077        );
1078
1079        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
1080        #[unsafe(method(appClipCodeTrackingEnabled))]
1081        #[unsafe(method_family = none)]
1082        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
1083
1084        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
1085        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
1086        #[unsafe(method_family = none)]
1087        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
1088
1089        /// Indicates whether app clip code tracking can be enabled on this device.
1090        #[unsafe(method(supportsAppClipCodeTracking))]
1091        #[unsafe(method_family = none)]
1092        pub unsafe fn supportsAppClipCodeTracking() -> bool;
1093
1094        #[unsafe(method(init))]
1095        #[unsafe(method_family = init)]
1096        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1097
1098        #[unsafe(method(new))]
1099        #[unsafe(method_family = new)]
1100        pub unsafe fn new() -> Retained<Self>;
1101    );
1102}
1103
1104#[cfg(feature = "objc2")]
1105extern_class!(
1106    /// A configuration for running positional tracking.
1107    ///
1108    ///
1109    /// Positional tracking provides 6 degrees of freedom tracking of the device by running the camera at lowest possible resolution and frame
1110    /// rate.
1111    ///
1112    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arpositionaltrackingconfiguration?language=objc)
1113    #[unsafe(super(ARConfiguration, NSObject))]
1114    #[derive(Debug, PartialEq, Eq, Hash)]
1115    #[cfg(feature = "objc2")]
1116    pub struct ARPositionalTrackingConfiguration;
1117);
1118
1119#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1120extern_conformance!(
1121    unsafe impl NSCopying for ARPositionalTrackingConfiguration {}
1122);
1123
1124#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1125unsafe impl CopyingHelper for ARPositionalTrackingConfiguration {
1126    type Result = Self;
1127}
1128
1129#[cfg(feature = "objc2")]
1130extern_conformance!(
1131    unsafe impl NSObjectProtocol for ARPositionalTrackingConfiguration {}
1132);
1133
1134#[cfg(feature = "objc2")]
1135impl ARPositionalTrackingConfiguration {
1136    extern_methods!(
1137        #[cfg(feature = "ARPlaneDetectionTypes")]
1138        /// Type of planes to detect in the scene.
1139        ///
1140        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
1141        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1142        #[unsafe(method(planeDetection))]
1143        #[unsafe(method_family = none)]
1144        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1145
1146        #[cfg(feature = "ARPlaneDetectionTypes")]
1147        /// Setter for [`planeDetection`][Self::planeDetection].
1148        #[unsafe(method(setPlaneDetection:))]
1149        #[unsafe(method_family = none)]
1150        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1151
1152        #[cfg(feature = "ARWorldMap")]
1153        /// The initial map of the physical space that world tracking will localize to and track.
1154        ///
1155        /// If set, the session will attempt to localize to the provided map with
1156        /// a limited tracking state until localization is successful or run is called again
1157        /// with a different (or no) initial map specified. Once localized, the map will be extended
1158        /// and can again be saved using the `getCurrentWorldMap` method on the session.
1159        #[unsafe(method(initialWorldMap))]
1160        #[unsafe(method_family = none)]
1161        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
1162
1163        #[cfg(feature = "ARWorldMap")]
1164        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
1165        #[unsafe(method(setInitialWorldMap:))]
1166        #[unsafe(method_family = none)]
1167        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
1168
1169        #[unsafe(method(init))]
1170        #[unsafe(method_family = init)]
1171        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1172
1173        #[unsafe(method(new))]
1174        #[unsafe(method_family = new)]
1175        pub unsafe fn new() -> Retained<Self>;
1176    );
1177}
1178
1179#[cfg(feature = "objc2")]
1180extern_class!(
1181    /// A configuration for running geographical world tracking.
1182    ///
1183    ///
1184    /// It allows placing geo-referenced anchors (ARGeoAnchor) in the scene by running world tracking with location and compass.
1185    ///
1186    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/argeotrackingconfiguration?language=objc)
1187    #[unsafe(super(ARConfiguration, NSObject))]
1188    #[derive(Debug, PartialEq, Eq, Hash)]
1189    #[cfg(feature = "objc2")]
1190    pub struct ARGeoTrackingConfiguration;
1191);
1192
1193#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1194extern_conformance!(
1195    unsafe impl NSCopying for ARGeoTrackingConfiguration {}
1196);
1197
1198#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1199unsafe impl CopyingHelper for ARGeoTrackingConfiguration {
1200    type Result = Self;
1201}
1202
1203#[cfg(feature = "objc2")]
1204extern_conformance!(
1205    unsafe impl NSObjectProtocol for ARGeoTrackingConfiguration {}
1206);
1207
1208#[cfg(feature = "objc2")]
1209impl ARGeoTrackingConfiguration {
1210    extern_methods!(
1211        /// Unavailable
1212        #[unsafe(method(worldAlignment))]
1213        #[unsafe(method_family = none)]
1214        pub unsafe fn worldAlignment(&self) -> ARWorldAlignment;
1215
1216        /// Setter for [`worldAlignment`][Self::worldAlignment].
1217        #[unsafe(method(setWorldAlignment:))]
1218        #[unsafe(method_family = none)]
1219        pub unsafe fn setWorldAlignment(&self, world_alignment: ARWorldAlignment);
1220
1221        /// The mode of environment texturing to run.
1222        ///
1223        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
1224        /// will get the current environment texture available from that probe's perspective which can be used for lighting
1225        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
1226        #[unsafe(method(environmentTexturing))]
1227        #[unsafe(method_family = none)]
1228        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
1229
1230        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
1231        #[unsafe(method(setEnvironmentTexturing:))]
1232        #[unsafe(method_family = none)]
1233        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
1234
1235        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
1236        #[unsafe(method(wantsHDREnvironmentTextures))]
1237        #[unsafe(method_family = none)]
1238        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
1239
1240        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
1241        #[unsafe(method(setWantsHDREnvironmentTextures:))]
1242        #[unsafe(method_family = none)]
1243        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
1244
1245        #[cfg(feature = "ARPlaneDetectionTypes")]
1246        /// Type of planes to detect in the scene.
1247        ///
1248        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
1249        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1250        #[unsafe(method(planeDetection))]
1251        #[unsafe(method_family = none)]
1252        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1253
1254        #[cfg(feature = "ARPlaneDetectionTypes")]
1255        /// Setter for [`planeDetection`][Self::planeDetection].
1256        #[unsafe(method(setPlaneDetection:))]
1257        #[unsafe(method_family = none)]
1258        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1259
1260        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1261        /// Images to detect in the scene.
1262        ///
1263        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
1264        #[unsafe(method(detectionImages))]
1265        #[unsafe(method_family = none)]
1266        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
1267
1268        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1269        /// Setter for [`detectionImages`][Self::detectionImages].
1270        ///
1271        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1272        #[unsafe(method(setDetectionImages:))]
1273        #[unsafe(method_family = none)]
1274        pub unsafe fn setDetectionImages(&self, detection_images: Option<&NSSet<ARReferenceImage>>);
1275
1276        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
1277        ///
1278        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1279        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
1280        ///
1281        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1282        #[unsafe(method(automaticImageScaleEstimationEnabled))]
1283        #[unsafe(method_family = none)]
1284        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
1285
1286        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
1287        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
1288        #[unsafe(method_family = none)]
1289        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
1290            &self,
1291            automatic_image_scale_estimation_enabled: bool,
1292        );
1293
1294        /// Maximum number of images to track simultaneously.
1295        ///
1296        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
1297        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
1298        /// Images will continue to be detected regardless of images tracked. Default value is zero.
1299        #[unsafe(method(maximumNumberOfTrackedImages))]
1300        #[unsafe(method_family = none)]
1301        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
1302
1303        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
1304        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
1305        #[unsafe(method_family = none)]
1306        pub unsafe fn setMaximumNumberOfTrackedImages(
1307            &self,
1308            maximum_number_of_tracked_images: NSInteger,
1309        );
1310
1311        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
1312        /// Objects to detect in the scene.
1313        ///
1314        /// If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the
1315        /// session.
1316        #[unsafe(method(detectionObjects))]
1317        #[unsafe(method_family = none)]
1318        pub unsafe fn detectionObjects(&self) -> Retained<NSSet<ARReferenceObject>>;
1319
1320        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
1321        /// Setter for [`detectionObjects`][Self::detectionObjects].
1322        ///
1323        /// This is [copied][objc2_foundation::NSCopying::copy] when set.
1324        #[unsafe(method(setDetectionObjects:))]
1325        #[unsafe(method_family = none)]
1326        pub unsafe fn setDetectionObjects(&self, detection_objects: &NSSet<ARReferenceObject>);
1327
1328        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
1329        #[unsafe(method(appClipCodeTrackingEnabled))]
1330        #[unsafe(method_family = none)]
1331        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
1332
1333        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
1334        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
1335        #[unsafe(method_family = none)]
1336        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
1337
1338        /// Indicates whether app clip code tracking can be enabled on this device.
1339        #[unsafe(method(supportsAppClipCodeTracking))]
1340        #[unsafe(method_family = none)]
1341        pub unsafe fn supportsAppClipCodeTracking() -> bool;
1342
1343        #[cfg(all(feature = "block2", feature = "objc2-foundation"))]
1344        /// Determines the availability of geo tracking at the current location.
1345        ///
1346        ///
1347        /// This method will attempt to acquire a location fix on a background thread, then check availability.
1348        ///
1349        ///
1350        /// Parameter `completionHandler`: Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial
1351        /// queue. It takes the following parameters: isAvailable - True if geo tracking is available at the current location, otherwise false. error - An error
1352        /// that indicates why geo tracking is not available at the current location.
1353        #[unsafe(method(checkAvailabilityWithCompletionHandler:))]
1354        #[unsafe(method_family = none)]
1355        pub unsafe fn checkAvailabilityWithCompletionHandler(
1356            completion_handler: &block2::DynBlock<dyn Fn(Bool, *mut NSError)>,
1357        );
1358
1359        #[cfg(all(
1360            feature = "block2",
1361            feature = "objc2-core-location",
1362            feature = "objc2-foundation"
1363        ))]
1364        /// Determines the availability of geo tracking at the given location.
1365        ///
1366        /// Parameter `coordinate`: Location at which to check.
1367        ///
1368        /// Parameter `completionHandler`: Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial
1369        /// queue. It takes the following parameters: isAvailable - True if geo tracking is available at the given location, otherwise false. error - An error
1370        /// that indicates why geo tracking is not available at the given location.
1371        #[unsafe(method(checkAvailabilityAtCoordinate:completionHandler:))]
1372        #[unsafe(method_family = none)]
1373        pub unsafe fn checkAvailabilityAtCoordinate_completionHandler(
1374            coordinate: CLLocationCoordinate2D,
1375            completion_handler: &block2::DynBlock<dyn Fn(Bool, *mut NSError)>,
1376        );
1377
1378        #[unsafe(method(init))]
1379        #[unsafe(method_family = init)]
1380        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1381
1382        #[unsafe(method(new))]
1383        #[unsafe(method_family = new)]
1384        pub unsafe fn new() -> Retained<Self>;
1385    );
1386}