objc2_ar_kit/generated/
ARConfiguration.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5#[cfg(feature = "objc2")]
6use objc2::__framework_prelude::*;
7#[cfg(feature = "objc2-av-foundation")]
8use objc2_av_foundation::*;
9#[cfg(feature = "objc2-core-location")]
10use objc2_core_location::*;
11#[cfg(feature = "objc2-foundation")]
12use objc2_foundation::*;
13
14use crate::*;
15
16/// Option set indicating semantic understanding types of the image frame.
17///
18/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arframesemantics?language=objc)
19// NS_OPTIONS
20#[cfg(feature = "objc2")]
21#[repr(transparent)]
22#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
23pub struct ARFrameSemantics(pub NSUInteger);
24#[cfg(feature = "objc2")]
25bitflags::bitflags! {
26    impl ARFrameSemantics: NSUInteger {
27/// No semantic operation is run.
28        #[doc(alias = "ARFrameSemanticNone")]
29        const None = 0;
30/// Person segmentation.
31///
32/// A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
33///
34/// See: -[ARFrame segmentationBuffer]
35///
36/// See: ARSegmentationClass
37        #[doc(alias = "ARFrameSemanticPersonSegmentation")]
38        const PersonSegmentation = 1<<0;
39/// Person segmentation with depth.
40///
41/// A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
42/// Additionally, every pixel in the image frame that gets classified as person will also have a depth value.
43///
44/// See: -[ARFrame estimatedDepthData]
45///
46/// See: -[ARFrame segmentationBuffer]
47        #[doc(alias = "ARFrameSemanticPersonSegmentationWithDepth")]
48        const PersonSegmentationWithDepth = (1<<1)|(1<<0);
49/// Body detection.
50///
51/// Once activated an ARFrame will contain information about a detected body.
52///
53/// See: -[ARFrame detectedBody]
54///
55/// See: ARBody2D
56        #[doc(alias = "ARFrameSemanticBodyDetection")]
57        const BodyDetection = 1<<2;
58/// Scene Depth.
59///
60/// Each capturedImage will have an associated scene depth data.
61///
62/// See: - [ARFrame sceneDepth]
63        #[doc(alias = "ARFrameSemanticSceneDepth")]
64        const SceneDepth = 1<<3;
65/// Smoothed Scene Depth.
66///
67/// Each capturedImage will have an associated scene depth data that is temporally smoothed.
68///
69/// See: - [ARFrame smoothedSceneDepth]
70        #[doc(alias = "ARFrameSemanticSmoothedSceneDepth")]
71        const SmoothedSceneDepth = 1<<4;
72    }
73}
74
75#[cfg(feature = "objc2")]
76unsafe impl Encode for ARFrameSemantics {
77    const ENCODING: Encoding = NSUInteger::ENCODING;
78}
79
80#[cfg(feature = "objc2")]
81unsafe impl RefEncode for ARFrameSemantics {
82    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
83}
84
85/// Enum constants for indicating the world alignment.
86///
87/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldalignment?language=objc)
88// NS_ENUM
89#[cfg(feature = "objc2")]
90#[repr(transparent)]
91#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
92pub struct ARWorldAlignment(pub NSInteger);
93#[cfg(feature = "objc2")]
94impl ARWorldAlignment {
95    /// Aligns the world with gravity that is defined by vector (0, -1, 0).
96    #[doc(alias = "ARWorldAlignmentGravity")]
97    pub const Gravity: Self = Self(0);
98    /// Aligns the world with gravity that is defined by the vector (0, -1, 0)
99    /// and heading (w.r.t. True North) that is given by the vector (0, 0, -1).
100    #[doc(alias = "ARWorldAlignmentGravityAndHeading")]
101    pub const GravityAndHeading: Self = Self(1);
102    /// Aligns the world with the camera’s orientation.
103    #[doc(alias = "ARWorldAlignmentCamera")]
104    pub const Camera: Self = Self(2);
105}
106
107#[cfg(feature = "objc2")]
108unsafe impl Encode for ARWorldAlignment {
109    const ENCODING: Encoding = NSInteger::ENCODING;
110}
111
112#[cfg(feature = "objc2")]
113unsafe impl RefEncode for ARWorldAlignment {
114    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
115}
116
117/// Enum constants for indicating the mode of environment texturing to run.
118///
119/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arenvironmenttexturing?language=objc)
120// NS_ENUM
121#[cfg(feature = "objc2")]
122#[repr(transparent)]
123#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
124pub struct AREnvironmentTexturing(pub NSInteger);
125#[cfg(feature = "objc2")]
126impl AREnvironmentTexturing {
127    /// No texture information is gathered.
128    #[doc(alias = "AREnvironmentTexturingNone")]
129    pub const None: Self = Self(0);
130    /// Texture information is gathered for the environment.
131    /// Environment textures will be generated for AREnvironmentProbes added to the session.
132    #[doc(alias = "AREnvironmentTexturingManual")]
133    pub const Manual: Self = Self(1);
134    /// Texture information is gathered for the environment and probes automatically placed in the scene.
135    #[doc(alias = "AREnvironmentTexturingAutomatic")]
136    pub const Automatic: Self = Self(2);
137}
138
139#[cfg(feature = "objc2")]
140unsafe impl Encode for AREnvironmentTexturing {
141    const ENCODING: Encoding = NSInteger::ENCODING;
142}
143
144#[cfg(feature = "objc2")]
145unsafe impl RefEncode for AREnvironmentTexturing {
146    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
147}
148
149/// Types of scene reconstruction.
150///
151/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arscenereconstruction?language=objc)
152// NS_OPTIONS
153#[cfg(feature = "objc2")]
154#[repr(transparent)]
155#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
156pub struct ARSceneReconstruction(pub NSUInteger);
157#[cfg(feature = "objc2")]
158bitflags::bitflags! {
159    impl ARSceneReconstruction: NSUInteger {
160/// No scene reconstruction is run.
161        #[doc(alias = "ARSceneReconstructionNone")]
162        const None = 0;
163/// Scene reconstruction generates a mesh of the world
164        #[doc(alias = "ARSceneReconstructionMesh")]
165        const Mesh = 1<<0;
166/// Scene reconstruction generates a mesh of the world with classification for each face.
167        #[doc(alias = "ARSceneReconstructionMeshWithClassification")]
168        const MeshWithClassification = (1<<1)|(1<<0);
169    }
170}
171
172#[cfg(feature = "objc2")]
173unsafe impl Encode for ARSceneReconstruction {
174    const ENCODING: Encoding = NSUInteger::ENCODING;
175}
176
177#[cfg(feature = "objc2")]
178unsafe impl RefEncode for ARSceneReconstruction {
179    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
180}
181
182#[cfg(feature = "objc2")]
183extern_class!(
184    /// An object to describe and configure the Augmented Reality techniques to be used in an ARSession.
185    ///
186    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arconfiguration?language=objc)
187    #[unsafe(super(NSObject))]
188    #[derive(Debug, PartialEq, Eq, Hash)]
189    #[cfg(feature = "objc2")]
190    pub struct ARConfiguration;
191);
192
193#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
194extern_conformance!(
195    unsafe impl NSCopying for ARConfiguration {}
196);
197
198#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
199unsafe impl CopyingHelper for ARConfiguration {
200    type Result = Self;
201}
202
203#[cfg(feature = "objc2")]
204extern_conformance!(
205    unsafe impl NSObjectProtocol for ARConfiguration {}
206);
207
208#[cfg(feature = "objc2")]
209impl ARConfiguration {
210    extern_methods!(
211        /// Determines whether this device supports the ARConfiguration.
212        #[unsafe(method(isSupported))]
213        #[unsafe(method_family = none)]
214        pub unsafe fn isSupported() -> bool;
215
216        #[cfg(all(feature = "ARVideoFormat", feature = "objc2-foundation"))]
217        /// A list of supported video formats for this configuration and device.
218        ///
219        /// The first element in the list is the default format for session output.
220        #[unsafe(method(supportedVideoFormats))]
221        #[unsafe(method_family = none)]
222        pub unsafe fn supportedVideoFormats() -> Retained<NSArray<ARVideoFormat>>;
223
224        #[cfg(feature = "ARVideoFormat")]
225        /// Video format of the session output.
226        #[unsafe(method(videoFormat))]
227        #[unsafe(method_family = none)]
228        pub unsafe fn videoFormat(&self) -> Retained<ARVideoFormat>;
229
230        #[cfg(feature = "ARVideoFormat")]
231        /// Setter for [`videoFormat`][Self::videoFormat].
232        #[unsafe(method(setVideoFormat:))]
233        #[unsafe(method_family = none)]
234        pub unsafe fn setVideoFormat(&self, video_format: &ARVideoFormat);
235
236        /// Determines how the coordinate system should be aligned with the world.
237        ///
238        /// The default is ARWorldAlignmentGravity.
239        #[unsafe(method(worldAlignment))]
240        #[unsafe(method_family = none)]
241        pub unsafe fn worldAlignment(&self) -> ARWorldAlignment;
242
243        /// Setter for [`worldAlignment`][Self::worldAlignment].
244        #[unsafe(method(setWorldAlignment:))]
245        #[unsafe(method_family = none)]
246        pub unsafe fn setWorldAlignment(&self, world_alignment: ARWorldAlignment);
247
248        /// Enable or disable light estimation.
249        ///
250        /// Enabled by default.
251        #[unsafe(method(isLightEstimationEnabled))]
252        #[unsafe(method_family = none)]
253        pub unsafe fn isLightEstimationEnabled(&self) -> bool;
254
255        /// Setter for [`isLightEstimationEnabled`][Self::isLightEstimationEnabled].
256        #[unsafe(method(setLightEstimationEnabled:))]
257        #[unsafe(method_family = none)]
258        pub unsafe fn setLightEstimationEnabled(&self, light_estimation_enabled: bool);
259
260        /// Determines whether to capture and provide audio data.
261        ///
262        /// Disabled by default.
263        #[unsafe(method(providesAudioData))]
264        #[unsafe(method_family = none)]
265        pub unsafe fn providesAudioData(&self) -> bool;
266
267        /// Setter for [`providesAudioData`][Self::providesAudioData].
268        #[unsafe(method(setProvidesAudioData:))]
269        #[unsafe(method_family = none)]
270        pub unsafe fn setProvidesAudioData(&self, provides_audio_data: bool);
271
272        /// The type of semantic understanding to provide with each frame.
273        ///
274        ///
275        /// Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the set of frame semantics. For example, when running a session with
276        /// a configuration of type ARWorldTrackingConfiguration one would need to use `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
277        /// An exception is thrown if the option
278        /// is not supported. Defaults to ARFrameSemanticNone.
279        ///
280        /// See: ARFrameSemantics
281        ///
282        /// See: +[ARConfiguration supportsFrameSemantics:]
283        #[unsafe(method(frameSemantics))]
284        #[unsafe(method_family = none)]
285        pub unsafe fn frameSemantics(&self) -> ARFrameSemantics;
286
287        /// Setter for [`frameSemantics`][Self::frameSemantics].
288        #[unsafe(method(setFrameSemantics:))]
289        #[unsafe(method_family = none)]
290        pub unsafe fn setFrameSemantics(&self, frame_semantics: ARFrameSemantics);
291
292        /// Determines whether the type of frame semantics is supported by the device and ARConfiguration class.
293        ///
294        ///
295        /// Semantic frame understanding is not supported on all devices. Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the
296        /// set of frame semantics. For example, when running a session with a configuration of type ARWorldTrackingConfiguration one would need to use
297        /// `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
298        ///
299        /// See: ARFrameSemantics
300        #[unsafe(method(supportsFrameSemantics:))]
301        #[unsafe(method_family = none)]
302        pub unsafe fn supportsFrameSemantics(frame_semantics: ARFrameSemantics) -> bool;
303
304        #[cfg(feature = "objc2-av-foundation")]
305        /// Returns a pointer to the capture device of the camera that's used for rendering, so developers can adjust capture settings.
306        ///
307        /// May return nil if it is not recommended to modify capture settings, for example if the primary camera is used for tracking.
308        #[unsafe(method(configurableCaptureDeviceForPrimaryCamera))]
309        #[unsafe(method_family = none)]
310        pub unsafe fn configurableCaptureDeviceForPrimaryCamera(
311        ) -> Option<Retained<AVCaptureDevice>>;
312
313        #[cfg(feature = "ARVideoFormat")]
314        /// Returns a video format using a 4K resolution from the list of supported video formats.
315        ///
316        /// May return nil if 4K is not supported for this configuration or device.
317        #[unsafe(method(recommendedVideoFormatFor4KResolution))]
318        #[unsafe(method_family = none)]
319        pub unsafe fn recommendedVideoFormatFor4KResolution() -> Option<Retained<ARVideoFormat>>;
320
321        #[cfg(feature = "ARVideoFormat")]
322        /// Returns a recommended video format that supports capturing high resolution frames with a significantly higher resolution than the streaming camera resolution.
323        ///
324        /// Using this format may consume more power. Other video formats may support capturing high resolution frames as well, albeit at a lower quality or resolution.
325        ///
326        /// See: [ARSession captureHighResolutionFrameWithCompletion:]
327        #[unsafe(method(recommendedVideoFormatForHighResolutionFrameCapturing))]
328        #[unsafe(method_family = none)]
329        pub unsafe fn recommendedVideoFormatForHighResolutionFrameCapturing(
330        ) -> Option<Retained<ARVideoFormat>>;
331
332        /// Whether HDR capturing is allowed if the current video format supports it. Defaults to
333        /// `NO.`
334        #[unsafe(method(videoHDRAllowed))]
335        #[unsafe(method_family = none)]
336        pub unsafe fn videoHDRAllowed(&self) -> bool;
337
338        /// Setter for [`videoHDRAllowed`][Self::videoHDRAllowed].
339        #[unsafe(method(setVideoHDRAllowed:))]
340        #[unsafe(method_family = none)]
341        pub unsafe fn setVideoHDRAllowed(&self, video_hdr_allowed: bool);
342
343        /// Unavailable
344        #[unsafe(method(init))]
345        #[unsafe(method_family = init)]
346        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
347
348        #[unsafe(method(new))]
349        #[unsafe(method_family = new)]
350        pub unsafe fn new() -> Retained<Self>;
351    );
352}
353
354#[cfg(feature = "objc2")]
355extern_class!(
356    /// A configuration for running world tracking.
357    ///
358    ///
359    /// World tracking provides 6 degrees of freedom tracking of the device.
360    /// By finding feature points in the scene, world tracking enables performing hit-tests against the frame.
361    /// Tracking can no longer be resumed once the session is paused.
362    ///
363    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldtrackingconfiguration?language=objc)
364    #[unsafe(super(ARConfiguration, NSObject))]
365    #[derive(Debug, PartialEq, Eq, Hash)]
366    #[cfg(feature = "objc2")]
367    pub struct ARWorldTrackingConfiguration;
368);
369
370#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
371extern_conformance!(
372    unsafe impl NSCopying for ARWorldTrackingConfiguration {}
373);
374
375#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
376unsafe impl CopyingHelper for ARWorldTrackingConfiguration {
377    type Result = Self;
378}
379
380#[cfg(feature = "objc2")]
381extern_conformance!(
382    unsafe impl NSObjectProtocol for ARWorldTrackingConfiguration {}
383);
384
385#[cfg(feature = "objc2")]
386impl ARWorldTrackingConfiguration {
387    extern_methods!(
388        /// Enable or disable continuous auto focus.
389        ///
390        /// Enabled by default.
391        #[unsafe(method(isAutoFocusEnabled))]
392        #[unsafe(method_family = none)]
393        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
394
395        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
396        #[unsafe(method(setAutoFocusEnabled:))]
397        #[unsafe(method_family = none)]
398        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
399
400        /// The mode of environment texturing to run.
401        ///
402        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
403        /// will get the current environment texture available from that probe's perspective which can be used for lighting
404        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
405        #[unsafe(method(environmentTexturing))]
406        #[unsafe(method_family = none)]
407        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
408
409        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
410        #[unsafe(method(setEnvironmentTexturing:))]
411        #[unsafe(method_family = none)]
412        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
413
414        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
415        #[unsafe(method(wantsHDREnvironmentTextures))]
416        #[unsafe(method_family = none)]
417        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
418
419        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
420        #[unsafe(method(setWantsHDREnvironmentTextures:))]
421        #[unsafe(method_family = none)]
422        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
423
424        #[cfg(feature = "ARPlaneDetectionTypes")]
425        /// Type of planes to detect in the scene.
426        ///
427        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
428        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
429        #[unsafe(method(planeDetection))]
430        #[unsafe(method_family = none)]
431        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
432
433        #[cfg(feature = "ARPlaneDetectionTypes")]
434        /// Setter for [`planeDetection`][Self::planeDetection].
435        #[unsafe(method(setPlaneDetection:))]
436        #[unsafe(method_family = none)]
437        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
438
439        #[cfg(feature = "ARWorldMap")]
440        /// The initial map of the physical space that world tracking will localize to and track.
441        ///
442        /// If set, the session will attempt to localize to the provided map with
443        /// a limited tracking state until localization is successful or run is called again
444        /// with a different (or no) initial map specified. Once localized, the map will be extended
445        /// and can again be saved using the `getCurrentWorldMap` method on the session.
446        #[unsafe(method(initialWorldMap))]
447        #[unsafe(method_family = none)]
448        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
449
450        #[cfg(feature = "ARWorldMap")]
451        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
452        #[unsafe(method(setInitialWorldMap:))]
453        #[unsafe(method_family = none)]
454        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
455
456        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
457        /// Images to detect in the scene.
458        ///
459        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
460        #[unsafe(method(detectionImages))]
461        #[unsafe(method_family = none)]
462        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
463
464        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
465        /// Setter for [`detectionImages`][Self::detectionImages].
466        #[unsafe(method(setDetectionImages:))]
467        #[unsafe(method_family = none)]
468        pub unsafe fn setDetectionImages(&self, detection_images: Option<&NSSet<ARReferenceImage>>);
469
470        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
471        ///
472        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
473        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
474        ///
475        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
476        #[unsafe(method(automaticImageScaleEstimationEnabled))]
477        #[unsafe(method_family = none)]
478        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
479
480        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
481        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
482        #[unsafe(method_family = none)]
483        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
484            &self,
485            automatic_image_scale_estimation_enabled: bool,
486        );
487
488        /// Maximum number of images to track simultaneously.
489        ///
490        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
491        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
492        /// Images will continue to be detected regardless of images tracked. Default value is zero.
493        #[unsafe(method(maximumNumberOfTrackedImages))]
494        #[unsafe(method_family = none)]
495        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
496
497        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
498        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
499        #[unsafe(method_family = none)]
500        pub unsafe fn setMaximumNumberOfTrackedImages(
501            &self,
502            maximum_number_of_tracked_images: NSInteger,
503        );
504
505        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
506        /// Objects to detect in the scene.
507        ///
508        /// If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the session.
509        #[unsafe(method(detectionObjects))]
510        #[unsafe(method_family = none)]
511        pub unsafe fn detectionObjects(&self) -> Retained<NSSet<ARReferenceObject>>;
512
513        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
514        /// Setter for [`detectionObjects`][Self::detectionObjects].
515        #[unsafe(method(setDetectionObjects:))]
516        #[unsafe(method_family = none)]
517        pub unsafe fn setDetectionObjects(&self, detection_objects: &NSSet<ARReferenceObject>);
518
519        /// Enable/disable a collaborative session. Disabled by default.
520        ///
521        ///
522        /// When enabled, ARSession will output collaboration data for other participants using its delegate didOutputCollaborationData.
523        /// It is the responsibility of the caller to send the data to each participant. When data is received by a participant, it
524        /// should be passed to the ARSession by calling updateWithCollaborationData.
525        #[unsafe(method(isCollaborationEnabled))]
526        #[unsafe(method_family = none)]
527        pub unsafe fn isCollaborationEnabled(&self) -> bool;
528
529        /// Setter for [`isCollaborationEnabled`][Self::isCollaborationEnabled].
530        #[unsafe(method(setCollaborationEnabled:))]
531        #[unsafe(method_family = none)]
532        pub unsafe fn setCollaborationEnabled(&self, collaboration_enabled: bool);
533
534        /// Indicates whether user face tracking using the front facing camera can be enabled on this device.
535        #[unsafe(method(supportsUserFaceTracking))]
536        #[unsafe(method_family = none)]
537        pub unsafe fn supportsUserFaceTracking() -> bool;
538
539        /// Enable or disable running Face Tracking using the front facing camera. Disabled by default.
540        /// When enabled, ARSession detects faces (if visible in the front-facing camera image) and adds to its list of anchors,
541        /// an ARFaceAnchor object representing each face.
542        ///
543        ///
544        /// The transform of the ARFaceAnchor objects will be in the world coordinate space.
545        ///
546        /// See: ARFaceAnchor
547        #[unsafe(method(userFaceTrackingEnabled))]
548        #[unsafe(method_family = none)]
549        pub unsafe fn userFaceTrackingEnabled(&self) -> bool;
550
551        /// Setter for [`userFaceTrackingEnabled`][Self::userFaceTrackingEnabled].
552        #[unsafe(method(setUserFaceTrackingEnabled:))]
553        #[unsafe(method_family = none)]
554        pub unsafe fn setUserFaceTrackingEnabled(&self, user_face_tracking_enabled: bool);
555
556        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
557        #[unsafe(method(appClipCodeTrackingEnabled))]
558        #[unsafe(method_family = none)]
559        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
560
561        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
562        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
563        #[unsafe(method_family = none)]
564        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
565
566        /// Indicates whether app clip code tracking can be enabled on this device.
567        #[unsafe(method(supportsAppClipCodeTracking))]
568        #[unsafe(method_family = none)]
569        pub unsafe fn supportsAppClipCodeTracking() -> bool;
570
571        /// Indicates whether the scene reconstruction type is supported for the configuration on this device.
572        #[unsafe(method(supportsSceneReconstruction:))]
573        #[unsafe(method_family = none)]
574        pub unsafe fn supportsSceneReconstruction(
575            scene_reconstruction: ARSceneReconstruction,
576        ) -> bool;
577
578        /// Type of scene reconstruction to run. Defaults to ARSceneReconstructionNone.
579        ///
580        /// See: ARMeshAnchor
581        ///
582        /// If set to a value other than ARSceneReconstructionNone, output of scene reconstruction will be added to the session as
583        /// ARMeshAnchor objects.
584        #[unsafe(method(sceneReconstruction))]
585        #[unsafe(method_family = none)]
586        pub unsafe fn sceneReconstruction(&self) -> ARSceneReconstruction;
587
588        /// Setter for [`sceneReconstruction`][Self::sceneReconstruction].
589        #[unsafe(method(setSceneReconstruction:))]
590        #[unsafe(method_family = none)]
591        pub unsafe fn setSceneReconstruction(&self, scene_reconstruction: ARSceneReconstruction);
592
593        #[unsafe(method(init))]
594        #[unsafe(method_family = init)]
595        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
596
597        #[unsafe(method(new))]
598        #[unsafe(method_family = new)]
599        pub unsafe fn new() -> Retained<Self>;
600    );
601}
602
603#[cfg(feature = "objc2")]
604extern_class!(
605    /// A configuration for running orientation tracking.
606    ///
607    ///
608    /// Orientation tracking provides 3 degrees of freedom tracking of the device.
609    ///
610    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arorientationtrackingconfiguration?language=objc)
611    #[unsafe(super(ARConfiguration, NSObject))]
612    #[derive(Debug, PartialEq, Eq, Hash)]
613    #[cfg(feature = "objc2")]
614    pub struct AROrientationTrackingConfiguration;
615);
616
617#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
618extern_conformance!(
619    unsafe impl NSCopying for AROrientationTrackingConfiguration {}
620);
621
622#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
623unsafe impl CopyingHelper for AROrientationTrackingConfiguration {
624    type Result = Self;
625}
626
627#[cfg(feature = "objc2")]
628extern_conformance!(
629    unsafe impl NSObjectProtocol for AROrientationTrackingConfiguration {}
630);
631
632#[cfg(feature = "objc2")]
633impl AROrientationTrackingConfiguration {
634    extern_methods!(
635        /// Enable or disable continuous auto focus.
636        ///
637        /// Enabled by default.
638        #[unsafe(method(isAutoFocusEnabled))]
639        #[unsafe(method_family = none)]
640        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
641
642        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
643        #[unsafe(method(setAutoFocusEnabled:))]
644        #[unsafe(method_family = none)]
645        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
646
647        #[unsafe(method(init))]
648        #[unsafe(method_family = init)]
649        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
650
651        #[unsafe(method(new))]
652        #[unsafe(method_family = new)]
653        pub unsafe fn new() -> Retained<Self>;
654    );
655}
656
657#[cfg(feature = "objc2")]
658extern_class!(
659    /// A configuration for running face tracking.
660    ///
661    ///
662    /// Face tracking uses the front facing camera to track the face in 3D providing details on the topology and expression of the face.
663    /// A detected face will be added to the session as an ARFaceAnchor object which contains information about head pose, mesh, eye pose, and blend shape
664    /// coefficients. If light estimation is enabled the detected face will be treated as a light probe and used to estimate the direction of incoming light.
665    ///
666    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arfacetrackingconfiguration?language=objc)
667    #[unsafe(super(ARConfiguration, NSObject))]
668    #[derive(Debug, PartialEq, Eq, Hash)]
669    #[cfg(feature = "objc2")]
670    pub struct ARFaceTrackingConfiguration;
671);
672
673#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
674extern_conformance!(
675    unsafe impl NSCopying for ARFaceTrackingConfiguration {}
676);
677
678#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
679unsafe impl CopyingHelper for ARFaceTrackingConfiguration {
680    type Result = Self;
681}
682
683#[cfg(feature = "objc2")]
684extern_conformance!(
685    unsafe impl NSObjectProtocol for ARFaceTrackingConfiguration {}
686);
687
688#[cfg(feature = "objc2")]
689impl ARFaceTrackingConfiguration {
690    extern_methods!(
691        /// Maximum number of faces which can be tracked simultaneously.
692        #[unsafe(method(supportedNumberOfTrackedFaces))]
693        #[unsafe(method_family = none)]
694        pub unsafe fn supportedNumberOfTrackedFaces() -> NSInteger;
695
696        /// Maximum number of faces to track simultaneously.
697        ///
698        /// Setting the maximum number of tracked faces will limit the number of faces that can be tracked in a given frame.
699        /// If more than the maximum is visible, only the faces already being tracked will continue to track until tracking is lost or another face is removed.
700        /// Default value is one.
701        #[unsafe(method(maximumNumberOfTrackedFaces))]
702        #[unsafe(method_family = none)]
703        pub unsafe fn maximumNumberOfTrackedFaces(&self) -> NSInteger;
704
705        /// Setter for [`maximumNumberOfTrackedFaces`][Self::maximumNumberOfTrackedFaces].
706        #[unsafe(method(setMaximumNumberOfTrackedFaces:))]
707        #[unsafe(method_family = none)]
708        pub unsafe fn setMaximumNumberOfTrackedFaces(
709            &self,
710            maximum_number_of_tracked_faces: NSInteger,
711        );
712
713        /// Indicates whether world tracking can be enabled on this device.
714        #[unsafe(method(supportsWorldTracking))]
715        #[unsafe(method_family = none)]
716        pub unsafe fn supportsWorldTracking() -> bool;
717
718        /// Enable or disable World Tracking. Disabled by default.
719        ///
720        ///
721        /// When enabled, ARSession uses the back facing camera to track the device's orientation and position in the world. The camera transform and the ARFaceAnchor transform will be in the world coordinate space.
722        #[unsafe(method(isWorldTrackingEnabled))]
723        #[unsafe(method_family = none)]
724        pub unsafe fn isWorldTrackingEnabled(&self) -> bool;
725
726        /// Setter for [`isWorldTrackingEnabled`][Self::isWorldTrackingEnabled].
727        #[unsafe(method(setWorldTrackingEnabled:))]
728        #[unsafe(method_family = none)]
729        pub unsafe fn setWorldTrackingEnabled(&self, world_tracking_enabled: bool);
730
731        #[unsafe(method(init))]
732        #[unsafe(method_family = init)]
733        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
734
735        #[unsafe(method(new))]
736        #[unsafe(method_family = new)]
737        pub unsafe fn new() -> Retained<Self>;
738    );
739}
740
741#[cfg(feature = "objc2")]
742extern_class!(
743    /// A configuration for running image tracking.
744    ///
745    ///
746    /// Image tracking provides 6 degrees of freedom tracking of known images. Four images may be tracked simultaneously.
747    ///
748    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arimagetrackingconfiguration?language=objc)
749    #[unsafe(super(ARConfiguration, NSObject))]
750    #[derive(Debug, PartialEq, Eq, Hash)]
751    #[cfg(feature = "objc2")]
752    pub struct ARImageTrackingConfiguration;
753);
754
755#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
756extern_conformance!(
757    unsafe impl NSCopying for ARImageTrackingConfiguration {}
758);
759
760#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
761unsafe impl CopyingHelper for ARImageTrackingConfiguration {
762    type Result = Self;
763}
764
765#[cfg(feature = "objc2")]
766extern_conformance!(
767    unsafe impl NSObjectProtocol for ARImageTrackingConfiguration {}
768);
769
770#[cfg(feature = "objc2")]
771impl ARImageTrackingConfiguration {
772    extern_methods!(
773        /// Enable or disable continuous auto focus.
774        ///
775        /// Enabled by default.
776        #[unsafe(method(isAutoFocusEnabled))]
777        #[unsafe(method_family = none)]
778        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
779
780        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
781        #[unsafe(method(setAutoFocusEnabled:))]
782        #[unsafe(method_family = none)]
783        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
784
785        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
786        /// Images to track in the scene.
787        #[unsafe(method(trackingImages))]
788        #[unsafe(method_family = none)]
789        pub unsafe fn trackingImages(&self) -> Retained<NSSet<ARReferenceImage>>;
790
791        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
792        /// Setter for [`trackingImages`][Self::trackingImages].
793        #[unsafe(method(setTrackingImages:))]
794        #[unsafe(method_family = none)]
795        pub unsafe fn setTrackingImages(&self, tracking_images: &NSSet<ARReferenceImage>);
796
797        /// Maximum number of images to track simultaneously.
798        ///
799        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
800        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
801        /// Default value is one.
802        #[unsafe(method(maximumNumberOfTrackedImages))]
803        #[unsafe(method_family = none)]
804        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
805
806        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
807        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
808        #[unsafe(method_family = none)]
809        pub unsafe fn setMaximumNumberOfTrackedImages(
810            &self,
811            maximum_number_of_tracked_images: NSInteger,
812        );
813
814        #[unsafe(method(init))]
815        #[unsafe(method_family = init)]
816        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
817
818        #[unsafe(method(new))]
819        #[unsafe(method_family = new)]
820        pub unsafe fn new() -> Retained<Self>;
821    );
822}
823
824#[cfg(feature = "objc2")]
825extern_class!(
826    /// A configuration for scanning objects.
827    ///
828    ///
829    /// The object scanning configuration runs world tracking, capturing additional detail in order to create reference objects.
830    /// Running object scanning will consume additional power in order to provide more detailed features.
831    /// The createReferenceObject method can be called on the session to capture a scan of an object in the world.
832    ///
833    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arobjectscanningconfiguration?language=objc)
834    #[unsafe(super(ARConfiguration, NSObject))]
835    #[derive(Debug, PartialEq, Eq, Hash)]
836    #[cfg(feature = "objc2")]
837    pub struct ARObjectScanningConfiguration;
838);
839
840#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
841extern_conformance!(
842    unsafe impl NSCopying for ARObjectScanningConfiguration {}
843);
844
845#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
846unsafe impl CopyingHelper for ARObjectScanningConfiguration {
847    type Result = Self;
848}
849
850#[cfg(feature = "objc2")]
851extern_conformance!(
852    unsafe impl NSObjectProtocol for ARObjectScanningConfiguration {}
853);
854
855#[cfg(feature = "objc2")]
856impl ARObjectScanningConfiguration {
857    extern_methods!(
858        /// Enable or disable continuous auto focus.
859        ///
860        /// Enabled by default.
861        #[unsafe(method(isAutoFocusEnabled))]
862        #[unsafe(method_family = none)]
863        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
864
865        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
866        #[unsafe(method(setAutoFocusEnabled:))]
867        #[unsafe(method_family = none)]
868        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
869
870        #[cfg(feature = "ARPlaneDetectionTypes")]
871        /// Type of planes to detect in the scene.
872        ///
873        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
874        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
875        #[unsafe(method(planeDetection))]
876        #[unsafe(method_family = none)]
877        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
878
879        #[cfg(feature = "ARPlaneDetectionTypes")]
880        /// Setter for [`planeDetection`][Self::planeDetection].
881        #[unsafe(method(setPlaneDetection:))]
882        #[unsafe(method_family = none)]
883        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
884
885        #[unsafe(method(init))]
886        #[unsafe(method_family = init)]
887        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
888
889        #[unsafe(method(new))]
890        #[unsafe(method_family = new)]
891        pub unsafe fn new() -> Retained<Self>;
892    );
893}
894
895#[cfg(feature = "objc2")]
896extern_class!(
897    /// A configuration for running body tracking.
898    ///
899    ///
900    /// Body tracking provides 6 degrees of freedom tracking of a detected body in the scene. By default, ARFrameSemanticBodyDetection will be
901    /// enabled.
902    ///
903    /// See: ARBodyAnchor
904    ///
905    /// See: -[ARFrame detectedBody]
906    ///
907    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arbodytrackingconfiguration?language=objc)
908    #[unsafe(super(ARConfiguration, NSObject))]
909    #[derive(Debug, PartialEq, Eq, Hash)]
910    #[cfg(feature = "objc2")]
911    pub struct ARBodyTrackingConfiguration;
912);
913
914#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
915extern_conformance!(
916    unsafe impl NSCopying for ARBodyTrackingConfiguration {}
917);
918
919#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
920unsafe impl CopyingHelper for ARBodyTrackingConfiguration {
921    type Result = Self;
922}
923
924#[cfg(feature = "objc2")]
925extern_conformance!(
926    unsafe impl NSObjectProtocol for ARBodyTrackingConfiguration {}
927);
928
929#[cfg(feature = "objc2")]
930impl ARBodyTrackingConfiguration {
931    extern_methods!(
932        /// Enable or disable continuous auto focus.
933        ///
934        /// Enabled by default.
935        #[unsafe(method(isAutoFocusEnabled))]
936        #[unsafe(method_family = none)]
937        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
938
939        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
940        #[unsafe(method(setAutoFocusEnabled:))]
941        #[unsafe(method_family = none)]
942        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
943
944        #[cfg(feature = "ARWorldMap")]
945        /// The initial map of the physical space that world tracking will localize to and track.
946        ///
947        /// If set, the session will attempt to localize to the provided map with
948        /// a limited tracking state until localization is successful or run is called again
949        /// with a different (or no) initial map specified. Once localized, the map will be extended
950        /// and can again be saved using the `getCurrentWorldMap` method on the session.
951        #[unsafe(method(initialWorldMap))]
952        #[unsafe(method_family = none)]
953        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
954
955        #[cfg(feature = "ARWorldMap")]
956        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
957        #[unsafe(method(setInitialWorldMap:))]
958        #[unsafe(method_family = none)]
959        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
960
961        /// The mode of environment texturing to run.
962        ///
963        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
964        /// will get the current environment texture available from that probe's perspective which can be used for lighting
965        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
966        #[unsafe(method(environmentTexturing))]
967        #[unsafe(method_family = none)]
968        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
969
970        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
971        #[unsafe(method(setEnvironmentTexturing:))]
972        #[unsafe(method_family = none)]
973        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
974
975        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
976        #[unsafe(method(wantsHDREnvironmentTextures))]
977        #[unsafe(method_family = none)]
978        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
979
980        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
981        #[unsafe(method(setWantsHDREnvironmentTextures:))]
982        #[unsafe(method_family = none)]
983        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
984
985        #[cfg(feature = "ARPlaneDetectionTypes")]
986        /// Type of planes to detect in the scene.
987        ///
988        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
989        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
990        #[unsafe(method(planeDetection))]
991        #[unsafe(method_family = none)]
992        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
993
994        #[cfg(feature = "ARPlaneDetectionTypes")]
995        /// Setter for [`planeDetection`][Self::planeDetection].
996        #[unsafe(method(setPlaneDetection:))]
997        #[unsafe(method_family = none)]
998        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
999
1000        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1001        /// Images to detect in the scene.
1002        ///
1003        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
1004        #[unsafe(method(detectionImages))]
1005        #[unsafe(method_family = none)]
1006        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
1007
1008        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1009        /// Setter for [`detectionImages`][Self::detectionImages].
1010        #[unsafe(method(setDetectionImages:))]
1011        #[unsafe(method_family = none)]
1012        pub unsafe fn setDetectionImages(&self, detection_images: &NSSet<ARReferenceImage>);
1013
1014        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
1015        ///
1016        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1017        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
1018        ///
1019        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1020        #[unsafe(method(automaticImageScaleEstimationEnabled))]
1021        #[unsafe(method_family = none)]
1022        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
1023
1024        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
1025        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
1026        #[unsafe(method_family = none)]
1027        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
1028            &self,
1029            automatic_image_scale_estimation_enabled: bool,
1030        );
1031
1032        /// Enables the estimation of a scale factor which may be used to correct the physical size of a skeleton in 3D.
1033        ///
1034        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1035        /// differs from the default one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARBodyAnchor.
1036        ///
1037        /// Note: When set to true the transform of a returned ARBodyAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1038        #[unsafe(method(automaticSkeletonScaleEstimationEnabled))]
1039        #[unsafe(method_family = none)]
1040        pub unsafe fn automaticSkeletonScaleEstimationEnabled(&self) -> bool;
1041
1042        /// Setter for [`automaticSkeletonScaleEstimationEnabled`][Self::automaticSkeletonScaleEstimationEnabled].
1043        #[unsafe(method(setAutomaticSkeletonScaleEstimationEnabled:))]
1044        #[unsafe(method_family = none)]
1045        pub unsafe fn setAutomaticSkeletonScaleEstimationEnabled(
1046            &self,
1047            automatic_skeleton_scale_estimation_enabled: bool,
1048        );
1049
1050        /// Maximum number of images to track simultaneously.
1051        ///
1052        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
1053        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
1054        /// Images will continue to be detected regardless of images tracked. Default value is zero.
1055        #[unsafe(method(maximumNumberOfTrackedImages))]
1056        #[unsafe(method_family = none)]
1057        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
1058
1059        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
1060        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
1061        #[unsafe(method_family = none)]
1062        pub unsafe fn setMaximumNumberOfTrackedImages(
1063            &self,
1064            maximum_number_of_tracked_images: NSInteger,
1065        );
1066
1067        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
1068        #[unsafe(method(appClipCodeTrackingEnabled))]
1069        #[unsafe(method_family = none)]
1070        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
1071
1072        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
1073        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
1074        #[unsafe(method_family = none)]
1075        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
1076
1077        /// Indicates whether app clip code tracking can be enabled on this device.
1078        #[unsafe(method(supportsAppClipCodeTracking))]
1079        #[unsafe(method_family = none)]
1080        pub unsafe fn supportsAppClipCodeTracking() -> bool;
1081
1082        #[unsafe(method(init))]
1083        #[unsafe(method_family = init)]
1084        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1085
1086        #[unsafe(method(new))]
1087        #[unsafe(method_family = new)]
1088        pub unsafe fn new() -> Retained<Self>;
1089    );
1090}
1091
1092#[cfg(feature = "objc2")]
1093extern_class!(
1094    /// A configuration for running positional tracking.
1095    ///
1096    ///
1097    /// Positional tracking provides 6 degrees of freedom tracking of the device by running the camera at lowest possible resolution and frame rate.
1098    ///
1099    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arpositionaltrackingconfiguration?language=objc)
1100    #[unsafe(super(ARConfiguration, NSObject))]
1101    #[derive(Debug, PartialEq, Eq, Hash)]
1102    #[cfg(feature = "objc2")]
1103    pub struct ARPositionalTrackingConfiguration;
1104);
1105
1106#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1107extern_conformance!(
1108    unsafe impl NSCopying for ARPositionalTrackingConfiguration {}
1109);
1110
1111#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1112unsafe impl CopyingHelper for ARPositionalTrackingConfiguration {
1113    type Result = Self;
1114}
1115
1116#[cfg(feature = "objc2")]
1117extern_conformance!(
1118    unsafe impl NSObjectProtocol for ARPositionalTrackingConfiguration {}
1119);
1120
1121#[cfg(feature = "objc2")]
1122impl ARPositionalTrackingConfiguration {
1123    extern_methods!(
1124        #[cfg(feature = "ARPlaneDetectionTypes")]
1125        /// Type of planes to detect in the scene.
1126        ///
1127        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
1128        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1129        #[unsafe(method(planeDetection))]
1130        #[unsafe(method_family = none)]
1131        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1132
1133        #[cfg(feature = "ARPlaneDetectionTypes")]
1134        /// Setter for [`planeDetection`][Self::planeDetection].
1135        #[unsafe(method(setPlaneDetection:))]
1136        #[unsafe(method_family = none)]
1137        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1138
1139        #[cfg(feature = "ARWorldMap")]
1140        /// The initial map of the physical space that world tracking will localize to and track.
1141        ///
1142        /// If set, the session will attempt to localize to the provided map with
1143        /// a limited tracking state until localization is successful or run is called again
1144        /// with a different (or no) initial map specified. Once localized, the map will be extended
1145        /// and can again be saved using the `getCurrentWorldMap` method on the session.
1146        #[unsafe(method(initialWorldMap))]
1147        #[unsafe(method_family = none)]
1148        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
1149
1150        #[cfg(feature = "ARWorldMap")]
1151        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
1152        #[unsafe(method(setInitialWorldMap:))]
1153        #[unsafe(method_family = none)]
1154        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
1155
1156        #[unsafe(method(init))]
1157        #[unsafe(method_family = init)]
1158        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1159
1160        #[unsafe(method(new))]
1161        #[unsafe(method_family = new)]
1162        pub unsafe fn new() -> Retained<Self>;
1163    );
1164}
1165
1166#[cfg(feature = "objc2")]
1167extern_class!(
1168    /// A configuration for running geographical world tracking.
1169    ///
1170    ///
1171    /// It allows placing geo-referenced anchors (ARGeoAnchor) in the scene by running world tracking with location and compass.
1172    ///
1173    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/argeotrackingconfiguration?language=objc)
1174    #[unsafe(super(ARConfiguration, NSObject))]
1175    #[derive(Debug, PartialEq, Eq, Hash)]
1176    #[cfg(feature = "objc2")]
1177    pub struct ARGeoTrackingConfiguration;
1178);
1179
1180#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1181extern_conformance!(
1182    unsafe impl NSCopying for ARGeoTrackingConfiguration {}
1183);
1184
1185#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1186unsafe impl CopyingHelper for ARGeoTrackingConfiguration {
1187    type Result = Self;
1188}
1189
1190#[cfg(feature = "objc2")]
1191extern_conformance!(
1192    unsafe impl NSObjectProtocol for ARGeoTrackingConfiguration {}
1193);
1194
1195#[cfg(feature = "objc2")]
1196impl ARGeoTrackingConfiguration {
1197    extern_methods!(
1198        /// Unavailable
1199        #[unsafe(method(worldAlignment))]
1200        #[unsafe(method_family = none)]
1201        pub unsafe fn worldAlignment(&self) -> ARWorldAlignment;
1202
1203        /// Setter for [`worldAlignment`][Self::worldAlignment].
1204        #[unsafe(method(setWorldAlignment:))]
1205        #[unsafe(method_family = none)]
1206        pub unsafe fn setWorldAlignment(&self, world_alignment: ARWorldAlignment);
1207
1208        /// The mode of environment texturing to run.
1209        ///
1210        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
1211        /// will get the current environment texture available from that probe's perspective which can be used for lighting
1212        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
1213        #[unsafe(method(environmentTexturing))]
1214        #[unsafe(method_family = none)]
1215        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
1216
1217        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
1218        #[unsafe(method(setEnvironmentTexturing:))]
1219        #[unsafe(method_family = none)]
1220        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
1221
1222        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
1223        #[unsafe(method(wantsHDREnvironmentTextures))]
1224        #[unsafe(method_family = none)]
1225        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
1226
1227        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
1228        #[unsafe(method(setWantsHDREnvironmentTextures:))]
1229        #[unsafe(method_family = none)]
1230        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
1231
1232        #[cfg(feature = "ARPlaneDetectionTypes")]
1233        /// Type of planes to detect in the scene.
1234        ///
1235        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
1236        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1237        #[unsafe(method(planeDetection))]
1238        #[unsafe(method_family = none)]
1239        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1240
1241        #[cfg(feature = "ARPlaneDetectionTypes")]
1242        /// Setter for [`planeDetection`][Self::planeDetection].
1243        #[unsafe(method(setPlaneDetection:))]
1244        #[unsafe(method_family = none)]
1245        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1246
1247        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1248        /// Images to detect in the scene.
1249        ///
1250        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
1251        #[unsafe(method(detectionImages))]
1252        #[unsafe(method_family = none)]
1253        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
1254
1255        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1256        /// Setter for [`detectionImages`][Self::detectionImages].
1257        #[unsafe(method(setDetectionImages:))]
1258        #[unsafe(method_family = none)]
1259        pub unsafe fn setDetectionImages(&self, detection_images: Option<&NSSet<ARReferenceImage>>);
1260
1261        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
1262        ///
1263        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1264        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
1265        ///
1266        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1267        #[unsafe(method(automaticImageScaleEstimationEnabled))]
1268        #[unsafe(method_family = none)]
1269        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
1270
1271        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
1272        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
1273        #[unsafe(method_family = none)]
1274        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
1275            &self,
1276            automatic_image_scale_estimation_enabled: bool,
1277        );
1278
1279        /// Maximum number of images to track simultaneously.
1280        ///
1281        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
1282        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
1283        /// Images will continue to be detected regardless of images tracked. Default value is zero.
1284        #[unsafe(method(maximumNumberOfTrackedImages))]
1285        #[unsafe(method_family = none)]
1286        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
1287
1288        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
1289        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
1290        #[unsafe(method_family = none)]
1291        pub unsafe fn setMaximumNumberOfTrackedImages(
1292            &self,
1293            maximum_number_of_tracked_images: NSInteger,
1294        );
1295
1296        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
1297        /// Objects to detect in the scene.
1298        ///
1299        /// If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the session.
1300        #[unsafe(method(detectionObjects))]
1301        #[unsafe(method_family = none)]
1302        pub unsafe fn detectionObjects(&self) -> Retained<NSSet<ARReferenceObject>>;
1303
1304        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
1305        /// Setter for [`detectionObjects`][Self::detectionObjects].
1306        #[unsafe(method(setDetectionObjects:))]
1307        #[unsafe(method_family = none)]
1308        pub unsafe fn setDetectionObjects(&self, detection_objects: &NSSet<ARReferenceObject>);
1309
1310        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
1311        #[unsafe(method(appClipCodeTrackingEnabled))]
1312        #[unsafe(method_family = none)]
1313        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
1314
1315        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
1316        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
1317        #[unsafe(method_family = none)]
1318        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
1319
1320        /// Indicates whether app clip code tracking can be enabled on this device.
1321        #[unsafe(method(supportsAppClipCodeTracking))]
1322        #[unsafe(method_family = none)]
1323        pub unsafe fn supportsAppClipCodeTracking() -> bool;
1324
1325        #[cfg(all(feature = "block2", feature = "objc2-foundation"))]
1326        /// Determines the availability of geo tracking at the current location.
1327        ///
1328        ///
1329        /// This method will attempt to acquire a location fix on a background thread, then check availability.
1330        ///
1331        ///
1332        /// Parameter `completionHandler`: Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial queue. It takes the following parameters:
1333        /// isAvailable - True if geo tracking is available at the current location, otherwise false.
1334        /// error - An error that indicates why geo tracking is not available at the current location.
1335        #[unsafe(method(checkAvailabilityWithCompletionHandler:))]
1336        #[unsafe(method_family = none)]
1337        pub unsafe fn checkAvailabilityWithCompletionHandler(
1338            completion_handler: &block2::DynBlock<dyn Fn(Bool, *mut NSError)>,
1339        );
1340
1341        #[cfg(all(
1342            feature = "block2",
1343            feature = "objc2-core-location",
1344            feature = "objc2-foundation"
1345        ))]
1346        /// Determines the availability of geo tracking at the given location.
1347        ///
1348        /// Parameter `coordinate`: Location at which to check.
1349        ///
1350        /// Parameter `completionHandler`: Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial queue. It takes the following parameters:
1351        /// isAvailable - True if geo tracking is available at the given location, otherwise false.
1352        /// error - An error that indicates why geo tracking is not available at the given location.
1353        #[unsafe(method(checkAvailabilityAtCoordinate:completionHandler:))]
1354        #[unsafe(method_family = none)]
1355        pub unsafe fn checkAvailabilityAtCoordinate_completionHandler(
1356            coordinate: CLLocationCoordinate2D,
1357            completion_handler: &block2::DynBlock<dyn Fn(Bool, *mut NSError)>,
1358        );
1359
1360        #[unsafe(method(init))]
1361        #[unsafe(method_family = init)]
1362        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1363
1364        #[unsafe(method(new))]
1365        #[unsafe(method_family = new)]
1366        pub unsafe fn new() -> Retained<Self>;
1367    );
1368}