objc2_ar_kit/generated/
ARConfiguration.rs

1//! This file has been automatically generated by `objc2`'s `header-translator`.
2//! DO NOT EDIT
3use core::ffi::*;
4use core::ptr::NonNull;
5#[cfg(feature = "objc2")]
6use objc2::__framework_prelude::*;
7#[cfg(feature = "objc2-av-foundation")]
8use objc2_av_foundation::*;
9#[cfg(feature = "objc2-core-location")]
10use objc2_core_location::*;
11#[cfg(feature = "objc2-foundation")]
12use objc2_foundation::*;
13
14use crate::*;
15
16/// Option set indicating semantic understanding types of the image frame.
17///
18/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arframesemantics?language=objc)
19// NS_OPTIONS
20#[cfg(feature = "objc2")]
21#[repr(transparent)]
22#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
23pub struct ARFrameSemantics(pub NSUInteger);
24#[cfg(feature = "objc2")]
25bitflags::bitflags! {
26    impl ARFrameSemantics: NSUInteger {
27/// No semantic operation is run.
28        #[doc(alias = "ARFrameSemanticNone")]
29        const None = 0;
30/// Person segmentation.
31///
32/// A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
33///
34/// See: -[ARFrame segmentationBuffer]
35///
36/// See: ARSegmentationClass
37        #[doc(alias = "ARFrameSemanticPersonSegmentation")]
38        const PersonSegmentation = 1<<0;
39/// Person segmentation with depth.
40///
41/// A pixel in the image frame that gets classified as person will have an intensity value equal to 'ARSegmentationClassPerson'.
42/// Additionally, every pixel in the image frame that gets classified as person will also have a depth value.
43///
44/// See: -[ARFrame estimatedDepthData]
45///
46/// See: -[ARFrame segmentationBuffer]
47        #[doc(alias = "ARFrameSemanticPersonSegmentationWithDepth")]
48        const PersonSegmentationWithDepth = (1<<1)|(1<<0);
49/// Body detection.
50///
51/// Once activated an ARFrame will contain information about a detected body.
52///
53/// See: -[ARFrame detectedBody]
54///
55/// See: ARBody2D
56        #[doc(alias = "ARFrameSemanticBodyDetection")]
57        const BodyDetection = 1<<2;
58/// Scene Depth.
59///
60/// Each capturedImage will have an associated scene depth data.
61///
62/// See: - [ARFrame sceneDepth]
63        #[doc(alias = "ARFrameSemanticSceneDepth")]
64        const SceneDepth = 1<<3;
65/// Smoothed Scene Depth.
66///
67/// Each capturedImage will have an associated scene depth data that is temporally smoothed.
68///
69/// See: - [ARFrame smoothedSceneDepth]
70        #[doc(alias = "ARFrameSemanticSmoothedSceneDepth")]
71        const SmoothedSceneDepth = 1<<4;
72    }
73}
74
75#[cfg(feature = "objc2")]
76unsafe impl Encode for ARFrameSemantics {
77    const ENCODING: Encoding = NSUInteger::ENCODING;
78}
79
80#[cfg(feature = "objc2")]
81unsafe impl RefEncode for ARFrameSemantics {
82    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
83}
84
85/// Enum constants for indicating the world alignment.
86///
87/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldalignment?language=objc)
88// NS_ENUM
89#[cfg(feature = "objc2")]
90#[repr(transparent)]
91#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
92pub struct ARWorldAlignment(pub NSInteger);
93#[cfg(feature = "objc2")]
94impl ARWorldAlignment {
95    /// Aligns the world with gravity that is defined by vector (0, -1, 0).
96    #[doc(alias = "ARWorldAlignmentGravity")]
97    pub const Gravity: Self = Self(0);
98    /// Aligns the world with gravity that is defined by the vector (0, -1, 0)
99    /// and heading (w.r.t. True North) that is given by the vector (0, 0, -1).
100    #[doc(alias = "ARWorldAlignmentGravityAndHeading")]
101    pub const GravityAndHeading: Self = Self(1);
102    /// Aligns the world with the camera’s orientation.
103    #[doc(alias = "ARWorldAlignmentCamera")]
104    pub const Camera: Self = Self(2);
105}
106
107#[cfg(feature = "objc2")]
108unsafe impl Encode for ARWorldAlignment {
109    const ENCODING: Encoding = NSInteger::ENCODING;
110}
111
112#[cfg(feature = "objc2")]
113unsafe impl RefEncode for ARWorldAlignment {
114    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
115}
116
117/// Enum constants for indicating the mode of environment texturing to run.
118///
119/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arenvironmenttexturing?language=objc)
120// NS_ENUM
121#[cfg(feature = "objc2")]
122#[repr(transparent)]
123#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
124pub struct AREnvironmentTexturing(pub NSInteger);
125#[cfg(feature = "objc2")]
126impl AREnvironmentTexturing {
127    /// No texture information is gathered.
128    #[doc(alias = "AREnvironmentTexturingNone")]
129    pub const None: Self = Self(0);
130    /// Texture information is gathered for the environment.
131    /// Environment textures will be generated for AREnvironmentProbes added to the session.
132    #[doc(alias = "AREnvironmentTexturingManual")]
133    pub const Manual: Self = Self(1);
134    /// Texture information is gathered for the environment and probes automatically placed in the scene.
135    #[doc(alias = "AREnvironmentTexturingAutomatic")]
136    pub const Automatic: Self = Self(2);
137}
138
139#[cfg(feature = "objc2")]
140unsafe impl Encode for AREnvironmentTexturing {
141    const ENCODING: Encoding = NSInteger::ENCODING;
142}
143
144#[cfg(feature = "objc2")]
145unsafe impl RefEncode for AREnvironmentTexturing {
146    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
147}
148
149/// Types of scene reconstruction.
150///
151/// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arscenereconstruction?language=objc)
152// NS_OPTIONS
153#[cfg(feature = "objc2")]
154#[repr(transparent)]
155#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
156pub struct ARSceneReconstruction(pub NSUInteger);
157#[cfg(feature = "objc2")]
158bitflags::bitflags! {
159    impl ARSceneReconstruction: NSUInteger {
160/// No scene reconstruction is run.
161        #[doc(alias = "ARSceneReconstructionNone")]
162        const None = 0;
163/// Scene reconstruction generates a mesh of the world
164        #[doc(alias = "ARSceneReconstructionMesh")]
165        const Mesh = 1<<0;
166/// Scene reconstruction generates a mesh of the world with classification for each face.
167        #[doc(alias = "ARSceneReconstructionMeshWithClassification")]
168        const MeshWithClassification = (1<<1)|(1<<0);
169    }
170}
171
172#[cfg(feature = "objc2")]
173unsafe impl Encode for ARSceneReconstruction {
174    const ENCODING: Encoding = NSUInteger::ENCODING;
175}
176
177#[cfg(feature = "objc2")]
178unsafe impl RefEncode for ARSceneReconstruction {
179    const ENCODING_REF: Encoding = Encoding::Pointer(&Self::ENCODING);
180}
181
182#[cfg(feature = "objc2")]
183extern_class!(
184    /// An object to describe and configure the Augmented Reality techniques to be used in an ARSession.
185    ///
186    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arconfiguration?language=objc)
187    #[unsafe(super(NSObject))]
188    #[derive(Debug, PartialEq, Eq, Hash)]
189    #[cfg(feature = "objc2")]
190    pub struct ARConfiguration;
191);
192
193#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
194unsafe impl NSCopying for ARConfiguration {}
195
196#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
197unsafe impl CopyingHelper for ARConfiguration {
198    type Result = Self;
199}
200
201#[cfg(feature = "objc2")]
202unsafe impl NSObjectProtocol for ARConfiguration {}
203
204#[cfg(feature = "objc2")]
205impl ARConfiguration {
206    extern_methods!(
207        /// Determines whether this device supports the ARConfiguration.
208        #[unsafe(method(isSupported))]
209        #[unsafe(method_family = none)]
210        pub unsafe fn isSupported() -> bool;
211
212        #[cfg(all(feature = "ARVideoFormat", feature = "objc2-foundation"))]
213        /// A list of supported video formats for this configuration and device.
214        ///
215        /// The first element in the list is the default format for session output.
216        #[unsafe(method(supportedVideoFormats))]
217        #[unsafe(method_family = none)]
218        pub unsafe fn supportedVideoFormats() -> Retained<NSArray<ARVideoFormat>>;
219
220        #[cfg(feature = "ARVideoFormat")]
221        /// Video format of the session output.
222        #[unsafe(method(videoFormat))]
223        #[unsafe(method_family = none)]
224        pub unsafe fn videoFormat(&self) -> Retained<ARVideoFormat>;
225
226        #[cfg(feature = "ARVideoFormat")]
227        /// Setter for [`videoFormat`][Self::videoFormat].
228        #[unsafe(method(setVideoFormat:))]
229        #[unsafe(method_family = none)]
230        pub unsafe fn setVideoFormat(&self, video_format: &ARVideoFormat);
231
232        /// Determines how the coordinate system should be aligned with the world.
233        ///
234        /// The default is ARWorldAlignmentGravity.
235        #[unsafe(method(worldAlignment))]
236        #[unsafe(method_family = none)]
237        pub unsafe fn worldAlignment(&self) -> ARWorldAlignment;
238
239        /// Setter for [`worldAlignment`][Self::worldAlignment].
240        #[unsafe(method(setWorldAlignment:))]
241        #[unsafe(method_family = none)]
242        pub unsafe fn setWorldAlignment(&self, world_alignment: ARWorldAlignment);
243
244        /// Enable or disable light estimation.
245        ///
246        /// Enabled by default.
247        #[unsafe(method(isLightEstimationEnabled))]
248        #[unsafe(method_family = none)]
249        pub unsafe fn isLightEstimationEnabled(&self) -> bool;
250
251        /// Setter for [`isLightEstimationEnabled`][Self::isLightEstimationEnabled].
252        #[unsafe(method(setLightEstimationEnabled:))]
253        #[unsafe(method_family = none)]
254        pub unsafe fn setLightEstimationEnabled(&self, light_estimation_enabled: bool);
255
256        /// Determines whether to capture and provide audio data.
257        ///
258        /// Disabled by default.
259        #[unsafe(method(providesAudioData))]
260        #[unsafe(method_family = none)]
261        pub unsafe fn providesAudioData(&self) -> bool;
262
263        /// Setter for [`providesAudioData`][Self::providesAudioData].
264        #[unsafe(method(setProvidesAudioData:))]
265        #[unsafe(method_family = none)]
266        pub unsafe fn setProvidesAudioData(&self, provides_audio_data: bool);
267
268        /// The type of semantic understanding to provide with each frame.
269        ///
270        ///
271        /// Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the set of frame semantics. For example, when running a session with
272        /// a configuration of type ARWorldTrackingConfiguration one would need to use `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
273        /// An exception is thrown if the option
274        /// is not supported. Defaults to ARFrameSemanticNone.
275        ///
276        /// See: ARFrameSemantics
277        ///
278        /// See: +[ARConfiguration supportsFrameSemantics:]
279        #[unsafe(method(frameSemantics))]
280        #[unsafe(method_family = none)]
281        pub unsafe fn frameSemantics(&self) -> ARFrameSemantics;
282
283        /// Setter for [`frameSemantics`][Self::frameSemantics].
284        #[unsafe(method(setFrameSemantics:))]
285        #[unsafe(method_family = none)]
286        pub unsafe fn setFrameSemantics(&self, frame_semantics: ARFrameSemantics);
287
288        /// Determines whether the type of frame semantics is supported by the device and ARConfiguration class.
289        ///
290        ///
291        /// Semantic frame understanding is not supported on all devices. Use the `supportsFrameSemantics` class method to check if the configuration type you intend to run supports the
292        /// set of frame semantics. For example, when running a session with a configuration of type ARWorldTrackingConfiguration one would need to use
293        /// `+[ ARWorldTrackingConfiguration supportsFrameSemantics:]` to perform said check.
294        ///
295        /// See: ARFrameSemantics
296        #[unsafe(method(supportsFrameSemantics:))]
297        #[unsafe(method_family = none)]
298        pub unsafe fn supportsFrameSemantics(frame_semantics: ARFrameSemantics) -> bool;
299
300        #[cfg(feature = "objc2-av-foundation")]
301        /// Returns a pointer to the capture device of the camera that's used for rendering, so developers can adjust capture settings.
302        ///
303        /// May return nil if it is not recommended to modify capture settings, for example if the primary camera is used for tracking.
304        #[unsafe(method(configurableCaptureDeviceForPrimaryCamera))]
305        #[unsafe(method_family = none)]
306        pub unsafe fn configurableCaptureDeviceForPrimaryCamera(
307        ) -> Option<Retained<AVCaptureDevice>>;
308
309        #[cfg(feature = "ARVideoFormat")]
310        /// Returns a video format using a 4K resolution from the list of supported video formats.
311        ///
312        /// May return nil if 4K is not supported for this configuration or device.
313        #[unsafe(method(recommendedVideoFormatFor4KResolution))]
314        #[unsafe(method_family = none)]
315        pub unsafe fn recommendedVideoFormatFor4KResolution() -> Option<Retained<ARVideoFormat>>;
316
317        #[cfg(feature = "ARVideoFormat")]
318        /// Returns a recommended video format that supports capturing high resolution frames with a significantly higher resolution than the streaming camera resolution.
319        ///
320        /// Using this format may consume more power. Other video formats may support capturing high resolution frames as well, albeit at a lower quality or resolution.
321        ///
322        /// See: [ARSession captureHighResolutionFrameWithCompletion:]
323        #[unsafe(method(recommendedVideoFormatForHighResolutionFrameCapturing))]
324        #[unsafe(method_family = none)]
325        pub unsafe fn recommendedVideoFormatForHighResolutionFrameCapturing(
326        ) -> Option<Retained<ARVideoFormat>>;
327
328        /// Whether HDR capturing is allowed if the current video format supports it. Defaults to
329        /// `NO.`
330        #[unsafe(method(videoHDRAllowed))]
331        #[unsafe(method_family = none)]
332        pub unsafe fn videoHDRAllowed(&self) -> bool;
333
334        /// Setter for [`videoHDRAllowed`][Self::videoHDRAllowed].
335        #[unsafe(method(setVideoHDRAllowed:))]
336        #[unsafe(method_family = none)]
337        pub unsafe fn setVideoHDRAllowed(&self, video_hdr_allowed: bool);
338
339        /// Unavailable
340        #[unsafe(method(init))]
341        #[unsafe(method_family = init)]
342        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
343
344        #[unsafe(method(new))]
345        #[unsafe(method_family = new)]
346        pub unsafe fn new() -> Retained<Self>;
347    );
348}
349
350#[cfg(feature = "objc2")]
351extern_class!(
352    /// A configuration for running world tracking.
353    ///
354    ///
355    /// World tracking provides 6 degrees of freedom tracking of the device.
356    /// By finding feature points in the scene, world tracking enables performing hit-tests against the frame.
357    /// Tracking can no longer be resumed once the session is paused.
358    ///
359    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arworldtrackingconfiguration?language=objc)
360    #[unsafe(super(ARConfiguration, NSObject))]
361    #[derive(Debug, PartialEq, Eq, Hash)]
362    #[cfg(feature = "objc2")]
363    pub struct ARWorldTrackingConfiguration;
364);
365
366#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
367unsafe impl NSCopying for ARWorldTrackingConfiguration {}
368
369#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
370unsafe impl CopyingHelper for ARWorldTrackingConfiguration {
371    type Result = Self;
372}
373
374#[cfg(feature = "objc2")]
375unsafe impl NSObjectProtocol for ARWorldTrackingConfiguration {}
376
377#[cfg(feature = "objc2")]
378impl ARWorldTrackingConfiguration {
379    extern_methods!(
380        /// Enable or disable continuous auto focus.
381        ///
382        /// Enabled by default.
383        #[unsafe(method(isAutoFocusEnabled))]
384        #[unsafe(method_family = none)]
385        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
386
387        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
388        #[unsafe(method(setAutoFocusEnabled:))]
389        #[unsafe(method_family = none)]
390        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
391
392        /// The mode of environment texturing to run.
393        ///
394        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
395        /// will get the current environment texture available from that probe's perspective which can be used for lighting
396        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
397        #[unsafe(method(environmentTexturing))]
398        #[unsafe(method_family = none)]
399        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
400
401        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
402        #[unsafe(method(setEnvironmentTexturing:))]
403        #[unsafe(method_family = none)]
404        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
405
406        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
407        #[unsafe(method(wantsHDREnvironmentTextures))]
408        #[unsafe(method_family = none)]
409        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
410
411        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
412        #[unsafe(method(setWantsHDREnvironmentTextures:))]
413        #[unsafe(method_family = none)]
414        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
415
416        #[cfg(feature = "ARPlaneDetectionTypes")]
417        /// Type of planes to detect in the scene.
418        ///
419        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
420        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
421        #[unsafe(method(planeDetection))]
422        #[unsafe(method_family = none)]
423        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
424
425        #[cfg(feature = "ARPlaneDetectionTypes")]
426        /// Setter for [`planeDetection`][Self::planeDetection].
427        #[unsafe(method(setPlaneDetection:))]
428        #[unsafe(method_family = none)]
429        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
430
431        #[cfg(feature = "ARWorldMap")]
432        /// The initial map of the physical space that world tracking will localize to and track.
433        ///
434        /// If set, the session will attempt to localize to the provided map with
435        /// a limited tracking state until localization is successful or run is called again
436        /// with a different (or no) initial map specified. Once localized, the map will be extended
437        /// and can again be saved using the `getCurrentWorldMap` method on the session.
438        #[unsafe(method(initialWorldMap))]
439        #[unsafe(method_family = none)]
440        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
441
442        #[cfg(feature = "ARWorldMap")]
443        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
444        #[unsafe(method(setInitialWorldMap:))]
445        #[unsafe(method_family = none)]
446        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
447
448        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
449        /// Images to detect in the scene.
450        ///
451        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
452        #[unsafe(method(detectionImages))]
453        #[unsafe(method_family = none)]
454        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
455
456        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
457        /// Setter for [`detectionImages`][Self::detectionImages].
458        #[unsafe(method(setDetectionImages:))]
459        #[unsafe(method_family = none)]
460        pub unsafe fn setDetectionImages(&self, detection_images: Option<&NSSet<ARReferenceImage>>);
461
462        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
463        ///
464        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
465        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
466        ///
467        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
468        #[unsafe(method(automaticImageScaleEstimationEnabled))]
469        #[unsafe(method_family = none)]
470        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
471
472        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
473        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
474        #[unsafe(method_family = none)]
475        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
476            &self,
477            automatic_image_scale_estimation_enabled: bool,
478        );
479
480        /// Maximum number of images to track simultaneously.
481        ///
482        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
483        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
484        /// Images will continue to be detected regardless of images tracked. Default value is zero.
485        #[unsafe(method(maximumNumberOfTrackedImages))]
486        #[unsafe(method_family = none)]
487        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
488
489        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
490        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
491        #[unsafe(method_family = none)]
492        pub unsafe fn setMaximumNumberOfTrackedImages(
493            &self,
494            maximum_number_of_tracked_images: NSInteger,
495        );
496
497        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
498        /// Objects to detect in the scene.
499        ///
500        /// If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the session.
501        #[unsafe(method(detectionObjects))]
502        #[unsafe(method_family = none)]
503        pub unsafe fn detectionObjects(&self) -> Retained<NSSet<ARReferenceObject>>;
504
505        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
506        /// Setter for [`detectionObjects`][Self::detectionObjects].
507        #[unsafe(method(setDetectionObjects:))]
508        #[unsafe(method_family = none)]
509        pub unsafe fn setDetectionObjects(&self, detection_objects: &NSSet<ARReferenceObject>);
510
511        /// Enable/disable a collaborative session. Disabled by default.
512        ///
513        ///
514        /// When enabled, ARSession will output collaboration data for other participants using its delegate didOutputCollaborationData.
515        /// It is the responsibility of the caller to send the data to each participant. When data is received by a participant, it
516        /// should be passed to the ARSession by calling updateWithCollaborationData.
517        #[unsafe(method(isCollaborationEnabled))]
518        #[unsafe(method_family = none)]
519        pub unsafe fn isCollaborationEnabled(&self) -> bool;
520
521        /// Setter for [`isCollaborationEnabled`][Self::isCollaborationEnabled].
522        #[unsafe(method(setCollaborationEnabled:))]
523        #[unsafe(method_family = none)]
524        pub unsafe fn setCollaborationEnabled(&self, collaboration_enabled: bool);
525
526        /// Indicates whether user face tracking using the front facing camera can be enabled on this device.
527        #[unsafe(method(supportsUserFaceTracking))]
528        #[unsafe(method_family = none)]
529        pub unsafe fn supportsUserFaceTracking() -> bool;
530
531        /// Enable or disable running Face Tracking using the front facing camera. Disabled by default.
532        /// When enabled, ARSession detects faces (if visible in the front-facing camera image) and adds to its list of anchors,
533        /// an ARFaceAnchor object representing each face.
534        ///
535        ///
536        /// The transform of the ARFaceAnchor objects will be in the world coordinate space.
537        ///
538        /// See: ARFaceAnchor
539        #[unsafe(method(userFaceTrackingEnabled))]
540        #[unsafe(method_family = none)]
541        pub unsafe fn userFaceTrackingEnabled(&self) -> bool;
542
543        /// Setter for [`userFaceTrackingEnabled`][Self::userFaceTrackingEnabled].
544        #[unsafe(method(setUserFaceTrackingEnabled:))]
545        #[unsafe(method_family = none)]
546        pub unsafe fn setUserFaceTrackingEnabled(&self, user_face_tracking_enabled: bool);
547
548        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
549        #[unsafe(method(appClipCodeTrackingEnabled))]
550        #[unsafe(method_family = none)]
551        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
552
553        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
554        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
555        #[unsafe(method_family = none)]
556        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
557
558        /// Indicates whether app clip code tracking can be enabled on this device.
559        #[unsafe(method(supportsAppClipCodeTracking))]
560        #[unsafe(method_family = none)]
561        pub unsafe fn supportsAppClipCodeTracking() -> bool;
562
563        /// Indicates whether the scene reconstruction type is supported for the configuration on this device.
564        #[unsafe(method(supportsSceneReconstruction:))]
565        #[unsafe(method_family = none)]
566        pub unsafe fn supportsSceneReconstruction(
567            scene_reconstruction: ARSceneReconstruction,
568        ) -> bool;
569
570        /// Type of scene reconstruction to run. Defaults to ARSceneReconstructionNone.
571        ///
572        /// See: ARMeshAnchor
573        ///
574        /// If set to a value other than ARSceneReconstructionNone, output of scene reconstruction will be added to the session as
575        /// ARMeshAnchor objects.
576        #[unsafe(method(sceneReconstruction))]
577        #[unsafe(method_family = none)]
578        pub unsafe fn sceneReconstruction(&self) -> ARSceneReconstruction;
579
580        /// Setter for [`sceneReconstruction`][Self::sceneReconstruction].
581        #[unsafe(method(setSceneReconstruction:))]
582        #[unsafe(method_family = none)]
583        pub unsafe fn setSceneReconstruction(&self, scene_reconstruction: ARSceneReconstruction);
584
585        #[unsafe(method(init))]
586        #[unsafe(method_family = init)]
587        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
588
589        #[unsafe(method(new))]
590        #[unsafe(method_family = new)]
591        pub unsafe fn new() -> Retained<Self>;
592    );
593}
594
595#[cfg(feature = "objc2")]
596extern_class!(
597    /// A configuration for running orientation tracking.
598    ///
599    ///
600    /// Orientation tracking provides 3 degrees of freedom tracking of the device.
601    ///
602    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arorientationtrackingconfiguration?language=objc)
603    #[unsafe(super(ARConfiguration, NSObject))]
604    #[derive(Debug, PartialEq, Eq, Hash)]
605    #[cfg(feature = "objc2")]
606    pub struct AROrientationTrackingConfiguration;
607);
608
609#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
610unsafe impl NSCopying for AROrientationTrackingConfiguration {}
611
612#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
613unsafe impl CopyingHelper for AROrientationTrackingConfiguration {
614    type Result = Self;
615}
616
617#[cfg(feature = "objc2")]
618unsafe impl NSObjectProtocol for AROrientationTrackingConfiguration {}
619
620#[cfg(feature = "objc2")]
621impl AROrientationTrackingConfiguration {
622    extern_methods!(
623        /// Enable or disable continuous auto focus.
624        ///
625        /// Enabled by default.
626        #[unsafe(method(isAutoFocusEnabled))]
627        #[unsafe(method_family = none)]
628        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
629
630        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
631        #[unsafe(method(setAutoFocusEnabled:))]
632        #[unsafe(method_family = none)]
633        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
634
635        #[unsafe(method(init))]
636        #[unsafe(method_family = init)]
637        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
638
639        #[unsafe(method(new))]
640        #[unsafe(method_family = new)]
641        pub unsafe fn new() -> Retained<Self>;
642    );
643}
644
645#[cfg(feature = "objc2")]
646extern_class!(
647    /// A configuration for running face tracking.
648    ///
649    ///
650    /// Face tracking uses the front facing camera to track the face in 3D providing details on the topology and expression of the face.
651    /// A detected face will be added to the session as an ARFaceAnchor object which contains information about head pose, mesh, eye pose, and blend shape
652    /// coefficients. If light estimation is enabled the detected face will be treated as a light probe and used to estimate the direction of incoming light.
653    ///
654    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arfacetrackingconfiguration?language=objc)
655    #[unsafe(super(ARConfiguration, NSObject))]
656    #[derive(Debug, PartialEq, Eq, Hash)]
657    #[cfg(feature = "objc2")]
658    pub struct ARFaceTrackingConfiguration;
659);
660
661#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
662unsafe impl NSCopying for ARFaceTrackingConfiguration {}
663
664#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
665unsafe impl CopyingHelper for ARFaceTrackingConfiguration {
666    type Result = Self;
667}
668
669#[cfg(feature = "objc2")]
670unsafe impl NSObjectProtocol for ARFaceTrackingConfiguration {}
671
672#[cfg(feature = "objc2")]
673impl ARFaceTrackingConfiguration {
674    extern_methods!(
675        /// Maximum number of faces which can be tracked simultaneously.
676        #[unsafe(method(supportedNumberOfTrackedFaces))]
677        #[unsafe(method_family = none)]
678        pub unsafe fn supportedNumberOfTrackedFaces() -> NSInteger;
679
680        /// Maximum number of faces to track simultaneously.
681        ///
682        /// Setting the maximum number of tracked faces will limit the number of faces that can be tracked in a given frame.
683        /// If more than the maximum is visible, only the faces already being tracked will continue to track until tracking is lost or another face is removed.
684        /// Default value is one.
685        #[unsafe(method(maximumNumberOfTrackedFaces))]
686        #[unsafe(method_family = none)]
687        pub unsafe fn maximumNumberOfTrackedFaces(&self) -> NSInteger;
688
689        /// Setter for [`maximumNumberOfTrackedFaces`][Self::maximumNumberOfTrackedFaces].
690        #[unsafe(method(setMaximumNumberOfTrackedFaces:))]
691        #[unsafe(method_family = none)]
692        pub unsafe fn setMaximumNumberOfTrackedFaces(
693            &self,
694            maximum_number_of_tracked_faces: NSInteger,
695        );
696
697        /// Indicates whether world tracking can be enabled on this device.
698        #[unsafe(method(supportsWorldTracking))]
699        #[unsafe(method_family = none)]
700        pub unsafe fn supportsWorldTracking() -> bool;
701
702        /// Enable or disable World Tracking. Disabled by default.
703        ///
704        ///
705        /// When enabled, ARSession uses the back facing camera to track the device's orientation and position in the world. The camera transform and the ARFaceAnchor transform will be in the world coordinate space.
706        #[unsafe(method(isWorldTrackingEnabled))]
707        #[unsafe(method_family = none)]
708        pub unsafe fn isWorldTrackingEnabled(&self) -> bool;
709
710        /// Setter for [`isWorldTrackingEnabled`][Self::isWorldTrackingEnabled].
711        #[unsafe(method(setWorldTrackingEnabled:))]
712        #[unsafe(method_family = none)]
713        pub unsafe fn setWorldTrackingEnabled(&self, world_tracking_enabled: bool);
714
715        #[unsafe(method(init))]
716        #[unsafe(method_family = init)]
717        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
718
719        #[unsafe(method(new))]
720        #[unsafe(method_family = new)]
721        pub unsafe fn new() -> Retained<Self>;
722    );
723}
724
725#[cfg(feature = "objc2")]
726extern_class!(
727    /// A configuration for running image tracking.
728    ///
729    ///
730    /// Image tracking provides 6 degrees of freedom tracking of known images. Four images may be tracked simultaneously.
731    ///
732    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arimagetrackingconfiguration?language=objc)
733    #[unsafe(super(ARConfiguration, NSObject))]
734    #[derive(Debug, PartialEq, Eq, Hash)]
735    #[cfg(feature = "objc2")]
736    pub struct ARImageTrackingConfiguration;
737);
738
739#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
740unsafe impl NSCopying for ARImageTrackingConfiguration {}
741
742#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
743unsafe impl CopyingHelper for ARImageTrackingConfiguration {
744    type Result = Self;
745}
746
747#[cfg(feature = "objc2")]
748unsafe impl NSObjectProtocol for ARImageTrackingConfiguration {}
749
750#[cfg(feature = "objc2")]
751impl ARImageTrackingConfiguration {
752    extern_methods!(
753        /// Enable or disable continuous auto focus.
754        ///
755        /// Enabled by default.
756        #[unsafe(method(isAutoFocusEnabled))]
757        #[unsafe(method_family = none)]
758        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
759
760        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
761        #[unsafe(method(setAutoFocusEnabled:))]
762        #[unsafe(method_family = none)]
763        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
764
765        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
766        /// Images to track in the scene.
767        #[unsafe(method(trackingImages))]
768        #[unsafe(method_family = none)]
769        pub unsafe fn trackingImages(&self) -> Retained<NSSet<ARReferenceImage>>;
770
771        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
772        /// Setter for [`trackingImages`][Self::trackingImages].
773        #[unsafe(method(setTrackingImages:))]
774        #[unsafe(method_family = none)]
775        pub unsafe fn setTrackingImages(&self, tracking_images: &NSSet<ARReferenceImage>);
776
777        /// Maximum number of images to track simultaneously.
778        ///
779        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
780        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
781        /// Default value is one.
782        #[unsafe(method(maximumNumberOfTrackedImages))]
783        #[unsafe(method_family = none)]
784        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
785
786        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
787        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
788        #[unsafe(method_family = none)]
789        pub unsafe fn setMaximumNumberOfTrackedImages(
790            &self,
791            maximum_number_of_tracked_images: NSInteger,
792        );
793
794        #[unsafe(method(init))]
795        #[unsafe(method_family = init)]
796        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
797
798        #[unsafe(method(new))]
799        #[unsafe(method_family = new)]
800        pub unsafe fn new() -> Retained<Self>;
801    );
802}
803
804#[cfg(feature = "objc2")]
805extern_class!(
806    /// A configuration for scanning objects.
807    ///
808    ///
809    /// The object scanning configuration runs world tracking, capturing additional detail in order to create reference objects.
810    /// Running object scanning will consume additional power in order to provide more detailed features.
811    /// The createReferenceObject method can be called on the session to capture a scan of an object in the world.
812    ///
813    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arobjectscanningconfiguration?language=objc)
814    #[unsafe(super(ARConfiguration, NSObject))]
815    #[derive(Debug, PartialEq, Eq, Hash)]
816    #[cfg(feature = "objc2")]
817    pub struct ARObjectScanningConfiguration;
818);
819
820#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
821unsafe impl NSCopying for ARObjectScanningConfiguration {}
822
823#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
824unsafe impl CopyingHelper for ARObjectScanningConfiguration {
825    type Result = Self;
826}
827
828#[cfg(feature = "objc2")]
829unsafe impl NSObjectProtocol for ARObjectScanningConfiguration {}
830
831#[cfg(feature = "objc2")]
832impl ARObjectScanningConfiguration {
833    extern_methods!(
834        /// Enable or disable continuous auto focus.
835        ///
836        /// Enabled by default.
837        #[unsafe(method(isAutoFocusEnabled))]
838        #[unsafe(method_family = none)]
839        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
840
841        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
842        #[unsafe(method(setAutoFocusEnabled:))]
843        #[unsafe(method_family = none)]
844        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
845
846        #[cfg(feature = "ARPlaneDetectionTypes")]
847        /// Type of planes to detect in the scene.
848        ///
849        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
850        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
851        #[unsafe(method(planeDetection))]
852        #[unsafe(method_family = none)]
853        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
854
855        #[cfg(feature = "ARPlaneDetectionTypes")]
856        /// Setter for [`planeDetection`][Self::planeDetection].
857        #[unsafe(method(setPlaneDetection:))]
858        #[unsafe(method_family = none)]
859        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
860
861        #[unsafe(method(init))]
862        #[unsafe(method_family = init)]
863        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
864
865        #[unsafe(method(new))]
866        #[unsafe(method_family = new)]
867        pub unsafe fn new() -> Retained<Self>;
868    );
869}
870
871#[cfg(feature = "objc2")]
872extern_class!(
873    /// A configuration for running body tracking.
874    ///
875    ///
876    /// Body tracking provides 6 degrees of freedom tracking of a detected body in the scene. By default, ARFrameSemanticBodyDetection will be
877    /// enabled.
878    ///
879    /// See: ARBodyAnchor
880    ///
881    /// See: -[ARFrame detectedBody]
882    ///
883    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arbodytrackingconfiguration?language=objc)
884    #[unsafe(super(ARConfiguration, NSObject))]
885    #[derive(Debug, PartialEq, Eq, Hash)]
886    #[cfg(feature = "objc2")]
887    pub struct ARBodyTrackingConfiguration;
888);
889
890#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
891unsafe impl NSCopying for ARBodyTrackingConfiguration {}
892
893#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
894unsafe impl CopyingHelper for ARBodyTrackingConfiguration {
895    type Result = Self;
896}
897
898#[cfg(feature = "objc2")]
899unsafe impl NSObjectProtocol for ARBodyTrackingConfiguration {}
900
901#[cfg(feature = "objc2")]
902impl ARBodyTrackingConfiguration {
903    extern_methods!(
904        /// Enable or disable continuous auto focus.
905        ///
906        /// Enabled by default.
907        #[unsafe(method(isAutoFocusEnabled))]
908        #[unsafe(method_family = none)]
909        pub unsafe fn isAutoFocusEnabled(&self) -> bool;
910
911        /// Setter for [`isAutoFocusEnabled`][Self::isAutoFocusEnabled].
912        #[unsafe(method(setAutoFocusEnabled:))]
913        #[unsafe(method_family = none)]
914        pub unsafe fn setAutoFocusEnabled(&self, auto_focus_enabled: bool);
915
916        #[cfg(feature = "ARWorldMap")]
917        /// The initial map of the physical space that world tracking will localize to and track.
918        ///
919        /// If set, the session will attempt to localize to the provided map with
920        /// a limited tracking state until localization is successful or run is called again
921        /// with a different (or no) initial map specified. Once localized, the map will be extended
922        /// and can again be saved using the `getCurrentWorldMap` method on the session.
923        #[unsafe(method(initialWorldMap))]
924        #[unsafe(method_family = none)]
925        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
926
927        #[cfg(feature = "ARWorldMap")]
928        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
929        #[unsafe(method(setInitialWorldMap:))]
930        #[unsafe(method_family = none)]
931        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
932
933        /// The mode of environment texturing to run.
934        ///
935        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
936        /// will get the current environment texture available from that probe's perspective which can be used for lighting
937        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
938        #[unsafe(method(environmentTexturing))]
939        #[unsafe(method_family = none)]
940        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
941
942        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
943        #[unsafe(method(setEnvironmentTexturing:))]
944        #[unsafe(method_family = none)]
945        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
946
947        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
948        #[unsafe(method(wantsHDREnvironmentTextures))]
949        #[unsafe(method_family = none)]
950        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
951
952        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
953        #[unsafe(method(setWantsHDREnvironmentTextures:))]
954        #[unsafe(method_family = none)]
955        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
956
957        #[cfg(feature = "ARPlaneDetectionTypes")]
958        /// Type of planes to detect in the scene.
959        ///
960        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
961        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
962        #[unsafe(method(planeDetection))]
963        #[unsafe(method_family = none)]
964        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
965
966        #[cfg(feature = "ARPlaneDetectionTypes")]
967        /// Setter for [`planeDetection`][Self::planeDetection].
968        #[unsafe(method(setPlaneDetection:))]
969        #[unsafe(method_family = none)]
970        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
971
972        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
973        /// Images to detect in the scene.
974        ///
975        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
976        #[unsafe(method(detectionImages))]
977        #[unsafe(method_family = none)]
978        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
979
980        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
981        /// Setter for [`detectionImages`][Self::detectionImages].
982        #[unsafe(method(setDetectionImages:))]
983        #[unsafe(method_family = none)]
984        pub unsafe fn setDetectionImages(&self, detection_images: &NSSet<ARReferenceImage>);
985
986        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
987        ///
988        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
989        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
990        ///
991        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
992        #[unsafe(method(automaticImageScaleEstimationEnabled))]
993        #[unsafe(method_family = none)]
994        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
995
996        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
997        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
998        #[unsafe(method_family = none)]
999        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
1000            &self,
1001            automatic_image_scale_estimation_enabled: bool,
1002        );
1003
1004        /// Enables the estimation of a scale factor which may be used to correct the physical size of a skeleton in 3D.
1005        ///
1006        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1007        /// differs from the default one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARBodyAnchor.
1008        ///
1009        /// Note: When set to true the transform of a returned ARBodyAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1010        #[unsafe(method(automaticSkeletonScaleEstimationEnabled))]
1011        #[unsafe(method_family = none)]
1012        pub unsafe fn automaticSkeletonScaleEstimationEnabled(&self) -> bool;
1013
1014        /// Setter for [`automaticSkeletonScaleEstimationEnabled`][Self::automaticSkeletonScaleEstimationEnabled].
1015        #[unsafe(method(setAutomaticSkeletonScaleEstimationEnabled:))]
1016        #[unsafe(method_family = none)]
1017        pub unsafe fn setAutomaticSkeletonScaleEstimationEnabled(
1018            &self,
1019            automatic_skeleton_scale_estimation_enabled: bool,
1020        );
1021
1022        /// Maximum number of images to track simultaneously.
1023        ///
1024        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
1025        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
1026        /// Images will continue to be detected regardless of images tracked. Default value is zero.
1027        #[unsafe(method(maximumNumberOfTrackedImages))]
1028        #[unsafe(method_family = none)]
1029        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
1030
1031        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
1032        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
1033        #[unsafe(method_family = none)]
1034        pub unsafe fn setMaximumNumberOfTrackedImages(
1035            &self,
1036            maximum_number_of_tracked_images: NSInteger,
1037        );
1038
1039        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
1040        #[unsafe(method(appClipCodeTrackingEnabled))]
1041        #[unsafe(method_family = none)]
1042        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
1043
1044        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
1045        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
1046        #[unsafe(method_family = none)]
1047        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
1048
1049        /// Indicates whether app clip code tracking can be enabled on this device.
1050        #[unsafe(method(supportsAppClipCodeTracking))]
1051        #[unsafe(method_family = none)]
1052        pub unsafe fn supportsAppClipCodeTracking() -> bool;
1053
1054        #[unsafe(method(init))]
1055        #[unsafe(method_family = init)]
1056        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1057
1058        #[unsafe(method(new))]
1059        #[unsafe(method_family = new)]
1060        pub unsafe fn new() -> Retained<Self>;
1061    );
1062}
1063
1064#[cfg(feature = "objc2")]
1065extern_class!(
1066    /// A configuration for running positional tracking.
1067    ///
1068    ///
1069    /// Positional tracking provides 6 degrees of freedom tracking of the device by running the camera at lowest possible resolution and frame rate.
1070    ///
1071    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/arpositionaltrackingconfiguration?language=objc)
1072    #[unsafe(super(ARConfiguration, NSObject))]
1073    #[derive(Debug, PartialEq, Eq, Hash)]
1074    #[cfg(feature = "objc2")]
1075    pub struct ARPositionalTrackingConfiguration;
1076);
1077
1078#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1079unsafe impl NSCopying for ARPositionalTrackingConfiguration {}
1080
1081#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1082unsafe impl CopyingHelper for ARPositionalTrackingConfiguration {
1083    type Result = Self;
1084}
1085
1086#[cfg(feature = "objc2")]
1087unsafe impl NSObjectProtocol for ARPositionalTrackingConfiguration {}
1088
1089#[cfg(feature = "objc2")]
1090impl ARPositionalTrackingConfiguration {
1091    extern_methods!(
1092        #[cfg(feature = "ARPlaneDetectionTypes")]
1093        /// Type of planes to detect in the scene.
1094        ///
1095        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
1096        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1097        #[unsafe(method(planeDetection))]
1098        #[unsafe(method_family = none)]
1099        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1100
1101        #[cfg(feature = "ARPlaneDetectionTypes")]
1102        /// Setter for [`planeDetection`][Self::planeDetection].
1103        #[unsafe(method(setPlaneDetection:))]
1104        #[unsafe(method_family = none)]
1105        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1106
1107        #[cfg(feature = "ARWorldMap")]
1108        /// The initial map of the physical space that world tracking will localize to and track.
1109        ///
1110        /// If set, the session will attempt to localize to the provided map with
1111        /// a limited tracking state until localization is successful or run is called again
1112        /// with a different (or no) initial map specified. Once localized, the map will be extended
1113        /// and can again be saved using the `getCurrentWorldMap` method on the session.
1114        #[unsafe(method(initialWorldMap))]
1115        #[unsafe(method_family = none)]
1116        pub unsafe fn initialWorldMap(&self) -> Option<Retained<ARWorldMap>>;
1117
1118        #[cfg(feature = "ARWorldMap")]
1119        /// Setter for [`initialWorldMap`][Self::initialWorldMap].
1120        #[unsafe(method(setInitialWorldMap:))]
1121        #[unsafe(method_family = none)]
1122        pub unsafe fn setInitialWorldMap(&self, initial_world_map: Option<&ARWorldMap>);
1123
1124        #[unsafe(method(init))]
1125        #[unsafe(method_family = init)]
1126        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1127
1128        #[unsafe(method(new))]
1129        #[unsafe(method_family = new)]
1130        pub unsafe fn new() -> Retained<Self>;
1131    );
1132}
1133
1134#[cfg(feature = "objc2")]
1135extern_class!(
1136    /// A configuration for running geographical world tracking.
1137    ///
1138    ///
1139    /// It allows placing geo-referenced anchors (ARGeoAnchor) in the scene by running world tracking with location and compass.
1140    ///
1141    /// See also [Apple's documentation](https://developer.apple.com/documentation/arkit/argeotrackingconfiguration?language=objc)
1142    #[unsafe(super(ARConfiguration, NSObject))]
1143    #[derive(Debug, PartialEq, Eq, Hash)]
1144    #[cfg(feature = "objc2")]
1145    pub struct ARGeoTrackingConfiguration;
1146);
1147
1148#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1149unsafe impl NSCopying for ARGeoTrackingConfiguration {}
1150
1151#[cfg(all(feature = "objc2", feature = "objc2-foundation"))]
1152unsafe impl CopyingHelper for ARGeoTrackingConfiguration {
1153    type Result = Self;
1154}
1155
1156#[cfg(feature = "objc2")]
1157unsafe impl NSObjectProtocol for ARGeoTrackingConfiguration {}
1158
1159#[cfg(feature = "objc2")]
1160impl ARGeoTrackingConfiguration {
1161    extern_methods!(
1162        /// Unavailable
1163        #[unsafe(method(worldAlignment))]
1164        #[unsafe(method_family = none)]
1165        pub unsafe fn worldAlignment(&self) -> ARWorldAlignment;
1166
1167        /// Setter for [`worldAlignment`][Self::worldAlignment].
1168        #[unsafe(method(setWorldAlignment:))]
1169        #[unsafe(method_family = none)]
1170        pub unsafe fn setWorldAlignment(&self, world_alignment: ARWorldAlignment);
1171
1172        /// The mode of environment texturing to run.
1173        ///
1174        /// If set, texture information will be accumulated and updated. Adding an AREnvironmentProbeAnchor to the session
1175        /// will get the current environment texture available from that probe's perspective which can be used for lighting
1176        /// virtual objects in the scene. Defaults to AREnvironmentTexturingNone.
1177        #[unsafe(method(environmentTexturing))]
1178        #[unsafe(method_family = none)]
1179        pub unsafe fn environmentTexturing(&self) -> AREnvironmentTexturing;
1180
1181        /// Setter for [`environmentTexturing`][Self::environmentTexturing].
1182        #[unsafe(method(setEnvironmentTexturing:))]
1183        #[unsafe(method_family = none)]
1184        pub unsafe fn setEnvironmentTexturing(&self, environment_texturing: AREnvironmentTexturing);
1185
1186        /// Determines whether environment textures will be provided with high dynamic range. Enabled by default.
1187        #[unsafe(method(wantsHDREnvironmentTextures))]
1188        #[unsafe(method_family = none)]
1189        pub unsafe fn wantsHDREnvironmentTextures(&self) -> bool;
1190
1191        /// Setter for [`wantsHDREnvironmentTextures`][Self::wantsHDREnvironmentTextures].
1192        #[unsafe(method(setWantsHDREnvironmentTextures:))]
1193        #[unsafe(method_family = none)]
1194        pub unsafe fn setWantsHDREnvironmentTextures(&self, wants_hdr_environment_textures: bool);
1195
1196        #[cfg(feature = "ARPlaneDetectionTypes")]
1197        /// Type of planes to detect in the scene.
1198        ///
1199        /// If set, new planes will continue to be detected and updated over time. Detected planes will be added to the session as
1200        /// ARPlaneAnchor objects. In the event that two planes are merged, the newer plane will be removed. Defaults to ARPlaneDetectionNone.
1201        #[unsafe(method(planeDetection))]
1202        #[unsafe(method_family = none)]
1203        pub unsafe fn planeDetection(&self) -> ARPlaneDetection;
1204
1205        #[cfg(feature = "ARPlaneDetectionTypes")]
1206        /// Setter for [`planeDetection`][Self::planeDetection].
1207        #[unsafe(method(setPlaneDetection:))]
1208        #[unsafe(method_family = none)]
1209        pub unsafe fn setPlaneDetection(&self, plane_detection: ARPlaneDetection);
1210
1211        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1212        /// Images to detect in the scene.
1213        ///
1214        /// If set the session will attempt to detect the specified images. When an image is detected an ARImageAnchor will be added to the session.
1215        #[unsafe(method(detectionImages))]
1216        #[unsafe(method_family = none)]
1217        pub unsafe fn detectionImages(&self) -> Retained<NSSet<ARReferenceImage>>;
1218
1219        #[cfg(all(feature = "ARReferenceImage", feature = "objc2-foundation"))]
1220        /// Setter for [`detectionImages`][Self::detectionImages].
1221        #[unsafe(method(setDetectionImages:))]
1222        #[unsafe(method_family = none)]
1223        pub unsafe fn setDetectionImages(&self, detection_images: Option<&NSSet<ARReferenceImage>>);
1224
1225        /// Enables the estimation of a scale factor which may be used to correct the physical size of an image.
1226        ///
1227        /// If set to true ARKit will attempt to use the computed camera positions in order to compute the scale by which the given physical size
1228        /// differs from the estimated one. The information about the estimated scale can be found as the property estimatedScaleFactor on the ARImageAnchor.
1229        ///
1230        /// Note: When set to true the transform of a returned ARImageAnchor will use the estimated scale factor to correct the translation. Default value is NO.
1231        #[unsafe(method(automaticImageScaleEstimationEnabled))]
1232        #[unsafe(method_family = none)]
1233        pub unsafe fn automaticImageScaleEstimationEnabled(&self) -> bool;
1234
1235        /// Setter for [`automaticImageScaleEstimationEnabled`][Self::automaticImageScaleEstimationEnabled].
1236        #[unsafe(method(setAutomaticImageScaleEstimationEnabled:))]
1237        #[unsafe(method_family = none)]
1238        pub unsafe fn setAutomaticImageScaleEstimationEnabled(
1239            &self,
1240            automatic_image_scale_estimation_enabled: bool,
1241        );
1242
1243        /// Maximum number of images to track simultaneously.
1244        ///
1245        /// Setting the maximum number of tracked images will limit the number of images that can be tracked in a given frame.
1246        /// If more than the maximum is visible, only the images already being tracked will continue to track until tracking is lost or another image is removed.
1247        /// Images will continue to be detected regardless of images tracked. Default value is zero.
1248        #[unsafe(method(maximumNumberOfTrackedImages))]
1249        #[unsafe(method_family = none)]
1250        pub unsafe fn maximumNumberOfTrackedImages(&self) -> NSInteger;
1251
1252        /// Setter for [`maximumNumberOfTrackedImages`][Self::maximumNumberOfTrackedImages].
1253        #[unsafe(method(setMaximumNumberOfTrackedImages:))]
1254        #[unsafe(method_family = none)]
1255        pub unsafe fn setMaximumNumberOfTrackedImages(
1256            &self,
1257            maximum_number_of_tracked_images: NSInteger,
1258        );
1259
1260        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
1261        /// Objects to detect in the scene.
1262        ///
1263        /// If set the session will attempt to detect the specified objects. When an object is detected an ARObjectAnchor will be added to the session.
1264        #[unsafe(method(detectionObjects))]
1265        #[unsafe(method_family = none)]
1266        pub unsafe fn detectionObjects(&self) -> Retained<NSSet<ARReferenceObject>>;
1267
1268        #[cfg(all(feature = "ARReferenceObject", feature = "objc2-foundation"))]
1269        /// Setter for [`detectionObjects`][Self::detectionObjects].
1270        #[unsafe(method(setDetectionObjects:))]
1271        #[unsafe(method_family = none)]
1272        pub unsafe fn setDetectionObjects(&self, detection_objects: &NSSet<ARReferenceObject>);
1273
1274        /// Enable or disable app clip code tracking. Disabled by default. When enabled, detected app clip codes will be surfaced as an ARAppClipCodeAnchor.
1275        #[unsafe(method(appClipCodeTrackingEnabled))]
1276        #[unsafe(method_family = none)]
1277        pub unsafe fn appClipCodeTrackingEnabled(&self) -> bool;
1278
1279        /// Setter for [`appClipCodeTrackingEnabled`][Self::appClipCodeTrackingEnabled].
1280        #[unsafe(method(setAppClipCodeTrackingEnabled:))]
1281        #[unsafe(method_family = none)]
1282        pub unsafe fn setAppClipCodeTrackingEnabled(&self, app_clip_code_tracking_enabled: bool);
1283
1284        /// Indicates whether app clip code tracking can be enabled on this device.
1285        #[unsafe(method(supportsAppClipCodeTracking))]
1286        #[unsafe(method_family = none)]
1287        pub unsafe fn supportsAppClipCodeTracking() -> bool;
1288
1289        #[cfg(all(feature = "block2", feature = "objc2-foundation"))]
1290        /// Determines the availability of geo tracking at the current location.
1291        ///
1292        ///
1293        /// This method will attempt to acquire a location fix on a background thread, then check availability.
1294        ///
1295        ///
1296        /// Parameter `completionHandler`: Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial queue. It takes the following parameters:
1297        /// isAvailable - True if geo tracking is available at the current location, otherwise false.
1298        /// error - An error that indicates why geo tracking is not available at the current location.
1299        #[unsafe(method(checkAvailabilityWithCompletionHandler:))]
1300        #[unsafe(method_family = none)]
1301        pub unsafe fn checkAvailabilityWithCompletionHandler(
1302            completion_handler: &block2::Block<dyn Fn(Bool, *mut NSError)>,
1303        );
1304
1305        #[cfg(all(
1306            feature = "block2",
1307            feature = "objc2-core-location",
1308            feature = "objc2-foundation"
1309        ))]
1310        /// Determines the availability of geo tracking at the given location.
1311        ///
1312        /// Parameter `coordinate`: Location at which to check.
1313        ///
1314        /// Parameter `completionHandler`: Completion handler that is called when availability has been determined. This handler is executed on an arbitrary serial queue. It takes the following parameters:
1315        /// isAvailable - True if geo tracking is available at the given location, otherwise false.
1316        /// error - An error that indicates why geo tracking is not available at the given location.
1317        #[unsafe(method(checkAvailabilityAtCoordinate:completionHandler:))]
1318        #[unsafe(method_family = none)]
1319        pub unsafe fn checkAvailabilityAtCoordinate_completionHandler(
1320            coordinate: CLLocationCoordinate2D,
1321            completion_handler: &block2::Block<dyn Fn(Bool, *mut NSError)>,
1322        );
1323
1324        #[unsafe(method(init))]
1325        #[unsafe(method_family = init)]
1326        pub unsafe fn init(this: Allocated<Self>) -> Retained<Self>;
1327
1328        #[unsafe(method(new))]
1329        #[unsafe(method_family = new)]
1330        pub unsafe fn new() -> Retained<Self>;
1331    );
1332}