Skip to main content

nv_runtime/
lib.rs

1//! # nv-runtime
2//!
3//! Pipeline orchestration and runtime for the NextVision video perception library.
4//!
5//! ## Conceptual model
6//!
7//! The runtime manages **feeds** — independent video streams, each running on a
8//! dedicated OS thread. For every frame, a linear pipeline of user-defined
9//! **[stages](nv_perception::Stage)** produces structured perception output.
10//!
11//! ```text
12//! Media source → FrameQueue → [Stage 1 → Stage 2 → …] → OutputSink
13//!                                 │                             │
14//!                          TemporalStore               Broadcast channel
15//!                          ViewState                   (optional subscribers)
16//! ```
17//!
18//! The media backend (GStreamer, via `nv-media`) is an implementation
19//! detail. Users interact with backend-agnostic types: [`SourceSpec`](nv_core::SourceSpec),
20//! [`FeedConfig`], and [`OutputEnvelope`]. A custom backend can be injected via
21//! [`RuntimeBuilder::ingress_factory`].
22//!
23//! ## Key types
24//!
25//! - **[`Runtime`]** — manages cross-feed concerns (thread pools, metrics, shutdown).
26//! - **[`RuntimeBuilder`]** — builder for configuring and constructing a `Runtime`.
27//! - **[`RuntimeHandle`]** — cloneable control handle (add/remove feeds, subscribe, shutdown).
28//! - **[`FeedConfig`]** / **[`FeedConfigBuilder`]** — per-feed configuration.
29//! - **[`FeedHandle`]** — handle to a running feed (metrics, diagnostics, queue telemetry, pause/resume).
30//! - **[`QueueTelemetry`]** — source/sink queue depth and capacity snapshot.
31//! - **[`OutputEnvelope`]** — structured, provenanced output for each processed frame.
32//! - **[`OutputSink`]** — user-implementable trait for receiving outputs.
33//! - **[`Provenance`]** — full audit trail of stage and view-system decisions.
34//! - **[`BackpressurePolicy`]** — queue behavior configuration.
35//! - **[`FeedDiagnostics`]** / **[`RuntimeDiagnostics`]** — consolidated diagnostic snapshots.
36//!
37//! ## PTZ / view-state handling
38//!
39//! Moving-camera feeds use `CameraMode::Observed` with a user-supplied
40//! [`ViewStateProvider`](nv_view::ViewStateProvider). The runtime polls it
41//! each frame, runs an [`EpochPolicy`](nv_view::EpochPolicy), and manages
42//! view epochs, continuity degradation, and trajectory segmentation
43//! automatically. Fixed cameras use `CameraMode::Fixed` and skip the
44//! view-state machinery entirely.
45//!
46//! ## Out of scope
47//!
48//! The runtime does **not** include domain-specific event taxonomies,
49//! alerting workflows, calibration semantics, or UI concerns. Those
50//! belong in layers built on top of this library.
51//!
52//! ## Minimal usage
53//!
54//! ```rust,no_run
55//! use nv_runtime::*;
56//! use nv_core::*;
57//!
58//! # struct MyStage;
59//! # impl nv_perception::Stage for MyStage {
60//! #     fn id(&self) -> StageId { StageId("my_stage") }
61//! #     fn process(&mut self, _ctx: &nv_perception::StageContext<'_>) -> Result<nv_perception::StageOutput, StageError> {
62//! #         Ok(nv_perception::StageOutput::empty())
63//! #     }
64//! # }
65//! struct MySink;
66//! impl OutputSink for MySink {
67//!     fn emit(&self, _output: SharedOutput) {}
68//! }
69//!
70//! # fn example() -> Result<(), NvError> {
71//! let runtime = Runtime::builder().build()?;
72//! let _feed = runtime.add_feed(
73//!     FeedConfig::builder()
74//!         .source(SourceSpec::rtsp("rtsp://cam/stream"))
75//!         .camera_mode(CameraMode::Fixed)
76//!         .stages(vec![Box::new(MyStage)])
77//!         .output_sink(Box::new(MySink))
78//!         .build()?
79//! )?;
80//! // runtime.shutdown();
81//! # Ok(())
82//! # }
83//! ```
84//!
85//! ## Batch inference across feeds
86//!
87//! Multiple feeds can share a single GPU-accelerated batch processor via
88//! [`BatchHandle`]. Create a batch handle once, then reference it from
89//! each feed's pipeline.
90//!
91//! ```rust,no_run
92//! use nv_runtime::*;
93//! use nv_core::*;
94//! use nv_perception::batch::{BatchProcessor, BatchEntry};
95//! use std::time::Duration;
96//!
97//! # struct MyDetector;
98//! # impl BatchProcessor for MyDetector {
99//! #     fn id(&self) -> StageId { StageId("detector") }
100//! #     fn process(&mut self, items: &mut [BatchEntry]) -> Result<(), StageError> {
101//! #         for item in items.iter_mut() { item.output = Some(nv_perception::StageOutput::empty()); }
102//! #         Ok(())
103//! #     }
104//! # }
105//! # struct MySink;
106//! # impl OutputSink for MySink { fn emit(&self, _: SharedOutput) {} }
107//!
108//! # fn example() -> Result<(), NvError> {
109//! let runtime = Runtime::builder().build()?;
110//!
111//! // Create a shared batch coordinator.
112//! let batch = runtime.create_batch(
113//!     Box::new(MyDetector),
114//!     BatchConfig {
115//!         max_batch_size: 8,
116//!         max_latency: Duration::from_millis(50),
117//!         queue_capacity: None,
118//!         response_timeout: None,
119//!         max_in_flight_per_feed: 1,
120//!         startup_timeout: None,
121//!     },
122//! )?;
123//!
124//! // Build per-feed pipelines referencing the shared batch.
125//! let pipeline = FeedPipeline::builder()
126//!     .batch(batch.clone()).expect("single batch point")
127//!     .build();
128//!
129//! let _feed = runtime.add_feed(
130//!     FeedConfig::builder()
131//!         .source(SourceSpec::rtsp("rtsp://cam/stream"))
132//!         .camera_mode(CameraMode::Fixed)
133//!         .feed_pipeline(pipeline)
134//!         .output_sink(Box::new(MySink))
135//!         .build()?
136//! )?;
137//! # Ok(())
138//! # }
139//! ```
140
141pub mod backpressure;
142pub mod batch;
143pub mod diagnostics;
144pub(crate) mod executor;
145pub mod feed;
146pub mod feed_handle;
147pub mod output;
148pub mod pipeline;
149pub mod provenance;
150pub(crate) mod queue;
151pub mod runtime;
152pub mod shutdown;
153pub(crate) mod worker;
154
155// Re-export key types at crate root.
156pub use backpressure::BackpressurePolicy;
157pub use batch::{BatchConfig, BatchHandle, BatchMetrics};
158pub use diagnostics::{
159    BatchDiagnostics, FeedDiagnostics, OutputLagStatus, RuntimeDiagnostics, ViewDiagnostics,
160    ViewStatus,
161};
162pub use feed::{FeedConfig, FeedConfigBuilder};
163pub use feed_handle::{DecodeStatus, FeedHandle, QueueTelemetry};
164pub use output::{
165    AdmissionSummary, FrameInclusion, OutputEnvelope, OutputSink, SharedOutput, SinkFactory,
166};
167pub use pipeline::{FeedPipeline, FeedPipelineBuilder, PipelineError};
168pub use provenance::{
169    Provenance, StageOutcomeCategory, StageProvenance, StageResult, ViewProvenance,
170};
171pub use runtime::{Runtime, RuntimeBuilder, RuntimeHandle};
172pub use shutdown::{RestartPolicy, RestartTrigger};
173
174// Re-export validation types from nv-perception for convenience.
175pub use nv_perception::{ValidationMode, ValidationWarning};
176
177// Re-export decode types from nv-media for convenience.
178pub use nv_media::{DecodeCapabilities, DecodePreference, discover_decode_capabilities};
179
180// Re-export post-decode hook types from nv-media for convenience.
181pub use nv_media::{DecodedStreamInfo, PostDecodeHook};
182
183// Re-export device residency type from nv-media for convenience.
184//
185// To enable the built-in CUDA-resident frame pipeline, activate the `cuda`
186// cargo feature on nv-runtime (which forwards to `nv-media/cuda`):
187//
188//   nv-runtime = { version = "0.1", features = ["cuda"] }
189//
190// Then set `FeedConfig::device_residency(DeviceResidency::Cuda)`. Without
191// the feature, requesting `DeviceResidency::Cuda` returns
192// `MediaError::Unsupported` at pipeline build time.
193//
194// For platform-specific providers (e.g., Jetson NVMM), use
195// `DeviceResidency::Provider(provider)` — this path does **not** require
196// the `cuda` feature.
197pub use nv_media::DeviceResidency;
198
199// Re-export GPU pipeline provider types for application-level wiring.
200pub use nv_media::{GpuPipelineProvider, SharedGpuProvider};
201
202// Re-export health types from nv-core for convenience.
203pub use nv_core::health::{DecodeOutcome, HealthEvent};
204
205// Re-export security policy types from nv-core for convenience.
206pub use nv_core::security::{CustomPipelinePolicy, RtspSecurityPolicy};