pub trait IngestService {
// Required methods
fn ingest(
&self,
auth_: BearerToken,
trigger_ingest: IngestRequest,
) -> Result<IngestResponse, Error>;
fn deprecated_trigger_ingest(
&self,
auth_: BearerToken,
trigger_ingest: DeprecatedTriggerIngest,
) -> Result<TriggeredIngest, Error>;
fn trigger_ingest(
&self,
auth_: BearerToken,
trigger_ingest: TriggerIngest,
) -> Result<TriggeredIngest, Error>;
fn trigger_file_ingest(
&self,
auth_: BearerToken,
trigger_ingest: TriggerFileIngest,
) -> Result<TriggeredIngest, Error>;
fn ingest_run(
&self,
auth_: BearerToken,
request: IngestRunRequest,
) -> Result<IngestRunResponse, Error>;
fn ingest_video(
&self,
auth_: BearerToken,
ingest_video: IngestVideoRequest,
) -> Result<IngestVideoResponse, Error>;
fn ingest_mcap(
&self,
auth_: BearerToken,
ingest_video: IngestMcapRequest,
) -> Result<IngestMcapResponse, Error>;
fn reingest_from_datasets(
&self,
auth_: BearerToken,
request: ReingestDatasetsRequest,
) -> Result<ReingestDatasetsResponse, Error>;
fn delete_file(
&self,
auth_: BearerToken,
dataset_rid: DatasetRid,
file_id: Uuid,
) -> Result<(), Error>;
}
Expand description
The Ingest Service handles the data ingestion into Nominal/Clickhouse.
Required Methods§
Sourcefn ingest(
&self,
auth_: BearerToken,
trigger_ingest: IngestRequest,
) -> Result<IngestResponse, Error>
fn ingest( &self, auth_: BearerToken, trigger_ingest: IngestRequest, ) -> Result<IngestResponse, Error>
Triggers an ingest job, allowing either creating a new dataset or uploading to an existing one. This endpoint is meant to supersede all other ingestion endpoints as their functionality gets migrated to this one.
fn deprecated_trigger_ingest( &self, auth_: BearerToken, trigger_ingest: DeprecatedTriggerIngest, ) -> Result<TriggeredIngest, Error>
Sourcefn trigger_ingest(
&self,
auth_: BearerToken,
trigger_ingest: TriggerIngest,
) -> Result<TriggeredIngest, Error>
fn trigger_ingest( &self, auth_: BearerToken, trigger_ingest: TriggerIngest, ) -> Result<TriggeredIngest, Error>
Triggers an ingest job for the given data source. The ingest job will be processed asynchronously.
Sourcefn trigger_file_ingest(
&self,
auth_: BearerToken,
trigger_ingest: TriggerFileIngest,
) -> Result<TriggeredIngest, Error>
fn trigger_file_ingest( &self, auth_: BearerToken, trigger_ingest: TriggerFileIngest, ) -> Result<TriggeredIngest, Error>
Triggers an ingest job of a new file, allowing either creating a new dataset or uploading to an existing one.
Sourcefn ingest_run(
&self,
auth_: BearerToken,
request: IngestRunRequest,
) -> Result<IngestRunResponse, Error>
fn ingest_run( &self, auth_: BearerToken, request: IngestRunRequest, ) -> Result<IngestRunResponse, Error>
Creates a run and ingests data sources to be added to the run.
Sourcefn ingest_video(
&self,
auth_: BearerToken,
ingest_video: IngestVideoRequest,
) -> Result<IngestVideoResponse, Error>
fn ingest_video( &self, auth_: BearerToken, ingest_video: IngestVideoRequest, ) -> Result<IngestVideoResponse, Error>
Ingests video data from a S3 Nominal upload bucket.
Sourcefn ingest_mcap(
&self,
auth_: BearerToken,
ingest_video: IngestMcapRequest,
) -> Result<IngestMcapResponse, Error>
fn ingest_mcap( &self, auth_: BearerToken, ingest_video: IngestMcapRequest, ) -> Result<IngestMcapResponse, Error>
Ingests data from mcap files in the S3 Nominal upload bucket.
Sourcefn reingest_from_datasets(
&self,
auth_: BearerToken,
request: ReingestDatasetsRequest,
) -> Result<ReingestDatasetsResponse, Error>
fn reingest_from_datasets( &self, auth_: BearerToken, request: ReingestDatasetsRequest, ) -> Result<ReingestDatasetsResponse, Error>
Re-ingests data from provided source datasets into either an existing target dataset, or a new one. Only supported for CSV and Parquet dataset files. Will only reingest dataset files and will drop streaming data from datasets.
Sourcefn delete_file(
&self,
auth_: BearerToken,
dataset_rid: DatasetRid,
file_id: Uuid,
) -> Result<(), Error>
fn delete_file( &self, auth_: BearerToken, dataset_rid: DatasetRid, file_id: Uuid, ) -> Result<(), Error>
This is a best effort deletion of the file’s data based on the ingestedAt timestamp. This is an unreversible action. Only v2 dataset file deletion is supported. !!!WARNING!!! It’s possible that the file has overwritten points, in which case, those older points will not be recovered. Only use this endpoint if this is acceptable, the data across files are disjoint, or you’re willing to re-ingest files to manually recover older points.