nominal-api 0.1239.0

API bindings for the Nominal platform
Documentation
use conjure_http::endpoint;
/// The Series Cache service manages internal chunks of data cached by Nominal's backend.
#[conjure_http::conjure_client(name = "SeriesCacheService")]
pub trait SeriesCacheService<
    #[response_body]
    I: Iterator<
            Item = Result<conjure_http::private::Bytes, conjure_http::private::Error>,
        >,
> {
    /// Fetches "chunks" of series that are stored as Arrow files in S3.
    /// This endpoint is being deprecated for new series in favor of batchGetCachedSeries
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/get-chunks",
        name = "getChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    fn get_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        get_chunks_parameters: &super::super::super::super::objects::timeseries::seriescache::api::GetChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::GetChunksResponse,
        conjure_http::private::Error,
    >;
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/create-chunks",
        name = "createChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    fn create_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        create_chunks_parameters: &super::super::super::super::objects::timeseries::seriescache::api::CreateChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::CreateChunksResponse,
        conjure_http::private::Error,
    >;
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/batch-create-chunks",
        name = "batchCreateChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    fn batch_create_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        request: &super::super::super::super::objects::timeseries::seriescache::api::CreateChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::CreateChunksResponse,
        conjure_http::private::Error,
    >;
    /// Deletes the chunks that intersect the given time range. Does not delete the corresponding files from S3.
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/delete-chunks",
        name = "deleteChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    fn delete_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        request: &super::super::super::super::objects::timeseries::seriescache::api::DeleteChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::DeleteChunksResponse,
        conjure_http::private::Error,
    >;
}
/// The Series Cache service manages internal chunks of data cached by Nominal's backend.
#[conjure_http::conjure_client(name = "SeriesCacheService")]
pub trait AsyncSeriesCacheService<
    #[response_body]
    I: conjure_http::private::Stream<
            Item = Result<conjure_http::private::Bytes, conjure_http::private::Error>,
        >,
> {
    /// Fetches "chunks" of series that are stored as Arrow files in S3.
    /// This endpoint is being deprecated for new series in favor of batchGetCachedSeries
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/get-chunks",
        name = "getChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn get_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        get_chunks_parameters: &super::super::super::super::objects::timeseries::seriescache::api::GetChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::GetChunksResponse,
        conjure_http::private::Error,
    >;
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/create-chunks",
        name = "createChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn create_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        create_chunks_parameters: &super::super::super::super::objects::timeseries::seriescache::api::CreateChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::CreateChunksResponse,
        conjure_http::private::Error,
    >;
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/batch-create-chunks",
        name = "batchCreateChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn batch_create_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        request: &super::super::super::super::objects::timeseries::seriescache::api::CreateChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::CreateChunksResponse,
        conjure_http::private::Error,
    >;
    /// Deletes the chunks that intersect the given time range. Does not delete the corresponding files from S3.
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/delete-chunks",
        name = "deleteChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn delete_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        request: &super::super::super::super::objects::timeseries::seriescache::api::DeleteChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::DeleteChunksResponse,
        conjure_http::private::Error,
    >;
}
/// The Series Cache service manages internal chunks of data cached by Nominal's backend.
#[conjure_http::conjure_client(name = "SeriesCacheService", local)]
pub trait LocalAsyncSeriesCacheService<
    #[response_body]
    I: conjure_http::private::Stream<
            Item = Result<conjure_http::private::Bytes, conjure_http::private::Error>,
        >,
> {
    /// Fetches "chunks" of series that are stored as Arrow files in S3.
    /// This endpoint is being deprecated for new series in favor of batchGetCachedSeries
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/get-chunks",
        name = "getChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn get_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        get_chunks_parameters: &super::super::super::super::objects::timeseries::seriescache::api::GetChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::GetChunksResponse,
        conjure_http::private::Error,
    >;
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/create-chunks",
        name = "createChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn create_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        create_chunks_parameters: &super::super::super::super::objects::timeseries::seriescache::api::CreateChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::CreateChunksResponse,
        conjure_http::private::Error,
    >;
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/batch-create-chunks",
        name = "batchCreateChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn batch_create_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        request: &super::super::super::super::objects::timeseries::seriescache::api::CreateChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::CreateChunksResponse,
        conjure_http::private::Error,
    >;
    /// Deletes the chunks that intersect the given time range. Does not delete the corresponding files from S3.
    #[endpoint(
        method = POST,
        path = "/timeseries/series-cache/v1/logical-series/{logicalSeriesRid}/delete-chunks",
        name = "deleteChunks",
        accept = conjure_http::client::StdResponseDeserializer
    )]
    async fn delete_chunks(
        &self,
        #[auth]
        auth_: &conjure_object::BearerToken,
        #[path(
            name = "logicalSeriesRid",
            encoder = conjure_http::client::conjure::PlainEncoder
        )]
        logical_series_rid: &super::super::super::super::objects::api::LogicalSeriesRid,
        #[body(serializer = conjure_http::client::StdRequestSerializer)]
        request: &super::super::super::super::objects::timeseries::seriescache::api::DeleteChunksParameters,
    ) -> Result<
        super::super::super::super::objects::timeseries::seriescache::api::DeleteChunksResponse,
        conjure_http::private::Error,
    >;
}