Struct LogsPipelinesAPI

Source
pub struct LogsPipelinesAPI { /* private fields */ }
Expand description

Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.

Notes:

Grok parsing rules may effect JSON output and require returned data to be configured before using in a request. For example, if you are using the data returned from a request for another request body, and have a parsing rule that uses a regex pattern like \s for spaces, you will need to configure all escaped spaces as %{space} to use in the body data.

Implementations§

Source§

impl LogsPipelinesAPI

Source

pub fn new() -> Self

Source

pub fn with_config(config: Configuration) -> Self

Examples found in repository?
examples/v1_logs-pipelines_ListLogsPipelines.rs (line 8)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.list_logs_pipelines().await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
More examples
Hide additional examples
examples/v1_logs-pipelines_GetLogsPipelineOrder.rs (line 8)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.get_logs_pipeline_order().await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
examples/v1_logs-pipelines_GetLogsPipeline.rs (line 8)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.get_logs_pipeline("pipeline_id".to_string()).await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
examples/v1_logs-pipelines_DeleteLogsPipeline.rs (line 8)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.delete_logs_pipeline("pipeline_id".to_string()).await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
examples/v1_logs-pipelines_UpdateLogsPipelineOrder.rs (line 14)
7async fn main() {
8    let body = LogsPipelinesOrder::new(vec![
9        "tags".to_string(),
10        "org_ids".to_string(),
11        "products".to_string(),
12    ]);
13    let configuration = datadog::Configuration::new();
14    let api = LogsPipelinesAPI::with_config(configuration);
15    let resp = api.update_logs_pipeline_order(body).await;
16    if let Ok(value) = resp {
17        println!("{:#?}", value);
18    } else {
19        println!("{:#?}", resp.unwrap_err());
20    }
21}
examples/v1_logs-pipelines_CreateLogsPipeline_2707101123.rs (line 22)
11async fn main() {
12    let body = LogsPipeline::new("testPipeline".to_string())
13        .filter(LogsFilter::new().query("source:python".to_string()))
14        .processors(vec![LogsProcessor::LogsSpanRemapper(Box::new(
15            LogsSpanRemapper::new(LogsSpanRemapperType::SPAN_ID_REMAPPER)
16                .is_enabled(true)
17                .name("test_filter".to_string())
18                .sources(vec!["dd.span_id".to_string()]),
19        ))])
20        .tags(vec![]);
21    let configuration = datadog::Configuration::new();
22    let api = LogsPipelinesAPI::with_config(configuration);
23    let resp = api.create_logs_pipeline(body).await;
24    if let Ok(value) = resp {
25        println!("{:#?}", value);
26    } else {
27        println!("{:#?}", resp.unwrap_err());
28    }
29}
Source

pub fn with_client_and_config( config: Configuration, client: ClientWithMiddleware, ) -> Self

Source

pub async fn create_logs_pipeline( &self, body: LogsPipeline, ) -> Result<LogsPipeline, Error<CreateLogsPipelineError>>

Create a pipeline in your organization.

Examples found in repository?
examples/v1_logs-pipelines_CreateLogsPipeline_2707101123.rs (line 23)
11async fn main() {
12    let body = LogsPipeline::new("testPipeline".to_string())
13        .filter(LogsFilter::new().query("source:python".to_string()))
14        .processors(vec![LogsProcessor::LogsSpanRemapper(Box::new(
15            LogsSpanRemapper::new(LogsSpanRemapperType::SPAN_ID_REMAPPER)
16                .is_enabled(true)
17                .name("test_filter".to_string())
18                .sources(vec!["dd.span_id".to_string()]),
19        ))])
20        .tags(vec![]);
21    let configuration = datadog::Configuration::new();
22    let api = LogsPipelinesAPI::with_config(configuration);
23    let resp = api.create_logs_pipeline(body).await;
24    if let Ok(value) = resp {
25        println!("{:#?}", value);
26    } else {
27        println!("{:#?}", resp.unwrap_err());
28    }
29}
More examples
Hide additional examples
examples/v1_logs-pipelines_CreateLogsPipeline.rs (line 38)
12async fn main() {
13    let body = LogsPipeline::new("".to_string())
14        .filter(LogsFilter::new().query("source:python".to_string()))
15        .processors(vec![LogsProcessor::LogsGrokParser(Box::new(
16            LogsGrokParser::new(
17                LogsGrokParserRules::new(
18                    r#"rule_name_1 foo
19rule_name_2 bar
20"#
21                    .to_string(),
22                )
23                .support_rules(
24                    r#"rule_name_1 foo
25rule_name_2 bar
26"#
27                    .to_string(),
28                ),
29                "message".to_string(),
30                LogsGrokParserType::GROK_PARSER,
31            )
32            .is_enabled(false)
33            .samples(vec![]),
34        ))])
35        .tags(vec![]);
36    let configuration = datadog::Configuration::new();
37    let api = LogsPipelinesAPI::with_config(configuration);
38    let resp = api.create_logs_pipeline(body).await;
39    if let Ok(value) = resp {
40        println!("{:#?}", value);
41    } else {
42        println!("{:#?}", resp.unwrap_err());
43    }
44}
examples/v1_logs-pipelines_CreateLogsPipeline_1271012410.rs (line 34)
14async fn main() {
15    let body = LogsPipeline::new("testPipelineArrayLength".to_string())
16        .filter(LogsFilter::new().query("source:python".to_string()))
17        .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18            LogsArrayProcessor::new(
19                LogsArrayProcessorOperation::LogsArrayProcessorOperationLength(Box::new(
20                    LogsArrayProcessorOperationLength::new(
21                        "tags".to_string(),
22                        "tagCount".to_string(),
23                        LogsArrayProcessorOperationLengthType::LENGTH,
24                    ),
25                )),
26                LogsArrayProcessorType::ARRAY_PROCESSOR,
27            )
28            .is_enabled(true)
29            .name("count_tags".to_string()),
30        ))])
31        .tags(vec![]);
32    let configuration = datadog::Configuration::new();
33    let api = LogsPipelinesAPI::with_config(configuration);
34    let resp = api.create_logs_pipeline(body).await;
35    if let Ok(value) = resp {
36        println!("{:#?}", value);
37    } else {
38        println!("{:#?}", resp.unwrap_err());
39    }
40}
examples/v1_logs-pipelines_CreateLogsPipeline_1248402480.rs (line 34)
14async fn main() {
15    let body = LogsPipeline::new("testPipelineArrayAppend".to_string())
16        .filter(LogsFilter::new().query("source:python".to_string()))
17        .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18            LogsArrayProcessor::new(
19                LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
20                    LogsArrayProcessorOperationAppend::new(
21                        "network.client.ip".to_string(),
22                        "sourceIps".to_string(),
23                        LogsArrayProcessorOperationAppendType::APPEND,
24                    ),
25                )),
26                LogsArrayProcessorType::ARRAY_PROCESSOR,
27            )
28            .is_enabled(true)
29            .name("append_ip_to_array".to_string()),
30        ))])
31        .tags(vec![]);
32    let configuration = datadog::Configuration::new();
33    let api = LogsPipelinesAPI::with_config(configuration);
34    let resp = api.create_logs_pipeline(body).await;
35    if let Ok(value) = resp {
36        println!("{:#?}", value);
37    } else {
38        println!("{:#?}", resp.unwrap_err());
39    }
40}
examples/v1_logs-pipelines_CreateLogsPipeline_3314493032.rs (line 36)
15async fn main() {
16    let body = LogsPipeline::new("testPipelineArrayAppendPreserve".to_string())
17        .filter(LogsFilter::new().query("source:python".to_string()))
18        .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
19            LogsArrayProcessor::new(
20                LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
21                    LogsArrayProcessorOperationAppend::new(
22                        "network.client.ip".to_string(),
23                        "sourceIps".to_string(),
24                        LogsArrayProcessorOperationAppendType::APPEND,
25                    )
26                    .preserve_source(true),
27                )),
28                LogsArrayProcessorType::ARRAY_PROCESSOR,
29            )
30            .is_enabled(true)
31            .name("append_ip_and_keep_source".to_string()),
32        ))])
33        .tags(vec![]);
34    let configuration = datadog::Configuration::new();
35    let api = LogsPipelinesAPI::with_config(configuration);
36    let resp = api.create_logs_pipeline(body).await;
37    if let Ok(value) = resp {
38        println!("{:#?}", value);
39    } else {
40        println!("{:#?}", resp.unwrap_err());
41    }
42}
examples/v1_logs-pipelines_CreateLogsPipeline_3934594739.rs (line 36)
15async fn main() {
16    let body = LogsPipeline::new("testPipelineArrayAppendNoPreserve".to_string())
17        .filter(LogsFilter::new().query("source:python".to_string()))
18        .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
19            LogsArrayProcessor::new(
20                LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
21                    LogsArrayProcessorOperationAppend::new(
22                        "network.client.ip".to_string(),
23                        "sourceIps".to_string(),
24                        LogsArrayProcessorOperationAppendType::APPEND,
25                    )
26                    .preserve_source(false),
27                )),
28                LogsArrayProcessorType::ARRAY_PROCESSOR,
29            )
30            .is_enabled(true)
31            .name("append_ip_and_remove_source".to_string()),
32        ))])
33        .tags(vec![]);
34    let configuration = datadog::Configuration::new();
35    let api = LogsPipelinesAPI::with_config(configuration);
36    let resp = api.create_logs_pipeline(body).await;
37    if let Ok(value) = resp {
38        println!("{:#?}", value);
39    } else {
40        println!("{:#?}", resp.unwrap_err());
41    }
42}
Source

pub async fn create_logs_pipeline_with_http_info( &self, body: LogsPipeline, ) -> Result<ResponseContent<LogsPipeline>, Error<CreateLogsPipelineError>>

Create a pipeline in your organization.

Source

pub async fn delete_logs_pipeline( &self, pipeline_id: String, ) -> Result<(), Error<DeleteLogsPipelineError>>

Delete a given pipeline from your organization. This endpoint takes no JSON arguments.

Examples found in repository?
examples/v1_logs-pipelines_DeleteLogsPipeline.rs (line 9)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.delete_logs_pipeline("pipeline_id".to_string()).await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
Source

pub async fn delete_logs_pipeline_with_http_info( &self, pipeline_id: String, ) -> Result<ResponseContent<()>, Error<DeleteLogsPipelineError>>

Delete a given pipeline from your organization. This endpoint takes no JSON arguments.

Source

pub async fn get_logs_pipeline( &self, pipeline_id: String, ) -> Result<LogsPipeline, Error<GetLogsPipelineError>>

Get a specific pipeline from your organization. This endpoint takes no JSON arguments.

Examples found in repository?
examples/v1_logs-pipelines_GetLogsPipeline.rs (line 9)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.get_logs_pipeline("pipeline_id".to_string()).await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
Source

pub async fn get_logs_pipeline_with_http_info( &self, pipeline_id: String, ) -> Result<ResponseContent<LogsPipeline>, Error<GetLogsPipelineError>>

Get a specific pipeline from your organization. This endpoint takes no JSON arguments.

Source

pub async fn get_logs_pipeline_order( &self, ) -> Result<LogsPipelinesOrder, Error<GetLogsPipelineOrderError>>

Get the current order of your pipelines. This endpoint takes no JSON arguments.

Examples found in repository?
examples/v1_logs-pipelines_GetLogsPipelineOrder.rs (line 9)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.get_logs_pipeline_order().await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
Source

pub async fn get_logs_pipeline_order_with_http_info( &self, ) -> Result<ResponseContent<LogsPipelinesOrder>, Error<GetLogsPipelineOrderError>>

Get the current order of your pipelines. This endpoint takes no JSON arguments.

Source

pub async fn list_logs_pipelines( &self, ) -> Result<Vec<LogsPipeline>, Error<ListLogsPipelinesError>>

Get all pipelines from your organization. This endpoint takes no JSON arguments.

Examples found in repository?
examples/v1_logs-pipelines_ListLogsPipelines.rs (line 9)
6async fn main() {
7    let configuration = datadog::Configuration::new();
8    let api = LogsPipelinesAPI::with_config(configuration);
9    let resp = api.list_logs_pipelines().await;
10    if let Ok(value) = resp {
11        println!("{:#?}", value);
12    } else {
13        println!("{:#?}", resp.unwrap_err());
14    }
15}
Source

pub async fn list_logs_pipelines_with_http_info( &self, ) -> Result<ResponseContent<Vec<LogsPipeline>>, Error<ListLogsPipelinesError>>

Get all pipelines from your organization. This endpoint takes no JSON arguments.

Source

pub async fn update_logs_pipeline( &self, pipeline_id: String, body: LogsPipeline, ) -> Result<LogsPipeline, Error<UpdateLogsPipelineError>>

Update a given pipeline configuration to change it’s processors or their order.

Note: Using this method updates your pipeline configuration by replacing your current configuration with the new one sent to your Datadog organization.

Examples found in repository?
examples/v1_logs-pipelines_UpdateLogsPipeline.rs (line 39)
12async fn main() {
13    let body = LogsPipeline::new("".to_string())
14        .filter(LogsFilter::new().query("source:python".to_string()))
15        .processors(vec![LogsProcessor::LogsGrokParser(Box::new(
16            LogsGrokParser::new(
17                LogsGrokParserRules::new(
18                    r#"rule_name_1 foo
19rule_name_2 bar
20"#
21                    .to_string(),
22                )
23                .support_rules(
24                    r#"rule_name_1 foo
25rule_name_2 bar
26"#
27                    .to_string(),
28                ),
29                "message".to_string(),
30                LogsGrokParserType::GROK_PARSER,
31            )
32            .is_enabled(false)
33            .samples(vec![]),
34        ))])
35        .tags(vec![]);
36    let configuration = datadog::Configuration::new();
37    let api = LogsPipelinesAPI::with_config(configuration);
38    let resp = api
39        .update_logs_pipeline("pipeline_id".to_string(), body)
40        .await;
41    if let Ok(value) = resp {
42        println!("{:#?}", value);
43    } else {
44        println!("{:#?}", resp.unwrap_err());
45    }
46}
Source

pub async fn update_logs_pipeline_with_http_info( &self, pipeline_id: String, body: LogsPipeline, ) -> Result<ResponseContent<LogsPipeline>, Error<UpdateLogsPipelineError>>

Update a given pipeline configuration to change it’s processors or their order.

Note: Using this method updates your pipeline configuration by replacing your current configuration with the new one sent to your Datadog organization.

Source

pub async fn update_logs_pipeline_order( &self, body: LogsPipelinesOrder, ) -> Result<LogsPipelinesOrder, Error<UpdateLogsPipelineOrderError>>

Update the order of your pipelines. Since logs are processed sequentially, reordering a pipeline may change the structure and content of the data processed by other pipelines and their processors.

Note: Using the PUT method updates your pipeline order by replacing your current order with the new one sent to your Datadog organization.

Examples found in repository?
examples/v1_logs-pipelines_UpdateLogsPipelineOrder.rs (line 15)
7async fn main() {
8    let body = LogsPipelinesOrder::new(vec![
9        "tags".to_string(),
10        "org_ids".to_string(),
11        "products".to_string(),
12    ]);
13    let configuration = datadog::Configuration::new();
14    let api = LogsPipelinesAPI::with_config(configuration);
15    let resp = api.update_logs_pipeline_order(body).await;
16    if let Ok(value) = resp {
17        println!("{:#?}", value);
18    } else {
19        println!("{:#?}", resp.unwrap_err());
20    }
21}
Source

pub async fn update_logs_pipeline_order_with_http_info( &self, body: LogsPipelinesOrder, ) -> Result<ResponseContent<LogsPipelinesOrder>, Error<UpdateLogsPipelineOrderError>>

Update the order of your pipelines. Since logs are processed sequentially, reordering a pipeline may change the structure and content of the data processed by other pipelines and their processors.

Note: Using the PUT method updates your pipeline order by replacing your current order with the new one sent to your Datadog organization.

Trait Implementations§

Source§

impl Clone for LogsPipelinesAPI

Source§

fn clone(&self) -> LogsPipelinesAPI

Returns a duplicate of the value. Read more
1.0.0 · Source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
Source§

impl Debug for LogsPipelinesAPI

Source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
Source§

impl Default for LogsPipelinesAPI

Source§

fn default() -> Self

Returns the “default value” for a type. Read more

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> CloneToUninit for T
where T: Clone,

Source§

unsafe fn clone_to_uninit(&self, dest: *mut u8)

🔬This is a nightly-only experimental API. (clone_to_uninit)
Performs copy-assignment from self to dest. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> ToOwned for T
where T: Clone,

Source§

type Owned = T

The resulting type after obtaining ownership.
Source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
Source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

impl<T> ErasedDestructor for T
where T: 'static,