pub struct LogsPipelinesAPI { /* private fields */ }
Expand description
Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.
-
See the pipelines configuration page for a list of the pipelines and processors currently configured in web UI.
-
Additional API-related information about processors can be found in the processors documentation.
-
For more information about Pipelines, see the pipeline documentation.
Notes:
Grok parsing rules may effect JSON output and require
returned data to be configured before using in a request.
For example, if you are using the data returned from a
request for another request body, and have a parsing rule
that uses a regex pattern like \s
for spaces, you will
need to configure all escaped spaces as %{space}
to use
in the body data.
Implementations§
Source§impl LogsPipelinesAPI
impl LogsPipelinesAPI
pub fn new() -> Self
Sourcepub fn with_config(config: Configuration) -> Self
pub fn with_config(config: Configuration) -> Self
Examples found in repository?
More examples
7async fn main() {
8 let body = LogsPipelinesOrder::new(vec![
9 "tags".to_string(),
10 "org_ids".to_string(),
11 "products".to_string(),
12 ]);
13 let configuration = datadog::Configuration::new();
14 let api = LogsPipelinesAPI::with_config(configuration);
15 let resp = api.update_logs_pipeline_order(body).await;
16 if let Ok(value) = resp {
17 println!("{:#?}", value);
18 } else {
19 println!("{:#?}", resp.unwrap_err());
20 }
21}
11async fn main() {
12 let body = LogsPipeline::new("testPipeline".to_string())
13 .filter(LogsFilter::new().query("source:python".to_string()))
14 .processors(vec![LogsProcessor::LogsSpanRemapper(Box::new(
15 LogsSpanRemapper::new(LogsSpanRemapperType::SPAN_ID_REMAPPER)
16 .is_enabled(true)
17 .name("test_filter".to_string())
18 .sources(vec!["dd.span_id".to_string()]),
19 ))])
20 .tags(vec![]);
21 let configuration = datadog::Configuration::new();
22 let api = LogsPipelinesAPI::with_config(configuration);
23 let resp = api.create_logs_pipeline(body).await;
24 if let Ok(value) = resp {
25 println!("{:#?}", value);
26 } else {
27 println!("{:#?}", resp.unwrap_err());
28 }
29}
- examples/v1_logs-pipelines_CreateLogsPipeline.rs
- examples/v1_logs-pipelines_UpdateLogsPipeline.rs
- examples/v1_logs-pipelines_CreateLogsPipeline_1271012410.rs
- examples/v1_logs-pipelines_CreateLogsPipeline_1248402480.rs
- examples/v1_logs-pipelines_CreateLogsPipeline_3314493032.rs
- examples/v1_logs-pipelines_CreateLogsPipeline_3934594739.rs
- examples/v1_logs-pipelines_CreateLogsPipeline_1267211320.rs
pub fn with_client_and_config( config: Configuration, client: ClientWithMiddleware, ) -> Self
Sourcepub async fn create_logs_pipeline(
&self,
body: LogsPipeline,
) -> Result<LogsPipeline, Error<CreateLogsPipelineError>>
pub async fn create_logs_pipeline( &self, body: LogsPipeline, ) -> Result<LogsPipeline, Error<CreateLogsPipelineError>>
Create a pipeline in your organization.
Examples found in repository?
11async fn main() {
12 let body = LogsPipeline::new("testPipeline".to_string())
13 .filter(LogsFilter::new().query("source:python".to_string()))
14 .processors(vec![LogsProcessor::LogsSpanRemapper(Box::new(
15 LogsSpanRemapper::new(LogsSpanRemapperType::SPAN_ID_REMAPPER)
16 .is_enabled(true)
17 .name("test_filter".to_string())
18 .sources(vec!["dd.span_id".to_string()]),
19 ))])
20 .tags(vec![]);
21 let configuration = datadog::Configuration::new();
22 let api = LogsPipelinesAPI::with_config(configuration);
23 let resp = api.create_logs_pipeline(body).await;
24 if let Ok(value) = resp {
25 println!("{:#?}", value);
26 } else {
27 println!("{:#?}", resp.unwrap_err());
28 }
29}
More examples
12async fn main() {
13 let body = LogsPipeline::new("".to_string())
14 .filter(LogsFilter::new().query("source:python".to_string()))
15 .processors(vec![LogsProcessor::LogsGrokParser(Box::new(
16 LogsGrokParser::new(
17 LogsGrokParserRules::new(
18 r#"rule_name_1 foo
19rule_name_2 bar
20"#
21 .to_string(),
22 )
23 .support_rules(
24 r#"rule_name_1 foo
25rule_name_2 bar
26"#
27 .to_string(),
28 ),
29 "message".to_string(),
30 LogsGrokParserType::GROK_PARSER,
31 )
32 .is_enabled(false)
33 .samples(vec![]),
34 ))])
35 .tags(vec![]);
36 let configuration = datadog::Configuration::new();
37 let api = LogsPipelinesAPI::with_config(configuration);
38 let resp = api.create_logs_pipeline(body).await;
39 if let Ok(value) = resp {
40 println!("{:#?}", value);
41 } else {
42 println!("{:#?}", resp.unwrap_err());
43 }
44}
14async fn main() {
15 let body = LogsPipeline::new("testPipelineArrayLength".to_string())
16 .filter(LogsFilter::new().query("source:python".to_string()))
17 .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18 LogsArrayProcessor::new(
19 LogsArrayProcessorOperation::LogsArrayProcessorOperationLength(Box::new(
20 LogsArrayProcessorOperationLength::new(
21 "tags".to_string(),
22 "tagCount".to_string(),
23 LogsArrayProcessorOperationLengthType::LENGTH,
24 ),
25 )),
26 LogsArrayProcessorType::ARRAY_PROCESSOR,
27 )
28 .is_enabled(true)
29 .name("count_tags".to_string()),
30 ))])
31 .tags(vec![]);
32 let configuration = datadog::Configuration::new();
33 let api = LogsPipelinesAPI::with_config(configuration);
34 let resp = api.create_logs_pipeline(body).await;
35 if let Ok(value) = resp {
36 println!("{:#?}", value);
37 } else {
38 println!("{:#?}", resp.unwrap_err());
39 }
40}
14async fn main() {
15 let body = LogsPipeline::new("testPipelineArrayAppend".to_string())
16 .filter(LogsFilter::new().query("source:python".to_string()))
17 .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18 LogsArrayProcessor::new(
19 LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
20 LogsArrayProcessorOperationAppend::new(
21 "network.client.ip".to_string(),
22 "sourceIps".to_string(),
23 LogsArrayProcessorOperationAppendType::APPEND,
24 ),
25 )),
26 LogsArrayProcessorType::ARRAY_PROCESSOR,
27 )
28 .is_enabled(true)
29 .name("append_ip_to_array".to_string()),
30 ))])
31 .tags(vec![]);
32 let configuration = datadog::Configuration::new();
33 let api = LogsPipelinesAPI::with_config(configuration);
34 let resp = api.create_logs_pipeline(body).await;
35 if let Ok(value) = resp {
36 println!("{:#?}", value);
37 } else {
38 println!("{:#?}", resp.unwrap_err());
39 }
40}
15async fn main() {
16 let body = LogsPipeline::new("testPipelineArrayAppendPreserve".to_string())
17 .filter(LogsFilter::new().query("source:python".to_string()))
18 .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
19 LogsArrayProcessor::new(
20 LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
21 LogsArrayProcessorOperationAppend::new(
22 "network.client.ip".to_string(),
23 "sourceIps".to_string(),
24 LogsArrayProcessorOperationAppendType::APPEND,
25 )
26 .preserve_source(true),
27 )),
28 LogsArrayProcessorType::ARRAY_PROCESSOR,
29 )
30 .is_enabled(true)
31 .name("append_ip_and_keep_source".to_string()),
32 ))])
33 .tags(vec![]);
34 let configuration = datadog::Configuration::new();
35 let api = LogsPipelinesAPI::with_config(configuration);
36 let resp = api.create_logs_pipeline(body).await;
37 if let Ok(value) = resp {
38 println!("{:#?}", value);
39 } else {
40 println!("{:#?}", resp.unwrap_err());
41 }
42}
15async fn main() {
16 let body = LogsPipeline::new("testPipelineArrayAppendNoPreserve".to_string())
17 .filter(LogsFilter::new().query("source:python".to_string()))
18 .processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
19 LogsArrayProcessor::new(
20 LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
21 LogsArrayProcessorOperationAppend::new(
22 "network.client.ip".to_string(),
23 "sourceIps".to_string(),
24 LogsArrayProcessorOperationAppendType::APPEND,
25 )
26 .preserve_source(false),
27 )),
28 LogsArrayProcessorType::ARRAY_PROCESSOR,
29 )
30 .is_enabled(true)
31 .name("append_ip_and_remove_source".to_string()),
32 ))])
33 .tags(vec![]);
34 let configuration = datadog::Configuration::new();
35 let api = LogsPipelinesAPI::with_config(configuration);
36 let resp = api.create_logs_pipeline(body).await;
37 if let Ok(value) = resp {
38 println!("{:#?}", value);
39 } else {
40 println!("{:#?}", resp.unwrap_err());
41 }
42}
Sourcepub async fn create_logs_pipeline_with_http_info(
&self,
body: LogsPipeline,
) -> Result<ResponseContent<LogsPipeline>, Error<CreateLogsPipelineError>>
pub async fn create_logs_pipeline_with_http_info( &self, body: LogsPipeline, ) -> Result<ResponseContent<LogsPipeline>, Error<CreateLogsPipelineError>>
Create a pipeline in your organization.
Sourcepub async fn delete_logs_pipeline(
&self,
pipeline_id: String,
) -> Result<(), Error<DeleteLogsPipelineError>>
pub async fn delete_logs_pipeline( &self, pipeline_id: String, ) -> Result<(), Error<DeleteLogsPipelineError>>
Delete a given pipeline from your organization. This endpoint takes no JSON arguments.
Sourcepub async fn delete_logs_pipeline_with_http_info(
&self,
pipeline_id: String,
) -> Result<ResponseContent<()>, Error<DeleteLogsPipelineError>>
pub async fn delete_logs_pipeline_with_http_info( &self, pipeline_id: String, ) -> Result<ResponseContent<()>, Error<DeleteLogsPipelineError>>
Delete a given pipeline from your organization. This endpoint takes no JSON arguments.
Sourcepub async fn get_logs_pipeline(
&self,
pipeline_id: String,
) -> Result<LogsPipeline, Error<GetLogsPipelineError>>
pub async fn get_logs_pipeline( &self, pipeline_id: String, ) -> Result<LogsPipeline, Error<GetLogsPipelineError>>
Get a specific pipeline from your organization. This endpoint takes no JSON arguments.
Sourcepub async fn get_logs_pipeline_with_http_info(
&self,
pipeline_id: String,
) -> Result<ResponseContent<LogsPipeline>, Error<GetLogsPipelineError>>
pub async fn get_logs_pipeline_with_http_info( &self, pipeline_id: String, ) -> Result<ResponseContent<LogsPipeline>, Error<GetLogsPipelineError>>
Get a specific pipeline from your organization. This endpoint takes no JSON arguments.
Sourcepub async fn get_logs_pipeline_order(
&self,
) -> Result<LogsPipelinesOrder, Error<GetLogsPipelineOrderError>>
pub async fn get_logs_pipeline_order( &self, ) -> Result<LogsPipelinesOrder, Error<GetLogsPipelineOrderError>>
Get the current order of your pipelines. This endpoint takes no JSON arguments.
Sourcepub async fn get_logs_pipeline_order_with_http_info(
&self,
) -> Result<ResponseContent<LogsPipelinesOrder>, Error<GetLogsPipelineOrderError>>
pub async fn get_logs_pipeline_order_with_http_info( &self, ) -> Result<ResponseContent<LogsPipelinesOrder>, Error<GetLogsPipelineOrderError>>
Get the current order of your pipelines. This endpoint takes no JSON arguments.
Sourcepub async fn list_logs_pipelines(
&self,
) -> Result<Vec<LogsPipeline>, Error<ListLogsPipelinesError>>
pub async fn list_logs_pipelines( &self, ) -> Result<Vec<LogsPipeline>, Error<ListLogsPipelinesError>>
Get all pipelines from your organization. This endpoint takes no JSON arguments.
Sourcepub async fn list_logs_pipelines_with_http_info(
&self,
) -> Result<ResponseContent<Vec<LogsPipeline>>, Error<ListLogsPipelinesError>>
pub async fn list_logs_pipelines_with_http_info( &self, ) -> Result<ResponseContent<Vec<LogsPipeline>>, Error<ListLogsPipelinesError>>
Get all pipelines from your organization. This endpoint takes no JSON arguments.
Sourcepub async fn update_logs_pipeline(
&self,
pipeline_id: String,
body: LogsPipeline,
) -> Result<LogsPipeline, Error<UpdateLogsPipelineError>>
pub async fn update_logs_pipeline( &self, pipeline_id: String, body: LogsPipeline, ) -> Result<LogsPipeline, Error<UpdateLogsPipelineError>>
Update a given pipeline configuration to change it’s processors or their order.
Note: Using this method updates your pipeline configuration by replacing your current configuration with the new one sent to your Datadog organization.
Examples found in repository?
12async fn main() {
13 let body = LogsPipeline::new("".to_string())
14 .filter(LogsFilter::new().query("source:python".to_string()))
15 .processors(vec![LogsProcessor::LogsGrokParser(Box::new(
16 LogsGrokParser::new(
17 LogsGrokParserRules::new(
18 r#"rule_name_1 foo
19rule_name_2 bar
20"#
21 .to_string(),
22 )
23 .support_rules(
24 r#"rule_name_1 foo
25rule_name_2 bar
26"#
27 .to_string(),
28 ),
29 "message".to_string(),
30 LogsGrokParserType::GROK_PARSER,
31 )
32 .is_enabled(false)
33 .samples(vec![]),
34 ))])
35 .tags(vec![]);
36 let configuration = datadog::Configuration::new();
37 let api = LogsPipelinesAPI::with_config(configuration);
38 let resp = api
39 .update_logs_pipeline("pipeline_id".to_string(), body)
40 .await;
41 if let Ok(value) = resp {
42 println!("{:#?}", value);
43 } else {
44 println!("{:#?}", resp.unwrap_err());
45 }
46}
Sourcepub async fn update_logs_pipeline_with_http_info(
&self,
pipeline_id: String,
body: LogsPipeline,
) -> Result<ResponseContent<LogsPipeline>, Error<UpdateLogsPipelineError>>
pub async fn update_logs_pipeline_with_http_info( &self, pipeline_id: String, body: LogsPipeline, ) -> Result<ResponseContent<LogsPipeline>, Error<UpdateLogsPipelineError>>
Update a given pipeline configuration to change it’s processors or their order.
Note: Using this method updates your pipeline configuration by replacing your current configuration with the new one sent to your Datadog organization.
Sourcepub async fn update_logs_pipeline_order(
&self,
body: LogsPipelinesOrder,
) -> Result<LogsPipelinesOrder, Error<UpdateLogsPipelineOrderError>>
pub async fn update_logs_pipeline_order( &self, body: LogsPipelinesOrder, ) -> Result<LogsPipelinesOrder, Error<UpdateLogsPipelineOrderError>>
Update the order of your pipelines. Since logs are processed sequentially, reordering a pipeline may change the structure and content of the data processed by other pipelines and their processors.
Note: Using the PUT
method updates your pipeline order by replacing your current order
with the new one sent to your Datadog organization.
Examples found in repository?
7async fn main() {
8 let body = LogsPipelinesOrder::new(vec![
9 "tags".to_string(),
10 "org_ids".to_string(),
11 "products".to_string(),
12 ]);
13 let configuration = datadog::Configuration::new();
14 let api = LogsPipelinesAPI::with_config(configuration);
15 let resp = api.update_logs_pipeline_order(body).await;
16 if let Ok(value) = resp {
17 println!("{:#?}", value);
18 } else {
19 println!("{:#?}", resp.unwrap_err());
20 }
21}
Sourcepub async fn update_logs_pipeline_order_with_http_info(
&self,
body: LogsPipelinesOrder,
) -> Result<ResponseContent<LogsPipelinesOrder>, Error<UpdateLogsPipelineOrderError>>
pub async fn update_logs_pipeline_order_with_http_info( &self, body: LogsPipelinesOrder, ) -> Result<ResponseContent<LogsPipelinesOrder>, Error<UpdateLogsPipelineOrderError>>
Update the order of your pipelines. Since logs are processed sequentially, reordering a pipeline may change the structure and content of the data processed by other pipelines and their processors.
Note: Using the PUT
method updates your pipeline order by replacing your current order
with the new one sent to your Datadog organization.
Trait Implementations§
Source§impl Clone for LogsPipelinesAPI
impl Clone for LogsPipelinesAPI
Source§fn clone(&self) -> LogsPipelinesAPI
fn clone(&self) -> LogsPipelinesAPI
1.0.0 · Source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source
. Read more