pub struct LogsPipelinesAPI { /* private fields */ }Expand description
Pipelines and processors operate on incoming logs, parsing and transforming them into structured attributes for easier querying.
-
See the pipelines configuration page for a list of the pipelines and processors currently configured in web UI.
-
Additional API-related information about processors can be found in the processors documentation.
-
For more information about Pipelines, see the pipeline documentation.
Notes:
These endpoints are only available for admin users. Make sure to use an application key created by an admin.
Grok parsing rules may effect JSON output and require
returned data to be configured before using in a request.
For example, if you are using the data returned from a
request for another request body, and have a parsing rule
that uses a regex pattern like \s for spaces, you will
need to configure all escaped spaces as %{space} to use
in the body data.
Implementations§
source§impl LogsPipelinesAPI
impl LogsPipelinesAPI
pub fn new() -> Self
sourcepub fn with_config(config: Configuration) -> Self
pub fn with_config(config: Configuration) -> Self
Examples found in repository?
More examples
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
async fn main() {
let body = LogsPipelinesOrder::new(vec![
"tags".to_string(),
"org_ids".to_string(),
"products".to_string(),
]);
let configuration = datadog::Configuration::new();
let api = LogsPipelinesAPI::with_config(configuration);
let resp = api.update_logs_pipeline_order(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43
async fn main() {
let body = LogsPipeline::new("".to_string())
.filter(LogsFilter::new().query("source:python".to_string()))
.processors(vec![LogsProcessor::LogsGrokParser(Box::new(
LogsGrokParser::new(
LogsGrokParserRules::new(
r#"rule_name_1 foo
rule_name_2 bar
"#
.to_string(),
)
.support_rules(
r#"rule_name_1 foo
rule_name_2 bar
"#
.to_string(),
),
"message".to_string(),
LogsGrokParserType::GROK_PARSER,
)
.is_enabled(false)
.samples(vec![]),
))]);
let configuration = datadog::Configuration::new();
let api = LogsPipelinesAPI::with_config(configuration);
let resp = api.create_logs_pipeline(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}pub fn with_client_and_config( config: Configuration, client: ClientWithMiddleware, ) -> Self
sourcepub async fn create_logs_pipeline(
&self,
body: LogsPipeline,
) -> Result<LogsPipeline, Error<CreateLogsPipelineError>>
pub async fn create_logs_pipeline( &self, body: LogsPipeline, ) -> Result<LogsPipeline, Error<CreateLogsPipelineError>>
Create a pipeline in your organization.
Examples found in repository?
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43
async fn main() {
let body = LogsPipeline::new("".to_string())
.filter(LogsFilter::new().query("source:python".to_string()))
.processors(vec![LogsProcessor::LogsGrokParser(Box::new(
LogsGrokParser::new(
LogsGrokParserRules::new(
r#"rule_name_1 foo
rule_name_2 bar
"#
.to_string(),
)
.support_rules(
r#"rule_name_1 foo
rule_name_2 bar
"#
.to_string(),
),
"message".to_string(),
LogsGrokParserType::GROK_PARSER,
)
.is_enabled(false)
.samples(vec![]),
))]);
let configuration = datadog::Configuration::new();
let api = LogsPipelinesAPI::with_config(configuration);
let resp = api.create_logs_pipeline(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}sourcepub async fn create_logs_pipeline_with_http_info(
&self,
body: LogsPipeline,
) -> Result<ResponseContent<LogsPipeline>, Error<CreateLogsPipelineError>>
pub async fn create_logs_pipeline_with_http_info( &self, body: LogsPipeline, ) -> Result<ResponseContent<LogsPipeline>, Error<CreateLogsPipelineError>>
Create a pipeline in your organization.
sourcepub async fn delete_logs_pipeline(
&self,
pipeline_id: String,
) -> Result<(), Error<DeleteLogsPipelineError>>
pub async fn delete_logs_pipeline( &self, pipeline_id: String, ) -> Result<(), Error<DeleteLogsPipelineError>>
Delete a given pipeline from your organization. This endpoint takes no JSON arguments.
sourcepub async fn delete_logs_pipeline_with_http_info(
&self,
pipeline_id: String,
) -> Result<ResponseContent<()>, Error<DeleteLogsPipelineError>>
pub async fn delete_logs_pipeline_with_http_info( &self, pipeline_id: String, ) -> Result<ResponseContent<()>, Error<DeleteLogsPipelineError>>
Delete a given pipeline from your organization. This endpoint takes no JSON arguments.
sourcepub async fn get_logs_pipeline(
&self,
pipeline_id: String,
) -> Result<LogsPipeline, Error<GetLogsPipelineError>>
pub async fn get_logs_pipeline( &self, pipeline_id: String, ) -> Result<LogsPipeline, Error<GetLogsPipelineError>>
Get a specific pipeline from your organization. This endpoint takes no JSON arguments.
sourcepub async fn get_logs_pipeline_with_http_info(
&self,
pipeline_id: String,
) -> Result<ResponseContent<LogsPipeline>, Error<GetLogsPipelineError>>
pub async fn get_logs_pipeline_with_http_info( &self, pipeline_id: String, ) -> Result<ResponseContent<LogsPipeline>, Error<GetLogsPipelineError>>
Get a specific pipeline from your organization. This endpoint takes no JSON arguments.
sourcepub async fn get_logs_pipeline_order(
&self,
) -> Result<LogsPipelinesOrder, Error<GetLogsPipelineOrderError>>
pub async fn get_logs_pipeline_order( &self, ) -> Result<LogsPipelinesOrder, Error<GetLogsPipelineOrderError>>
Get the current order of your pipelines. This endpoint takes no JSON arguments.
sourcepub async fn get_logs_pipeline_order_with_http_info(
&self,
) -> Result<ResponseContent<LogsPipelinesOrder>, Error<GetLogsPipelineOrderError>>
pub async fn get_logs_pipeline_order_with_http_info( &self, ) -> Result<ResponseContent<LogsPipelinesOrder>, Error<GetLogsPipelineOrderError>>
Get the current order of your pipelines. This endpoint takes no JSON arguments.
sourcepub async fn list_logs_pipelines(
&self,
) -> Result<Vec<LogsPipeline>, Error<ListLogsPipelinesError>>
pub async fn list_logs_pipelines( &self, ) -> Result<Vec<LogsPipeline>, Error<ListLogsPipelinesError>>
Get all pipelines from your organization. This endpoint takes no JSON arguments.
sourcepub async fn list_logs_pipelines_with_http_info(
&self,
) -> Result<ResponseContent<Vec<LogsPipeline>>, Error<ListLogsPipelinesError>>
pub async fn list_logs_pipelines_with_http_info( &self, ) -> Result<ResponseContent<Vec<LogsPipeline>>, Error<ListLogsPipelinesError>>
Get all pipelines from your organization. This endpoint takes no JSON arguments.
sourcepub async fn update_logs_pipeline(
&self,
pipeline_id: String,
body: LogsPipeline,
) -> Result<LogsPipeline, Error<UpdateLogsPipelineError>>
pub async fn update_logs_pipeline( &self, pipeline_id: String, body: LogsPipeline, ) -> Result<LogsPipeline, Error<UpdateLogsPipelineError>>
Update a given pipeline configuration to change it’s processors or their order.
Note: Using this method updates your pipeline configuration by replacing your current configuration with the new one sent to your Datadog organization.
Examples found in repository?
12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45
async fn main() {
let body = LogsPipeline::new("".to_string())
.filter(LogsFilter::new().query("source:python".to_string()))
.processors(vec![LogsProcessor::LogsGrokParser(Box::new(
LogsGrokParser::new(
LogsGrokParserRules::new(
r#"rule_name_1 foo
rule_name_2 bar
"#
.to_string(),
)
.support_rules(
r#"rule_name_1 foo
rule_name_2 bar
"#
.to_string(),
),
"message".to_string(),
LogsGrokParserType::GROK_PARSER,
)
.is_enabled(false)
.samples(vec![]),
))]);
let configuration = datadog::Configuration::new();
let api = LogsPipelinesAPI::with_config(configuration);
let resp = api
.update_logs_pipeline("pipeline_id".to_string(), body)
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}sourcepub async fn update_logs_pipeline_with_http_info(
&self,
pipeline_id: String,
body: LogsPipeline,
) -> Result<ResponseContent<LogsPipeline>, Error<UpdateLogsPipelineError>>
pub async fn update_logs_pipeline_with_http_info( &self, pipeline_id: String, body: LogsPipeline, ) -> Result<ResponseContent<LogsPipeline>, Error<UpdateLogsPipelineError>>
Update a given pipeline configuration to change it’s processors or their order.
Note: Using this method updates your pipeline configuration by replacing your current configuration with the new one sent to your Datadog organization.
sourcepub async fn update_logs_pipeline_order(
&self,
body: LogsPipelinesOrder,
) -> Result<LogsPipelinesOrder, Error<UpdateLogsPipelineOrderError>>
pub async fn update_logs_pipeline_order( &self, body: LogsPipelinesOrder, ) -> Result<LogsPipelinesOrder, Error<UpdateLogsPipelineOrderError>>
Update the order of your pipelines. Since logs are processed sequentially, reordering a pipeline may change the structure and content of the data processed by other pipelines and their processors.
Note: Using the PUT method updates your pipeline order by replacing your current order
with the new one sent to your Datadog organization.
Examples found in repository?
7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
async fn main() {
let body = LogsPipelinesOrder::new(vec![
"tags".to_string(),
"org_ids".to_string(),
"products".to_string(),
]);
let configuration = datadog::Configuration::new();
let api = LogsPipelinesAPI::with_config(configuration);
let resp = api.update_logs_pipeline_order(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}sourcepub async fn update_logs_pipeline_order_with_http_info(
&self,
body: LogsPipelinesOrder,
) -> Result<ResponseContent<LogsPipelinesOrder>, Error<UpdateLogsPipelineOrderError>>
pub async fn update_logs_pipeline_order_with_http_info( &self, body: LogsPipelinesOrder, ) -> Result<ResponseContent<LogsPipelinesOrder>, Error<UpdateLogsPipelineOrderError>>
Update the order of your pipelines. Since logs are processed sequentially, reordering a pipeline may change the structure and content of the data processed by other pipelines and their processors.
Note: Using the PUT method updates your pipeline order by replacing your current order
with the new one sent to your Datadog organization.
Trait Implementations§
source§impl Clone for LogsPipelinesAPI
impl Clone for LogsPipelinesAPI
source§fn clone(&self) -> LogsPipelinesAPI
fn clone(&self) -> LogsPipelinesAPI
1.0.0 · source§fn clone_from(&mut self, source: &Self)
fn clone_from(&mut self, source: &Self)
source. Read moresource§impl Debug for LogsPipelinesAPI
impl Debug for LogsPipelinesAPI
Auto Trait Implementations§
impl Freeze for LogsPipelinesAPI
impl !RefUnwindSafe for LogsPipelinesAPI
impl Send for LogsPipelinesAPI
impl Sync for LogsPipelinesAPI
impl Unpin for LogsPipelinesAPI
impl !UnwindSafe for LogsPipelinesAPI
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
source§unsafe fn clone_to_uninit(&self, dst: *mut T)
unsafe fn clone_to_uninit(&self, dst: *mut T)
clone_to_uninit)