pub struct LogsAPI { /* private fields */ }
Expand description
Search your logs and send them to your Datadog platform over HTTP. See the Log Management page for more information.
Implementations§
Source§impl LogsAPI
impl LogsAPI
pub fn new() -> Self
Sourcepub fn with_config(config: Configuration) -> Self
pub fn with_config(config: Configuration) -> Self
Examples found in repository?
7async fn main() {
8 let configuration = datadog::Configuration::new();
9 let api = LogsAPI::with_config(configuration);
10 let resp = api
11 .list_logs_get(ListLogsGetOptionalParams::default())
12 .await;
13 if let Ok(value) = resp {
14 println!("{:#?}", value);
15 } else {
16 println!("{:#?}", resp.unwrap_err());
17 }
18}
More examples
9async fn main() {
10 let configuration = datadog::Configuration::new();
11 let api = LogsAPI::with_config(configuration);
12 let response = api.list_logs_get_with_pagination(ListLogsGetOptionalParams::default());
13 pin_mut!(response);
14 while let Some(resp) = response.next().await {
15 if let Ok(value) = resp {
16 println!("{:#?}", value);
17 } else {
18 println!("{:#?}", resp.unwrap_err());
19 }
20 }
21}
9async fn main() {
10 let configuration = datadog::Configuration::new();
11 let api = LogsAPI::with_config(configuration);
12 let response =
13 api.list_logs_get_with_pagination(ListLogsGetOptionalParams::default().page_limit(2));
14 pin_mut!(response);
15 while let Some(resp) = response.next().await {
16 if let Ok(value) = resp {
17 println!("{:#?}", value);
18 } else {
19 println!("{:#?}", resp.unwrap_err());
20 }
21 }
22}
8async fn main() {
9 let body = LogsAggregateRequest::new().filter(
10 LogsQueryFilter::new()
11 .from("now-15m".to_string())
12 .indexes(vec!["main".to_string()])
13 .query("*".to_string())
14 .to("now".to_string()),
15 );
16 let configuration = datadog::Configuration::new();
17 let api = LogsAPI::with_config(configuration);
18 let resp = api.aggregate_logs(body).await;
19 if let Ok(value) = resp {
20 println!("{:#?}", value);
21 } else {
22 println!("{:#?}", resp.unwrap_err());
23 }
24}
11async fn main() {
12 let body = LogsAggregateRequest::new()
13 .compute(vec![LogsCompute::new(LogsAggregationFunction::COUNT)
14 .interval("5m".to_string())
15 .type_(LogsComputeType::TIMESERIES)])
16 .filter(
17 LogsQueryFilter::new()
18 .from("now-15m".to_string())
19 .indexes(vec!["main".to_string()])
20 .query("*".to_string())
21 .to("now".to_string()),
22 );
23 let configuration = datadog::Configuration::new();
24 let api = LogsAPI::with_config(configuration);
25 let resp = api.aggregate_logs(body).await;
26 if let Ok(value) = resp {
27 println!("{:#?}", value);
28 } else {
29 println!("{:#?}", resp.unwrap_err());
30 }
31}
11async fn main() {
12 let body = vec![HTTPLogItem::new(
13 "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World".to_string(),
14 )
15 .ddsource("nginx".to_string())
16 .ddtags("env:staging,version:5.1".to_string())
17 .hostname("i-012345678".to_string())
18 .service("payment".to_string())
19 .additional_properties(BTreeMap::from([(
20 "status".to_string(),
21 Value::from("info"),
22 )]))];
23 let configuration = datadog::Configuration::new();
24 let api = LogsAPI::with_config(configuration);
25 let resp = api
26 .submit_log(body, SubmitLogOptionalParams::default())
27 .await;
28 if let Ok(value) = resp {
29 println!("{:#?}", value);
30 } else {
31 println!("{:#?}", resp.unwrap_err());
32 }
33}
- examples/v2_logs_SubmitLog_3496222707.rs
- examples/v2_logs_SubmitLog_904601870.rs
- examples/v2_logs_ListLogs_3400928236.rs
- examples/v2_logs_ListLogs_3138392594.rs
- examples/v2_logs_ListLogsGet_2034110533.rs
- examples/v2_logs_ListLogs.rs
- examples/v2_logs_AggregateLogs_2955613758.rs
- examples/v2_logs_ListLogs_534975433.rs
pub fn with_client_and_config( config: Configuration, client: ClientWithMiddleware, ) -> Self
Sourcepub async fn aggregate_logs(
&self,
body: LogsAggregateRequest,
) -> Result<LogsAggregateResponse, Error<AggregateLogsError>>
pub async fn aggregate_logs( &self, body: LogsAggregateRequest, ) -> Result<LogsAggregateResponse, Error<AggregateLogsError>>
The API endpoint to aggregate events into buckets and compute metrics and timeseries.
Examples found in repository?
8async fn main() {
9 let body = LogsAggregateRequest::new().filter(
10 LogsQueryFilter::new()
11 .from("now-15m".to_string())
12 .indexes(vec!["main".to_string()])
13 .query("*".to_string())
14 .to("now".to_string()),
15 );
16 let configuration = datadog::Configuration::new();
17 let api = LogsAPI::with_config(configuration);
18 let resp = api.aggregate_logs(body).await;
19 if let Ok(value) = resp {
20 println!("{:#?}", value);
21 } else {
22 println!("{:#?}", resp.unwrap_err());
23 }
24}
More examples
11async fn main() {
12 let body = LogsAggregateRequest::new()
13 .compute(vec![LogsCompute::new(LogsAggregationFunction::COUNT)
14 .interval("5m".to_string())
15 .type_(LogsComputeType::TIMESERIES)])
16 .filter(
17 LogsQueryFilter::new()
18 .from("now-15m".to_string())
19 .indexes(vec!["main".to_string()])
20 .query("*".to_string())
21 .to("now".to_string()),
22 );
23 let configuration = datadog::Configuration::new();
24 let api = LogsAPI::with_config(configuration);
25 let resp = api.aggregate_logs(body).await;
26 if let Ok(value) = resp {
27 println!("{:#?}", value);
28 } else {
29 println!("{:#?}", resp.unwrap_err());
30 }
31}
16async fn main() {
17 let body = LogsAggregateRequest::new()
18 .compute(vec![LogsCompute::new(LogsAggregationFunction::COUNT)
19 .interval("5m".to_string())
20 .type_(LogsComputeType::TIMESERIES)])
21 .filter(
22 LogsQueryFilter::new()
23 .from("now-15m".to_string())
24 .indexes(vec!["main".to_string()])
25 .query("*".to_string())
26 .to("now".to_string()),
27 )
28 .group_by(vec![LogsGroupBy::new("host".to_string())
29 .missing(LogsGroupByMissing::LogsGroupByMissingString(
30 "miss".to_string(),
31 ))
32 .sort(
33 LogsAggregateSort::new()
34 .aggregation(LogsAggregationFunction::PERCENTILE_90)
35 .metric("@duration".to_string())
36 .order(LogsSortOrder::ASCENDING)
37 .type_(LogsAggregateSortType::MEASURE),
38 )]);
39 let configuration = datadog::Configuration::new();
40 let api = LogsAPI::with_config(configuration);
41 let resp = api.aggregate_logs(body).await;
42 if let Ok(value) = resp {
43 println!("{:#?}", value);
44 } else {
45 println!("{:#?}", resp.unwrap_err());
46 }
47}
Sourcepub async fn aggregate_logs_with_http_info(
&self,
body: LogsAggregateRequest,
) -> Result<ResponseContent<LogsAggregateResponse>, Error<AggregateLogsError>>
pub async fn aggregate_logs_with_http_info( &self, body: LogsAggregateRequest, ) -> Result<ResponseContent<LogsAggregateResponse>, Error<AggregateLogsError>>
The API endpoint to aggregate events into buckets and compute metrics and timeseries.
Sourcepub async fn list_logs(
&self,
params: ListLogsOptionalParams,
) -> Result<LogsListResponse, Error<ListLogsError>>
pub async fn list_logs( &self, params: ListLogsOptionalParams, ) -> Result<LogsListResponse, Error<ListLogsError>>
List endpoint returns logs that match a log search query. Results are paginated.
Use this endpoint to search and filter your logs.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Examples found in repository?
11async fn main() {
12 let body = LogsListRequest::new()
13 .filter(
14 LogsQueryFilter::new()
15 .from("2020-09-17T11:48:36+01:00".to_string())
16 .indexes(vec!["main".to_string()])
17 .query("datadog-agent".to_string())
18 .to("2020-09-17T12:48:36+01:00".to_string()),
19 )
20 .page(LogsListRequestPage::new().limit(5))
21 .sort(LogsSort::TIMESTAMP_ASCENDING);
22 let configuration = datadog::Configuration::new();
23 let api = LogsAPI::with_config(configuration);
24 let resp = api
25 .list_logs(ListLogsOptionalParams::default().body(body))
26 .await;
27 if let Ok(value) = resp {
28 println!("{:#?}", value);
29 } else {
30 println!("{:#?}", resp.unwrap_err());
31 }
32}
More examples
13async fn main() {
14 let body =
15 LogsListRequest::new()
16 .filter(
17 LogsQueryFilter::new()
18 .from("now-15m".to_string())
19 .indexes(vec!["main".to_string(), "web".to_string()])
20 .query("service:web* AND @http.status_code:[200 TO 299]".to_string())
21 .storage_tier(LogsStorageTier::INDEXES)
22 .to("now".to_string()),
23 )
24 .options(LogsQueryOptions::new().timezone("GMT".to_string()))
25 .page(
26 LogsListRequestPage::new()
27 .cursor(
28 "eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ==".to_string(),
29 )
30 .limit(25),
31 )
32 .sort(LogsSort::TIMESTAMP_ASCENDING);
33 let configuration = datadog::Configuration::new();
34 let api = LogsAPI::with_config(configuration);
35 let resp = api
36 .list_logs(ListLogsOptionalParams::default().body(body))
37 .await;
38 if let Ok(value) = resp {
39 println!("{:#?}", value);
40 } else {
41 println!("{:#?}", resp.unwrap_err());
42 }
43}
Sourcepub fn list_logs_with_pagination(
&self,
params: ListLogsOptionalParams,
) -> impl Stream<Item = Result<Log, Error<ListLogsError>>> + '_
pub fn list_logs_with_pagination( &self, params: ListLogsOptionalParams, ) -> impl Stream<Item = Result<Log, Error<ListLogsError>>> + '_
Examples found in repository?
14async fn main() {
15 let body = LogsListRequest::new()
16 .filter(
17 LogsQueryFilter::new()
18 .from("now-15m".to_string())
19 .indexes(vec!["main".to_string()])
20 .to("now".to_string()),
21 )
22 .options(LogsQueryOptions::new().timezone("GMT".to_string()))
23 .page(LogsListRequestPage::new().limit(2))
24 .sort(LogsSort::TIMESTAMP_ASCENDING);
25 let configuration = datadog::Configuration::new();
26 let api = LogsAPI::with_config(configuration);
27 let response = api.list_logs_with_pagination(ListLogsOptionalParams::default().body(body));
28 pin_mut!(response);
29 while let Some(resp) = response.next().await {
30 if let Ok(value) = resp {
31 println!("{:#?}", value);
32 } else {
33 println!("{:#?}", resp.unwrap_err());
34 }
35 }
36}
More examples
15async fn main() {
16 let body =
17 LogsListRequest::new()
18 .filter(
19 LogsQueryFilter::new()
20 .from("now-15m".to_string())
21 .indexes(vec!["main".to_string(), "web".to_string()])
22 .query("service:web* AND @http.status_code:[200 TO 299]".to_string())
23 .storage_tier(LogsStorageTier::INDEXES)
24 .to("now".to_string()),
25 )
26 .options(LogsQueryOptions::new().timezone("GMT".to_string()))
27 .page(
28 LogsListRequestPage::new()
29 .cursor(
30 "eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ==".to_string(),
31 )
32 .limit(25),
33 )
34 .sort(LogsSort::TIMESTAMP_ASCENDING);
35 let configuration = datadog::Configuration::new();
36 let api = LogsAPI::with_config(configuration);
37 let response = api.list_logs_with_pagination(ListLogsOptionalParams::default().body(body));
38 pin_mut!(response);
39 while let Some(resp) = response.next().await {
40 if let Ok(value) = resp {
41 println!("{:#?}", value);
42 } else {
43 println!("{:#?}", resp.unwrap_err());
44 }
45 }
46}
Sourcepub async fn list_logs_with_http_info(
&self,
params: ListLogsOptionalParams,
) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsError>>
pub async fn list_logs_with_http_info( &self, params: ListLogsOptionalParams, ) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsError>>
List endpoint returns logs that match a log search query. Results are paginated.
Use this endpoint to search and filter your logs.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Sourcepub async fn list_logs_get(
&self,
params: ListLogsGetOptionalParams,
) -> Result<LogsListResponse, Error<ListLogsGetError>>
pub async fn list_logs_get( &self, params: ListLogsGetOptionalParams, ) -> Result<LogsListResponse, Error<ListLogsGetError>>
List endpoint returns logs that match a log search query. Results are paginated.
Use this endpoint to search and filter your logs.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Examples found in repository?
7async fn main() {
8 let configuration = datadog::Configuration::new();
9 let api = LogsAPI::with_config(configuration);
10 let resp = api
11 .list_logs_get(ListLogsGetOptionalParams::default())
12 .await;
13 if let Ok(value) = resp {
14 println!("{:#?}", value);
15 } else {
16 println!("{:#?}", resp.unwrap_err());
17 }
18}
More examples
8async fn main() {
9 let configuration = datadog::Configuration::new();
10 let api = LogsAPI::with_config(configuration);
11 let resp = api
12 .list_logs_get(
13 ListLogsGetOptionalParams::default()
14 .filter_query("datadog-agent".to_string())
15 .filter_indexes(vec!["main".to_string()])
16 .filter_from(
17 DateTime::parse_from_rfc3339("2020-09-17T11:48:36+01:00")
18 .expect("Failed to parse datetime")
19 .with_timezone(&Utc),
20 )
21 .filter_to(
22 DateTime::parse_from_rfc3339("2020-09-17T12:48:36+01:00")
23 .expect("Failed to parse datetime")
24 .with_timezone(&Utc),
25 )
26 .page_limit(5),
27 )
28 .await;
29 if let Ok(value) = resp {
30 println!("{:#?}", value);
31 } else {
32 println!("{:#?}", resp.unwrap_err());
33 }
34}
Sourcepub fn list_logs_get_with_pagination(
&self,
params: ListLogsGetOptionalParams,
) -> impl Stream<Item = Result<Log, Error<ListLogsGetError>>> + '_
pub fn list_logs_get_with_pagination( &self, params: ListLogsGetOptionalParams, ) -> impl Stream<Item = Result<Log, Error<ListLogsGetError>>> + '_
Examples found in repository?
9async fn main() {
10 let configuration = datadog::Configuration::new();
11 let api = LogsAPI::with_config(configuration);
12 let response = api.list_logs_get_with_pagination(ListLogsGetOptionalParams::default());
13 pin_mut!(response);
14 while let Some(resp) = response.next().await {
15 if let Ok(value) = resp {
16 println!("{:#?}", value);
17 } else {
18 println!("{:#?}", resp.unwrap_err());
19 }
20 }
21}
More examples
9async fn main() {
10 let configuration = datadog::Configuration::new();
11 let api = LogsAPI::with_config(configuration);
12 let response =
13 api.list_logs_get_with_pagination(ListLogsGetOptionalParams::default().page_limit(2));
14 pin_mut!(response);
15 while let Some(resp) = response.next().await {
16 if let Ok(value) = resp {
17 println!("{:#?}", value);
18 } else {
19 println!("{:#?}", resp.unwrap_err());
20 }
21 }
22}
Sourcepub async fn list_logs_get_with_http_info(
&self,
params: ListLogsGetOptionalParams,
) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsGetError>>
pub async fn list_logs_get_with_http_info( &self, params: ListLogsGetOptionalParams, ) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsGetError>>
List endpoint returns logs that match a log search query. Results are paginated.
Use this endpoint to search and filter your logs.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Sourcepub async fn submit_log(
&self,
body: Vec<HTTPLogItem>,
params: SubmitLogOptionalParams,
) -> Result<BTreeMap<String, Value>, Error<SubmitLogError>>
pub async fn submit_log( &self, body: Vec<HTTPLogItem>, params: SubmitLogOptionalParams, ) -> Result<BTreeMap<String, Value>, Error<SubmitLogError>>
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
- Maximum content size per payload (uncompressed): 5MB
- Maximum size for a single log: 1MB
- Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
- For a single log request, the API truncates the log at 1MB and returns a 2xx.
- For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Datadog recommends sending your logs compressed.
Add the Content-Encoding: gzip
header to the request when sending compressed logs.
Log events can be submitted with a timestamp that is up to 18 hours in the past.
The status codes answered by the HTTP API are:
- 202: Accepted: the request has been accepted for processing
- 400: Bad request (likely an issue in the payload formatting)
- 401: Unauthorized (likely a missing API Key)
- 403: Permission issue (likely using an invalid API Key)
- 408: Request Timeout, request should be retried after some time
- 413: Payload too large (batch is above 5MB uncompressed)
- 429: Too Many Requests, request should be retried after some time
- 500: Internal Server Error, the server encountered an unexpected condition that prevented it from fulfilling the request, request should be retried after some time
- 503: Service Unavailable, the server is not ready to handle the request probably because it is overloaded, request should be retried after some time
Examples found in repository?
11async fn main() {
12 let body = vec![HTTPLogItem::new(
13 "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World".to_string(),
14 )
15 .ddsource("nginx".to_string())
16 .ddtags("env:staging,version:5.1".to_string())
17 .hostname("i-012345678".to_string())
18 .service("payment".to_string())
19 .additional_properties(BTreeMap::from([(
20 "status".to_string(),
21 Value::from("info"),
22 )]))];
23 let configuration = datadog::Configuration::new();
24 let api = LogsAPI::with_config(configuration);
25 let resp = api
26 .submit_log(body, SubmitLogOptionalParams::default())
27 .await;
28 if let Ok(value) = resp {
29 println!("{:#?}", value);
30 } else {
31 println!("{:#?}", resp.unwrap_err());
32 }
33}
More examples
11async fn main() {
12 let body = vec![HTTPLogItem::new(
13 "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World".to_string(),
14 )
15 .ddsource("nginx".to_string())
16 .ddtags("env:staging,version:5.1".to_string())
17 .hostname("i-012345678".to_string())
18 .service("payment".to_string())
19 .additional_properties(BTreeMap::from([]))];
20 let configuration = datadog::Configuration::new();
21 let api = LogsAPI::with_config(configuration);
22 let resp = api
23 .submit_log(
24 body,
25 SubmitLogOptionalParams::default().content_encoding(ContentEncoding::GZIP),
26 )
27 .await;
28 if let Ok(value) = resp {
29 println!("{:#?}", value);
30 } else {
31 println!("{:#?}", resp.unwrap_err());
32 }
33}
11async fn main() {
12 let body = vec![HTTPLogItem::new(
13 "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World".to_string(),
14 )
15 .ddsource("nginx".to_string())
16 .ddtags("env:staging,version:5.1".to_string())
17 .hostname("i-012345678".to_string())
18 .service("payment".to_string())
19 .additional_properties(BTreeMap::from([]))];
20 let configuration = datadog::Configuration::new();
21 let api = LogsAPI::with_config(configuration);
22 let resp = api
23 .submit_log(
24 body,
25 SubmitLogOptionalParams::default().content_encoding(ContentEncoding::DEFLATE),
26 )
27 .await;
28 if let Ok(value) = resp {
29 println!("{:#?}", value);
30 } else {
31 println!("{:#?}", resp.unwrap_err());
32 }
33}
Sourcepub async fn submit_log_with_http_info(
&self,
body: Vec<HTTPLogItem>,
params: SubmitLogOptionalParams,
) -> Result<ResponseContent<BTreeMap<String, Value>>, Error<SubmitLogError>>
pub async fn submit_log_with_http_info( &self, body: Vec<HTTPLogItem>, params: SubmitLogOptionalParams, ) -> Result<ResponseContent<BTreeMap<String, Value>>, Error<SubmitLogError>>
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
- Maximum content size per payload (uncompressed): 5MB
- Maximum size for a single log: 1MB
- Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
- For a single log request, the API truncates the log at 1MB and returns a 2xx.
- For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Datadog recommends sending your logs compressed.
Add the Content-Encoding: gzip
header to the request when sending compressed logs.
Log events can be submitted with a timestamp that is up to 18 hours in the past.
The status codes answered by the HTTP API are:
- 202: Accepted: the request has been accepted for processing
- 400: Bad request (likely an issue in the payload formatting)
- 401: Unauthorized (likely a missing API Key)
- 403: Permission issue (likely using an invalid API Key)
- 408: Request Timeout, request should be retried after some time
- 413: Payload too large (batch is above 5MB uncompressed)
- 429: Too Many Requests, request should be retried after some time
- 500: Internal Server Error, the server encountered an unexpected condition that prevented it from fulfilling the request, request should be retried after some time
- 503: Service Unavailable, the server is not ready to handle the request probably because it is overloaded, request should be retried after some time