pub struct LogsAPI { /* private fields */ }
Expand description
Search your logs and send them to your Datadog platform over HTTP. See the Log Management page for more information.
Implementations§
Source§impl LogsAPI
impl LogsAPI
pub fn new() -> Self
Sourcepub fn with_config(config: Configuration) -> Self
pub fn with_config(config: Configuration) -> Self
Examples found in repository?
9async fn main() {
10 let body = vec![HTTPLogItem::new("Example-Log".to_string())
11 .ddtags("host:ExampleLog".to_string())
12 .additional_properties(BTreeMap::from([]))];
13 let configuration = datadog::Configuration::new();
14 let api = LogsAPI::with_config(configuration);
15 let resp = api
16 .submit_log(body, SubmitLogOptionalParams::default())
17 .await;
18 if let Ok(value) = resp {
19 println!("{:#?}", value);
20 } else {
21 println!("{:#?}", resp.unwrap_err());
22 }
23}
More examples
10async fn main() {
11 let body = vec![HTTPLogItem::new("Example-Log".to_string())
12 .ddtags("host:ExampleLog".to_string())
13 .additional_properties(BTreeMap::from([]))];
14 let configuration = datadog::Configuration::new();
15 let api = LogsAPI::with_config(configuration);
16 let resp = api
17 .submit_log(
18 body,
19 SubmitLogOptionalParams::default().content_encoding(ContentEncoding::GZIP),
20 )
21 .await;
22 if let Ok(value) = resp {
23 println!("{:#?}", value);
24 } else {
25 println!("{:#?}", resp.unwrap_err());
26 }
27}
11async fn main() {
12 let body = vec![HTTPLogItem::new("Example-Log".to_string())
13 .ddtags("host:ExampleLog".to_string())
14 .additional_properties(BTreeMap::from([]))];
15 let configuration = datadog::Configuration::new();
16 let api = LogsAPI::with_config(configuration);
17 let resp = api
18 .submit_log(
19 body,
20 SubmitLogOptionalParams::default().content_encoding(ContentEncoding::DEFLATE),
21 )
22 .await;
23 if let Ok(value) = resp {
24 println!("{:#?}", value);
25 } else {
26 println!("{:#?}", resp.unwrap_err());
27 }
28}
10async fn main() {
11 let body = LogsListRequest::new(LogsListRequestTime::new(
12 DateTime::parse_from_rfc3339("2020-02-02T02:02:02.202000+00:00")
13 .expect("Failed to parse datetime")
14 .with_timezone(&Utc),
15 DateTime::parse_from_rfc3339("2020-02-20T02:02:02.202000+00:00")
16 .expect("Failed to parse datetime")
17 .with_timezone(&Utc),
18 ))
19 .index("retention-3,retention-15".to_string())
20 .query("service:web* AND @http.status_code:[200 TO 299]".to_string())
21 .sort(LogsSort::TIME_ASCENDING);
22 let configuration = datadog::Configuration::new();
23 let api = LogsAPI::with_config(configuration);
24 let resp = api.list_logs(body).await;
25 if let Ok(value) = resp {
26 println!("{:#?}", value);
27 } else {
28 println!("{:#?}", resp.unwrap_err());
29 }
30}
10async fn main() {
11 let body = LogsListRequest::new(
12 LogsListRequestTime::new(
13 DateTime::parse_from_rfc3339("2021-11-11T10:11:11+00:00")
14 .expect("Failed to parse datetime")
15 .with_timezone(&Utc),
16 DateTime::parse_from_rfc3339("2021-11-11T11:11:11+00:00")
17 .expect("Failed to parse datetime")
18 .with_timezone(&Utc),
19 )
20 .timezone("Europe/Paris".to_string()),
21 )
22 .index("main".to_string())
23 .query("host:Test*".to_string())
24 .sort(LogsSort::TIME_ASCENDING);
25 let configuration = datadog::Configuration::new();
26 let api = LogsAPI::with_config(configuration);
27 let resp = api.list_logs(body).await;
28 if let Ok(value) = resp {
29 println!("{:#?}", value);
30 } else {
31 println!("{:#?}", resp.unwrap_err());
32 }
33}
pub fn with_client_and_config( config: Configuration, client: ClientWithMiddleware, ) -> Self
Sourcepub async fn list_logs(
&self,
body: LogsListRequest,
) -> Result<LogsListResponse, Error<ListLogsError>>
pub async fn list_logs( &self, body: LogsListRequest, ) -> Result<LogsListResponse, Error<ListLogsError>>
List endpoint returns logs that match a log search query. Results are paginated.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Examples found in repository?
10async fn main() {
11 let body = LogsListRequest::new(LogsListRequestTime::new(
12 DateTime::parse_from_rfc3339("2020-02-02T02:02:02.202000+00:00")
13 .expect("Failed to parse datetime")
14 .with_timezone(&Utc),
15 DateTime::parse_from_rfc3339("2020-02-20T02:02:02.202000+00:00")
16 .expect("Failed to parse datetime")
17 .with_timezone(&Utc),
18 ))
19 .index("retention-3,retention-15".to_string())
20 .query("service:web* AND @http.status_code:[200 TO 299]".to_string())
21 .sort(LogsSort::TIME_ASCENDING);
22 let configuration = datadog::Configuration::new();
23 let api = LogsAPI::with_config(configuration);
24 let resp = api.list_logs(body).await;
25 if let Ok(value) = resp {
26 println!("{:#?}", value);
27 } else {
28 println!("{:#?}", resp.unwrap_err());
29 }
30}
More examples
10async fn main() {
11 let body = LogsListRequest::new(
12 LogsListRequestTime::new(
13 DateTime::parse_from_rfc3339("2021-11-11T10:11:11+00:00")
14 .expect("Failed to parse datetime")
15 .with_timezone(&Utc),
16 DateTime::parse_from_rfc3339("2021-11-11T11:11:11+00:00")
17 .expect("Failed to parse datetime")
18 .with_timezone(&Utc),
19 )
20 .timezone("Europe/Paris".to_string()),
21 )
22 .index("main".to_string())
23 .query("host:Test*".to_string())
24 .sort(LogsSort::TIME_ASCENDING);
25 let configuration = datadog::Configuration::new();
26 let api = LogsAPI::with_config(configuration);
27 let resp = api.list_logs(body).await;
28 if let Ok(value) = resp {
29 println!("{:#?}", value);
30 } else {
31 println!("{:#?}", resp.unwrap_err());
32 }
33}
Sourcepub async fn list_logs_with_http_info(
&self,
body: LogsListRequest,
) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsError>>
pub async fn list_logs_with_http_info( &self, body: LogsListRequest, ) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsError>>
List endpoint returns logs that match a log search query. Results are paginated.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Sourcepub async fn submit_log(
&self,
body: Vec<HTTPLogItem>,
params: SubmitLogOptionalParams,
) -> Result<BTreeMap<String, Value>, Error<SubmitLogError>>
pub async fn submit_log( &self, body: Vec<HTTPLogItem>, params: SubmitLogOptionalParams, ) -> Result<BTreeMap<String, Value>, Error<SubmitLogError>>
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
- Maximum content size per payload (uncompressed): 5MB
- Maximum size for a single log: 1MB
- Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
- For a single log request, the API truncates the log at 1MB and returns a 2xx.
- For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Datadog recommends sending your logs compressed.
Add the Content-Encoding: gzip
header to the request when sending compressed logs.
The status codes answered by the HTTP API are:
- 200: OK
- 400: Bad request (likely an issue in the payload formatting)
- 403: Permission issue (likely using an invalid API Key)
- 413: Payload too large (batch is above 5MB uncompressed)
- 5xx: Internal error, request should be retried after some time
Examples found in repository?
9async fn main() {
10 let body = vec![HTTPLogItem::new("Example-Log".to_string())
11 .ddtags("host:ExampleLog".to_string())
12 .additional_properties(BTreeMap::from([]))];
13 let configuration = datadog::Configuration::new();
14 let api = LogsAPI::with_config(configuration);
15 let resp = api
16 .submit_log(body, SubmitLogOptionalParams::default())
17 .await;
18 if let Ok(value) = resp {
19 println!("{:#?}", value);
20 } else {
21 println!("{:#?}", resp.unwrap_err());
22 }
23}
More examples
10async fn main() {
11 let body = vec![HTTPLogItem::new("Example-Log".to_string())
12 .ddtags("host:ExampleLog".to_string())
13 .additional_properties(BTreeMap::from([]))];
14 let configuration = datadog::Configuration::new();
15 let api = LogsAPI::with_config(configuration);
16 let resp = api
17 .submit_log(
18 body,
19 SubmitLogOptionalParams::default().content_encoding(ContentEncoding::GZIP),
20 )
21 .await;
22 if let Ok(value) = resp {
23 println!("{:#?}", value);
24 } else {
25 println!("{:#?}", resp.unwrap_err());
26 }
27}
11async fn main() {
12 let body = vec![HTTPLogItem::new("Example-Log".to_string())
13 .ddtags("host:ExampleLog".to_string())
14 .additional_properties(BTreeMap::from([]))];
15 let configuration = datadog::Configuration::new();
16 let api = LogsAPI::with_config(configuration);
17 let resp = api
18 .submit_log(
19 body,
20 SubmitLogOptionalParams::default().content_encoding(ContentEncoding::DEFLATE),
21 )
22 .await;
23 if let Ok(value) = resp {
24 println!("{:#?}", value);
25 } else {
26 println!("{:#?}", resp.unwrap_err());
27 }
28}
Sourcepub async fn submit_log_with_http_info(
&self,
body: Vec<HTTPLogItem>,
params: SubmitLogOptionalParams,
) -> Result<ResponseContent<BTreeMap<String, Value>>, Error<SubmitLogError>>
pub async fn submit_log_with_http_info( &self, body: Vec<HTTPLogItem>, params: SubmitLogOptionalParams, ) -> Result<ResponseContent<BTreeMap<String, Value>>, Error<SubmitLogError>>
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
- Maximum content size per payload (uncompressed): 5MB
- Maximum size for a single log: 1MB
- Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
- For a single log request, the API truncates the log at 1MB and returns a 2xx.
- For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Datadog recommends sending your logs compressed.
Add the Content-Encoding: gzip
header to the request when sending compressed logs.
The status codes answered by the HTTP API are:
- 200: OK
- 400: Bad request (likely an issue in the payload formatting)
- 403: Permission issue (likely using an invalid API Key)
- 413: Payload too large (batch is above 5MB uncompressed)
- 5xx: Internal error, request should be retried after some time