pub struct LogsAPI { /* private fields */ }Expand description
Search your logs and send them to your Datadog platform over HTTP. See the Log Management page for more information.
Implementations§
source§impl LogsAPI
impl LogsAPI
pub fn new() -> Self
sourcepub fn with_config(config: Configuration) -> Self
pub fn with_config(config: Configuration) -> Self
Examples found in repository?
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
async fn main() {
let body = vec![HTTPLogItem::new("Example-Log".to_string())
.ddtags("host:ExampleLog".to_string())
.additional_properties(BTreeMap::from([]))];
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api
.submit_log(body, SubmitLogOptionalParams::default())
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}More examples
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
async fn main() {
let body = vec![HTTPLogItem::new("Example-Log".to_string())
.ddtags("host:ExampleLog".to_string())
.additional_properties(BTreeMap::from([]))];
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api
.submit_log(
body,
SubmitLogOptionalParams::default().content_encoding(ContentEncoding::GZIP),
)
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28
async fn main() {
let body = vec![HTTPLogItem::new("Example-Log".to_string())
.ddtags("host:ExampleLog".to_string())
.additional_properties(BTreeMap::from([]))];
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api
.submit_log(
body,
SubmitLogOptionalParams::default().content_encoding(ContentEncoding::DEFLATE),
)
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
async fn main() {
let body = LogsListRequest::new(LogsListRequestTime::new(
DateTime::parse_from_rfc3339("2020-02-02T02:02:02.202000+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
DateTime::parse_from_rfc3339("2020-02-20T02:02:02.202000+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
))
.index("retention-3,retention-15".to_string())
.query("service:web* AND @http.status_code:[200 TO 299]".to_string())
.sort(LogsSort::TIME_ASCENDING);
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api.list_logs(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33
async fn main() {
let body = LogsListRequest::new(
LogsListRequestTime::new(
DateTime::parse_from_rfc3339("2021-11-11T10:11:11+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
DateTime::parse_from_rfc3339("2021-11-11T11:11:11+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
)
.timezone("Europe/Paris".to_string()),
)
.index("main".to_string())
.query("host:Test*".to_string())
.sort(LogsSort::TIME_ASCENDING);
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api.list_logs(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}pub fn with_client_and_config( config: Configuration, client: ClientWithMiddleware, ) -> Self
sourcepub async fn list_logs(
&self,
body: LogsListRequest,
) -> Result<LogsListResponse, Error<ListLogsError>>
pub async fn list_logs( &self, body: LogsListRequest, ) -> Result<LogsListResponse, Error<ListLogsError>>
List endpoint returns logs that match a log search query. Results are paginated.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
Examples found in repository?
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
async fn main() {
let body = LogsListRequest::new(LogsListRequestTime::new(
DateTime::parse_from_rfc3339("2020-02-02T02:02:02.202000+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
DateTime::parse_from_rfc3339("2020-02-20T02:02:02.202000+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
))
.index("retention-3,retention-15".to_string())
.query("service:web* AND @http.status_code:[200 TO 299]".to_string())
.sort(LogsSort::TIME_ASCENDING);
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api.list_logs(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}More examples
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33
async fn main() {
let body = LogsListRequest::new(
LogsListRequestTime::new(
DateTime::parse_from_rfc3339("2021-11-11T10:11:11+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
DateTime::parse_from_rfc3339("2021-11-11T11:11:11+00:00")
.expect("Failed to parse datetime")
.with_timezone(&Utc),
)
.timezone("Europe/Paris".to_string()),
)
.index("main".to_string())
.query("host:Test*".to_string())
.sort(LogsSort::TIME_ASCENDING);
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api.list_logs(body).await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}sourcepub async fn list_logs_with_http_info(
&self,
body: LogsListRequest,
) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsError>>
pub async fn list_logs_with_http_info( &self, body: LogsListRequest, ) -> Result<ResponseContent<LogsListResponse>, Error<ListLogsError>>
List endpoint returns logs that match a log search query. Results are paginated.
If you are considering archiving logs for your organization, consider use of the Datadog archive capabilities instead of the log list API. See Datadog Logs Archive documentation.
sourcepub async fn submit_log(
&self,
body: Vec<HTTPLogItem>,
params: SubmitLogOptionalParams,
) -> Result<BTreeMap<String, Value>, Error<SubmitLogError>>
pub async fn submit_log( &self, body: Vec<HTTPLogItem>, params: SubmitLogOptionalParams, ) -> Result<BTreeMap<String, Value>, Error<SubmitLogError>>
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
- Maximum content size per payload (uncompressed): 5MB
- Maximum size for a single log: 1MB
- Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
- For a single log request, the API truncates the log at 1MB and returns a 2xx.
- For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Datadog recommends sending your logs compressed.
Add the Content-Encoding: gzip header to the request when sending compressed logs.
The status codes answered by the HTTP API are:
- 200: OK
- 400: Bad request (likely an issue in the payload formatting)
- 403: Permission issue (likely using an invalid API Key)
- 413: Payload too large (batch is above 5MB uncompressed)
- 5xx: Internal error, request should be retried after some time
Examples found in repository?
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
async fn main() {
let body = vec![HTTPLogItem::new("Example-Log".to_string())
.ddtags("host:ExampleLog".to_string())
.additional_properties(BTreeMap::from([]))];
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api
.submit_log(body, SubmitLogOptionalParams::default())
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}More examples
10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
async fn main() {
let body = vec![HTTPLogItem::new("Example-Log".to_string())
.ddtags("host:ExampleLog".to_string())
.additional_properties(BTreeMap::from([]))];
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api
.submit_log(
body,
SubmitLogOptionalParams::default().content_encoding(ContentEncoding::GZIP),
)
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28
async fn main() {
let body = vec![HTTPLogItem::new("Example-Log".to_string())
.ddtags("host:ExampleLog".to_string())
.additional_properties(BTreeMap::from([]))];
let configuration = datadog::Configuration::new();
let api = LogsAPI::with_config(configuration);
let resp = api
.submit_log(
body,
SubmitLogOptionalParams::default().content_encoding(ContentEncoding::DEFLATE),
)
.await;
if let Ok(value) = resp {
println!("{:#?}", value);
} else {
println!("{:#?}", resp.unwrap_err());
}
}sourcepub async fn submit_log_with_http_info(
&self,
body: Vec<HTTPLogItem>,
params: SubmitLogOptionalParams,
) -> Result<ResponseContent<BTreeMap<String, Value>>, Error<SubmitLogError>>
pub async fn submit_log_with_http_info( &self, body: Vec<HTTPLogItem>, params: SubmitLogOptionalParams, ) -> Result<ResponseContent<BTreeMap<String, Value>>, Error<SubmitLogError>>
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
- Maximum content size per payload (uncompressed): 5MB
- Maximum size for a single log: 1MB
- Maximum array size if sending multiple logs in an array: 1000 entries
Any log exceeding 1MB is accepted and truncated by Datadog:
- For a single log request, the API truncates the log at 1MB and returns a 2xx.
- For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
Datadog recommends sending your logs compressed.
Add the Content-Encoding: gzip header to the request when sending compressed logs.
The status codes answered by the HTTP API are:
- 200: OK
- 400: Bad request (likely an issue in the payload formatting)
- 403: Permission issue (likely using an invalid API Key)
- 413: Payload too large (batch is above 5MB uncompressed)
- 5xx: Internal error, request should be retried after some time
Trait Implementations§
Auto Trait Implementations§
impl Freeze for LogsAPI
impl !RefUnwindSafe for LogsAPI
impl Send for LogsAPI
impl Sync for LogsAPI
impl Unpin for LogsAPI
impl !UnwindSafe for LogsAPI
Blanket Implementations§
source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
source§impl<T> CloneToUninit for Twhere
T: Clone,
impl<T> CloneToUninit for Twhere
T: Clone,
source§unsafe fn clone_to_uninit(&self, dst: *mut T)
unsafe fn clone_to_uninit(&self, dst: *mut T)
clone_to_uninit)