use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct OrgLimits {
pub daily_api_requests: LimitInfo,
pub daily_async_apex_executions: LimitInfo,
pub daily_batch_apex_executions: LimitInfo,
pub daily_durable_generic_streaming_api_events: LimitInfo,
pub daily_durable_streaming_api_events: LimitInfo,
pub daily_generic_streaming_api_events: LimitInfo,
pub daily_streaming_api_events: LimitInfo,
pub daily_workflow_emails: LimitInfo,
#[serde(rename = "DataStorageMB")]
pub data_storage_mb: LimitInfo,
#[serde(rename = "FileStorageMB")]
pub file_storage_mb: LimitInfo,
pub hourly_async_report_runs: LimitInfo,
pub hourly_dashboard_refreshes: LimitInfo,
pub hourly_dashboard_results: LimitInfo,
pub hourly_dashboard_statuses: LimitInfo,
#[serde(rename = "HourlyLongTermIdMapping")]
pub hourly_long_term_id_mapping: LimitInfo,
pub hourly_managed_content_public_requests: LimitInfo,
#[serde(rename = "HourlyODataCallout")]
pub hourly_o_data_callout: LimitInfo,
#[serde(rename = "HourlyShortTermIdMapping")]
pub hourly_short_term_id_mapping: LimitInfo,
pub hourly_time_based_workflow: LimitInfo,
pub mass_email: LimitInfo,
pub single_email: LimitInfo,
#[serde(flatten)]
pub additional_limits: HashMap<String, LimitInfo>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct LimitInfo {
pub max: i64,
pub remaining: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub used: Option<i64>,
}
impl LimitInfo {
#[must_use]
pub const fn new(max: i64, remaining: i64, used: Option<i64>) -> Self {
Self {
max,
remaining,
used,
}
}
#[must_use]
pub fn percentage_used(&self) -> f64 {
if self.max == 0 {
return 0.0;
}
let used = self
.used
.unwrap_or_else(|| self.max.saturating_sub(self.remaining));
(used as f64 / self.max as f64) * 100.0
}
#[must_use]
pub const fn is_at_limit(&self) -> bool {
self.remaining == 0
}
#[must_use]
pub fn is_above_threshold(&self, threshold: f64) -> bool {
self.percentage_used() > threshold
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_support::Must;
#[test]
fn test_limit_info_new() {
let limit = LimitInfo::new(1000, 250, Some(750));
assert_eq!(limit.max, 1000);
assert_eq!(limit.remaining, 250);
assert_eq!(limit.used, Some(750));
}
#[test]
fn test_limit_info_percentage_used() {
let limit = LimitInfo::new(1000, 250, Some(750));
assert!((limit.percentage_used() - 75.0).abs() < f64::EPSILON);
}
#[test]
fn test_limit_info_percentage_used_without_explicit_used() {
let limit = LimitInfo::new(1000, 300, None);
assert!((limit.percentage_used() - 70.0).abs() < f64::EPSILON);
}
#[test]
fn test_limit_info_percentage_used_zero_max() {
let limit = LimitInfo::new(0, 0, Some(0));
assert!((limit.percentage_used() - 0.0).abs() < f64::EPSILON);
}
#[test]
fn test_limit_info_is_at_limit() {
let at_limit = LimitInfo::new(1000, 0, Some(1000));
assert!(at_limit.is_at_limit());
let not_at_limit = LimitInfo::new(1000, 100, Some(900));
assert!(!not_at_limit.is_at_limit());
}
#[test]
fn test_limit_info_is_above_threshold() {
let limit = LimitInfo::new(1000, 100, Some(900));
assert!(limit.is_above_threshold(80.0));
assert!(limit.is_above_threshold(89.0));
assert!(!limit.is_above_threshold(90.0));
assert!(!limit.is_above_threshold(95.0));
}
#[test]
fn test_no_panic_on_overflow() {
let limit = LimitInfo::new(i64::MIN, i64::MAX, None);
let _ = limit.percentage_used();
}
#[test]
fn test_limit_info_serialize() {
let limit = LimitInfo::new(15000, 14850, Some(150));
let json = serde_json::to_string(&limit).must();
assert!(json.contains("\"Max\":15000"));
assert!(json.contains("\"Remaining\":14850"));
assert!(json.contains("\"Used\":150"));
}
#[test]
fn test_limit_info_deserialize() {
let json = r#"{
"Max": 15000,
"Remaining": 14850,
"Used": 150
}"#;
let limit: LimitInfo = serde_json::from_str(json).must();
assert_eq!(limit.max, 15000);
assert_eq!(limit.remaining, 14850);
assert_eq!(limit.used, Some(150));
}
#[test]
fn test_limit_info_deserialize_without_used() {
let json = r#"{
"Max": 2000000,
"Remaining": 1999000
}"#;
let limit: LimitInfo = serde_json::from_str(json).must();
assert_eq!(limit.max, 2_000_000);
assert_eq!(limit.remaining, 1_999_000);
assert_eq!(limit.used, None);
}
#[test]
fn test_org_limits_deserialize() {
let json = r#"{
"DailyApiRequests": {
"Max": 15000,
"Remaining": 14850,
"Used": 150
},
"DailyAsyncApexExecutions": {
"Max": 250000,
"Remaining": 250000
},
"DailyBatchApexExecutions": {
"Max": 250000,
"Remaining": 250000
},
"DailyDurableGenericStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyDurableStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyGenericStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyWorkflowEmails": {
"Max": 1000,
"Remaining": 1000
},
"DataStorageMB": {
"Max": 5120,
"Remaining": 4800
},
"FileStorageMB": {
"Max": 20480,
"Remaining": 20000
},
"HourlyAsyncReportRuns": {
"Max": 1200,
"Remaining": 1200
},
"HourlyDashboardRefreshes": {
"Max": 200,
"Remaining": 200
},
"HourlyDashboardResults": {
"Max": 5000,
"Remaining": 5000
},
"HourlyDashboardStatuses": {
"Max": 999999999,
"Remaining": 999999999
},
"HourlyLongTermIdMapping": {
"Max": 100000,
"Remaining": 100000
},
"HourlyManagedContentPublicRequests": {
"Max": 50000,
"Remaining": 50000
},
"HourlyODataCallout": {
"Max": 10000,
"Remaining": 10000
},
"HourlyShortTermIdMapping": {
"Max": 100000,
"Remaining": 100000
},
"HourlyTimeBasedWorkflow": {
"Max": 1000,
"Remaining": 1000
},
"MassEmail": {
"Max": 10,
"Remaining": 10
},
"SingleEmail": {
"Max": 15,
"Remaining": 15
}
}"#;
let limits: OrgLimits = serde_json::from_str(json).must();
assert_eq!(limits.daily_api_requests.max, 15000);
assert_eq!(limits.daily_api_requests.remaining, 14850);
assert_eq!(limits.daily_api_requests.used, Some(150));
assert_eq!(limits.data_storage_mb.max, 5120);
assert_eq!(limits.file_storage_mb.max, 20480);
}
#[test]
fn test_org_limits_with_additional_limits() {
let json = r#"{
"DailyApiRequests": {"Max": 15000, "Remaining": 14850},
"DailyAsyncApexExecutions": {"Max": 250000, "Remaining": 250000},
"DailyBatchApexExecutions": {"Max": 250000, "Remaining": 250000},
"DailyDurableGenericStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyDurableStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyGenericStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyWorkflowEmails": {"Max": 1000, "Remaining": 1000},
"DataStorageMB": {"Max": 5120, "Remaining": 4800},
"FileStorageMB": {"Max": 20480, "Remaining": 20000},
"HourlyAsyncReportRuns": {"Max": 1200, "Remaining": 1200},
"HourlyDashboardRefreshes": {"Max": 200, "Remaining": 200},
"HourlyDashboardResults": {"Max": 5000, "Remaining": 5000},
"HourlyDashboardStatuses": {"Max": 999999999, "Remaining": 999999999},
"HourlyLongTermIdMapping": {"Max": 100000, "Remaining": 100000},
"HourlyManagedContentPublicRequests": {"Max": 50000, "Remaining": 50000},
"HourlyODataCallout": {"Max": 10000, "Remaining": 10000},
"HourlyShortTermIdMapping": {"Max": 100000, "Remaining": 100000},
"HourlyTimeBasedWorkflow": {"Max": 1000, "Remaining": 1000},
"MassEmail": {"Max": 10, "Remaining": 10},
"SingleEmail": {"Max": 15, "Remaining": 15},
"FutureLimit": {"Max": 999, "Remaining": 888}
}"#;
let limits: OrgLimits = serde_json::from_str(json).must();
assert!(limits.additional_limits.contains_key("FutureLimit"));
assert_eq!(limits.additional_limits["FutureLimit"].max, 999);
}
#[test]
fn test_org_limits_roundtrip() {
let original = OrgLimits {
daily_api_requests: LimitInfo::new(15000, 14850, Some(150)),
daily_async_apex_executions: LimitInfo::new(250_000, 250_000, None),
daily_batch_apex_executions: LimitInfo::new(250_000, 250_000, None),
daily_durable_generic_streaming_api_events: LimitInfo::new(10000, 10000, None),
daily_durable_streaming_api_events: LimitInfo::new(10000, 10000, None),
daily_generic_streaming_api_events: LimitInfo::new(10000, 10000, None),
daily_streaming_api_events: LimitInfo::new(10000, 10000, None),
daily_workflow_emails: LimitInfo::new(1000, 1000, None),
data_storage_mb: LimitInfo::new(5120, 4800, None),
file_storage_mb: LimitInfo::new(20480, 20000, None),
hourly_async_report_runs: LimitInfo::new(1200, 1200, None),
hourly_dashboard_refreshes: LimitInfo::new(200, 200, None),
hourly_dashboard_results: LimitInfo::new(5000, 5000, None),
hourly_dashboard_statuses: LimitInfo::new(999_999_999, 999_999_999, None),
hourly_long_term_id_mapping: LimitInfo::new(100_000, 100_000, None),
hourly_managed_content_public_requests: LimitInfo::new(50000, 50000, None),
hourly_o_data_callout: LimitInfo::new(10000, 10000, None),
hourly_short_term_id_mapping: LimitInfo::new(100_000, 100_000, None),
hourly_time_based_workflow: LimitInfo::new(1000, 1000, None),
mass_email: LimitInfo::new(10, 10, None),
single_email: LimitInfo::new(15, 15, None),
additional_limits: HashMap::new(),
};
let json = serde_json::to_string(&original).must();
let deserialized: OrgLimits = serde_json::from_str(&json).must();
assert_eq!(original, deserialized);
}
}
#[cfg(all(test, feature = "mock"))]
mod integration_tests {
use crate::client::builder;
use crate::config::ClientConfig;
use crate::test_support::{MockAuthenticator, Must, MustMsg};
use wiremock::matchers::{bearer_token, header, method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
fn sample_limits_response() -> serde_json::Value {
serde_json::json!({
"DailyApiRequests": {
"Max": 15000,
"Remaining": 14850,
"Used": 150
},
"DailyAsyncApexExecutions": {
"Max": 250_000,
"Remaining": 250_000
},
"DailyBatchApexExecutions": {
"Max": 250_000,
"Remaining": 250_000
},
"DailyDurableGenericStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyDurableStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyGenericStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyStreamingApiEvents": {
"Max": 10000,
"Remaining": 10000
},
"DailyWorkflowEmails": {
"Max": 1000,
"Remaining": 995,
"Used": 5
},
"DataStorageMB": {
"Max": 5120,
"Remaining": 4800
},
"FileStorageMB": {
"Max": 20480,
"Remaining": 20000
},
"HourlyAsyncReportRuns": {
"Max": 1200,
"Remaining": 1200
},
"HourlyDashboardRefreshes": {
"Max": 200,
"Remaining": 200
},
"HourlyDashboardResults": {
"Max": 5000,
"Remaining": 5000
},
"HourlyDashboardStatuses": {
"Max": 999_999_999,
"Remaining": 999_999_999
},
"HourlyLongTermIdMapping": {
"Max": 100_000,
"Remaining": 100_000
},
"HourlyManagedContentPublicRequests": {
"Max": 50000,
"Remaining": 50000
},
"HourlyODataCallout": {
"Max": 10000,
"Remaining": 10000
},
"HourlyShortTermIdMapping": {
"Max": 100_000,
"Remaining": 100_000
},
"HourlyTimeBasedWorkflow": {
"Max": 1000,
"Remaining": 1000
},
"MassEmail": {
"Max": 10,
"Remaining": 10
},
"SingleEmail": {
"Max": 15,
"Remaining": 15
}
})
}
#[tokio::test]
async fn test_limits_success() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.and(bearer_token("test_token"))
.respond_with(ResponseTemplate::new(200).set_body_json(sample_limits_response()))
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let limits = client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
assert_eq!(limits.daily_api_requests.max, 15000);
assert_eq!(limits.daily_api_requests.remaining, 14850);
assert_eq!(limits.daily_api_requests.used, Some(150));
assert_eq!(limits.daily_workflow_emails.used, Some(5));
assert_eq!(limits.data_storage_mb.max, 5120);
}
#[tokio::test]
async fn test_limits_with_custom_api_version() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("custom_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v59.0/limits"))
.and(bearer_token("custom_token"))
.respond_with(ResponseTemplate::new(200).set_body_json(sample_limits_response()))
.mount(&mock_server)
.await;
let config = ClientConfig {
api_version: "v59.0".into(),
..Default::default()
};
let client = builder()
.authenticate(auth)
.config(config)
.build()
.await
.must_msg("Failed to build client");
let limits = client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
assert_eq!(limits.daily_api_requests.max, 15000);
}
#[tokio::test]
async fn test_limits_unauthorized() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("invalid_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(401))
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let result = client.rest().limits().await;
let Err(err) = result else {
panic!("Expected an error");
};
assert!(err.to_string().contains(""));
}
#[tokio::test]
async fn test_limits_server_error() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(500))
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let result = client.rest().limits().await;
let Err(err) = result else {
panic!("Expected an error");
};
assert!(err.to_string().contains(""));
}
#[tokio::test]
async fn test_limits_correct_headers() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("header_test_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.and(header("Authorization", "Bearer header_test_token"))
.respond_with(ResponseTemplate::new(200).set_body_json(sample_limits_response()))
.expect(1)
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
}
#[tokio::test]
async fn test_limits_at_limit() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
let at_limit_response = serde_json::json!({
"DailyApiRequests": {
"Max": 15000,
"Remaining": 0,
"Used": 15000
},
"DailyAsyncApexExecutions": {"Max": 250_000, "Remaining": 250_000},
"DailyBatchApexExecutions": {"Max": 250_000, "Remaining": 250_000},
"DailyDurableGenericStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyDurableStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyGenericStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyWorkflowEmails": {"Max": 1000, "Remaining": 1000},
"DataStorageMB": {"Max": 5120, "Remaining": 4800},
"FileStorageMB": {"Max": 20480, "Remaining": 20000},
"HourlyAsyncReportRuns": {"Max": 1200, "Remaining": 1200},
"HourlyDashboardRefreshes": {"Max": 200, "Remaining": 200},
"HourlyDashboardResults": {"Max": 5000, "Remaining": 5000},
"HourlyDashboardStatuses": {"Max": 999_999_999, "Remaining": 999_999_999},
"HourlyLongTermIdMapping": {"Max": 100_000, "Remaining": 100_000},
"HourlyManagedContentPublicRequests": {"Max": 50000, "Remaining": 50000},
"HourlyODataCallout": {"Max": 10000, "Remaining": 10000},
"HourlyShortTermIdMapping": {"Max": 100_000, "Remaining": 100_000},
"HourlyTimeBasedWorkflow": {"Max": 1000, "Remaining": 1000},
"MassEmail": {"Max": 10, "Remaining": 10},
"SingleEmail": {"Max": 15, "Remaining": 15}
});
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(200).set_body_json(at_limit_response))
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let limits = client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
assert!(limits.daily_api_requests.is_at_limit());
assert!((limits.daily_api_requests.percentage_used() - 100.0).abs() < f64::EPSILON);
}
#[tokio::test]
async fn test_limits_with_additional_unknown_limits() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
let mut response = sample_limits_response();
response.as_object_mut().must().insert(
"FutureNewLimit".to_string(),
serde_json::json!({"Max": 5000, "Remaining": 4500}),
);
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(200).set_body_json(response))
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let limits = client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
assert!(limits.additional_limits.contains_key("FutureNewLimit"));
assert_eq!(limits.additional_limits["FutureNewLimit"].max, 5000);
}
#[tokio::test]
async fn test_limits_threshold_warnings() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
let high_usage_response = serde_json::json!({
"DailyApiRequests": {
"Max": 15000,
"Remaining": 1500,
"Used": 13500
},
"DailyAsyncApexExecutions": {"Max": 250_000, "Remaining": 250_000},
"DailyBatchApexExecutions": {"Max": 250_000, "Remaining": 250_000},
"DailyDurableGenericStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyDurableStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyGenericStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyStreamingApiEvents": {"Max": 10000, "Remaining": 10000},
"DailyWorkflowEmails": {"Max": 1000, "Remaining": 1000},
"DataStorageMB": {"Max": 5120, "Remaining": 4800},
"FileStorageMB": {"Max": 20480, "Remaining": 20000},
"HourlyAsyncReportRuns": {"Max": 1200, "Remaining": 1200},
"HourlyDashboardRefreshes": {"Max": 200, "Remaining": 200},
"HourlyDashboardResults": {"Max": 5000, "Remaining": 5000},
"HourlyDashboardStatuses": {"Max": 999_999_999, "Remaining": 999_999_999},
"HourlyLongTermIdMapping": {"Max": 100_000, "Remaining": 100_000},
"HourlyManagedContentPublicRequests": {"Max": 50000, "Remaining": 50000},
"HourlyODataCallout": {"Max": 10000, "Remaining": 10000},
"HourlyShortTermIdMapping": {"Max": 100_000, "Remaining": 100_000},
"HourlyTimeBasedWorkflow": {"Max": 1000, "Remaining": 1000},
"MassEmail": {"Max": 10, "Remaining": 10},
"SingleEmail": {"Max": 15, "Remaining": 15}
});
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(200).set_body_json(high_usage_response))
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let limits = client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
assert!(limits.daily_api_requests.is_above_threshold(80.0));
assert!((limits.daily_api_requests.percentage_used() - 90.0).abs() < f64::EPSILON);
}
#[tokio::test]
async fn test_limits_multiple_calls() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(200).set_body_json(sample_limits_response()))
.expect(3)
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
for _ in 0..3 {
let limits = client
.rest()
.limits()
.await
.must_msg("Failed to get limits");
assert_eq!(limits.daily_api_requests.max, 15000);
}
}
#[tokio::test]
async fn test_limits_cloned_handler() {
let mock_server = MockServer::start().await;
let auth = MockAuthenticator::new("test_token", &mock_server.uri());
Mock::given(method("GET"))
.and(path("/services/data/v60.0/limits"))
.respond_with(ResponseTemplate::new(200).set_body_json(sample_limits_response()))
.expect(2)
.mount(&mock_server)
.await;
let client = builder()
.authenticate(auth)
.build()
.await
.must_msg("Failed to build client");
let handler1 = client.rest();
let handler2 = handler1.clone();
let limits1 = handler1.limits().await.must_msg("Failed with handler1");
let limits2 = handler2.limits().await.must_msg("Failed with handler2");
assert_eq!(
limits1.daily_api_requests.max,
limits2.daily_api_requests.max
);
}
}