apalis_core/
error.rs

1use std::{error::Error as StdError, time::Duration};
2use thiserror::Error;
3
4/// Convenience type alias
5pub type BoxDynError = Box<dyn StdError + 'static + Send + Sync>;
6/// Execution should be aborted
7/// This signifies that the task should not be retried
8#[derive(Error, Debug)]
9#[error("AbortError: {source}")]
10pub struct AbortError {
11    #[source]
12    source: BoxDynError,
13}
14impl AbortError {
15    /// Create a new abort error
16    pub fn new<E: Into<BoxDynError>>(err: E) -> Self {
17        AbortError { source: err.into() }
18    }
19}
20
21/// Execution should be retried after a specific duration
22/// This increases the attempts
23#[derive(Error, Debug)]
24#[error("RetryError: {source}")]
25pub struct RetryAfterError {
26    #[source]
27    source: BoxDynError,
28    duration: Duration,
29}
30
31impl RetryAfterError {
32    /// Create a new retry after error
33    pub fn new<E: Into<BoxDynError>>(err: E, duration: Duration) -> Self {
34        RetryAfterError {
35            source: err.into(),
36            duration,
37        }
38    }
39
40    /// Get the duration after which the task should be retried
41    pub fn get_duration(&self) -> Duration {
42        self.duration
43    }
44}
45
46/// Execution should be deferred, will be retried instantly
47#[derive(Error, Debug)]
48#[error("DeferredError: {source}")]
49pub struct DeferredError {
50    #[source]
51    source: BoxDynError,
52}
53
54/// Possible errors that can occur when running a worker.
55#[derive(Error, Debug)]
56pub enum WorkerError {
57    /// An error occurred while consuming the task stream.
58    #[error("Failed to consume task stream: {0}")]
59    StreamError(BoxDynError),
60    /// An error occurred in the worker's heartbeat.
61    #[error("Heartbeat error: {0}")]
62    HeartbeatError(BoxDynError),
63    /// An error occurred while trying to change the state of the worker.
64    #[error("Failed to handle the new state: {0}")]
65    StateError(WorkerStateError),
66    /// A worker that terminates when .stop was called
67    #[error("Worker stopped and gracefully exited")]
68    GracefulExit,
69    /// A worker panicked and the panic was caught.
70    #[error("Worker panicked: {0}")]
71    PanicError(String),
72    /// An error occurred while handling io
73    #[error("IO error: {0}")]
74    IoError(#[from] std::io::Error),
75}
76
77/// Errors related to worker state transitions
78#[derive(Error, Debug)]
79pub enum WorkerStateError {
80    /// Worker not started
81    #[error("Worker not started, did you forget to call worker.start()")]
82    NotStarted,
83    /// Worker already started
84    #[error("Worker already started")]
85    AlreadyStarted,
86    /// Worker is not running
87    #[error("Worker is not running")]
88    NotRunning,
89    /// Worker is not paused
90    #[error("Worker is not paused")]
91    NotPaused,
92    /// Worker is shutting down
93    #[error("Worker is shutting down")]
94    ShuttingDown,
95    /// Invalid state provided
96    #[error("Worker provided with invalid state {0}")]
97    InvalidState(String),
98}