meilisearch_sdk/
tasks.rs

1use serde::{Deserialize, Deserializer, Serialize};
2use std::time::Duration;
3use time::OffsetDateTime;
4
5use crate::{
6    client::Client, client::SwapIndexes, errors::Error, errors::MeilisearchError, indexes::Index,
7    request::HttpClient, settings::Settings, task_info::TaskInfo,
8};
9
10#[derive(Debug, Clone, Deserialize)]
11#[serde(rename_all = "camelCase", tag = "type")]
12pub enum TaskType {
13    Customs,
14    DocumentAdditionOrUpdate {
15        details: Option<DocumentAdditionOrUpdate>,
16    },
17    DocumentDeletion {
18        details: Option<DocumentDeletion>,
19    },
20    IndexCreation {
21        details: Option<IndexCreation>,
22    },
23    IndexUpdate {
24        details: Option<IndexUpdate>,
25    },
26    IndexDeletion {
27        details: Option<IndexDeletion>,
28    },
29    SettingsUpdate {
30        details: Box<Option<Settings>>,
31    },
32    DumpCreation {
33        details: Option<DumpCreation>,
34    },
35    IndexSwap {
36        details: Option<IndexSwap>,
37    },
38    TaskCancelation {
39        details: Option<TaskCancelation>,
40    },
41    TaskDeletion {
42        details: Option<TaskDeletion>,
43    },
44    SnapshotCreation {
45        details: Option<SnapshotCreation>,
46    },
47}
48
49#[derive(Debug, Clone, Deserialize)]
50pub struct TasksResults {
51    pub results: Vec<Task>,
52    pub total: u64,
53    pub limit: u32,
54    pub from: Option<u32>,
55    pub next: Option<u32>,
56}
57
58#[derive(Debug, Clone, Deserialize)]
59#[serde(rename_all = "camelCase")]
60pub struct DocumentAdditionOrUpdate {
61    pub indexed_documents: Option<usize>,
62    pub received_documents: usize,
63}
64
65#[derive(Debug, Clone, Deserialize)]
66#[serde(rename_all = "camelCase")]
67pub struct DocumentDeletion {
68    pub provided_ids: Option<usize>,
69    pub deleted_documents: Option<usize>,
70    pub original_filter: Option<String>,
71}
72
73#[derive(Debug, Clone, Deserialize)]
74#[serde(rename_all = "camelCase")]
75pub struct IndexCreation {
76    pub primary_key: Option<String>,
77}
78
79#[derive(Debug, Clone, Deserialize)]
80#[serde(rename_all = "camelCase")]
81pub struct IndexUpdate {
82    pub primary_key: Option<String>,
83}
84
85#[derive(Debug, Clone, Deserialize)]
86#[serde(rename_all = "camelCase")]
87pub struct IndexDeletion {
88    pub deleted_documents: Option<usize>,
89}
90
91#[derive(Debug, Clone, Deserialize)]
92#[serde(rename_all = "camelCase")]
93pub struct SnapshotCreation {}
94
95#[derive(Debug, Clone, Deserialize)]
96#[serde(rename_all = "camelCase")]
97pub struct DumpCreation {
98    pub dump_uid: Option<String>,
99}
100
101#[derive(Debug, Clone, Deserialize)]
102#[serde(rename_all = "camelCase")]
103pub struct IndexSwap {
104    pub swaps: Vec<SwapIndexes>,
105}
106
107#[derive(Debug, Clone, Deserialize)]
108#[serde(rename_all = "camelCase")]
109pub struct TaskCancelation {
110    pub matched_tasks: usize,
111    pub canceled_tasks: Option<usize>,
112    pub original_filter: String,
113}
114
115#[derive(Debug, Clone, Deserialize)]
116#[serde(rename_all = "camelCase")]
117pub struct TaskDeletion {
118    pub matched_tasks: usize,
119    pub deleted_tasks: Option<usize>,
120    pub original_filter: String,
121}
122
123#[derive(Deserialize, Debug, Clone)]
124#[serde(rename_all = "camelCase")]
125pub struct FailedTask {
126    pub error: MeilisearchError,
127    #[serde(flatten)]
128    pub task: SucceededTask,
129}
130
131impl AsRef<u32> for FailedTask {
132    fn as_ref(&self) -> &u32 {
133        &self.task.uid
134    }
135}
136
137fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error>
138where
139    D: Deserializer<'de>,
140{
141    let s = String::deserialize(deserializer)?;
142    let iso_duration = iso8601::duration(&s).map_err(serde::de::Error::custom)?;
143    Ok(iso_duration.into())
144}
145
146#[derive(Deserialize, Debug, Clone)]
147#[serde(rename_all = "camelCase")]
148pub struct SucceededTask {
149    #[serde(deserialize_with = "deserialize_duration")]
150    pub duration: Duration,
151    #[serde(with = "time::serde::rfc3339")]
152    pub enqueued_at: OffsetDateTime,
153    #[serde(with = "time::serde::rfc3339")]
154    pub started_at: OffsetDateTime,
155    #[serde(with = "time::serde::rfc3339")]
156    pub finished_at: OffsetDateTime,
157    pub canceled_by: Option<usize>,
158    pub index_uid: Option<String>,
159    pub error: Option<MeilisearchError>,
160    #[serde(flatten)]
161    pub update_type: TaskType,
162    pub uid: u32,
163}
164
165impl AsRef<u32> for SucceededTask {
166    fn as_ref(&self) -> &u32 {
167        &self.uid
168    }
169}
170
171#[derive(Debug, Clone, Deserialize)]
172#[serde(rename_all = "camelCase")]
173pub struct EnqueuedTask {
174    #[serde(with = "time::serde::rfc3339")]
175    pub enqueued_at: OffsetDateTime,
176    pub index_uid: Option<String>,
177    #[serde(flatten)]
178    pub update_type: TaskType,
179    pub uid: u32,
180}
181
182impl AsRef<u32> for EnqueuedTask {
183    fn as_ref(&self) -> &u32 {
184        &self.uid
185    }
186}
187
188#[derive(Debug, Clone, Deserialize)]
189#[serde(rename_all = "camelCase")]
190pub struct ProcessingTask {
191    #[serde(with = "time::serde::rfc3339")]
192    pub enqueued_at: OffsetDateTime,
193    #[serde(with = "time::serde::rfc3339")]
194    pub started_at: OffsetDateTime,
195    pub index_uid: Option<String>,
196    #[serde(flatten)]
197    pub update_type: TaskType,
198    pub uid: u32,
199}
200
201impl AsRef<u32> for ProcessingTask {
202    fn as_ref(&self) -> &u32 {
203        &self.uid
204    }
205}
206
207#[derive(Debug, Clone, Deserialize)]
208#[serde(rename_all = "camelCase", tag = "status")]
209pub enum Task {
210    Enqueued {
211        #[serde(flatten)]
212        content: EnqueuedTask,
213    },
214    Processing {
215        #[serde(flatten)]
216        content: ProcessingTask,
217    },
218    Failed {
219        #[serde(flatten)]
220        content: FailedTask,
221    },
222    Succeeded {
223        #[serde(flatten)]
224        content: SucceededTask,
225    },
226}
227
228impl Task {
229    #[must_use]
230    pub fn get_uid(&self) -> u32 {
231        match self {
232            Self::Enqueued { content } => *content.as_ref(),
233            Self::Processing { content } => *content.as_ref(),
234            Self::Failed { content } => *content.as_ref(),
235            Self::Succeeded { content } => *content.as_ref(),
236        }
237    }
238
239    /// Wait until Meilisearch processes a [Task], and get its status.
240    ///
241    /// `interval` = The frequency at which the server should be polled. **Default = 50ms**
242    ///
243    /// `timeout` = The maximum time to wait for processing to complete. **Default = 5000ms**
244    ///
245    /// If the waited time exceeds `timeout` then an [`Error::Timeout`] will be returned.
246    ///
247    /// See also [`Client::wait_for_task`, `Index::wait_for_task`].
248    ///
249    /// # Example
250    ///
251    /// ```
252    /// # use meilisearch_sdk::{client::*, indexes::*, tasks::Task};
253    /// # use serde::{Serialize, Deserialize};
254    /// #
255    /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
256    /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
257    /// #
258    /// # #[derive(Debug, Serialize, Deserialize, PartialEq)]
259    /// # struct Document {
260    /// #    id: usize,
261    /// #    value: String,
262    /// #    kind: String,
263    /// # }
264    /// #
265    /// #
266    /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
267    /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
268    /// let movies = client.index("movies_wait_for_completion");
269    ///
270    /// let status = movies.add_documents(&[
271    ///     Document { id: 0, kind: "title".into(), value: "The Social Network".to_string() },
272    ///     Document { id: 1, kind: "title".into(), value: "Harry Potter and the Sorcerer's Stone".to_string() },
273    /// ], None)
274    ///     .await
275    ///     .unwrap()
276    ///     .wait_for_completion(&client, None, None)
277    ///     .await
278    ///     .unwrap();
279    ///
280    /// assert!(matches!(status, Task::Succeeded { .. }));
281    /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap();
282    /// # });
283    /// ```
284    pub async fn wait_for_completion<Http: HttpClient>(
285        self,
286        client: &Client<Http>,
287        interval: Option<Duration>,
288        timeout: Option<Duration>,
289    ) -> Result<Self, Error> {
290        client.wait_for_task(self, interval, timeout).await
291    }
292
293    /// Extract the [Index] from a successful `IndexCreation` task.
294    ///
295    /// If the task failed or was not an `IndexCreation` task it returns itself.
296    ///
297    /// # Example
298    ///
299    /// ```
300    /// # use meilisearch_sdk::{client::*, indexes::*};
301    /// #
302    /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
303    /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
304    /// #
305    /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
306    /// # // create the client
307    /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
308    /// let task = client.create_index("try_make_index", None).await.unwrap();
309    /// let index = client.wait_for_task(task, None, None).await.unwrap().try_make_index(&client).unwrap();
310    ///
311    /// // and safely access it
312    /// assert_eq!(index.as_ref(), "try_make_index");
313    /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap();
314    /// # });
315    /// ```
316    #[allow(clippy::result_large_err)] // Since `self` has been consumed, this is not an issue
317    pub fn try_make_index<Http: HttpClient>(
318        self,
319        client: &Client<Http>,
320    ) -> Result<Index<Http>, Self> {
321        match self {
322            Self::Succeeded {
323                content:
324                    SucceededTask {
325                        index_uid,
326                        update_type: TaskType::IndexCreation { .. },
327                        ..
328                    },
329            } => Ok(client.index(index_uid.unwrap())),
330            _ => Err(self),
331        }
332    }
333
334    /// Unwrap the [`MeilisearchError`] from a [`Self::Failed`] [Task].
335    ///
336    /// Will panic if the task was not [`Self::Failed`].
337    ///
338    /// # Example
339    ///
340    /// ```
341    /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode};
342    /// #
343    /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
344    /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
345    /// #
346    /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
347    /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
348    /// let task = client.create_index("unwrap_failure", None).await.unwrap();
349    /// let task = client
350    ///     .create_index("unwrap_failure", None)
351    ///     .await
352    ///     .unwrap()
353    ///     .wait_for_completion(&client, None, None)
354    ///     .await
355    ///     .unwrap();
356    ///
357    /// assert!(task.is_failure());
358    ///
359    /// let failure = task.unwrap_failure();
360    ///
361    /// assert_eq!(failure.error_code, ErrorCode::IndexAlreadyExists);
362    /// # client.index("unwrap_failure").delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap();
363    /// # });
364    /// ```
365    #[must_use]
366    pub fn unwrap_failure(self) -> MeilisearchError {
367        match self {
368            Self::Failed {
369                content: FailedTask { error, .. },
370            } => error,
371            _ => panic!("Called `unwrap_failure` on a non `Failed` task."),
372        }
373    }
374
375    /// Returns `true` if the [Task] is [`Self::Failed`].
376    ///
377    /// # Example
378    ///
379    /// ```
380    /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode};
381    /// #
382    /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
383    /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
384    /// #
385    /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
386    /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
387    /// let task = client.create_index("is_failure", None).await.unwrap();
388    /// // create an index with a conflicting uid
389    /// let task = client
390    ///     .create_index("is_failure", None)
391    ///     .await
392    ///     .unwrap()
393    ///     .wait_for_completion(&client, None, None)
394    ///     .await
395    ///     .unwrap();
396    ///
397    /// assert!(task.is_failure());
398    /// # client.index("is_failure").delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap();
399    /// # });
400    /// ```
401    #[must_use]
402    pub fn is_failure(&self) -> bool {
403        matches!(self, Self::Failed { .. })
404    }
405
406    /// Returns `true` if the [Task] is [`Self::Succeeded`].
407    ///
408    /// # Example
409    ///
410    /// ```
411    /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode};
412    /// #
413    /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
414    /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
415    /// #
416    /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
417    /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
418    /// let task = client
419    ///     .create_index("is_success", None)
420    ///     .await
421    ///     .unwrap()
422    ///     .wait_for_completion(&client, None, None)
423    ///     .await
424    ///     .unwrap();
425    ///
426    /// assert!(task.is_success());
427    /// # task.try_make_index(&client).unwrap().delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap();
428    /// # });
429    /// ```
430    #[must_use]
431    pub fn is_success(&self) -> bool {
432        matches!(self, Self::Succeeded { .. })
433    }
434
435    /// Returns `true` if the [Task] is pending ([`Self::Enqueued`] or [`Self::Processing`]).
436    ///
437    /// # Example
438    /// ```no_run
439    /// # // The test is not run because it checks for an enqueued or processed status
440    /// # // and the task might already be processed when checking the status after the get_task call
441    /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode};
442    /// #
443    /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700");
444    /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey");
445    /// #
446    /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async {
447    /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap();
448    /// let task_info = client
449    ///     .create_index("is_pending", None)
450    ///     .await
451    ///     .unwrap();
452    /// let task = client.get_task(task_info).await.unwrap();
453    ///
454    /// assert!(task.is_pending());
455    /// # task.wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap().delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap();
456    /// # });
457    /// ```
458    #[must_use]
459    pub fn is_pending(&self) -> bool {
460        matches!(self, Self::Enqueued { .. } | Self::Processing { .. })
461    }
462}
463
464impl AsRef<u32> for Task {
465    fn as_ref(&self) -> &u32 {
466        match self {
467            Self::Enqueued { content } => content.as_ref(),
468            Self::Processing { content } => content.as_ref(),
469            Self::Succeeded { content } => content.as_ref(),
470            Self::Failed { content } => content.as_ref(),
471        }
472    }
473}
474
475#[derive(Debug, Serialize, Clone)]
476pub struct TasksPaginationFilters {
477    /// Maximum number of tasks to return.
478    #[serde(skip_serializing_if = "Option::is_none")]
479    limit: Option<u32>,
480    /// The first task uid that should be returned.
481    #[serde(skip_serializing_if = "Option::is_none")]
482    from: Option<u32>,
483}
484
485#[derive(Debug, Serialize, Clone)]
486pub struct TasksCancelFilters {}
487
488#[derive(Debug, Serialize, Clone)]
489pub struct TasksDeleteFilters {}
490
491pub type TasksSearchQuery<'a, Http> = TasksQuery<'a, TasksPaginationFilters, Http>;
492pub type TasksCancelQuery<'a, Http> = TasksQuery<'a, TasksCancelFilters, Http>;
493pub type TasksDeleteQuery<'a, Http> = TasksQuery<'a, TasksDeleteFilters, Http>;
494
495#[derive(Debug, Serialize, Clone)]
496#[serde(rename_all = "camelCase")]
497pub struct TasksQuery<'a, T, Http: HttpClient> {
498    #[serde(skip_serializing)]
499    client: &'a Client<Http>,
500    /// Index uids array to only retrieve the tasks of the indexes.
501    #[serde(skip_serializing_if = "Option::is_none")]
502    index_uids: Option<Vec<&'a str>>,
503    /// Statuses array to only retrieve the tasks with these statuses.
504    #[serde(skip_serializing_if = "Option::is_none")]
505    statuses: Option<Vec<&'a str>>,
506    /// Types array to only retrieve the tasks with these [`TaskType`]s.
507    #[serde(skip_serializing_if = "Option::is_none", rename = "types")]
508    task_types: Option<Vec<&'a str>>,
509    /// Uids of the tasks to retrieve.
510    #[serde(skip_serializing_if = "Option::is_none")]
511    uids: Option<Vec<&'a u32>>,
512    /// Uids of the tasks that canceled other tasks.
513    #[serde(skip_serializing_if = "Option::is_none")]
514    canceled_by: Option<Vec<&'a u32>>,
515    /// Date to retrieve all tasks that were enqueued before it.
516    #[serde(
517        skip_serializing_if = "Option::is_none",
518        serialize_with = "time::serde::rfc3339::option::serialize"
519    )]
520    before_enqueued_at: Option<OffsetDateTime>,
521    /// Date to retrieve all tasks that were enqueued after it.
522    #[serde(
523        skip_serializing_if = "Option::is_none",
524        serialize_with = "time::serde::rfc3339::option::serialize"
525    )]
526    after_enqueued_at: Option<OffsetDateTime>,
527    /// Date to retrieve all tasks that were started before it.
528    #[serde(
529        skip_serializing_if = "Option::is_none",
530        serialize_with = "time::serde::rfc3339::option::serialize"
531    )]
532    before_started_at: Option<OffsetDateTime>,
533    /// Date to retrieve all tasks that were started after it.
534    #[serde(
535        skip_serializing_if = "Option::is_none",
536        serialize_with = "time::serde::rfc3339::option::serialize"
537    )]
538    after_started_at: Option<OffsetDateTime>,
539    /// Date to retrieve all tasks that were finished before it.
540    #[serde(
541        skip_serializing_if = "Option::is_none",
542        serialize_with = "time::serde::rfc3339::option::serialize"
543    )]
544    before_finished_at: Option<OffsetDateTime>,
545    /// Date to retrieve all tasks that were finished after it.
546    #[serde(
547        skip_serializing_if = "Option::is_none",
548        serialize_with = "time::serde::rfc3339::option::serialize"
549    )]
550    after_finished_at: Option<OffsetDateTime>,
551
552    #[serde(flatten)]
553    pagination: T,
554
555    /// Whether to reverse the sort
556    #[serde(skip_serializing_if = "Option::is_none")]
557    reverse: Option<bool>,
558}
559
560#[allow(missing_docs)]
561impl<'a, T, Http: HttpClient> TasksQuery<'a, T, Http> {
562    pub fn with_index_uids<'b>(
563        &'b mut self,
564        index_uids: impl IntoIterator<Item = &'a str>,
565    ) -> &'b mut TasksQuery<'a, T, Http> {
566        self.index_uids = Some(index_uids.into_iter().collect());
567        self
568    }
569    pub fn with_statuses<'b>(
570        &'b mut self,
571        statuses: impl IntoIterator<Item = &'a str>,
572    ) -> &'b mut TasksQuery<'a, T, Http> {
573        self.statuses = Some(statuses.into_iter().collect());
574        self
575    }
576    pub fn with_types<'b>(
577        &'b mut self,
578        task_types: impl IntoIterator<Item = &'a str>,
579    ) -> &'b mut TasksQuery<'a, T, Http> {
580        self.task_types = Some(task_types.into_iter().collect());
581        self
582    }
583    pub fn with_uids<'b>(
584        &'b mut self,
585        uids: impl IntoIterator<Item = &'a u32>,
586    ) -> &'b mut TasksQuery<'a, T, Http> {
587        self.uids = Some(uids.into_iter().collect());
588        self
589    }
590    pub fn with_before_enqueued_at<'b>(
591        &'b mut self,
592        before_enqueued_at: &'a OffsetDateTime,
593    ) -> &'b mut TasksQuery<'a, T, Http> {
594        self.before_enqueued_at = Some(*before_enqueued_at);
595        self
596    }
597    pub fn with_after_enqueued_at<'b>(
598        &'b mut self,
599        after_enqueued_at: &'a OffsetDateTime,
600    ) -> &'b mut TasksQuery<'a, T, Http> {
601        self.after_enqueued_at = Some(*after_enqueued_at);
602        self
603    }
604    pub fn with_before_started_at<'b>(
605        &'b mut self,
606        before_started_at: &'a OffsetDateTime,
607    ) -> &'b mut TasksQuery<'a, T, Http> {
608        self.before_started_at = Some(*before_started_at);
609        self
610    }
611    pub fn with_after_started_at<'b>(
612        &'b mut self,
613        after_started_at: &'a OffsetDateTime,
614    ) -> &'b mut TasksQuery<'a, T, Http> {
615        self.after_started_at = Some(*after_started_at);
616        self
617    }
618    pub fn with_before_finished_at<'b>(
619        &'b mut self,
620        before_finished_at: &'a OffsetDateTime,
621    ) -> &'b mut TasksQuery<'a, T, Http> {
622        self.before_finished_at = Some(*before_finished_at);
623        self
624    }
625    pub fn with_after_finished_at<'b>(
626        &'b mut self,
627        after_finished_at: &'a OffsetDateTime,
628    ) -> &'b mut TasksQuery<'a, T, Http> {
629        self.after_finished_at = Some(*after_finished_at);
630        self
631    }
632    pub fn with_canceled_by<'b>(
633        &'b mut self,
634        task_uids: impl IntoIterator<Item = &'a u32>,
635    ) -> &'b mut TasksQuery<'a, T, Http> {
636        self.canceled_by = Some(task_uids.into_iter().collect());
637        self
638    }
639    pub fn with_reverse<'b>(&'b mut self, reverse: bool) -> &'b mut TasksQuery<'a, T, Http> {
640        self.reverse = Some(reverse);
641        self
642    }
643}
644
645impl<'a, Http: HttpClient> TasksQuery<'a, TasksCancelFilters, Http> {
646    #[must_use]
647    pub fn new(client: &'a Client<Http>) -> TasksQuery<'a, TasksCancelFilters, Http> {
648        TasksQuery {
649            client,
650            index_uids: None,
651            statuses: None,
652            task_types: None,
653            uids: None,
654            canceled_by: None,
655            before_enqueued_at: None,
656            after_enqueued_at: None,
657            before_started_at: None,
658            after_started_at: None,
659            before_finished_at: None,
660            after_finished_at: None,
661            reverse: None,
662            pagination: TasksCancelFilters {},
663        }
664    }
665
666    pub async fn execute(&'a self) -> Result<TaskInfo, Error> {
667        self.client.cancel_tasks_with(self).await
668    }
669}
670
671impl<'a, Http: HttpClient> TasksQuery<'a, TasksDeleteFilters, Http> {
672    #[must_use]
673    pub fn new(client: &'a Client<Http>) -> TasksQuery<'a, TasksDeleteFilters, Http> {
674        TasksQuery {
675            client,
676            index_uids: None,
677            statuses: None,
678            task_types: None,
679            uids: None,
680            canceled_by: None,
681            before_enqueued_at: None,
682            after_enqueued_at: None,
683            before_started_at: None,
684            after_started_at: None,
685            before_finished_at: None,
686            after_finished_at: None,
687            pagination: TasksDeleteFilters {},
688            reverse: None,
689        }
690    }
691
692    pub async fn execute(&'a self) -> Result<TaskInfo, Error> {
693        self.client.delete_tasks_with(self).await
694    }
695}
696
697impl<'a, Http: HttpClient> TasksQuery<'a, TasksPaginationFilters, Http> {
698    #[must_use]
699    pub fn new(client: &'a Client<Http>) -> TasksQuery<'a, TasksPaginationFilters, Http> {
700        TasksQuery {
701            client,
702            index_uids: None,
703            statuses: None,
704            task_types: None,
705            uids: None,
706            canceled_by: None,
707            before_enqueued_at: None,
708            after_enqueued_at: None,
709            before_started_at: None,
710            after_started_at: None,
711            before_finished_at: None,
712            after_finished_at: None,
713            pagination: TasksPaginationFilters {
714                limit: None,
715                from: None,
716            },
717            reverse: None,
718        }
719    }
720    pub fn with_limit<'b>(
721        &'b mut self,
722        limit: u32,
723    ) -> &'b mut TasksQuery<'a, TasksPaginationFilters, Http> {
724        self.pagination.limit = Some(limit);
725        self
726    }
727    pub fn with_from<'b>(
728        &'b mut self,
729        from: u32,
730    ) -> &'b mut TasksQuery<'a, TasksPaginationFilters, Http> {
731        self.pagination.from = Some(from);
732        self
733    }
734    pub async fn execute(&'a self) -> Result<TasksResults, Error> {
735        self.client.get_tasks_with(self).await
736    }
737}
738
739#[cfg(test)]
740mod test {
741    use super::*;
742    use crate::{
743        client::*,
744        errors::{ErrorCode, ErrorType},
745    };
746    use big_s::S;
747    use meilisearch_test_macro::meilisearch_test;
748    use serde::{Deserialize, Serialize};
749    use std::time::Duration;
750
751    #[derive(Debug, Serialize, Deserialize, PartialEq)]
752    struct Document {
753        id: usize,
754        value: String,
755        kind: String,
756    }
757
758    #[test]
759    fn test_deserialize_task() {
760        let datetime = OffsetDateTime::parse(
761            "2022-02-03T13:02:38.369634Z",
762            &time::format_description::well_known::Rfc3339,
763        )
764        .unwrap();
765
766        let task: Task = serde_json::from_str(
767            r#"
768{
769  "enqueuedAt": "2022-02-03T13:02:38.369634Z",
770  "indexUid": "meili",
771  "status": "enqueued",
772  "type": "documentAdditionOrUpdate",
773  "uid": 12
774}"#,
775        )
776        .unwrap();
777
778        assert!(matches!(
779            task,
780            Task::Enqueued {
781                content: EnqueuedTask {
782                    enqueued_at,
783                    index_uid: Some(index_uid),
784                    update_type: TaskType::DocumentAdditionOrUpdate { details: None },
785                    uid: 12,
786                }
787            }
788        if enqueued_at == datetime && index_uid == "meili"));
789
790        let task: Task = serde_json::from_str(
791            r#"
792{
793  "details": {
794    "indexedDocuments": null,
795    "receivedDocuments": 19547
796  },
797  "duration": null,
798  "enqueuedAt": "2022-02-03T15:17:02.801341Z",
799  "finishedAt": null,
800  "indexUid": "meili",
801  "startedAt": "2022-02-03T15:17:02.812338Z",
802  "status": "processing",
803  "type": "documentAdditionOrUpdate",
804  "uid": 14
805}"#,
806        )
807        .unwrap();
808
809        assert!(matches!(
810            task,
811            Task::Processing {
812                content: ProcessingTask {
813                    started_at,
814                    update_type: TaskType::DocumentAdditionOrUpdate {
815                        details: Some(DocumentAdditionOrUpdate {
816                            received_documents: 19547,
817                            indexed_documents: None,
818                        })
819                    },
820                    uid: 14,
821                    ..
822                }
823            }
824            if started_at == OffsetDateTime::parse(
825                "2022-02-03T15:17:02.812338Z",
826                &time::format_description::well_known::Rfc3339
827            ).unwrap()
828        ));
829
830        let task: Task = serde_json::from_str(
831            r#"
832{
833  "details": {
834    "indexedDocuments": 19546,
835    "receivedDocuments": 19547
836  },
837  "duration": "PT10.848957S",
838  "enqueuedAt": "2022-02-03T15:17:02.801341Z",
839  "finishedAt": "2022-02-03T15:17:13.661295Z",
840  "indexUid": "meili",
841  "startedAt": "2022-02-03T15:17:02.812338Z",
842  "status": "succeeded",
843  "type": "documentAdditionOrUpdate",
844  "uid": 14
845}"#,
846        )
847        .unwrap();
848
849        assert!(matches!(
850            task,
851            Task::Succeeded {
852                content: SucceededTask {
853                    update_type: TaskType::DocumentAdditionOrUpdate {
854                        details: Some(DocumentAdditionOrUpdate {
855                            received_documents: 19547,
856                            indexed_documents: Some(19546),
857                        })
858                    },
859                    uid: 14,
860                    duration,
861                    ..
862                }
863            }
864            if duration == Duration::from_millis(10_848)
865        ));
866    }
867
868    #[meilisearch_test]
869    async fn test_wait_for_task_with_args(client: Client, movies: Index) -> Result<(), Error> {
870        let task = movies
871            .add_documents(
872                &[
873                    Document {
874                        id: 0,
875                        kind: "title".into(),
876                        value: S("The Social Network"),
877                    },
878                    Document {
879                        id: 1,
880                        kind: "title".into(),
881                        value: S("Harry Potter and the Sorcerer's Stone"),
882                    },
883                ],
884                None,
885            )
886            .await?
887            .wait_for_completion(
888                &client,
889                Some(Duration::from_millis(1)),
890                Some(Duration::from_millis(6000)),
891            )
892            .await?;
893
894        assert!(matches!(task, Task::Succeeded { .. }));
895        Ok(())
896    }
897
898    #[meilisearch_test]
899    async fn test_get_tasks_no_params() -> Result<(), Error> {
900        let mut s = mockito::Server::new_async().await;
901        let mock_server_url = s.url();
902        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
903        let path = "/tasks";
904
905        let mock_res = s.mock("GET", path).with_status(200).create_async().await;
906        let _ = client.get_tasks().await;
907        mock_res.assert_async().await;
908
909        Ok(())
910    }
911
912    #[meilisearch_test]
913    async fn test_get_tasks_with_params() -> Result<(), Error> {
914        let mut s = mockito::Server::new_async().await;
915        let mock_server_url = s.url();
916        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
917        let path =
918            "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1&limit=0&from=1&reverse=true";
919
920        let mock_res = s.mock("GET", path).with_status(200).create_async().await;
921
922        let mut query = TasksSearchQuery::new(&client);
923        query
924            .with_index_uids(["movies", "test"])
925            .with_statuses(["equeued"])
926            .with_types(["documentDeletion"])
927            .with_from(1)
928            .with_limit(0)
929            .with_uids([&1])
930            .with_reverse(true);
931
932        let _ = client.get_tasks_with(&query).await;
933
934        mock_res.assert_async().await;
935
936        Ok(())
937    }
938
939    #[meilisearch_test]
940    async fn test_get_tasks_with_date_params() -> Result<(), Error> {
941        let mut s = mockito::Server::new_async().await;
942        let mock_server_url = s.url();
943        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
944        let path = "/tasks?\
945            beforeEnqueuedAt=2022-02-03T13%3A02%3A38.369634Z\
946            &afterEnqueuedAt=2023-02-03T13%3A02%3A38.369634Z\
947            &beforeStartedAt=2024-02-03T13%3A02%3A38.369634Z\
948            &afterStartedAt=2025-02-03T13%3A02%3A38.369634Z\
949            &beforeFinishedAt=2026-02-03T13%3A02%3A38.369634Z\
950            &afterFinishedAt=2027-02-03T13%3A02%3A38.369634Z";
951
952        let mock_res = s.mock("GET", path).with_status(200).create_async().await;
953
954        let before_enqueued_at = OffsetDateTime::parse(
955            "2022-02-03T13:02:38.369634Z",
956            &time::format_description::well_known::Rfc3339,
957        )
958        .unwrap();
959        let after_enqueued_at = OffsetDateTime::parse(
960            "2023-02-03T13:02:38.369634Z",
961            &time::format_description::well_known::Rfc3339,
962        )
963        .unwrap();
964        let before_started_at = OffsetDateTime::parse(
965            "2024-02-03T13:02:38.369634Z",
966            &time::format_description::well_known::Rfc3339,
967        )
968        .unwrap();
969
970        let after_started_at = OffsetDateTime::parse(
971            "2025-02-03T13:02:38.369634Z",
972            &time::format_description::well_known::Rfc3339,
973        )
974        .unwrap();
975
976        let before_finished_at = OffsetDateTime::parse(
977            "2026-02-03T13:02:38.369634Z",
978            &time::format_description::well_known::Rfc3339,
979        )
980        .unwrap();
981
982        let after_finished_at = OffsetDateTime::parse(
983            "2027-02-03T13:02:38.369634Z",
984            &time::format_description::well_known::Rfc3339,
985        )
986        .unwrap();
987
988        let mut query = TasksSearchQuery::new(&client);
989        query
990            .with_before_enqueued_at(&before_enqueued_at)
991            .with_after_enqueued_at(&after_enqueued_at)
992            .with_before_started_at(&before_started_at)
993            .with_after_started_at(&after_started_at)
994            .with_before_finished_at(&before_finished_at)
995            .with_after_finished_at(&after_finished_at);
996
997        let _ = client.get_tasks_with(&query).await;
998
999        mock_res.assert_async().await;
1000
1001        Ok(())
1002    }
1003
1004    #[meilisearch_test]
1005    async fn test_get_tasks_on_struct_with_params() -> Result<(), Error> {
1006        let mut s = mockito::Server::new_async().await;
1007        let mock_server_url = s.url();
1008        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
1009        let path =
1010            "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&canceledBy=9";
1011
1012        let mock_res = s.mock("GET", path).with_status(200).create_async().await;
1013
1014        let mut query = TasksSearchQuery::new(&client);
1015        let _ = query
1016            .with_index_uids(["movies", "test"])
1017            .with_statuses(["equeued"])
1018            .with_types(["documentDeletion"])
1019            .with_canceled_by([&9])
1020            .execute()
1021            .await;
1022
1023        mock_res.assert_async().await;
1024
1025        Ok(())
1026    }
1027
1028    #[meilisearch_test]
1029    async fn test_get_tasks_with_none_existant_index_uids(client: Client) -> Result<(), Error> {
1030        let mut query = TasksSearchQuery::new(&client);
1031        query.with_index_uids(["no_name"]);
1032        let tasks = client.get_tasks_with(&query).await.unwrap();
1033
1034        assert_eq!(tasks.results.len(), 0);
1035        Ok(())
1036    }
1037
1038    #[meilisearch_test]
1039    async fn test_get_tasks_with_execute(client: Client) -> Result<(), Error> {
1040        let tasks = TasksSearchQuery::new(&client)
1041            .with_index_uids(["no_name"])
1042            .execute()
1043            .await
1044            .unwrap();
1045
1046        assert_eq!(tasks.results.len(), 0);
1047        Ok(())
1048    }
1049
1050    #[meilisearch_test]
1051    async fn test_failing_task(client: Client, index: Index) -> Result<(), Error> {
1052        let task_info = client.create_index(index.uid, None).await.unwrap();
1053        let task = client.get_task(task_info).await?;
1054        let task = client.wait_for_task(task, None, None).await?;
1055
1056        let error = task.unwrap_failure();
1057        assert_eq!(error.error_code, ErrorCode::IndexAlreadyExists);
1058        assert_eq!(error.error_type, ErrorType::InvalidRequest);
1059        Ok(())
1060    }
1061
1062    #[meilisearch_test]
1063    async fn test_cancel_tasks_with_params() -> Result<(), Error> {
1064        let mut s = mockito::Server::new_async().await;
1065        let mock_server_url = s.url();
1066        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
1067        let path =
1068            "/tasks/cancel?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1";
1069
1070        let mock_res = s.mock("POST", path).with_status(200).create_async().await;
1071
1072        let mut query = TasksCancelQuery::new(&client);
1073        query
1074            .with_index_uids(["movies", "test"])
1075            .with_statuses(["equeued"])
1076            .with_types(["documentDeletion"])
1077            .with_uids([&1]);
1078
1079        let _ = client.cancel_tasks_with(&query).await;
1080
1081        mock_res.assert_async().await;
1082
1083        Ok(())
1084    }
1085
1086    #[meilisearch_test]
1087    async fn test_cancel_tasks_with_params_execute() -> Result<(), Error> {
1088        let mut s = mockito::Server::new_async().await;
1089        let mock_server_url = s.url();
1090        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
1091        let path =
1092            "/tasks/cancel?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1";
1093
1094        let mock_res = s.mock("POST", path).with_status(200).create_async().await;
1095
1096        let mut query = TasksCancelQuery::new(&client);
1097        let _ = query
1098            .with_index_uids(["movies", "test"])
1099            .with_statuses(["equeued"])
1100            .with_types(["documentDeletion"])
1101            .with_uids([&1])
1102            .execute()
1103            .await;
1104
1105        mock_res.assert_async().await;
1106
1107        Ok(())
1108    }
1109
1110    #[meilisearch_test]
1111    async fn test_delete_tasks_with_params() -> Result<(), Error> {
1112        let mut s = mockito::Server::new_async().await;
1113        let mock_server_url = s.url();
1114        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
1115        let path = "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1";
1116
1117        let mock_res = s.mock("DELETE", path).with_status(200).create_async().await;
1118
1119        let mut query = TasksDeleteQuery::new(&client);
1120        query
1121            .with_index_uids(["movies", "test"])
1122            .with_statuses(["equeued"])
1123            .with_types(["documentDeletion"])
1124            .with_uids([&1]);
1125
1126        let _ = client.delete_tasks_with(&query).await;
1127
1128        mock_res.assert_async().await;
1129
1130        Ok(())
1131    }
1132
1133    #[meilisearch_test]
1134    async fn test_delete_tasks_with_params_execute() -> Result<(), Error> {
1135        let mut s = mockito::Server::new_async().await;
1136        let mock_server_url = s.url();
1137        let client = Client::new(mock_server_url, Some("masterKey")).unwrap();
1138        let path = "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1";
1139
1140        let mock_res = s.mock("DELETE", path).with_status(200).create_async().await;
1141
1142        let mut query = TasksDeleteQuery::new(&client);
1143        let _ = query
1144            .with_index_uids(["movies", "test"])
1145            .with_statuses(["equeued"])
1146            .with_types(["documentDeletion"])
1147            .with_uids([&1])
1148            .execute()
1149            .await;
1150
1151        mock_res.assert_async().await;
1152
1153        Ok(())
1154    }
1155}