From 3d195369be77043ff13c65ae12557ffb145a1c1d Mon Sep 17 00:00:00 2001 From: meili-bot <74670311+meili-bot@users.noreply.github.com> Date: Mon, 7 Nov 2022 18:08:11 +0100 Subject: [PATCH 01/13] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bae2b403..1c7faf89 100644 --- a/README.md +++ b/README.md @@ -253,7 +253,7 @@ WARNING: `meilisearch-sdk` will panic if no Window is available (ex: Web extensi ## 🤖 Compatibility with Meilisearch -This package only guarantees the compatibility with the [version v0.29.0 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.29.0). +This package only guarantees the compatibility with the [version v0.30.0 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.30.0). ## ⚙️ Contributing From 65c2527e9142e332e1793b93d7d8219730df0b39 Mon Sep 17 00:00:00 2001 From: meili-bot <74670311+meili-bot@users.noreply.github.com> Date: Mon, 7 Nov 2022 18:08:11 +0100 Subject: [PATCH 02/13] Update README.tpl --- README.tpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.tpl b/README.tpl index f8bfbb75..94b68add 100644 --- a/README.tpl +++ b/README.tpl @@ -108,7 +108,7 @@ WARNING: `meilisearch-sdk` will panic if no Window is available (ex: Web extensi ## 🤖 Compatibility with Meilisearch -This package only guarantees the compatibility with the [version v0.29.0 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.29.0). +This package only guarantees the compatibility with the [version v0.30.0 of Meilisearch](https://github.com/meilisearch/meilisearch/releases/tag/v0.30.0). ## ⚙️ Contributing From 1bb056cfdc03315e835c6e4aff0115b11e5558dd Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Wed, 9 Nov 2022 18:35:58 +0100 Subject: [PATCH 03/13] Add filters in tasksQuery --- src/tasks.rs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/tasks.rs b/src/tasks.rs index aab72798..3c45f1ef 100644 --- a/src/tasks.rs +++ b/src/tasks.rs @@ -413,6 +413,28 @@ pub struct TasksQuery<'a> { // Types array to only retrieve the tasks with these [TaskType]. #[serde(skip_serializing_if = "Option::is_none", rename = "type")] pub task_type: Option>, + // Uids of the tasks to retrieve + #[serde(skip_serializing_if = "Option::is_none")] + pub uid: Option>, + // Date to retrieve all tasks that were enqueued before it. + #[serde(skip_serializing_if = "Option::is_none")] + pub beforeEnqueuedAt: Option, + // Date to retrieve all tasks that were enqueued after it. + #[serde(skip_serializing_if = "Option::is_none")] + pub afterEnqueuedAt: Option, + // Date to retrieve all tasks that were started before it. + #[serde(skip_serializing_if = "Option::is_none")] + pub beforeStartedAt: Option, + // Date to retrieve all tasks that were started before it. + #[serde(skip_serializing_if = "Option::is_none")] + pub afterStatedAt: Option, + // Date to retrieve all tasks that were finished before it. + #[serde(skip_serializing_if = "Option::is_none")] + pub beforeFinishedAt: Option, + // Date to retrieve all tasks that were finished after it. + #[serde(skip_serializing_if = "Option::is_none")] + pub afterFinishedAt: Option, + // Maximum number of tasks to return #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option, From cb8f4163e46e8027f443e5dd6626a166bb6590cf Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Thu, 10 Nov 2022 14:39:49 +0100 Subject: [PATCH 04/13] Add new error codes --- src/errors.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/errors.rs b/src/errors.rs index cde83248..adf13730 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -156,6 +156,9 @@ pub enum ErrorCode { InvalidApiKeyIndexes, InvalidApiKeyExpiresAt, ApiKeyNotFound, + InvalidTaskUid, + InvalidTaskDate, + MissingTaskFilter, /// That's unexpected. Please open a GitHub issue after ensuring you are /// using the supported version of the Meilisearch server. From e32f1b5ea0ff2210d8b3814b10a1ef85389c6e3a Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Thu, 10 Nov 2022 15:02:47 +0100 Subject: [PATCH 05/13] Add builder methods and tests --- src/tasks.rs | 169 ++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 153 insertions(+), 16 deletions(-) diff --git a/src/tasks.rs b/src/tasks.rs index 3c45f1ef..230d32ca 100644 --- a/src/tasks.rs +++ b/src/tasks.rs @@ -415,26 +415,43 @@ pub struct TasksQuery<'a> { pub task_type: Option>, // Uids of the tasks to retrieve #[serde(skip_serializing_if = "Option::is_none")] - pub uid: Option>, + pub uid: Option>, // Date to retrieve all tasks that were enqueued before it. - #[serde(skip_serializing_if = "Option::is_none")] - pub beforeEnqueuedAt: Option, + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + pub before_enqueued_at: Option, // Date to retrieve all tasks that were enqueued after it. - #[serde(skip_serializing_if = "Option::is_none")] - pub afterEnqueuedAt: Option, + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + pub after_enqueued_at: Option, // Date to retrieve all tasks that were started before it. - #[serde(skip_serializing_if = "Option::is_none")] - pub beforeStartedAt: Option, + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + pub before_started_at: Option, // Date to retrieve all tasks that were started before it. - #[serde(skip_serializing_if = "Option::is_none")] - pub afterStatedAt: Option, + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + pub after_started_at: Option, // Date to retrieve all tasks that were finished before it. - #[serde(skip_serializing_if = "Option::is_none")] - pub beforeFinishedAt: Option, + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + pub before_finished_at: Option, // Date to retrieve all tasks that were finished after it. - #[serde(skip_serializing_if = "Option::is_none")] - pub afterFinishedAt: Option, - + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + pub after_finished_at: Option, // Maximum number of tasks to return #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option, @@ -453,6 +470,13 @@ impl<'a> TasksQuery<'a> { task_type: None, limit: None, from: None, + uid: None, + before_enqueued_at: None, + after_enqueued_at: None, + before_started_at: None, + after_started_at: None, + before_finished_at: None, + after_finished_at: None, } } pub fn with_index_uid<'b>( @@ -476,6 +500,55 @@ impl<'a> TasksQuery<'a> { self.task_type = Some(task_type.into_iter().collect()); self } + pub fn with_uid<'b>( + &'b mut self, + index_uid: impl IntoIterator, + ) -> &'b mut TasksQuery<'a> { + self.uid = Some(index_uid.into_iter().collect()); + self + } + pub fn with_before_enqueued_at<'b>( + &'b mut self, + before_enqueued_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a> { + self.before_enqueued_at = Some(before_enqueued_at.clone()); + self + } + pub fn with_after_enqueued_at<'b>( + &'b mut self, + after_enqueued_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a> { + self.after_enqueued_at = Some(after_enqueued_at.clone()); + self + } + pub fn with_before_started_at<'b>( + &'b mut self, + before_started_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a> { + self.before_started_at = Some(before_started_at.clone()); + self + } + pub fn with_after_started_at<'b>( + &'b mut self, + after_started_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a> { + self.after_started_at = Some(after_started_at.clone()); + self + } + pub fn with_before_finished_at<'b>( + &'b mut self, + before_finished_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a> { + self.before_finished_at = Some(before_finished_at.clone()); + self + } + pub fn with_after_finished_at<'b>( + &'b mut self, + after_finished_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a> { + self.after_finished_at = Some(after_finished_at.clone()); + self + } pub fn with_limit<'b>(&'b mut self, limit: u32) -> &'b mut TasksQuery<'a> { self.limit = Some(limit); self @@ -662,7 +735,7 @@ mod test { let mock_server_url = &mockito::server_url(); let client = Client::new(mock_server_url, "masterKey"); let path = - "/tasks?indexUid=movies,test&status=equeued&type=documentDeletion&limit=0&from=1"; + "/tasks?indexUid=movies,test&status=equeued&type=documentDeletion&uid=1&limit=0&from=1"; let mock_res = mock("GET", path).with_status(200).create(); @@ -672,7 +745,71 @@ mod test { .with_status(["equeued"]) .with_type(["documentDeletion"]) .with_from(1) - .with_limit(0); + .with_limit(0) + .with_uid([&1]); + + let _ = client.get_tasks_with(&query).await; + + mock_res.assert(); + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_with_date_params() -> Result<(), Error> { + let mock_server_url = &mockito::server_url(); + let client = Client::new(mock_server_url, "masterKey"); + let path = "/tasks?\ + beforeEnqueuedAt=2022-02-03T13%3A02%3A38.369634Z\ + &afterEnqueuedAt=2023-02-03T13%3A02%3A38.369634Z\ + &beforeStartedAt=2024-02-03T13%3A02%3A38.369634Z\ + &afterStartedAt=2025-02-03T13%3A02%3A38.369634Z\ + &beforeFinishedAt=2026-02-03T13%3A02%3A38.369634Z\ + &afterFinishedAt=2027-02-03T13%3A02%3A38.369634Z"; + + let mock_res = mock("GET", path).with_status(200).create(); + + let before_enqueued_at = OffsetDateTime::parse( + "2022-02-03T13:02:38.369634Z", + &::time::format_description::well_known::Rfc3339, + ) + .unwrap(); + let after_enqueued_at = OffsetDateTime::parse( + "2023-02-03T13:02:38.369634Z", + &::time::format_description::well_known::Rfc3339, + ) + .unwrap(); + let before_started_at = OffsetDateTime::parse( + "2024-02-03T13:02:38.369634Z", + &::time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let after_started_at = OffsetDateTime::parse( + "2025-02-03T13:02:38.369634Z", + &::time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let before_finished_at = OffsetDateTime::parse( + "2026-02-03T13:02:38.369634Z", + &::time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let after_finished_at = OffsetDateTime::parse( + "2027-02-03T13:02:38.369634Z", + &::time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let mut query = TasksQuery::new(&client); + query + .with_before_enqueued_at(&before_enqueued_at) + .with_after_enqueued_at(&after_enqueued_at) + .with_before_started_at(&before_started_at) + .with_after_started_at(&after_started_at) + .with_before_finished_at(&before_finished_at) + .with_after_finished_at(&after_finished_at); let _ = client.get_tasks_with(&query).await; From 9e4f7374b144068724a2564018b29725b506659c Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Thu, 10 Nov 2022 15:32:55 +0100 Subject: [PATCH 06/13] Fix clippy suggestions --- src/tasks.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/tasks.rs b/src/tasks.rs index 230d32ca..d840445a 100644 --- a/src/tasks.rs +++ b/src/tasks.rs @@ -511,42 +511,42 @@ impl<'a> TasksQuery<'a> { &'b mut self, before_enqueued_at: &'a OffsetDateTime, ) -> &'b mut TasksQuery<'a> { - self.before_enqueued_at = Some(before_enqueued_at.clone()); + self.before_enqueued_at = Some(*before_enqueued_at); self } pub fn with_after_enqueued_at<'b>( &'b mut self, after_enqueued_at: &'a OffsetDateTime, ) -> &'b mut TasksQuery<'a> { - self.after_enqueued_at = Some(after_enqueued_at.clone()); + self.after_enqueued_at = Some(*after_enqueued_at); self } pub fn with_before_started_at<'b>( &'b mut self, before_started_at: &'a OffsetDateTime, ) -> &'b mut TasksQuery<'a> { - self.before_started_at = Some(before_started_at.clone()); + self.before_started_at = Some(*before_started_at); self } pub fn with_after_started_at<'b>( &'b mut self, after_started_at: &'a OffsetDateTime, ) -> &'b mut TasksQuery<'a> { - self.after_started_at = Some(after_started_at.clone()); + self.after_started_at = Some(*after_started_at); self } pub fn with_before_finished_at<'b>( &'b mut self, before_finished_at: &'a OffsetDateTime, ) -> &'b mut TasksQuery<'a> { - self.before_finished_at = Some(before_finished_at.clone()); + self.before_finished_at = Some(*before_finished_at); self } pub fn with_after_finished_at<'b>( &'b mut self, after_finished_at: &'a OffsetDateTime, ) -> &'b mut TasksQuery<'a> { - self.after_finished_at = Some(after_finished_at.clone()); + self.after_finished_at = Some(*after_finished_at); self } pub fn with_limit<'b>(&'b mut self, limit: u32) -> &'b mut TasksQuery<'a> { From 5b172e9b09c95077e0fa2af348eea72c243b8b9a Mon Sep 17 00:00:00 2001 From: cvermand <33010418+bidoubiwa@users.noreply.github.com> Date: Thu, 10 Nov 2022 16:52:44 +0100 Subject: [PATCH 07/13] Update src/tasks.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Amélie --- src/tasks.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tasks.rs b/src/tasks.rs index d840445a..78150990 100644 --- a/src/tasks.rs +++ b/src/tasks.rs @@ -434,7 +434,7 @@ pub struct TasksQuery<'a> { serialize_with = "time::serde::rfc3339::option::serialize" )] pub before_started_at: Option, - // Date to retrieve all tasks that were started before it. + // Date to retrieve all tasks that were started after it. #[serde( skip_serializing_if = "Option::is_none", serialize_with = "time::serde::rfc3339::option::serialize" From beb9101135335c915a2c5cba7debb652d9cc59db Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Thu, 10 Nov 2022 18:22:22 +0100 Subject: [PATCH 08/13] Fix flacky tests --- src/client.rs | 4 ++-- src/indexes.rs | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/client.rs b/src/client.rs index 0fcacf21..394a1c88 100644 --- a/src/client.rs +++ b/src/client.rs @@ -930,7 +930,7 @@ mod tests { #[meilisearch_test] async fn test_get_tasks(client: Client) { let tasks = client.get_tasks().await.unwrap(); - assert!(tasks.results.len() >= 2); + assert!(tasks.limit == 20); } #[meilisearch_test] @@ -938,7 +938,7 @@ mod tests { let query = TasksQuery::new(&client); let tasks = client.get_tasks_with(&query).await.unwrap(); - assert!(tasks.results.len() >= 2); + assert!(tasks.limit == 20); } #[meilisearch_test] diff --git a/src/indexes.rs b/src/indexes.rs index 28d8574c..eda661a2 100644 --- a/src/indexes.rs +++ b/src/indexes.rs @@ -436,7 +436,8 @@ impl Index { /// /// # futures::executor::block_on(async move { /// let client = Client::new(MEILISEARCH_URL, MEILISEARCH_API_KEY); - /// let movie_index = client.index("get_documents"); + /// + /// let movie_index = client.index("get_documents_with"); /// /// # movie_index.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); /// From afa8d17feba5d24d98b294e0a0430b8d3e0cbff7 Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Fri, 11 Nov 2022 19:24:55 +0100 Subject: [PATCH 09/13] Rename index_uids and task types --- src/client.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/client.rs b/src/client.rs index 394a1c88..c307a8ad 100644 --- a/src/client.rs +++ b/src/client.rs @@ -744,7 +744,7 @@ impl Client { /// # let client = client::Client::new(MEILISEARCH_URL, MEILISEARCH_API_KEY); /// /// let mut query = tasks::TasksQuery::new(&client); - /// query.with_index_uid(["get_tasks_with"]); + /// query.with_index_uids(["get_tasks_with"]); /// let tasks = client.get_tasks_with(&query).await.unwrap(); /// # }); /// ``` From 26d9289e3f63382408049f4910ce59c00c2ca42d Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Fri, 11 Nov 2022 19:33:45 +0100 Subject: [PATCH 10/13] Rename uid, status and types filters to plural --- src/indexes.rs | 6 ++--- src/tasks.rs | 66 +++++++++++++++++++++++++------------------------- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/src/indexes.rs b/src/indexes.rs index eda661a2..52d8e49a 100644 --- a/src/indexes.rs +++ b/src/indexes.rs @@ -903,7 +903,7 @@ impl Index { /// ``` pub async fn get_tasks(&self) -> Result { let mut query = TasksQuery::new(&self.client); - query.with_index_uid([self.uid.as_str()]); + query.with_index_uids([self.uid.as_str()]); self.client.get_tasks_with(&query).await } @@ -924,7 +924,7 @@ impl Index { /// # let index = client.create_index("get_tasks_with", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); /// /// let mut query = TasksQuery::new(&client); - /// query.with_index_uid(["none_existant"]); + /// query.with_index_uids(["none_existant"]); /// let tasks = index.get_tasks_with(&query).await.unwrap(); /// /// assert!(tasks.results.len() > 0); @@ -936,7 +936,7 @@ impl Index { tasks_query: &TasksQuery<'_>, ) -> Result { let mut query = tasks_query.clone(); - query.with_index_uid([self.uid.as_str()]); + query.with_index_uids([self.uid.as_str()]); self.client.get_tasks_with(&query).await } diff --git a/src/tasks.rs b/src/tasks.rs index 78150990..dea78951 100644 --- a/src/tasks.rs +++ b/src/tasks.rs @@ -406,16 +406,16 @@ pub struct TasksQuery<'a> { pub client: &'a Client, // Index uids array to only retrieve the tasks of the indexes. #[serde(skip_serializing_if = "Option::is_none")] - pub index_uid: Option>, + pub index_uids: Option>, // Statuses array to only retrieve the tasks with these statuses. #[serde(skip_serializing_if = "Option::is_none")] - pub status: Option>, + pub statuses: Option>, // Types array to only retrieve the tasks with these [TaskType]. - #[serde(skip_serializing_if = "Option::is_none", rename = "type")] - pub task_type: Option>, + #[serde(skip_serializing_if = "Option::is_none", rename = "types")] + pub task_types: Option>, // Uids of the tasks to retrieve #[serde(skip_serializing_if = "Option::is_none")] - pub uid: Option>, + pub uids: Option>, // Date to retrieve all tasks that were enqueued before it. #[serde( skip_serializing_if = "Option::is_none", @@ -465,12 +465,12 @@ impl<'a> TasksQuery<'a> { pub fn new(client: &'a Client) -> TasksQuery<'a> { TasksQuery { client, - index_uid: None, - status: None, - task_type: None, + index_uids: None, + statuses: None, + task_types: None, limit: None, from: None, - uid: None, + uids: None, before_enqueued_at: None, after_enqueued_at: None, before_started_at: None, @@ -479,32 +479,32 @@ impl<'a> TasksQuery<'a> { after_finished_at: None, } } - pub fn with_index_uid<'b>( + pub fn with_index_uids<'b>( &'b mut self, - index_uid: impl IntoIterator, + index_uids: impl IntoIterator, ) -> &'b mut TasksQuery<'a> { - self.index_uid = Some(index_uid.into_iter().collect()); + self.index_uids = Some(index_uids.into_iter().collect()); self } - pub fn with_status<'b>( + pub fn with_statuses<'b>( &'b mut self, - status: impl IntoIterator, + statuses: impl IntoIterator, ) -> &'b mut TasksQuery<'a> { - self.status = Some(status.into_iter().collect()); + self.statuses = Some(statuses.into_iter().collect()); self } - pub fn with_type<'b>( + pub fn with_types<'b>( &'b mut self, - task_type: impl IntoIterator, + task_types: impl IntoIterator, ) -> &'b mut TasksQuery<'a> { - self.task_type = Some(task_type.into_iter().collect()); + self.task_types = Some(task_types.into_iter().collect()); self } - pub fn with_uid<'b>( + pub fn with_uids<'b>( &'b mut self, - index_uid: impl IntoIterator, + uids: impl IntoIterator, ) -> &'b mut TasksQuery<'a> { - self.uid = Some(index_uid.into_iter().collect()); + self.uids = Some(uids.into_iter().collect()); self } pub fn with_before_enqueued_at<'b>( @@ -735,18 +735,18 @@ mod test { let mock_server_url = &mockito::server_url(); let client = Client::new(mock_server_url, "masterKey"); let path = - "/tasks?indexUid=movies,test&status=equeued&type=documentDeletion&uid=1&limit=0&from=1"; + "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1&limit=0&from=1"; let mock_res = mock("GET", path).with_status(200).create(); let mut query = TasksQuery::new(&client); query - .with_index_uid(["movies", "test"]) - .with_status(["equeued"]) - .with_type(["documentDeletion"]) + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) .with_from(1) .with_limit(0) - .with_uid([&1]); + .with_uids([&1]); let _ = client.get_tasks_with(&query).await; @@ -821,15 +821,15 @@ mod test { async fn test_get_tasks_on_struct_with_params() -> Result<(), Error> { let mock_server_url = &mockito::server_url(); let client = Client::new(mock_server_url, "masterKey"); - let path = "/tasks?indexUid=movies,test&status=equeued&type=documentDeletion"; + let path = "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion"; let mock_res = mock("GET", path).with_status(200).create(); let mut query = TasksQuery::new(&client); let _ = query - .with_index_uid(["movies", "test"]) - .with_status(["equeued"]) - .with_type(["documentDeletion"]) + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) .execute() .await; @@ -839,9 +839,9 @@ mod test { } #[meilisearch_test] - async fn test_get_tasks_with_none_existant_index_uid(client: Client) -> Result<(), Error> { + async fn test_get_tasks_with_none_existant_index_uids(client: Client) -> Result<(), Error> { let mut query = TasksQuery::new(&client); - query.with_index_uid(["no_name"]); + query.with_index_uids(["no_name"]); let tasks = client.get_tasks_with(&query).await.unwrap(); assert_eq!(tasks.results.len(), 0); @@ -851,7 +851,7 @@ mod test { #[meilisearch_test] async fn test_get_tasks_with_execute(client: Client) -> Result<(), Error> { let tasks = TasksQuery::new(&client) - .with_index_uid(["no_name"]) + .with_index_uids(["no_name"]) .execute() .await .unwrap(); From ddb05960a48a718eceea92070d1ce211d87d9fe0 Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Fri, 11 Nov 2022 19:46:13 +0100 Subject: [PATCH 11/13] Add new error codes --- .code-samples.meilisearch.yaml | 2 +- src/errors.rs | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index d4ff9f35..6da9b3a7 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -117,7 +117,7 @@ get_all_tasks_filtering_1: |- get_all_tasks_filtering_2: |- let mut query = TasksQuery::new(&client) .with_status(["succeeded", "failed"]) - .with_type(["documentAdditionOrUpdate"]) + .with_types(["documentAdditionOrUpdate"]) .execute() .await .unwrap(); diff --git a/src/errors.rs b/src/errors.rs index adf13730..506c9ce5 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -156,8 +156,11 @@ pub enum ErrorCode { InvalidApiKeyIndexes, InvalidApiKeyExpiresAt, ApiKeyNotFound, - InvalidTaskUid, - InvalidTaskDate, + InvalidTaskTypesFilter, + InvalidTaskStatusesFilter, + InvalidTaskCanceledByFilter, + InvalidTaskUidsFilter, + InvalidTaskDateFilter, MissingTaskFilter, /// That's unexpected. Please open a GitHub issue after ensuring you are From d3bb55b1d1cf3573ad9eeabaebfdd0aa881b6f85 Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Sun, 13 Nov 2022 23:16:14 +0100 Subject: [PATCH 12/13] Update error code to plural form --- src/errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/errors.rs b/src/errors.rs index 506c9ce5..40555289 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -161,7 +161,7 @@ pub enum ErrorCode { InvalidTaskCanceledByFilter, InvalidTaskUidsFilter, InvalidTaskDateFilter, - MissingTaskFilter, + MissingTaskFilters, /// That's unexpected. Please open a GitHub issue after ensuring you are /// using the supported version of the Meilisearch server. From 05f1d5ed2e187435d8115cbf4c7e5f3365f912f6 Mon Sep 17 00:00:00 2001 From: Charlotte Vermandel Date: Sun, 13 Nov 2022 23:39:20 +0100 Subject: [PATCH 13/13] Update code samples --- .code-samples.meilisearch.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index 6da9b3a7..637958d8 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -110,13 +110,13 @@ get_all_tasks_1: |- .unwrap(); get_all_tasks_filtering_1: |- let mut query = TasksQuery::new(&client) - .with_index_uid(["movies"]) + .with_index_uids(["movies"]) .execute() .await .unwrap(); get_all_tasks_filtering_2: |- let mut query = TasksQuery::new(&client) - .with_status(["succeeded", "failed"]) + .with_statuses(["succeeded", "failed"]) .with_types(["documentAdditionOrUpdate"]) .execute() .await