From 0ea2f68d988bd3a098aa87c79d01442caec2b75e Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Mon, 3 Oct 2022 17:51:54 +0300 Subject: [PATCH 01/11] WIP: Add cursor pagination * TODO: query before for more * TODO: query after for more --- derive/src/root_query.rs | 338 ++++++++++++++++++++++++++- examples/sqlite/Cargo.toml | 4 +- examples/sqlite/tests/query_tests.rs | 66 ++++++ generator/src/_Cargo.toml | 4 +- 4 files changed, 407 insertions(+), 5 deletions(-) diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs index 6971f446..b95b3ef0 100644 --- a/derive/src/root_query.rs +++ b/derive/src/root_query.rs @@ -40,7 +40,7 @@ pub fn root_query_fn( .iter() .map(|path| { let name = format_ident!("{}", path.clone().into_iter().last().unwrap().to_string()); - + let name_cursor = format_ident!("{}_cursor", path.clone().into_iter().last().unwrap().to_string()); quote!{ pub async fn #name<'a>( @@ -76,7 +76,93 @@ pub fn root_query_fn( current: 1, } } + } + + pub async fn #name_cursor<'a>( + &self, + ctx: &async_graphql::Context<'a>, + filters: Option<#path::Filter>, + cursor: CursorPaginationInput, + order_by: Option<#path::OrderBy>, + ) -> async_graphql::types::connection::Connection { + use sea_orm::prelude::*; + use sea_orm::Iterable; + use itertools::Itertools; + use async_graphql::types::connection::CursorType; + + println!("cursor_filters: {:?}", filters); + + let db: &crate::DatabaseConnection = ctx.data::().unwrap(); + let stmt = #path::Entity::find() + .filter(#path::filter_recursive(filters)); + + let stmt = #path::order_by(stmt, order_by); + + let mut stmt = if #path::PrimaryKey::iter().len() == 1 { + let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; + stmt.cursor_by(column) + } else if #path::PrimaryKey::iter().len() == 2 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else if #path::PrimaryKey::iter().len() == 3 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else { + panic!("seaography does not support cursors with size greater than 3") + }; + + if let Some(cursor_string) = cursor.cursor { + let values = CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); + + if values.0.len() == 1 { + let value = values.0[0].clone(); + stmt.after(value); + } else if values.0.len() == 2 { + let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value)>().unwrap(); + stmt.after(values); + } else if values.0.len() == 3 { + let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value, sea_orm::Value)>().unwrap(); + stmt.after(values); + } else { + panic!("seaography does not support cursors values with size greater than 3"); + } + } + + let mut stmt = stmt.first(cursor.limit); + + let data = stmt + .all(db) + .await + .unwrap(); + + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); + + let cursor_string = CursorValues(values).encode_cursor(); + + async_graphql::types::connection::Edge::new(cursor_string, node) + }) + .collect(); + + let mut result = async_graphql::types::connection::Connection::< + String, + #path::Model, + async_graphql::types::connection::EmptyFields, + async_graphql::types::connection::EmptyFields + >::new( + false, // has_previous_page: TODO test with cursor "before" + false // has_next_page: TODO test with cursor "after" and last cursor + ); + result.edges.extend(edges); + + result } } }) @@ -89,6 +175,12 @@ pub fn root_query_fn( pub page: usize, } + #[derive(Debug, async_graphql::InputObject)] + pub struct CursorPaginationInput { + pub cursor: Option, + pub limit: u64, + } + #[derive(Debug, async_graphql::SimpleObject)] #(#names)* pub struct PaginatedResult { @@ -97,6 +189,250 @@ pub fn root_query_fn( pub current: usize, } + #[derive(Debug)] + pub enum DecodeMode { + Type, + Length, + ColonSkip, + Data, + } + + #[derive(Debug)] + pub struct CursorValues(pub Vec); + + impl async_graphql::types::connection::CursorType for CursorValues { + type Error = String; + + fn decode_cursor(s: &str) -> Result { + let chars = s.chars(); + + let mut values: Vec = vec![]; + + let mut type_indicator = String::new(); + let mut length_indicator = String::new(); + let mut data_buffer = String::new(); + let mut length = -1; + + let mut mode: DecodeMode = DecodeMode::Type; + for char in chars { + match mode { + DecodeMode::Type => { + if char.eq(&'[') { + mode = DecodeMode::Length; + } else if char.eq(&',') { + // SKIP + } else { + type_indicator.push(char); + } + }, + DecodeMode::Length => { + if char.eq(&']') { + mode = DecodeMode::ColonSkip; + length = length_indicator.parse::().unwrap(); + } else { + length_indicator.push(char); + } + }, + DecodeMode::ColonSkip => { + // skips ':' char + mode = DecodeMode::Data; + }, + DecodeMode::Data => { + if length > 0 { + data_buffer.push(char); + length -= 1; + } + + if length <= 0{ + let value: sea_orm::Value = match type_indicator.as_str() { + "TinyInt" => { + if length.eq(&-1) { + sea_orm::Value::TinyInt(None) + } else { + sea_orm::Value::TinyInt(Some(data_buffer.parse::().unwrap())) + } + }, + "SmallInt" => { + if length.eq(&-1) { + sea_orm::Value::SmallInt(None) + } else { + sea_orm::Value::SmallInt(Some(data_buffer.parse::().unwrap())) + } + }, + "Int" => { + if length.eq(&-1) { + sea_orm::Value::Int(None) + } else { + sea_orm::Value::Int(Some(data_buffer.parse::().unwrap())) + } + }, + "BigInt" => { + if length.eq(&-1) { + sea_orm::Value::BigInt(None) + } else { + sea_orm::Value::BigInt(Some(data_buffer.parse::().unwrap())) + } + }, + "TinyUnsigned" => { + if length.eq(&-1) { + sea_orm::Value::TinyUnsigned(None) + } else { + sea_orm::Value::TinyUnsigned(Some(data_buffer.parse::().unwrap())) + } + }, + "SmallUnsigned" => { + if length.eq(&-1) { + sea_orm::Value::SmallUnsigned(None) + } else { + sea_orm::Value::SmallUnsigned(Some(data_buffer.parse::().unwrap())) + } + }, + "Unsigned" => { + if length.eq(&-1) { + sea_orm::Value::Unsigned(None) + } else { + sea_orm::Value::Unsigned(Some(data_buffer.parse::().unwrap())) + } + }, + "BigUnsigned" => { + if length.eq(&-1) { + sea_orm::Value::BigUnsigned(None) + } else { + sea_orm::Value::BigUnsigned(Some(data_buffer.parse::().unwrap())) + } + }, + "String" => { + if length.eq(&-1) { + sea_orm::Value::String(None) + } else { + sea_orm::Value::String(Some(Box::new(data_buffer.parse::().unwrap()))) + } + }, + "Uuid" => { + if length.eq(&-1) { + sea_orm::Value::Uuid(None) + } else { + sea_orm::Value::Uuid(Some(Box::new(data_buffer.parse::().unwrap()))) + } + }, + _ => { + // FIXME: missing value types + panic!("cannot encode current type") + }, + }; + + values.push(value); + + type_indicator = String::new(); + length_indicator = String::new(); + data_buffer = String::new(); + length = -1; + + mode = DecodeMode::Type; + } + } + } + } + + Ok(Self(values)) + } + + fn encode_cursor(&self) -> String { + use itertools::Itertools; + + self.0.iter().map(|value| -> String { + match value { + sea_orm::Value::TinyInt(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("TinyInt[{}]:{}", value.len(), value) + } else { + format!("TinyInt[-1]:") + } + }, + sea_orm::Value::SmallInt(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("SmallInt[{}]:{}", value.len(), value) + } else { + format!("SmallInt[-1]:") + } + }, + sea_orm::Value::Int(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("Int[{}]:{}", value.len(), value) + } else { + format!("Int[-1]:") + } + }, + sea_orm::Value::BigInt(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("BigInt[{}]:{}", value.len(), value) + } else { + format!("BigInt[-1]:") + } + }, + sea_orm::Value::TinyUnsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("TinyUnsigned[{}]:{}", value.len(), value) + } else { + format!("TinyUnsigned[-1]:") + } + }, + sea_orm::Value::SmallUnsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("SmallUnsigned[{}]:{}", value.len(), value) + } else { + format!("SmallUnsigned[-1]:") + } + }, + sea_orm::Value::Unsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("Unsigned[{}]:{}", value.len(), value) + } else { + format!("Unsigned[-1]:") + } + }, + sea_orm::Value::BigUnsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("BigUnsigned[{}]:{}", value.len(), value) + } else { + format!("BigUnsigned[-1]:") + } + }, + sea_orm::Value::String(value) => { + if let Some(value) = value { + let value = value.as_ref(); + format!("String[{}]:{}", value.len(), value) + } else { + format!("String[-1]:") + } + }, + sea_orm::Value::Uuid(value) => { + if let Some(value) = value { + let value = value.as_ref().to_string(); + format!("Uuid[{}]:{}", value.len(), value) + } else { + format!("Uuid[-1]:") + } + }, + _ => { + // FIXME: missing value types + panic!("cannot + current type") + }, + } + }) + .join(",") + } + } + #[async_graphql::Object] impl #ident { #(#queries)* diff --git a/examples/sqlite/Cargo.toml b/examples/sqlite/Cargo.toml index d63771dd..a42ce9db 100644 --- a/examples/sqlite/Cargo.toml +++ b/examples/sqlite/Cargo.toml @@ -4,8 +4,8 @@ name = 'seaography-sqlite-example' version = '0.1.0' [dependencies] -async-graphql = { version = "4.0.10", features = ["decimal", "chrono", "dataloader"] } -async-graphql-poem = { version = "4.0.10" } +async-graphql = { version = "4.0.14", features = ["decimal", "chrono", "dataloader"] } +async-graphql-poem = { version = "4.0.14" } async-trait = { version = "0.1.53" } dotenv = "0.15.0" poem = { version = "1.3.29" } diff --git a/examples/sqlite/tests/query_tests.rs b/examples/sqlite/tests/query_tests.rs index 7e239d04..58250eef 100644 --- a/examples/sqlite/tests/query_tests.rs +++ b/examples/sqlite/tests/query_tests.rs @@ -195,3 +195,69 @@ async fn test_complex_filter_with_pagination() { "#, ) } + +#[tokio::test] +async fn test_cursor_pagination() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + tracksCursor(cursor:{limit: 4, cursor: "Int[4]:2822"}, filters:{milliseconds: { gt: 2573031}}) { + edges { + node { + trackId + name + milliseconds + } + cursor + } + } + } + "#, + ) + .await, + r#" + { + "tracksCursor": { + "edges": [ + { + "node": { + "trackId": 2823, + "name": "Collaborators", + "milliseconds": 2626626 + }, + "cursor": "Int[4]:2823" + }, + { + "node": { + "trackId": 2824, + "name": "Torn", + "milliseconds": 2631291 + }, + "cursor": "Int[4]:2824" + }, + { + "node": { + "trackId": 2826, + "name": "Hero", + "milliseconds": 2713755 + }, + "cursor": "Int[4]:2826" + }, + { + "node": { + "trackId": 2827, + "name": "Unfinished Business", + "milliseconds": 2622038 + }, + "cursor": "Int[4]:2827" + } + ] + } + } + "#, + ) +} diff --git a/generator/src/_Cargo.toml b/generator/src/_Cargo.toml index 94e0738d..268c74b2 100644 --- a/generator/src/_Cargo.toml +++ b/generator/src/_Cargo.toml @@ -4,8 +4,8 @@ name = '' version = '0.1.0' [dependencies] -async-graphql = { version = "4.0.10", features = ["decimal", "chrono", "dataloader"] } -async-graphql-poem = { version = "4.0.10" } +async-graphql = { version = "4.0.14", features = ["decimal", "chrono", "dataloader"] } +async-graphql-poem = { version = "4.0.14" } async-trait = { version = "0.1.53" } dotenv = "0.15.0" poem = { version = "1.3.29" } From a5960b559c1bcafadbbdf4edf544d48ef3b7fd88 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Tue, 4 Oct 2022 13:15:33 +0300 Subject: [PATCH 02/11] Tidy code --- derive/src/root_query.rs | 297 ++++++++++++++++++++++----------------- 1 file changed, 167 insertions(+), 130 deletions(-) diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs index b95b3ef0..042b42ce 100644 --- a/derive/src/root_query.rs +++ b/derive/src/root_query.rs @@ -1,5 +1,5 @@ use heck::ToUpperCamelCase; -use proc_macro2::TokenStream; +use proc_macro2::{Ident, TokenStream}; use quote::{format_ident, quote}; #[derive(Debug, Eq, PartialEq, bae::FromAttributes)] @@ -40,147 +40,88 @@ pub fn root_query_fn( .iter() .map(|path| { let name = format_ident!("{}", path.clone().into_iter().last().unwrap().to_string()); - let name_cursor = format_ident!("{}_cursor", path.clone().into_iter().last().unwrap().to_string()); - - quote!{ - pub async fn #name<'a>( - &self, - ctx: &async_graphql::Context<'a>, - filters: Option<#path::Filter>, - pagination: Option, - order_by: Option<#path::OrderBy>, - ) -> PaginatedResult<#path::Model> { - use sea_orm::prelude::*; - - let db: &crate::DatabaseConnection = ctx.data::().unwrap(); - let stmt = #path::Entity::find() - .filter(#path::filter_recursive(filters)); - - let stmt = #path::order_by(stmt, order_by); - - if let Some(pagination) = pagination { - let paginator = stmt.paginate(db, pagination.limit); - let data: Vec<#path::Model> = - paginator.fetch_page(pagination.page).await.unwrap(); - let pages = paginator.num_pages().await.unwrap(); - PaginatedResult { - data, - pages, - current: pagination.page, - } - } else { - let data: Vec<#path::Model> = stmt.all(db).await.unwrap(); - PaginatedResult { - data, - pages: 1, - current: 1, - } - } - } + let name_cursor = format_ident!( + "{}_cursor", + path.clone().into_iter().last().unwrap().to_string() + ); - pub async fn #name_cursor<'a>( - &self, - ctx: &async_graphql::Context<'a>, - filters: Option<#path::Filter>, - cursor: CursorPaginationInput, - order_by: Option<#path::OrderBy>, - ) -> async_graphql::types::connection::Connection { - use sea_orm::prelude::*; - use sea_orm::Iterable; - use itertools::Itertools; - use async_graphql::types::connection::CursorType; - - println!("cursor_filters: {:?}", filters); - - let db: &crate::DatabaseConnection = ctx.data::().unwrap(); - let stmt = #path::Entity::find() - .filter(#path::filter_recursive(filters)); - - let stmt = #path::order_by(stmt, order_by); - - let mut stmt = if #path::PrimaryKey::iter().len() == 1 { - let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; - stmt.cursor_by(column) - } else if #path::PrimaryKey::iter().len() == 2 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else if #path::PrimaryKey::iter().len() == 3 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else { - panic!("seaography does not support cursors with size greater than 3") - }; - - if let Some(cursor_string) = cursor.cursor { - let values = CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); - - if values.0.len() == 1 { - let value = values.0[0].clone(); - stmt.after(value); - } else if values.0.len() == 2 { - let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value)>().unwrap(); - stmt.after(values); - } else if values.0.len() == 3 { - let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value, sea_orm::Value)>().unwrap(); - stmt.after(values); - } else { - panic!("seaography does not support cursors values with size greater than 3"); - } - } + let basic_query = basic_query(&name, path); - let mut stmt = stmt.first(cursor.limit); + let cursor_query = cursor_query(&name_cursor, path); + + quote! { + #basic_query - let data = stmt - .all(db) - .await - .unwrap(); + #cursor_query + } + }) + .collect(); - let edges: Vec> = data - .into_iter() - .map(|node| { - let values: Vec = #path::PrimaryKey::iter() - .map(|variant| { - node.get(variant.into_column()) - }) - .collect(); + let basic_dependencies = basic_dependencies(names); - let cursor_string = CursorValues(values).encode_cursor(); + let cursor_dependencies = cursor_dependencies(); - async_graphql::types::connection::Edge::new(cursor_string, node) - }) - .collect(); + Ok(quote! { + #basic_dependencies - let mut result = async_graphql::types::connection::Connection::< - String, - #path::Model, - async_graphql::types::connection::EmptyFields, - async_graphql::types::connection::EmptyFields - >::new( - false, // has_previous_page: TODO test with cursor "before" - false // has_next_page: TODO test with cursor "after" and last cursor - ); + #cursor_dependencies - result.edges.extend(edges); + #[async_graphql::Object] + impl #ident { + #(#queries)* + } + }) +} - result +pub fn basic_query(name: &Ident, path: &TokenStream) -> TokenStream { + quote! { + pub async fn #name<'a>( + &self, + ctx: &async_graphql::Context<'a>, + filters: Option<#path::Filter>, + pagination: Option, + order_by: Option<#path::OrderBy>, + ) -> PaginatedResult<#path::Model> { + use sea_orm::prelude::*; + + println!("filters: {:?}", filters); + + let db: &crate::DatabaseConnection = ctx.data::().unwrap(); + let stmt = #path::Entity::find() + .filter(#path::filter_recursive(filters)); + + let stmt = #path::order_by(stmt, order_by); + + if let Some(pagination) = pagination { + let paginator = stmt.paginate(db, pagination.limit); + let data: Vec<#path::Model> = + paginator.fetch_page(pagination.page).await.unwrap(); + let pages = paginator.num_pages().await.unwrap(); + PaginatedResult { + data, + pages, + current: pagination.page, + } + } else { + let data: Vec<#path::Model> = stmt.all(db).await.unwrap(); + PaginatedResult { + data, + pages: 1, + current: 1, } } - }) - .collect(); + } + } +} - Ok(quote! { +pub fn basic_dependencies(names: Vec) -> TokenStream { + quote! { #[derive(Debug, async_graphql::InputObject)] pub struct PaginationInput { pub limit: usize, pub page: usize, } - #[derive(Debug, async_graphql::InputObject)] - pub struct CursorPaginationInput { - pub cursor: Option, - pub limit: u64, - } - #[derive(Debug, async_graphql::SimpleObject)] #(#names)* pub struct PaginatedResult { @@ -188,6 +129,107 @@ pub fn root_query_fn( pub pages: usize, pub current: usize, } + } +} + +pub fn cursor_query(name: &Ident, path: &TokenStream) -> TokenStream { + quote! { + pub async fn #name<'a>( + &self, + ctx: &async_graphql::Context<'a>, + filters: Option<#path::Filter>, + cursor: CursorPaginationInput, + order_by: Option<#path::OrderBy>, + ) -> async_graphql::types::connection::Connection { + use sea_orm::prelude::*; + use sea_orm::Iterable; + use itertools::Itertools; + use async_graphql::types::connection::CursorType; + + println!("cursor_filters: {:?}", filters); + + let db: &crate::DatabaseConnection = ctx.data::().unwrap(); + let stmt = #path::Entity::find() + .filter(#path::filter_recursive(filters)); + + let stmt = #path::order_by(stmt, order_by); + + let mut stmt = if #path::PrimaryKey::iter().len() == 1 { + let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; + stmt.cursor_by(column) + } else if #path::PrimaryKey::iter().len() == 2 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else if #path::PrimaryKey::iter().len() == 3 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else { + panic!("seaography does not support cursors with size greater than 3") + }; + + if let Some(cursor_string) = cursor.cursor { + let values = CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); + + if values.0.len() == 1 { + let value = values.0[0].clone(); + stmt.after(value); + } else if values.0.len() == 2 { + let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value)>().unwrap(); + stmt.after(values); + } else if values.0.len() == 3 { + let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value, sea_orm::Value)>().unwrap(); + stmt.after(values); + } else { + panic!("seaography does not support cursors values with size greater than 3"); + } + } + + let mut stmt = stmt.first(cursor.limit); + + let data = stmt + .all(db) + .await + .unwrap(); + + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); + + let cursor_string = CursorValues(values).encode_cursor(); + + async_graphql::types::connection::Edge::new(cursor_string, node) + }) + .collect(); + + let mut result = async_graphql::types::connection::Connection::< + String, + #path::Model, + async_graphql::types::connection::EmptyFields, + async_graphql::types::connection::EmptyFields + >::new( + false, // has_previous_page: TODO test with cursor "before" + false // has_next_page: TODO test with cursor "after" and last cursor + ); + + result.edges.extend(edges); + + result + } + } +} + +pub fn cursor_dependencies() -> TokenStream { + quote! { + #[derive(Debug, async_graphql::InputObject)] + pub struct CursorPaginationInput { + pub cursor: Option, + pub limit: u64, + } #[derive(Debug)] pub enum DecodeMode { @@ -432,10 +474,5 @@ pub fn root_query_fn( .join(",") } } - - #[async_graphql::Object] - impl #ident { - #(#queries)* - } - }) + } } From ff4020eb68d11b195586b459db1cafe8903076e4 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Tue, 4 Oct 2022 15:15:13 +0300 Subject: [PATCH 03/11] Fix has_previous_page and has_next_page --- derive/src/root_query.rs | 121 ++++++++++++++++++++++++++++++--------- 1 file changed, 93 insertions(+), 28 deletions(-) diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs index 042b42ce..682860fa 100644 --- a/derive/src/root_query.rs +++ b/derive/src/root_query.rs @@ -154,43 +154,94 @@ pub fn cursor_query(name: &Ident, path: &TokenStream) -> TokenStream { let stmt = #path::order_by(stmt, order_by); - let mut stmt = if #path::PrimaryKey::iter().len() == 1 { - let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; - stmt.cursor_by(column) - } else if #path::PrimaryKey::iter().len() == 2 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else if #path::PrimaryKey::iter().len() == 3 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else { - panic!("seaography does not support cursors with size greater than 3") - }; + let next_stmt = stmt.clone(); + let previous_stmt = stmt.clone(); + + fn apply_stmt_cursor_by(stmt: sea_orm::entity::prelude::Select<#path::Entity>) -> sea_orm::Cursor> { + if #path::PrimaryKey::iter().len() == 1 { + let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; + stmt.cursor_by(column) + } else if #path::PrimaryKey::iter().len() == 2 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else if #path::PrimaryKey::iter().len() == 3 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else { + panic!("seaography does not support cursors with size greater than 3") + } + } + + let mut stmt = apply_stmt_cursor_by(stmt); if let Some(cursor_string) = cursor.cursor { let values = CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); - if values.0.len() == 1 { - let value = values.0[0].clone(); - stmt.after(value); - } else if values.0.len() == 2 { - let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value)>().unwrap(); - stmt.after(values); - } else if values.0.len() == 3 { - let values = values.0.into_iter().collect_tuple::<(sea_orm::Value, sea_orm::Value, sea_orm::Value)>().unwrap(); - stmt.after(values); - } else { - panic!("seaography does not support cursors values with size greater than 3"); - } - } + let cursor_values: sea_orm::sea_query::value::ValueTuple = map_cursor_values(values.0); - let mut stmt = stmt.first(cursor.limit); + stmt.after(cursor_values); + } let data = stmt + .first(cursor.limit) .all(db) .await .unwrap(); + let has_next_page: bool = { + let mut next_stmt = apply_stmt_cursor_by(next_stmt); + + let last_node = data.last(); + + if let Some(node) = last_node { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); + + let values = map_cursor_values(values); + + let next_data = next_stmt + .first(cursor.limit) + .after(values) + .all(db) + .await + .unwrap(); + + next_data.len() != 0 + } else { + false + } + }; + + let has_previous_page: bool = { + let mut previous_stmt = apply_stmt_cursor_by(previous_stmt); + + let first_node = data.first(); + + if let Some(node) = first_node { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); + + let values = map_cursor_values(values); + + let previous_data = previous_stmt + .first(cursor.limit) + .before(values) + .all(db) + .await + .unwrap(); + + previous_data.len() != 0 + } else { + false + } + }; + let edges: Vec> = data .into_iter() .map(|node| { @@ -212,8 +263,8 @@ pub fn cursor_query(name: &Ident, path: &TokenStream) -> TokenStream { async_graphql::types::connection::EmptyFields, async_graphql::types::connection::EmptyFields >::new( - false, // has_previous_page: TODO test with cursor "before" - false // has_next_page: TODO test with cursor "after" and last cursor + has_previous_page, + has_next_page ); result.edges.extend(edges); @@ -239,6 +290,20 @@ pub fn cursor_dependencies() -> TokenStream { Data, } + pub fn map_cursor_values(values: Vec) -> sea_orm::sea_query::value::ValueTuple { + use itertools::Itertools; + + if values.len() == 1 { + sea_orm::sea_query::value::ValueTuple::One(values[0].clone()) + } else if values.len() == 2 { + sea_orm::sea_query::value::ValueTuple::Two(values[0].clone(), values[1].clone()) + } else if values.len() == 3 { + sea_orm::sea_query::value::ValueTuple::Three(values[0].clone(), values[1].clone(), values[2].clone()) + } else { + panic!("seaography does not support cursors values with size greater than 3") + } + } + #[derive(Debug)] pub struct CursorValues(pub Vec); From 180f4a21cf5aaa6069db9d83782d1fa8d1fcbb79 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Tue, 4 Oct 2022 21:41:03 +0300 Subject: [PATCH 04/11] Update tests and postgres example --- examples/mysql/tests/query_tests.rs | 231 +++++++++++++++++++++++++ examples/postgres/.env | 3 + examples/postgres/tests/query_tests.rs | 231 +++++++++++++++++++++++++ examples/sqlite/tests/query_tests.rs | 175 ++++++++++++++++++- 4 files changed, 637 insertions(+), 3 deletions(-) create mode 100644 examples/postgres/.env diff --git a/examples/mysql/tests/query_tests.rs b/examples/mysql/tests/query_tests.rs index b63cbf3b..ec8ae842 100644 --- a/examples/mysql/tests/query_tests.rs +++ b/examples/mysql/tests/query_tests.rs @@ -197,3 +197,234 @@ async fn test_complex_filter_with_pagination() { "#, ) } + +#[tokio::test] +async fn test_cursor_pagination() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 5}) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "paymentCursor": { + "edges": [ + { + "node": { + "paymentId": 342, + "amount": "11.99", + "customer": { + "firstName": "KAREN" + } + } + }, + { + "node": { + "paymentId": 3146, + "amount": "11.99", + "customer": { + "firstName": "VICTORIA" + } + } + }, + { + "node": { + "paymentId": 5280, + "amount": "11.99", + "customer": { + "firstName": "VANESSA" + } + } + }, + { + "node": { + "paymentId": 5281, + "amount": "11.99", + "customer": { + "firstName": "ALMA" + } + } + }, + { + "node": { + "paymentId": 5550, + "amount": "11.99", + "customer": { + "firstName": "ROSEMARY" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": false, + "hasNextPage": true, + "startCursor": "SmallUnsigned[3]:342", + "endCursor": "SmallUnsigned[4]:5550" + } + } + } + "#, + ) +} + +#[tokio::test] +async fn test_cursor_pagination_prev() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:5550"}) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "paymentCursor": { + "edges": [ + { + "node": { + "paymentId": 6409, + "amount": "11.99", + "customer": { + "firstName": "TANYA" + } + } + }, + { + "node": { + "paymentId": 8272, + "amount": "11.99", + "customer": { + "firstName": "RICHARD" + } + } + }, + { + "node": { + "paymentId": 9803, + "amount": "11.99", + "customer": { + "firstName": "NICHOLAS" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "startCursor": "SmallUnsigned[4]:6409", + "endCursor": "SmallUnsigned[4]:9803" + } + } + } + "#, + ) +} + +#[tokio::test] +async fn test_cursor_pagination_no_next() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:9803"}) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "paymentCursor": { + "edges": [ + { + "node": { + "paymentId": 15821, + "amount": "11.99", + "customer": { + "firstName": "KENT" + } + } + }, + { + "node": { + "paymentId": 15850, + "amount": "11.99", + "customer": { + "firstName": "TERRANCE" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false, + "startCursor": "SmallUnsigned[5]:15821", + "endCursor": "SmallUnsigned[5]:15850" + } + } + } + "#, + ) +} diff --git a/examples/postgres/.env b/examples/postgres/.env new file mode 100644 index 00000000..869d217e --- /dev/null +++ b/examples/postgres/.env @@ -0,0 +1,3 @@ +DATABASE_URL="postgres://postgres:postgres@127.0.0.1/sakila?currentSchema=public" +# COMPLEXITY_LIMIT= +# DEPTH_LIMIT= \ No newline at end of file diff --git a/examples/postgres/tests/query_tests.rs b/examples/postgres/tests/query_tests.rs index 66e97488..43feeb38 100644 --- a/examples/postgres/tests/query_tests.rs +++ b/examples/postgres/tests/query_tests.rs @@ -197,3 +197,234 @@ async fn test_complex_filter_with_pagination() { "#, ) } + +#[tokio::test] +async fn test_cursor_pagination() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 5}) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "paymentCursor": { + "edges": [ + { + "node": { + "paymentId": 342, + "amount": "11.9900", + "customer": { + "firstName": "KAREN" + } + } + }, + { + "node": { + "paymentId": 3146, + "amount": "11.9900", + "customer": { + "firstName": "VICTORIA" + } + } + }, + { + "node": { + "paymentId": 5280, + "amount": "11.9900", + "customer": { + "firstName": "VANESSA" + } + } + }, + { + "node": { + "paymentId": 5281, + "amount": "11.9900", + "customer": { + "firstName": "ALMA" + } + } + }, + { + "node": { + "paymentId": 5550, + "amount": "11.9900", + "customer": { + "firstName": "ROSEMARY" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": false, + "hasNextPage": true, + "startCursor": "Int[3]:342", + "endCursor": "Int[4]:5550" + } + } + } + "#, + ) +} + +#[tokio::test] +async fn test_cursor_pagination_prev() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:5550"}) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "paymentCursor": { + "edges": [ + { + "node": { + "paymentId": 6409, + "amount": "11.9900", + "customer": { + "firstName": "TANYA" + } + } + }, + { + "node": { + "paymentId": 8272, + "amount": "11.9900", + "customer": { + "firstName": "RICHARD" + } + } + }, + { + "node": { + "paymentId": 9803, + "amount": "11.9900", + "customer": { + "firstName": "NICHOLAS" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "startCursor": "Int[4]:6409", + "endCursor": "Int[4]:9803" + } + } + } + "#, + ) +} + +#[tokio::test] +async fn test_cursor_pagination_no_next() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:9803"}) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "paymentCursor": { + "edges": [ + { + "node": { + "paymentId": 15821, + "amount": "11.9900", + "customer": { + "firstName": "KENT" + } + } + }, + { + "node": { + "paymentId": 15850, + "amount": "11.9900", + "customer": { + "firstName": "TERRANCE" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false, + "startCursor": "Int[5]:15821", + "endCursor": "Int[5]:15850" + } + } + } + "#, + ) +} diff --git a/examples/sqlite/tests/query_tests.rs b/examples/sqlite/tests/query_tests.rs index 58250eef..e538713f 100644 --- a/examples/sqlite/tests/query_tests.rs +++ b/examples/sqlite/tests/query_tests.rs @@ -205,7 +205,7 @@ async fn test_cursor_pagination() { .execute( r#" { - tracksCursor(cursor:{limit: 4, cursor: "Int[4]:2822"}, filters:{milliseconds: { gt: 2573031}}) { + tracksCursor(cursor:{limit: 5}, filters:{milliseconds: { gt: 2573031}}) { edges { node { trackId @@ -214,6 +214,12 @@ async fn test_cursor_pagination() { } cursor } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } } } "#, @@ -223,6 +229,38 @@ async fn test_cursor_pagination() { { "tracksCursor": { "edges": [ + { + "node": { + "trackId": 2819, + "name": "Battlestar Galactica: The Story So Far", + "milliseconds": 2622250 + }, + "cursor": "Int[4]:2819" + }, + { + "node": { + "trackId": 2820, + "name": "Occupation / Precipice", + "milliseconds": 5286953 + }, + "cursor": "Int[4]:2820" + }, + { + "node": { + "trackId": 2821, + "name": "Exodus, Pt. 1", + "milliseconds": 2621708 + }, + "cursor": "Int[4]:2821" + }, + { + "node": { + "trackId": 2822, + "name": "Exodus, Pt. 2", + "milliseconds": 2618000 + }, + "cursor": "Int[4]:2822" + }, { "node": { "trackId": 2823, @@ -230,7 +268,53 @@ async fn test_cursor_pagination() { "milliseconds": 2626626 }, "cursor": "Int[4]:2823" - }, + } + ], + "pageInfo": { + "hasPreviousPage": false, + "hasNextPage": true, + "startCursor": "Int[4]:2819", + "endCursor": "Int[4]:2823" + } + } + } + "#, + ) +} + +#[tokio::test] +async fn test_cursor_pagination_prev() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + tracksCursor(cursor:{limit: 5, cursor: "Int[4]:2823"}, filters:{milliseconds: { gt: 2573031}}) { + edges { + node { + trackId + name + milliseconds + } + cursor + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "tracksCursor": { + "edges": [ { "node": { "trackId": 2824, @@ -254,10 +338,95 @@ async fn test_cursor_pagination() { "milliseconds": 2622038 }, "cursor": "Int[4]:2827" + }, + { + "node": { + "trackId": 2828, + "name": "The Passage", + "milliseconds": 2623875 + }, + "cursor": "Int[4]:2828" + }, + { + "node": { + "trackId": 2829, + "name": "The Eye of Jupiter", + "milliseconds": 2618750 + }, + "cursor": "Int[4]:2829" } - ] + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "startCursor": "Int[4]:2824", + "endCursor": "Int[4]:2829" + } } } "#, ) } + +#[tokio::test] +async fn test_cursor_pagination_no_next() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + tracksCursor(cursor:{limit: 5, cursor: "Int[4]:3361"}, filters:{milliseconds: { gt: 2573031}}) { + edges { + node { + trackId + name + milliseconds + } + cursor + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, + ) + .await, + r#" + { + "tracksCursor": { + "edges": [ + { + "node": { + "trackId": 3362, + "name": "There's No Place Like Home, Pt. 1", + "milliseconds": 2609526 + }, + "cursor": "Int[4]:3362" + }, + { + "node": { + "trackId": 3364, + "name": "There's No Place Like Home, Pt. 3", + "milliseconds": 2582957 + }, + "cursor": "Int[4]:3364" + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false, + "startCursor": "Int[4]:3362", + "endCursor": "Int[4]:3364" + } + } + } + "#, + ) +} + From 233f23a10b1f95d33c491b786ed90f901bc4458c Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Sat, 8 Oct 2022 12:08:15 +0300 Subject: [PATCH 05/11] update to use crate libs --- derive/src/root_query.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs index 682860fa..6829f0bc 100644 --- a/derive/src/root_query.rs +++ b/derive/src/root_query.rs @@ -143,7 +143,7 @@ pub fn cursor_query(name: &Ident, path: &TokenStream) -> TokenStream { ) -> async_graphql::types::connection::Connection { use sea_orm::prelude::*; use sea_orm::Iterable; - use itertools::Itertools; + use seaography::itertools::Itertools; use async_graphql::types::connection::CursorType; println!("cursor_filters: {:?}", filters); @@ -291,7 +291,7 @@ pub fn cursor_dependencies() -> TokenStream { } pub fn map_cursor_values(values: Vec) -> sea_orm::sea_query::value::ValueTuple { - use itertools::Itertools; + use seaography::itertools::Itertools; if values.len() == 1 { sea_orm::sea_query::value::ValueTuple::One(values[0].clone()) @@ -445,7 +445,7 @@ pub fn cursor_dependencies() -> TokenStream { } fn encode_cursor(&self) -> String { - use itertools::Itertools; + use seaography::itertools::Itertools; self.0.iter().map(|value| -> String { match value { From 26b07790c38fb9a722667d3579f00ea2400f6d68 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Sat, 8 Oct 2022 12:13:57 +0300 Subject: [PATCH 06/11] Update sqlite tests with new database --- examples/sqlite/tests/query_tests.rs | 178 +++++++++++++-------------- 1 file changed, 87 insertions(+), 91 deletions(-) diff --git a/examples/sqlite/tests/query_tests.rs b/examples/sqlite/tests/query_tests.rs index e538713f..071f87e5 100644 --- a/examples/sqlite/tests/query_tests.rs +++ b/examples/sqlite/tests/query_tests.rs @@ -205,14 +205,15 @@ async fn test_cursor_pagination() { .execute( r#" { - tracksCursor(cursor:{limit: 5}, filters:{milliseconds: { gt: 2573031}}) { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 5}) { edges { node { - trackId - name - milliseconds + paymentId + amount + customer { + firstName + } } - cursor } pageInfo { hasPreviousPage @@ -227,54 +228,59 @@ async fn test_cursor_pagination() { .await, r#" { - "tracksCursor": { + "paymentCursor": { "edges": [ { "node": { - "trackId": 2819, - "name": "Battlestar Galactica: The Story So Far", - "milliseconds": 2622250 - }, - "cursor": "Int[4]:2819" + "paymentId": 342, + "amount": "11.99", + "customer": { + "firstName": "KAREN" + } + } }, { "node": { - "trackId": 2820, - "name": "Occupation / Precipice", - "milliseconds": 5286953 - }, - "cursor": "Int[4]:2820" + "paymentId": 3146, + "amount": "11.99", + "customer": { + "firstName": "VICTORIA" + } + } }, { "node": { - "trackId": 2821, - "name": "Exodus, Pt. 1", - "milliseconds": 2621708 - }, - "cursor": "Int[4]:2821" + "paymentId": 5280, + "amount": "11.99", + "customer": { + "firstName": "VANESSA" + } + } }, { "node": { - "trackId": 2822, - "name": "Exodus, Pt. 2", - "milliseconds": 2618000 - }, - "cursor": "Int[4]:2822" + "paymentId": 5281, + "amount": "11.99", + "customer": { + "firstName": "ALMA" + } + } }, { "node": { - "trackId": 2823, - "name": "Collaborators", - "milliseconds": 2626626 - }, - "cursor": "Int[4]:2823" + "paymentId": 5550, + "amount": "11.99", + "customer": { + "firstName": "ROSEMARY" + } + } } ], "pageInfo": { "hasPreviousPage": false, "hasNextPage": true, - "startCursor": "Int[4]:2819", - "endCursor": "Int[4]:2823" + "startCursor": "Int[3]:342", + "endCursor": "Int[4]:5550" } } } @@ -291,14 +297,15 @@ async fn test_cursor_pagination_prev() { .execute( r#" { - tracksCursor(cursor:{limit: 5, cursor: "Int[4]:2823"}, filters:{milliseconds: { gt: 2573031}}) { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:5550"}) { edges { node { - trackId - name - milliseconds + paymentId + amount + customer { + firstName + } } - cursor } pageInfo { hasPreviousPage @@ -313,54 +320,41 @@ async fn test_cursor_pagination_prev() { .await, r#" { - "tracksCursor": { + "paymentCursor": { "edges": [ { "node": { - "trackId": 2824, - "name": "Torn", - "milliseconds": 2631291 - }, - "cursor": "Int[4]:2824" - }, - { - "node": { - "trackId": 2826, - "name": "Hero", - "milliseconds": 2713755 - }, - "cursor": "Int[4]:2826" - }, - { - "node": { - "trackId": 2827, - "name": "Unfinished Business", - "milliseconds": 2622038 - }, - "cursor": "Int[4]:2827" + "paymentId": 6409, + "amount": "11.99", + "customer": { + "firstName": "TANYA" + } + } }, { "node": { - "trackId": 2828, - "name": "The Passage", - "milliseconds": 2623875 - }, - "cursor": "Int[4]:2828" + "paymentId": 8272, + "amount": "11.99", + "customer": { + "firstName": "RICHARD" + } + } }, { "node": { - "trackId": 2829, - "name": "The Eye of Jupiter", - "milliseconds": 2618750 - }, - "cursor": "Int[4]:2829" + "paymentId": 9803, + "amount": "11.99", + "customer": { + "firstName": "NICHOLAS" + } + } } ], "pageInfo": { "hasPreviousPage": true, "hasNextPage": true, - "startCursor": "Int[4]:2824", - "endCursor": "Int[4]:2829" + "startCursor": "Int[4]:6409", + "endCursor": "Int[4]:9803" } } } @@ -377,14 +371,15 @@ async fn test_cursor_pagination_no_next() { .execute( r#" { - tracksCursor(cursor:{limit: 5, cursor: "Int[4]:3361"}, filters:{milliseconds: { gt: 2573031}}) { + paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:9803"}) { edges { node { - trackId - name - milliseconds + paymentId + amount + customer { + firstName + } } - cursor } pageInfo { hasPreviousPage @@ -399,34 +394,35 @@ async fn test_cursor_pagination_no_next() { .await, r#" { - "tracksCursor": { + "paymentCursor": { "edges": [ { "node": { - "trackId": 3362, - "name": "There's No Place Like Home, Pt. 1", - "milliseconds": 2609526 - }, - "cursor": "Int[4]:3362" + "paymentId": 15821, + "amount": "11.99", + "customer": { + "firstName": "KENT" + } + } }, { "node": { - "trackId": 3364, - "name": "There's No Place Like Home, Pt. 3", - "milliseconds": 2582957 - }, - "cursor": "Int[4]:3364" + "paymentId": 15850, + "amount": "11.99", + "customer": { + "firstName": "TERRANCE" + } + } } ], "pageInfo": { "hasPreviousPage": true, "hasNextPage": false, - "startCursor": "Int[4]:3362", - "endCursor": "Int[4]:3364" + "startCursor": "Int[5]:15821", + "endCursor": "Int[5]:15850" } } } "#, ) } - From ea5ccda8306b94f54ec29c53bef6deb3e2ecdba7 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Sun, 9 Oct 2022 19:27:50 +0300 Subject: [PATCH 07/11] Fix mysql tests --- examples/mysql/tests/query_tests.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/mysql/tests/query_tests.rs b/examples/mysql/tests/query_tests.rs index ec8ae842..c5efd417 100644 --- a/examples/mysql/tests/query_tests.rs +++ b/examples/mysql/tests/query_tests.rs @@ -281,8 +281,8 @@ async fn test_cursor_pagination() { "pageInfo": { "hasPreviousPage": false, "hasNextPage": true, - "startCursor": "SmallUnsigned[3]:342", - "endCursor": "SmallUnsigned[4]:5550" + "startCursor": "Int[3]:342", + "endCursor": "Int[4]:5550" } } } @@ -355,8 +355,8 @@ async fn test_cursor_pagination_prev() { "pageInfo": { "hasPreviousPage": true, "hasNextPage": true, - "startCursor": "SmallUnsigned[4]:6409", - "endCursor": "SmallUnsigned[4]:9803" + "startCursor": "Int[4]:6409", + "endCursor": "Int[4]:9803" } } } @@ -420,8 +420,8 @@ async fn test_cursor_pagination_no_next() { "pageInfo": { "hasPreviousPage": true, "hasNextPage": false, - "startCursor": "SmallUnsigned[5]:15821", - "endCursor": "SmallUnsigned[5]:15850" + "startCursor": "Int[5]:15821", + "endCursor": "Int[5]:15850" } } } From 58b274e544b812cab281f85a83b080e80c5ace44 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Mon, 10 Oct 2022 22:33:03 +0300 Subject: [PATCH 08/11] Unify cursor and page pagination --- derive/src/root_query.rs | 348 ++++++++++++++++++--------------------- 1 file changed, 164 insertions(+), 184 deletions(-) diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs index 6829f0bc..1ca92290 100644 --- a/derive/src/root_query.rs +++ b/derive/src/root_query.rs @@ -1,4 +1,3 @@ -use heck::ToUpperCamelCase; use proc_macro2::{Ident, TokenStream}; use quote::{format_ident, quote}; @@ -24,48 +23,24 @@ pub fn root_query_fn( }) .collect::, crate::error::Error>>()?; - let names: Vec = paths - .iter() - .map(|path| { - let name = path.clone().into_iter().last().unwrap().to_string(); - let name = format!("Paginated{}Result", name.to_upper_camel_case()); - - quote! { - #[graphql(concrete(name = #name, params(#path::Model)))] - } - }) - .collect(); - let queries: Vec = paths .iter() .map(|path| { let name = format_ident!("{}", path.clone().into_iter().last().unwrap().to_string()); - let name_cursor = format_ident!( - "{}_cursor", - path.clone().into_iter().last().unwrap().to_string() - ); let basic_query = basic_query(&name, path); - let cursor_query = cursor_query(&name_cursor, path); - quote! { #basic_query - - #cursor_query } }) .collect(); - let basic_dependencies = basic_dependencies(names); - - let cursor_dependencies = cursor_dependencies(); + let basic_dependencies = basic_dependencies(); Ok(quote! { #basic_dependencies - #cursor_dependencies - #[async_graphql::Object] impl #ident { #(#queries)* @@ -79,74 +54,15 @@ pub fn basic_query(name: &Ident, path: &TokenStream) -> TokenStream { &self, ctx: &async_graphql::Context<'a>, filters: Option<#path::Filter>, - pagination: Option, - order_by: Option<#path::OrderBy>, - ) -> PaginatedResult<#path::Model> { - use sea_orm::prelude::*; - - println!("filters: {:?}", filters); - - let db: &crate::DatabaseConnection = ctx.data::().unwrap(); - let stmt = #path::Entity::find() - .filter(#path::filter_recursive(filters)); - - let stmt = #path::order_by(stmt, order_by); - - if let Some(pagination) = pagination { - let paginator = stmt.paginate(db, pagination.limit); - let data: Vec<#path::Model> = - paginator.fetch_page(pagination.page).await.unwrap(); - let pages = paginator.num_pages().await.unwrap(); - PaginatedResult { - data, - pages, - current: pagination.page, - } - } else { - let data: Vec<#path::Model> = stmt.all(db).await.unwrap(); - PaginatedResult { - data, - pages: 1, - current: 1, - } - } - } - } -} - -pub fn basic_dependencies(names: Vec) -> TokenStream { - quote! { - #[derive(Debug, async_graphql::InputObject)] - pub struct PaginationInput { - pub limit: usize, - pub page: usize, - } - - #[derive(Debug, async_graphql::SimpleObject)] - #(#names)* - pub struct PaginatedResult { - pub data: Vec, - pub pages: usize, - pub current: usize, - } - } -} - -pub fn cursor_query(name: &Ident, path: &TokenStream) -> TokenStream { - quote! { - pub async fn #name<'a>( - &self, - ctx: &async_graphql::Context<'a>, - filters: Option<#path::Filter>, - cursor: CursorPaginationInput, + pagination: Option, order_by: Option<#path::OrderBy>, - ) -> async_graphql::types::connection::Connection { + ) -> async_graphql::types::connection::Connection { use sea_orm::prelude::*; use sea_orm::Iterable; use seaography::itertools::Itertools; use async_graphql::types::connection::CursorType; - println!("cursor_filters: {:?}", filters); + println!("filters: {:?}", filters); let db: &crate::DatabaseConnection = ctx.data::().unwrap(); let stmt = #path::Entity::find() @@ -154,134 +70,198 @@ pub fn cursor_query(name: &Ident, path: &TokenStream) -> TokenStream { let stmt = #path::order_by(stmt, order_by); - let next_stmt = stmt.clone(); - let previous_stmt = stmt.clone(); - - fn apply_stmt_cursor_by(stmt: sea_orm::entity::prelude::Select<#path::Entity>) -> sea_orm::Cursor> { - if #path::PrimaryKey::iter().len() == 1 { - let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; - stmt.cursor_by(column) - } else if #path::PrimaryKey::iter().len() == 2 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else if #path::PrimaryKey::iter().len() == 3 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else { - panic!("seaography does not support cursors with size greater than 3") - } - } + fn get_result( + data: Vec<#path::Model>, + has_previous_page: bool, + has_next_page: bool, + pages: Option, + current: Option + ) -> async_graphql::types::connection::Connection< + String, + #path::Model, + ExtraPaginationFields, + async_graphql::types::connection::EmptyFields + > { + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); + + let cursor_string = CursorValues(values).encode_cursor(); + + async_graphql::types::connection::Edge::new(cursor_string, node) + }) + .collect(); + + let mut result = async_graphql::types::connection::Connection::< + String, + #path::Model, + ExtraPaginationFields, + async_graphql::types::connection::EmptyFields + >::with_additional_fields( + has_previous_page, + has_next_page, + ExtraPaginationFields { + pages, + current + } + ); - let mut stmt = apply_stmt_cursor_by(stmt); + result.edges.extend(edges); - if let Some(cursor_string) = cursor.cursor { - let values = CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); + result + } - let cursor_values: sea_orm::sea_query::value::ValueTuple = map_cursor_values(values.0); + if let Some(pagination) = pagination { - stmt.after(cursor_values); - } + match pagination { + Pagination::Pages(pagination) => { + let paginator = stmt.paginate(db, pagination.limit); + + let data: Vec<#path::Model> = paginator + .fetch_page(pagination.page) + .await + .unwrap(); + + let pages = paginator + .num_pages() + .await + .unwrap(); + + get_result(data, pagination.page != 1, pagination.page < pages, Some(pages), Some(pagination.page)) + }, + Pagination::Cursor(cursor) => { + let next_stmt = stmt.clone(); + let previous_stmt = stmt.clone(); + + fn apply_stmt_cursor_by(stmt: sea_orm::entity::prelude::Select<#path::Entity>) -> sea_orm::Cursor> { + if #path::PrimaryKey::iter().len() == 1 { + let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; + stmt.cursor_by(column) + } else if #path::PrimaryKey::iter().len() == 2 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else if #path::PrimaryKey::iter().len() == 3 { + let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); + stmt.cursor_by(columns) + } else { + panic!("seaography does not support cursors with size greater than 3") + } + } - let data = stmt - .first(cursor.limit) - .all(db) - .await - .unwrap(); + let mut stmt = apply_stmt_cursor_by(stmt); - let has_next_page: bool = { - let mut next_stmt = apply_stmt_cursor_by(next_stmt); + if let Some(cursor_string) = cursor.cursor { + let values = CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); - let last_node = data.last(); + let cursor_values: sea_orm::sea_query::value::ValueTuple = map_cursor_values(values.0); - if let Some(node) = last_node { - let values: Vec = #path::PrimaryKey::iter() - .map(|variant| { - node.get(variant.into_column()) - }) - .collect(); + stmt.after(cursor_values); + } - let values = map_cursor_values(values); + let data = stmt + .first(cursor.limit) + .all(db) + .await + .unwrap(); - let next_data = next_stmt - .first(cursor.limit) - .after(values) - .all(db) - .await - .unwrap(); + let has_next_page: bool = { + let mut next_stmt = apply_stmt_cursor_by(next_stmt); - next_data.len() != 0 - } else { - false - } - }; + let last_node = data.last(); - let has_previous_page: bool = { - let mut previous_stmt = apply_stmt_cursor_by(previous_stmt); + if let Some(node) = last_node { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); - let first_node = data.first(); + let values = map_cursor_values(values); - if let Some(node) = first_node { - let values: Vec = #path::PrimaryKey::iter() - .map(|variant| { - node.get(variant.into_column()) - }) - .collect(); + let next_data = next_stmt + .first(cursor.limit) + .after(values) + .all(db) + .await + .unwrap(); - let values = map_cursor_values(values); + next_data.len() != 0 + } else { + false + } + }; - let previous_data = previous_stmt - .first(cursor.limit) - .before(values) - .all(db) - .await - .unwrap(); + let has_previous_page: bool = { + let mut previous_stmt = apply_stmt_cursor_by(previous_stmt); - previous_data.len() != 0 - } else { - false - } - }; + let first_node = data.first(); - let edges: Vec> = data - .into_iter() - .map(|node| { - let values: Vec = #path::PrimaryKey::iter() - .map(|variant| { - node.get(variant.into_column()) - }) - .collect(); + if let Some(node) = first_node { + let values: Vec = #path::PrimaryKey::iter() + .map(|variant| { + node.get(variant.into_column()) + }) + .collect(); - let cursor_string = CursorValues(values).encode_cursor(); + let values = map_cursor_values(values); - async_graphql::types::connection::Edge::new(cursor_string, node) - }) - .collect(); + let previous_data = previous_stmt + .first(cursor.limit) + .before(values) + .all(db) + .await + .unwrap(); - let mut result = async_graphql::types::connection::Connection::< - String, - #path::Model, - async_graphql::types::connection::EmptyFields, - async_graphql::types::connection::EmptyFields - >::new( - has_previous_page, - has_next_page - ); + previous_data.len() != 0 + } else { + false + } + }; - result.edges.extend(edges); + get_result(data, has_previous_page, has_next_page, None, None) + } + } + } else { + let data: Vec<#path::Model> = stmt.all(db).await.unwrap(); - result + get_result(data, false, false, Some(1), Some(1)) + } } } } -pub fn cursor_dependencies() -> TokenStream { +pub fn basic_dependencies() -> TokenStream { quote! { #[derive(Debug, async_graphql::InputObject)] - pub struct CursorPaginationInput { + pub struct PageInput { + pub limit: usize, + pub page: usize, + } + + #[derive(Debug, async_graphql::InputObject)] + pub struct CursorInput { pub cursor: Option, pub limit: u64, } + #[derive(async_graphql::OneofObject)] + pub enum Pagination { + Pages(PageInput), + Cursor(CursorInput), + } + + + #[derive(async_graphql::SimpleObject)] + pub struct ExtraPaginationFields { + pub pages: Option, + pub current: Option, + } + #[derive(Debug)] pub enum DecodeMode { Type, @@ -540,4 +520,4 @@ pub fn cursor_dependencies() -> TokenStream { } } } -} +} \ No newline at end of file From a731afb441c63ee00ae28f5e587de2178c895c6b Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Mon, 10 Oct 2022 22:33:09 +0300 Subject: [PATCH 09/11] Fix tests --- examples/mysql/tests/query_tests.rs | 167 ++++++++++--------- examples/postgres/tests/query_tests.rs | 219 +++++++++++++------------ examples/sqlite/tests/query_tests.rs | 71 ++++---- 3 files changed, 251 insertions(+), 206 deletions(-) diff --git a/examples/mysql/tests/query_tests.rs b/examples/mysql/tests/query_tests.rs index c5efd417..7f1ae385 100644 --- a/examples/mysql/tests/query_tests.rs +++ b/examples/mysql/tests/query_tests.rs @@ -35,24 +35,24 @@ async fn test_simple_query() { schema .execute( r#" - { - store { - data { - storeId - staff { - firstName - lastName - } - } - } + { + store { + nodes { + storeId + staff { + firstName + lastName } - "#, + } + } + } + "#, ) .await, r#" { "store": { - "data": [ + "nodes": [ { "storeId": 1, "staff": { @@ -70,7 +70,7 @@ async fn test_simple_query() { ] } } - "#, + "#, ) } @@ -82,24 +82,24 @@ async fn test_simple_query_with_filter() { schema .execute( r#" - { - store(filters: {storeId:{eq: 1}}) { - data { - storeId - staff { - firstName - lastName - } + { + store(filters: {storeId:{eq: 1}}) { + nodes { + storeId + staff { + firstName + lastName } } - } - "#, + } + } + "#, ) .await, r#" { "store": { - "data": [ + "nodes": [ { "storeId": 1, "staff": { @@ -110,7 +110,7 @@ async fn test_simple_query_with_filter() { ] } } - "#, + "#, ) } @@ -122,37 +122,40 @@ async fn test_filter_with_pagination() { schema .execute( r#" - { - customer (filters:{active:{eq: 0}}, pagination:{page: 2, limit: 3}) { - data { - customerId + { + customer( + filters: { active: { eq: 0 } } + pagination: { pages: { page: 2, limit: 3 } } + ) { + nodes { + customerId + } + pages + current } - pages - current } - } - "#, + "#, ) .await, r#" - { - "customer": { - "data": [ - { - "customerId": 315 - }, - { - "customerId": 368 - }, - { - "customerId": 406 - } - ], - "pages": 5, - "current": 2 - } + { + "customer": { + "nodes": [ + { + "customerId": 315 + }, + { + "customerId": 368 + }, + { + "customerId": 406 + } + ], + "pages": 5, + "current": 2 } - "#, + } + "#, ) } @@ -165,8 +168,11 @@ async fn test_complex_filter_with_pagination() { .execute( r#" { - payment(filters:{amount: { gt: "11.1" }}, pagination: {limit: 2, page: 3}) { - data { + payment( + filters: { amount: { gt: "11.1" } } + pagination: { pages: { limit: 2, page: 3 } } + ) { + nodes { paymentId amount } @@ -174,27 +180,27 @@ async fn test_complex_filter_with_pagination() { current } } - "#, + "#, ) .await, r#" - { - "payment": { - "data": [ - { - "paymentId": 8272, - "amount": "11.99" - }, - { - "paymentId": 9803, - "amount": "11.99" - } - ], - "pages": 5, - "current": 3 - } + { + "payment": { + "nodes": [ + { + "paymentId": 8272, + "amount": "11.99" + }, + { + "paymentId": 9803, + "amount": "11.99" + } + ], + "pages": 5, + "current": 3 } - "#, + } + "#, ) } @@ -207,7 +213,10 @@ async fn test_cursor_pagination() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 5}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 5 } } + ) { edges { node { paymentId @@ -230,7 +239,7 @@ async fn test_cursor_pagination() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -299,7 +308,10 @@ async fn test_cursor_pagination_prev() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:5550"}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:5550" } } + ) { edges { node { paymentId @@ -322,7 +334,7 @@ async fn test_cursor_pagination_prev() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -373,7 +385,10 @@ async fn test_cursor_pagination_no_next() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:9803"}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:9803" } } + ) { edges { node { paymentId @@ -396,7 +411,7 @@ async fn test_cursor_pagination_no_next() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -427,4 +442,4 @@ async fn test_cursor_pagination_no_next() { } "#, ) -} +} \ No newline at end of file diff --git a/examples/postgres/tests/query_tests.rs b/examples/postgres/tests/query_tests.rs index 43feeb38..5c746f0a 100644 --- a/examples/postgres/tests/query_tests.rs +++ b/examples/postgres/tests/query_tests.rs @@ -3,7 +3,7 @@ use sea_orm::Database; use seaography_postgres_example::{OrmDataloader, QueryRoot}; pub async fn get_schema() -> Schema { - let database = Database::connect("postgres://sea:sea@127.0.0.1/sakila") + let database = Database::connect("postgres://postgres:postgres@127.0.0.1/sakila") .await .unwrap(); let orm_dataloader: DataLoader = DataLoader::new( @@ -35,42 +35,42 @@ async fn test_simple_query() { schema .execute( r#" - { - store { - data { - storeId - staff { - firstName - lastName - } + { + store { + nodes { + storeId + staff { + firstName + lastName } } } - "#, + } + "#, ) .await, r#" - { - "store": { - "data": [ - { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } - }, - { - "storeId": 2, - "staff": { - "firstName": "Jon", - "lastName": "Stephens" - } + { + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" } - ] - } + }, + { + "storeId": 2, + "staff": { + "firstName": "Jon", + "lastName": "Stephens" + } + } + ] } - "#, + } + "#, ) } @@ -82,35 +82,35 @@ async fn test_simple_query_with_filter() { schema .execute( r#" - { - store(filters: {storeId:{eq: 1}}) { - data { - storeId - staff { - firstName - lastName - } + { + store(filters: {storeId:{eq: 1}}) { + nodes { + storeId + staff { + firstName + lastName } } - } - "#, + } + } + "#, ) .await, r#" - { - "store": { - "data": [ - { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } + { + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" } - ] - } + } + ] } - "#, + } + "#, ) } @@ -122,37 +122,40 @@ async fn test_filter_with_pagination() { schema .execute( r#" - { - customer (filters:{active:{eq: 0}}, pagination:{page: 2, limit: 3}) { - data { - customerId + { + customer( + filters: { active: { eq: 0 } } + pagination: { pages: { page: 2, limit: 3 } } + ) { + nodes { + customerId + } + pages + current } - pages - current } - } - "#, + "#, ) .await, r#" - { - "customer": { - "data": [ - { - "customerId": 315 - }, - { - "customerId": 368 - }, - { - "customerId": 406 - } - ], - "pages": 5, - "current": 2 - } + { + "customer": { + "nodes": [ + { + "customerId": 315 + }, + { + "customerId": 368 + }, + { + "customerId": 406 + } + ], + "pages": 5, + "current": 2 } - "#, + } + "#, ) } @@ -165,8 +168,11 @@ async fn test_complex_filter_with_pagination() { .execute( r#" { - payment(filters:{amount: { gt: "11.1" }}, pagination: {limit: 2, page: 3}) { - data { + payment( + filters: { amount: { gt: "11.1" } } + pagination: { pages: { limit: 2, page: 3 } } + ) { + nodes { paymentId amount } @@ -174,27 +180,27 @@ async fn test_complex_filter_with_pagination() { current } } - "#, + "#, ) .await, r#" - { - "payment": { - "data": [ - { - "paymentId": 8272, - "amount": "11.9900" - }, - { - "paymentId": 9803, - "amount": "11.9900" - } - ], - "pages": 5, - "current": 3 + { + "payment": { + "nodes": [ + { + "paymentId": 8272, + "amount": "11.9900" + }, + { + "paymentId": 9803, + "amount": "11.9900" } - } - "#, + ], + "pages": 5, + "current": 3 + } + } + "#, ) } @@ -207,7 +213,10 @@ async fn test_cursor_pagination() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 5}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 5 } } + ) { edges { node { paymentId @@ -230,7 +239,7 @@ async fn test_cursor_pagination() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -299,7 +308,10 @@ async fn test_cursor_pagination_prev() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:5550"}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:5550" } } + ) { edges { node { paymentId @@ -322,7 +334,7 @@ async fn test_cursor_pagination_prev() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -373,7 +385,10 @@ async fn test_cursor_pagination_no_next() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:9803"}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:9803" } } + ) { edges { node { paymentId @@ -396,7 +411,7 @@ async fn test_cursor_pagination_no_next() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -427,4 +442,4 @@ async fn test_cursor_pagination_no_next() { } "#, ) -} +} \ No newline at end of file diff --git a/examples/sqlite/tests/query_tests.rs b/examples/sqlite/tests/query_tests.rs index 071f87e5..f205bd23 100644 --- a/examples/sqlite/tests/query_tests.rs +++ b/examples/sqlite/tests/query_tests.rs @@ -35,7 +35,7 @@ async fn test_simple_query() { r#" { store { - data { + nodes { storeId staff { firstName @@ -50,7 +50,7 @@ async fn test_simple_query() { r#" { "store": { - "data": [ + "nodes": [ { "storeId": 1, "staff": { @@ -82,7 +82,7 @@ async fn test_simple_query_with_filter() { r#" { store(filters: {storeId:{eq: 1}}) { - data { + nodes { storeId staff { firstName @@ -97,7 +97,7 @@ async fn test_simple_query_with_filter() { r#" { "store": { - "data": [ + "nodes": [ { "storeId": 1, "staff": { @@ -120,22 +120,25 @@ async fn test_filter_with_pagination() { schema .execute( r#" - { - customer (filters:{active:{eq: 0}}, pagination:{page: 2, limit: 3}) { - data { - customerId + { + customer( + filters: { active: { eq: 0 } } + pagination: { pages: { page: 2, limit: 3 } } + ) { + nodes { + customerId + } + pages + current + } } - pages - current - } - } "#, ) .await, r#" { "customer": { - "data": [ + "nodes": [ { "customerId": 315 }, @@ -162,23 +165,26 @@ async fn test_complex_filter_with_pagination() { schema .execute( r#" - { - payment(filters:{amount: { gt: "11.1" }}, pagination: {limit: 2, page: 3}) { - data { - paymentId - amount + { + payment( + filters: { amount: { gt: "11.1" } } + pagination: { pages: { limit: 2, page: 3 } } + ) { + nodes { + paymentId + amount + } + pages + current } - pages - current } - } "#, ) .await, r#" { "payment": { - "data": [ + "nodes": [ { "paymentId": 8272, "amount": "11.99" @@ -205,7 +211,10 @@ async fn test_cursor_pagination() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 5}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 5 } } + ) { edges { node { paymentId @@ -228,7 +237,7 @@ async fn test_cursor_pagination() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -297,7 +306,10 @@ async fn test_cursor_pagination_prev() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:5550"}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:5550" } } + ) { edges { node { paymentId @@ -320,7 +332,7 @@ async fn test_cursor_pagination_prev() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { @@ -371,7 +383,10 @@ async fn test_cursor_pagination_no_next() { .execute( r#" { - paymentCursor(filters: {amount: {gt: "11"}}, cursor: {limit: 3, cursor: "SmallUnsigned[4]:9803"}) { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:9803" } } + ) { edges { node { paymentId @@ -394,7 +409,7 @@ async fn test_cursor_pagination_no_next() { .await, r#" { - "paymentCursor": { + "payment": { "edges": [ { "node": { From 8aa5c141744a5b3e68a85d6e8a4f4911e3be40a4 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Tue, 11 Oct 2022 14:58:05 +0300 Subject: [PATCH 10/11] Fix format --- derive/src/root_query.rs | 2 +- examples/postgres/tests/query_tests.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs index 1ca92290..21fd555e 100644 --- a/derive/src/root_query.rs +++ b/derive/src/root_query.rs @@ -520,4 +520,4 @@ pub fn basic_dependencies() -> TokenStream { } } } -} \ No newline at end of file +} diff --git a/examples/postgres/tests/query_tests.rs b/examples/postgres/tests/query_tests.rs index 5c746f0a..0629fae9 100644 --- a/examples/postgres/tests/query_tests.rs +++ b/examples/postgres/tests/query_tests.rs @@ -3,7 +3,7 @@ use sea_orm::Database; use seaography_postgres_example::{OrmDataloader, QueryRoot}; pub async fn get_schema() -> Schema { - let database = Database::connect("postgres://postgres:postgres@127.0.0.1/sakila") + let database = Database::connect("postgres://sea:sea@127.0.0.1/sakila") .await .unwrap(); let orm_dataloader: DataLoader = DataLoader::new( From 8bbc5fed37035192d0efe202ab4ecda04ce79cac Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Wed, 12 Oct 2022 21:58:40 +0300 Subject: [PATCH 11/11] Update Readme --- README.md | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 17ec6174..46721216 100644 --- a/README.md +++ b/README.md @@ -58,8 +58,8 @@ Go to http://localhost:8000/ and try out the following queries: ```graphql { - film(pagination: { limit: 10, page: 0 }, orderBy: { title: ASC }) { - data { + film(pagination: { pages: { limit: 10, page: 0 } }, orderBy: { title: ASC }) { + nodes { title description releaseYear @@ -79,7 +79,7 @@ Go to http://localhost:8000/ and try out the following queries: ```graphql { store(filters: { storeId: { eq: 1 } }) { - data { + nodes { storeId address { address @@ -98,8 +98,11 @@ Go to http://localhost:8000/ and try out the following queries: ```graphql { - customer(filters: { active: { eq: 0 } }, pagination: { page: 2, limit: 3 }) { - data { + customer( + filters: { active: { eq: 0 } } + pagination: { pages: { page: 2, limit: 3 } } + ) { + nodes { customerId lastName email @@ -110,6 +113,28 @@ Go to http://localhost:8000/ and try out the following queries: } ``` +### The query above using cursor pagination + +```graphql +{ + customer( + filters: { active: { eq: 0 } } + pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } + ) { + nodes { + customerId + lastName + email + } + pageInfo { + hasPreviousPage + hasNextPage + endCursor + } + } +} +``` + ### Postgres Setup the [sakila](https://github.com/SeaQL/seaography/blob/main/examples/postgres/sakila-schema.sql) sample database.