diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 48c79efb..0f3a137c 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -1,24 +1,24 @@ name: tests on: - pull_request: - paths-ignore: - - '**.md' - - '.github/ISSUE_TEMPLATE/**' - push: - paths-ignore: - - '**.md' - - '.github/ISSUE_TEMPLATE/**' - branches: - - main - - pr/**/ci + pull_request: + paths-ignore: + - "**.md" + - ".github/ISSUE_TEMPLATE/**" + push: + paths-ignore: + - "**.md" + - ".github/ISSUE_TEMPLATE/**" + branches: + - main + - pr/**/ci concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true env: - CARGO_TERM_COLOR: always + CARGO_TERM_COLOR: always jobs: check: @@ -88,21 +88,32 @@ jobs: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: - profile: minimal - toolchain: stable - override: true + profile: minimal + toolchain: stable + override: true + - name: Install sea-orm-cli + uses: baptiste0928/cargo-install@v2 + with: + crate: sea-orm-cli + version: '0.11.0' - name: Remove generated folder run: rm -rf ./examples/sqlite/src - name: Copy sample database run: cp ./examples/sqlite/sakila.db . - - uses: actions-rs/cargo@v1 + - name: Generate entities + run: sea-orm-cli generate entity -o examples/sqlite/src/entities -u sqlite://sakila.db + - name: Generate Seaography project + uses: actions-rs/cargo@v1 with: command: run args: > - --package seaography-cli -- - -f poem sqlite://sakila.db seaography-sqlite-example ./examples/sqlite + --package seaography-cli -- + ./examples/sqlite ./examples/sqlite/src/entities sqlite://sakila.db seaography-sqlite-example -f poem - name: Depends on local seaography run: sed -i '/^\[dependencies.seaography\]$/a \path = "..\/..\/"' ./examples/sqlite/Cargo.toml + - name: Build example + working-directory: ./examples/sqlite + run: cargo build - name: Integration tests working-directory: ./examples/sqlite run: cargo test @@ -120,21 +131,29 @@ jobs: profile: minimal toolchain: stable override: true - - name: Build example - working-directory: ./examples/sqlite - run: cargo build + - name: Install sea-orm-cli + uses: baptiste0928/cargo-install@v2 + with: + crate: sea-orm-cli + version: '0.11.0' - name: Remove generated folder run: rm -rf ./examples/sqlite/src - name: Copy sample database run: cp ./examples/sqlite/sakila.db . - - uses: actions-rs/cargo@v1 + - name: Generate entities + run: sea-orm-cli generate entity -o examples/sqlite/src/entities -u sqlite://sakila.db + - name: Generate Seaography project + uses: actions-rs/cargo@v1 with: command: run args: > - --package seaography-cli -- - -f actix sqlite://sakila.db seaography-sqlite-example ./examples/sqlite + --package seaography-cli -- + ./examples/sqlite ./examples/sqlite/src/entities sqlite://sakila.db seaography-sqlite-example -f actix - name: Depends on local seaography run: sed -i '/^\[dependencies.seaography\]$/a \path = "..\/..\/"' ./examples/sqlite/Cargo.toml + - name: Build example + working-directory: ./examples/sqlite + run: cargo build - name: Integration tests working-directory: ./examples/sqlite run: cargo test @@ -167,9 +186,11 @@ jobs: profile: minimal toolchain: stable override: true - - name: Build example - working-directory: ./examples/mysql - run: cargo build + - name: Install sea-orm-cli + uses: baptiste0928/cargo-install@v2 + with: + crate: sea-orm-cli + version: '0.11.0' - name: Remove generated folder run: rm -rf ./examples/mysql/src - name: Create DB @@ -182,14 +203,20 @@ jobs: - name: Import DB Data run: mysql -uroot -h 127.0.0.1 sakila < sakila-data.sql working-directory: ./examples/mysql - - uses: actions-rs/cargo@v1 + - name: Generate entities + run: sea-orm-cli generate entity -o ./examples/mysql/src/entities -u mysql://sea:sea@127.0.0.1/sakila + - name: Generate Seaography project + uses: actions-rs/cargo@v1 with: command: run args: > - --package seaography-cli -- - mysql://sea:sea@127.0.0.1/sakila seaography-mysql-example ./examples/mysql + --package seaography-cli -- + ./examples/mysql ./examples/mysql/src/entities mysql://sea:sea@127.0.0.1/sakila seaography-mysql-example - name: Depends on local seaography run: sed -i '/^\[dependencies.seaography\]$/a \path = "..\/..\/"' ./examples/mysql/Cargo.toml + - name: Build example + working-directory: ./examples/mysql + run: cargo build - name: Integration tests working-directory: ./examples/mysql run: cargo test @@ -221,9 +248,11 @@ jobs: profile: minimal toolchain: stable override: true - - name: Build example - working-directory: ./examples/postgres - run: cargo build + - name: Install sea-orm-cli + uses: baptiste0928/cargo-install@v2 + with: + crate: sea-orm-cli + version: '0.11.0' - name: Remove generated folder run: rm -rf ./examples/postgres/src - name: Create DB @@ -234,14 +263,22 @@ jobs: - name: Import DB Data run: psql -q postgres://sea:sea@localhost/sakila < sakila-data.sql working-directory: ./examples/postgres - - uses: actions-rs/cargo@v1 + - name: Generate entities + run: sea-orm-cli generate entity -o ./examples/postgres/src/entities -u postgres://sea:sea@127.0.0.1/sakila?currentSchema=public + - name: Generate Seaography project + uses: actions-rs/cargo@v1 with: command: run args: > - --package seaography-cli -- - postgres://sea:sea@127.0.0.1/sakila?currentSchema=public seaography-postgres-example ./examples/postgres + --package seaography-cli -- + ./examples/postgres ./examples/postgres/src/entities postgres://sea:sea@127.0.0.1/sakila?currentSchema=public seaography-postgres-example - name: Depends on local seaography run: sed -i '/^\[dependencies.seaography\]$/a \path = "..\/..\/"' ./examples/postgres/Cargo.toml + - name: Fix Nullable not implemented for Vec and tsvector + run: sed -i "25,27d" ./examples/postgres/src/entities/film.rs + - name: Build example + working-directory: ./examples/postgres + run: cargo build - name: Integration tests working-directory: ./examples/postgres run: cargo test diff --git a/CHANGELOG.md b/CHANGELOG.md index 801ce09e..f345a227 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,31 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## 1.0.0 - 2023-03-25 + +Introduction the functional API of Seaography. Warning, this version has breaking changes, but it was a sacrifice in order to make the project easier to maintain. With this version we have support for field guards and field renames. + +### Breaking changes +* Dropped the derive API in favor of a functional API + + SeaORM is a dynamic ORM for rust, this means that we can inspect the Tables, Columns properties on runtime. Recently async-graphql added support for dynamic creation of GraphQL nodes. Utilizing the dynamic nature of both libraries the Derive API is no longer needed and we developed a functional approach API. Moreover, the project in order to live long it needs to be maintainable (easy to maintain) and extensible (easy to extend), but the Derive API was fairly complex compared to a functional API. In order to make the migration easier we updated the seaography generator to generate using the new API + +* Decoupled sea-orm-cli from seaography-cli + + Because we don't have to extend the result produced by the sea-orm-cli we decoupled the dependency away fro, seaography in order to make future versions easier to maintain. + +* Dataloader optimizations are not introduced yet + + The Dataloader optimizations are going to be added in future versions + +* Some renames in Connection node fields, and pagination filtering + +### New Features + +* Functional API +* Field renames +* Field guards + ## 0.3.0 - 2022-12-02 ### New Features diff --git a/Cargo.toml b/Cargo.toml index 0396809d..9f4b96ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,14 +2,12 @@ members = [ ".", "cli", - "derive", - "discoverer", "generator", ] [package] name = "seaography" -version = "0.3.0" +version = "1.0.0" edition = "2021" rust-version = "1.60" authors = ["Panagiotis Karatakis "] @@ -22,15 +20,19 @@ keywords = ["async", "graphql", "mysql", "postgres", "sqlite"] categories = ["database"] [dependencies] -async-graphql = { version = "4.0.12", default-features = false } -seaography-derive = { version = "^0.3.0", path = "./derive" } -sea-orm = { version = "^0.10", default-features = false } -itertools = { version = "0.10.3" } -heck = { version = "0.4.0" } +async-graphql = { version = "5.0.6", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] } +sea-orm = { version = "0.11.0", default-features = false } +itertools = { version = "0.10.5" } +heck = { version = "0.4.1" } [features] default = [] -with-chrono = ["seaography-derive/with-chrono", "sea-orm/with-chrono", "async-graphql/chrono"] -with-decimal = ["seaography-derive/with-decimal", "sea-orm/with-rust_decimal", "async-graphql/decimal"] -with-json = ["seaography-derive/with-json", "sea-orm/with-json"] -with-uuid = ["seaography-derive/with-uuid", "sea-orm/with-uuid"] +with-json = ["sea-orm/with-json"] +with-chrono = ["sea-orm/with-chrono", "async-graphql/chrono"] +with-time = ["sea-orm/with-time", "async-graphql/time"] +with-uuid = ["sea-orm/with-uuid"] +with-decimal = ["sea-orm/with-rust_decimal", "async-graphql/decimal"] +with-bigdecimal = ["sea-orm/with-bigdecimal", "async-graphql/bigdecimal"] +# with-postgres-array = ["sea-orm/postgres-array"] +# with-ipnetwork = ["sea-orm/with-ipnetwork"] +# with-mac_address = ["sea-orm/with-mac_address"] \ No newline at end of file diff --git a/README.md b/README.md index 3635c88d..6622f522 100644 --- a/README.md +++ b/README.md @@ -26,9 +26,11 @@ ## Features * Relational query (1-to-1, 1-to-N) -* Pagination on query's root entity -* Filter with operators (e.g. gt, lt, eq) +* Pagination for queries and relations (1-N) +* Filtering with operators (e.g. gt, lt, eq) * Order by any column +* Guard fields, queries or relations +* Rename fields (Right now there is no mutation, but it's on our plan!) @@ -37,6 +39,7 @@ ### Install ```sh +cargo install sea-orm-cli # used to generate entities cargo install seaography-cli ``` @@ -46,7 +49,8 @@ Setup the [sakila](https://github.com/SeaQL/seaography/blob/main/examples/mysql/ ```sh cd examples/mysql -seaography-cli mysql://user:pw@localhost/sakila seaography-mysql-example . +sea-orm-cli generate entity -o src/entities -u mysql://user:pw@127.0.0.1/sakila +seaography-cli ./ src/entities mysql://user:pw@127.0.0.1/sakila seaography-mysql-example cargo run ``` @@ -56,17 +60,15 @@ Go to http://localhost:8000/ and try out the following queries: ```graphql { - film(pagination: { pages: { limit: 10, page: 0 } }, orderBy: { title: ASC }) { + film(pagination: { page: { limit: 10, page: 0 } }, orderBy: { title: ASC }) { nodes { title description releaseYear - filmActor { + actor { nodes { - actor { - firstName - lastName - } + firstName + lastName } } } @@ -100,15 +102,17 @@ Go to http://localhost:8000/ and try out the following queries: { customer( filters: { active: { eq: 0 } } - pagination: { pages: { page: 2, limit: 3 } } + pagination: { page: { page: 2, limit: 3 } } ) { nodes { customerId lastName email } - pages - current + paginationInfo { + pages + current + } } } ``` @@ -155,14 +159,16 @@ Find all inactive customers, include their address, and their payments with amou payment( filters: { amount: { gt: "7" } } orderBy: { amount: ASC } - pagination: { pages: { limit: 1, page: 1 } } + pagination: { page: { limit: 1, page: 1 } } ) { nodes { paymentId amount } - pages - current + paginationInfo { + pages + current + } pageInfo { hasPreviousPage hasNextPage @@ -178,13 +184,29 @@ Find all inactive customers, include their address, and their payments with amou } ``` +### Filter using enumeration +```graphql +{ + film( + filters: { rating: { eq: NC17 } } + pagination: { page: { page: 1, limit: 5 } } + ) { + nodes { + filmId + rating + } + } +} +``` + ### Postgres Setup the [sakila](https://github.com/SeaQL/seaography/blob/main/examples/postgres/sakila-schema.sql) sample database. ```sh cd examples/postgres -seaography-cli postgres://user:pw@localhost/sakila seaography-postgres-example . +sea-orm-cli generate entity -o src/entities -u postgres://user:pw@localhost/sakila +seaography-cli ./ src/entities postgres://user:pw@localhost/sakila seaography-postgres-example cargo run ``` @@ -192,7 +214,8 @@ cargo run ```sh cd examples/sqlite -seaography-cli sqlite://sakila.db seaography-sqlite-example . +sea-orm-cli generate entity -o src/entities -u sqlite://sakila.db +seaography-cli ./ src/entities sqlite://sakila.db seaography-sqlite-example cargo run ``` diff --git a/build-tools/bump-version.sh b/build-tools/bump-version.sh index 275602de..6b893dc6 100644 --- a/build-tools/bump-version.sh +++ b/build-tools/bump-version.sh @@ -1,20 +1,6 @@ #!/bin/bash set -e -# Bump `seaography-derive` version -cd derive -sed -i 's/^version.*$/version = "'$1'"/' Cargo.toml -git commit -am "seaography-derive $1" -cd .. -sleep 1 - -# Bump `seaography-discoverer` version -cd discoverer -sed -i 's/^version.*$/version = "'$1'"/' Cargo.toml -git commit -am "seaography-discoverer $1" -cd .. -sleep 1 - # Bump `seaography-generator` version cd generator sed -i 's/^version.*$/version = "'$1'"/' Cargo.toml @@ -25,7 +11,6 @@ sleep 1 # Bump `seaography-cli` version cd cli sed -i 's/^version.*$/version = "'$1'"/' Cargo.toml -sed -i 's/^seaography-discoverer [^,]*,/seaography-discoverer = { version = "\^'$1'",/' Cargo.toml sed -i 's/^seaography-generator [^,]*,/seaography-generator = { version = "\^'$1'",/' Cargo.toml git commit -am "seaography-cli $1" cd .. @@ -33,7 +18,6 @@ sleep 1 # Bump `seaography` version sed -i 's/^version.*$/version = "'$1'"/' Cargo.toml -sed -i 's/^seaography-derive [^,]*,/seaography-derive = { version = "\^'$1'",/' Cargo.toml git commit -am "$1" sleep 1 diff --git a/build-tools/cargo-publish.sh b/build-tools/cargo-publish.sh index 074536b9..f1905f08 100644 --- a/build-tools/cargo-publish.sh +++ b/build-tools/cargo-publish.sh @@ -1,16 +1,6 @@ #!/bin/bash set -e -cd derive -cargo publish -cd .. -sleep 10 - -cd discoverer -cargo publish -cd .. -sleep 10 - cd generator cargo publish cd .. diff --git a/cli/Cargo.toml b/cli/Cargo.toml index d012ae1d..d87b5ffe 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "seaography-cli" -version = "0.3.0" +version = "1.0.0" edition = "2021" rust-version = "1.60" authors = ["Panagiotis Karatakis "] @@ -14,7 +14,6 @@ categories = ["database"] [dependencies] async-std = { version = "1.12.0", features = [ "attributes", "tokio1" ] } -clap = { version = "3.2.20", features = ["derive"] } -seaography-generator = { version = "^0.3.0", path = "../generator" } -seaography-discoverer = { version = "^0.3.0", path = "../discoverer" } -url = "2.2.2" \ No newline at end of file +clap = { version = "4.2.1", features = ["derive"] } +seaography-generator = { version = "^1.0.0", path = "../generator" } +url = "2.3.1" \ No newline at end of file diff --git a/cli/src/main.rs b/cli/src/main.rs index ca548da9..f9c3d478 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,35 +1,32 @@ -use clap::{ArgEnum, Parser}; +use clap::{Parser, ValueEnum}; use seaography_generator::write_project; #[derive(clap::Parser)] #[clap(author, version, about, long_about = None)] pub struct Args { - #[clap(value_parser)] + /// project destination folder + pub destination: String, + + /// entities folder to depend on + pub entities: String, + + /// database URL to write it in .env pub database_url: String, - #[clap(value_parser)] + /// crate name for generated project pub crate_name: String, - #[clap(value_parser)] - pub destination: String, - - #[clap(short, long)] - pub expanded_format: Option, + /// web framework + #[clap(short, long, value_enum, default_value_t = WebFrameworkEnum::Poem)] + pub framework: WebFrameworkEnum, - #[clap(short, long)] + /// GraphQL depth limit + #[clap(long)] pub depth_limit: Option, - #[clap(short, long)] + /// GraphQL complexity limit + #[clap(long)] pub complexity_limit: Option, - - #[clap(short, long)] - pub ignore_tables: Option, - - #[clap(short, long)] - pub hidden_tables: Option, - - #[clap(short, long, arg_enum, value_parser, default_value = "poem")] - pub framework: WebFrameworkEnum, } /** @@ -90,58 +87,21 @@ pub fn parse_database_url(database_url: &str) -> Result "sqlx-sqlite", - seaography_discoverer::SqlVersion::Mysql => "sqlx-mysql", - seaography_discoverer::SqlVersion::Postgres => "sqlx-postgres", - }; - - let expanded_format = args.expanded_format.unwrap_or(false); - - let ignore_tables = args - .ignore_tables - .unwrap_or_else(|| "seaql_migrations".into()); - let ignore_tables: Vec<&str> = ignore_tables.split(',').collect(); - - let hidden_tables = args.hidden_tables.unwrap_or(true); - - let tables: std::collections::BTreeMap< - String, - seaography_discoverer::sea_schema::sea_query::TableCreateStatement, - > = tables - .into_iter() - .filter(|(key, _)| { - if hidden_tables { - !key.starts_with('_') - } else { - true - } - }) - .filter(|(key, _)| { - if !ignore_tables.is_empty() { - !ignore_tables.contains(&key.as_str()) - } else { - true - } - }) - .collect(); + let sql_library = &map_sql_version(&database_url); let db_url = database_url.as_str(); write_project( - &path, + &root_path, + &entities_path, db_url, &args.crate_name, - expanded_format, - tables, sql_library, args.framework.into(), args.depth_limit, @@ -151,7 +111,7 @@ async fn main() { .unwrap(); } -#[derive(ArgEnum, Debug, Clone, Copy, Eq, PartialEq)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] pub enum WebFrameworkEnum { Actix, Poem, @@ -165,3 +125,12 @@ impl From for seaography_generator::WebFrameworkEnum { } } } + +fn map_sql_version(database_url: &url::Url) -> String { + match database_url.scheme() { + "mysql" => String::from("sqlx-mysql"), + "sqlite" => String::from("sqlx-sqlite"), + "postgres" | "postgresql" => String::from("sqlx-postgres"), + _ => unimplemented!("{} is not supported", database_url.scheme()), + } +} diff --git a/derive/Cargo.toml b/derive/Cargo.toml deleted file mode 100644 index 2a6d333a..00000000 --- a/derive/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "seaography-derive" -version = "0.3.0" -edition = "2021" -rust-version = "1.60" -authors = ["Panagiotis Karatakis "] -description = "🧭 A GraphQL framework and code generator for SeaORM" -license = "MIT OR Apache-2.0" -homepage = "https://www.sea-ql.org/Seaography" -documentation = "https://docs.rs/seaography" -repository = "https://github.com/SeaQL/seaography" -keywords = ["async", "graphql", "mysql", "postgres", "sqlite"] -categories = ["database"] - -[lib] -proc-macro = true - -[dependencies] -quote = "1.0.21" -syn = { version = "1.0.99" } -proc-macro2 = "1.0.43" -bae = "0.1.7" -heck = "0.4.0" - -[features] -default = [] -with-chrono = [] -with-decimal = [] -with-json = [] -with-uuid = [] diff --git a/derive/README.md b/derive/README.md deleted file mode 100644 index 668ac52c..00000000 --- a/derive/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Seaography Derive - -This is a library of macros that convert SeaORM entities into Async GraphQL nodes. \ No newline at end of file diff --git a/derive/src/enumeration.rs b/derive/src/enumeration.rs deleted file mode 100644 index 75bd5f90..00000000 --- a/derive/src/enumeration.rs +++ /dev/null @@ -1,72 +0,0 @@ -use proc_macro2::TokenStream; -use quote::{format_ident, quote}; - -pub fn enum_filter_fn(ident: syn::Ident) -> TokenStream { - let name = format_ident!("{}EnumFilter", ident); - - quote! { - #[derive(Debug, Clone, async_graphql::InputObject)] - pub struct #name { - pub eq: Option<#ident>, - pub ne: Option<#ident>, - pub gt: Option<#ident>, - pub gte: Option<#ident>, - pub lt: Option<#ident>, - pub lte: Option<#ident>, - pub is_in: Option>, - pub is_not_in: Option>, - pub is_null: Option, - } - - impl seaography::FilterTrait for #name { - type Ty = #ident; - - fn eq(&self) -> Option { - self.eq.clone() - } - fn ne(&self) -> Option { - self.ne.clone() - } - fn gt(&self) -> Option { - self.gt.clone() - } - fn gte(&self) -> Option { - self.gte.clone() - } - fn lt(&self) -> Option { - self.lt.clone() - } - fn lte(&self) -> Option { - self.lte.clone() - } - fn is_in(&self) -> Option> { - self.is_in.clone() - } - fn is_not_in(&self) -> Option> { - self.is_not_in.clone() - } - fn is_null(&self) -> Option { - self.is_null - } - fn contains(&self) -> Option { - panic!("contains not supported for enumerations") - } - fn starts_with(&self) -> Option { - panic!("starts_with not supported for enumerations") - } - fn ends_with(&self) -> Option { - panic!("ends_with not supported for enumerations") - } - fn like(&self) -> Option { - panic!("like not supported for enumerations") - } - fn not_like(&self) -> Option { - panic!("not_like not supported for enumerations") - } - } - - impl seaography::FilterTypeTrait for #ident { - type Filter = #name; - } - } -} diff --git a/derive/src/error.rs b/derive/src/error.rs deleted file mode 100644 index d23dfaaa..00000000 --- a/derive/src/error.rs +++ /dev/null @@ -1,18 +0,0 @@ -#[derive(Debug)] -pub enum Error { - Internal(String), - Syn(syn::Error), - LexError(proc_macro2::LexError), -} - -impl From for Error { - fn from(err: syn::Error) -> Self { - Self::Syn(err) - } -} - -impl From for Error { - fn from(err: proc_macro2::LexError) -> Self { - Self::LexError(err) - } -} diff --git a/derive/src/filter.rs b/derive/src/filter.rs deleted file mode 100644 index 952219d0..00000000 --- a/derive/src/filter.rs +++ /dev/null @@ -1,269 +0,0 @@ -use heck::{ToSnakeCase, ToUpperCamelCase}; -use proc_macro2::TokenStream; -use quote::{format_ident, quote}; - -#[derive(Debug, Eq, PartialEq, bae::FromAttributes)] -pub struct SeaOrm { - table_name: Option, -} - -pub type IdentTypeTuple = (syn::Ident, syn::Type); - -// TODO skip ignored fields -pub fn filter_fn(item: syn::DataStruct, attrs: SeaOrm) -> Result { - let fields: Vec = item - .fields - .into_iter() - .map(|field| { - ( - field.ident.unwrap(), - remove_optional_from_type(field.ty).unwrap(), - ) - }) - .collect(); - - let filter_struct = filter_struct(&fields, &attrs)?; - - let recursive_filter_fn = recursive_filter_fn(&fields)?; - - let order_by_struct = order_by_struct(&fields, &attrs)?; - - let order_by_fn = order_by_fn(&fields)?; - - Ok(quote! { - #filter_struct - - #recursive_filter_fn - - #order_by_struct - - #order_by_fn - - impl seaography::EnhancedEntity for Entity { - type Entity = Entity; - type Filter = Filter; - type OrderBy = OrderBy; - } - }) -} - -pub fn filter_struct( - fields: &[IdentTypeTuple], - attrs: &SeaOrm, -) -> Result { - let fields: Vec = fields - .iter() - .map(|(ident, type_ident)| { - quote! { - #ident: Option<<#type_ident as seaography::FilterTypeTrait>::Filter> - } - }) - .collect(); - - let entity_name = match &attrs.table_name { - Some(syn::Lit::Str(name)) => name, - _ => return Err(crate::error::Error::Internal("Invalid entity name".into())), - }; - - let filter_name = format!("{}Filter", entity_name.value().to_upper_camel_case()); - - // TODO enable when async graphql support name_type for input objects - // let type_name = quote!{ - // impl async_graphql::TypeName for Filter { - // fn type_name() -> ::std::borrow::Cow<'static, str> { - // use seaography::heck::ToUpperCamelCase; - - // let filter_name = format!("{}Filter", Entity::default().table_name().to_string().to_upper_camel_case()); - - // ::std::borrow::Cow::Owned(filter_name) - // } - // } - // } - - Ok(quote! { - #[derive(Debug, Clone, async_graphql::InputObject)] - #[graphql(name = #filter_name)] - pub struct Filter { - pub or: Option>>, - pub and: Option>>, - #(#fields),* - } - }) -} - -pub fn order_by_struct( - fields: &[IdentTypeTuple], - attrs: &SeaOrm, -) -> Result { - let fields: Vec = fields - .iter() - .map(|(ident, _)| { - quote! { - #ident: Option - } - }) - .collect(); - - let entity_name = match &attrs.table_name { - Some(syn::Lit::Str(name)) => name, - _ => return Err(crate::error::Error::Internal("Invalid entity name".into())), - }; - - let filter_name = format!("{}OrderBy", entity_name.value().to_upper_camel_case()); - - Ok(quote! { - #[derive(Debug, Clone, async_graphql::InputObject)] - #[graphql(name = #filter_name)] - pub struct OrderBy { - #(#fields),* - } - }) -} - -pub fn order_by_fn(fields: &[IdentTypeTuple]) -> Result { - let fields: Vec = fields - .iter() - .map(|(ident, _)| { - let column = format_ident!("{}", ident.to_string().to_upper_camel_case()); - - quote! { - let stmt = if let Some(order_by) = self.#ident { - match order_by { - seaography::OrderByEnum::Asc => stmt.order_by(Column::#column, sea_orm::query::Order::Asc), - seaography::OrderByEnum::Desc => stmt.order_by(Column::#column, sea_orm::query::Order::Desc), - } - } else { - stmt - }; - } - }) - .collect(); - - Ok(quote! { - impl seaography::EntityOrderBy for OrderBy { - fn order_by(&self, stmt: sea_orm::Select) -> sea_orm::Select { - use sea_orm::QueryOrder; - - #(#fields)* - - stmt - } - } - }) -} - -pub fn recursive_filter_fn(fields: &[IdentTypeTuple]) -> Result { - let columns_filters: Vec = fields - .iter() - .map(|(ident, _)| { - let column_name = format_ident!("{}", ident.to_string().to_snake_case()); - - let column_enum_name = format_ident!("{}", ident.to_string().to_upper_camel_case()); - - quote!{ - if let Some(#column_name) = &self.#column_name { - if let Some(eq_value) = seaography::FilterTrait::eq(#column_name) { - condition = condition.add(Column::#column_enum_name.eq(eq_value)) - } - - if let Some(ne_value) = seaography::FilterTrait::ne(#column_name) { - condition = condition.add(Column::#column_enum_name.ne(ne_value)) - } - - if let Some(gt_value) = seaography::FilterTrait::gt(#column_name) { - condition = condition.add(Column::#column_enum_name.gt(gt_value)) - } - - if let Some(gte_value) = seaography::FilterTrait::gte(#column_name) { - condition = condition.add(Column::#column_enum_name.gte(gte_value)) - } - - if let Some(lt_value) = seaography::FilterTrait::lt(#column_name) { - condition = condition.add(Column::#column_enum_name.lt(lt_value)) - } - - if let Some(lte_value) = seaography::FilterTrait::lte(#column_name) { - condition = condition.add(Column::#column_enum_name.lte(lte_value)) - } - - if let Some(is_in_value) = seaography::FilterTrait::is_in(#column_name) { - condition = condition.add(Column::#column_enum_name.is_in(is_in_value)) - } - - if let Some(is_not_in_value) = seaography::FilterTrait::is_not_in(#column_name) { - condition = condition.add(Column::#column_enum_name.is_not_in(is_not_in_value)) - } - - if let Some(is_null_value) = seaography::FilterTrait::is_null(#column_name) { - if is_null_value { - condition = condition.add(Column::#column_enum_name.is_null()) - } - } - } - } - }) - .collect(); - - Ok(quote! { - impl seaography::EntityFilter for Filter { - fn filter_condition(&self) -> sea_orm::Condition { - let mut condition = sea_orm::Condition::all(); - - if let Some(or_filters) = &self.or { - let or_condition = or_filters - .iter() - .fold( - sea_orm::Condition::any(), - |fold_condition, filter| fold_condition.add(filter.filter_condition()) - ); - condition = condition.add(or_condition); - } - - if let Some(and_filters) = &self.and { - let and_condition = and_filters - .iter() - .fold( - sea_orm::Condition::all(), - |fold_condition, filter| fold_condition.add(filter.filter_condition()) - ); - condition = condition.add(and_condition); - } - - #(#columns_filters)* - - condition - } - } - }) -} - -pub fn remove_optional_from_type(ty: syn::Type) -> Result { - fn path_is_option(path: &syn::Path) -> bool { - path.leading_colon.is_none() - && path.segments.len() == 1 - && path.segments.iter().next().unwrap().ident == "Option" - } - - let ty = match ty { - syn::Type::Path(type_path) - if type_path.qself.is_none() && path_is_option(&type_path.path) => - { - let type_params = &type_path.path.segments.first().unwrap().arguments; - let generic_arg = match type_params { - syn::PathArguments::AngleBracketed(params) => params.args.first().unwrap(), - _ => { - return Err(crate::error::Error::Internal( - "Cannot parse type brackets".into(), - )) - } - }; - match generic_arg { - syn::GenericArgument::Type(ty) => ty.to_owned(), - _ => return Err(crate::error::Error::Internal("Cannot parse type".into())), - } - } - _ => ty, - }; - - Ok(ty) -} diff --git a/derive/src/lib.rs b/derive/src/lib.rs deleted file mode 100644 index a4195fac..00000000 --- a/derive/src/lib.rs +++ /dev/null @@ -1,148 +0,0 @@ -use quote::ToTokens; -use syn::DeriveInput; - -mod enumeration; -mod error; -mod filter; -mod relation; -mod root_query; - -#[proc_macro_derive(Filter, attributes(sea_orm))] -pub fn derive_filter_fn(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let DeriveInput { - ident, data, attrs, .. - } = syn::parse_macro_input!(input as syn::DeriveInput); - - let item = match data { - syn::Data::Struct(item) => item, - _ => { - return quote::quote! { - compile_error!("Input not structure") - } - .into() - } - }; - - if ident.ne("Model") { - return quote::quote! { - compile_error!("Struct must be SeaOrm Model structure") - } - .into(); - } - - let attrs = filter::SeaOrm::from_attributes(&attrs).unwrap(); - - filter::filter_fn(item, attrs) - .unwrap_or_else(|err| { - let error = format!("{:?}", err); - - quote::quote! { - compile_error!(#error) - } - }) - .into() -} - -// TODO use attrs to skip relations -#[proc_macro_derive(RelationsCompact, attributes(sea_orm))] -pub fn derive_relations_compact_fn(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let DeriveInput { ident, data, .. } = syn::parse_macro_input!(input as syn::DeriveInput); - - let item = match data { - syn::Data::Enum(item) => item, - _ => return quote::quote! { compile_error!("Input not enumeration") }.into(), - }; - - if ident.ne("Relation") { - return quote::quote! { - compile_error!("Struct must be SeaOrm Relation enumeration") - } - .into(); - } - - let res = relation::compact_relation_fn(&item).unwrap_or_else(|err| { - let error = format!("{:?}", err); - - quote::quote! { - compile_error!(#error) - } - }); - - res.into() -} - -#[proc_macro_attribute] -pub fn relation( - _attrs: proc_macro::TokenStream, - input: proc_macro::TokenStream, -) -> proc_macro::TokenStream { - let implementation = syn::parse_macro_input!(input as syn::Item); - - if !implementation - .to_token_stream() - .to_string() - .starts_with("impl RelationTrait") - { - return quote::quote! { - compile_error!("Macro should be applied on the implementation of RelationTrait trait") - } - .into(); - } - - let item = match implementation { - syn::Item::Impl(implementation) => implementation, - _ => return quote::quote! { - compile_error!("Macro should be applied on the implementation of RelationTrait trait") - } - .into(), - }; - - let res = relation::expanded_relation_fn(&item).unwrap_or_else(|err| { - let error = format!("{:?}", err); - - quote::quote! { - compile_error!(#error) - } - }); - - res.into() -} - -#[proc_macro_derive(QueryRoot, attributes(seaography))] -pub fn derive_root_query_fn(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let DeriveInput { - ident, data, attrs, .. - } = syn::parse_macro_input!(input as syn::DeriveInput); - - match data { - syn::Data::Struct(_) => (), - _ => return quote::quote! { compile_error!("Input not structure") }.into(), - }; - - let attrs: Vec = attrs - .into_iter() - .map(|attribute| root_query::Seaography::from_attributes(&[attribute]).unwrap()) - .collect(); - - let res = root_query::root_query_fn(&ident, &attrs).unwrap_or_else(|err| { - let error = format!("{:?}", err); - - quote::quote! { - compile_error!(#error) - } - }); - - res.into() -} - -#[proc_macro_derive(EnumFilter, attributes())] -pub fn derive_enum_filter_fn(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let DeriveInput { ident, data, .. } = syn::parse_macro_input!(input as syn::DeriveInput); - - let _ = match data { - syn::Data::Enum(enumeration) => enumeration, - _ => return quote::quote! { compile_error!("Input not enumeration") }.into(), - }; - - enumeration::enum_filter_fn(ident).into() -} diff --git a/derive/src/relation.rs b/derive/src/relation.rs deleted file mode 100644 index 37ef11ef..00000000 --- a/derive/src/relation.rs +++ /dev/null @@ -1,441 +0,0 @@ -use heck::ToUpperCamelCase; -use proc_macro2::TokenStream; -use quote::{format_ident, quote, ToTokens}; - -#[derive(Debug, Eq, PartialEq, bae::FromAttributes)] -pub struct SeaOrm { - belongs_to: Option, - has_many: Option, - from: Option, - to: Option, - on_update: Option, - on_delete: Option, -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct RelationParams { - relation_name: String, - belongs_to: Option, - has_many: Option, - reverse: bool, -} - -pub fn compact_relation_fn(item: &syn::DataEnum) -> Result { - let relations_parameters: Vec = item - .variants - .iter() - .map(|variant| -> Result { - let attrs = SeaOrm::from_attributes(&variant.attrs)?; - - let belongs_to = match attrs.belongs_to { - Some(syn::Lit::Str(belongs_to)) => Some(belongs_to.value()), - _ => None, - }; - - let has_many = match attrs.has_many { - Some(syn::Lit::Str(has_many)) => Some(has_many.value()), - _ => None, - }; - - Ok(RelationParams { - relation_name: variant.ident.to_string(), - belongs_to, - has_many, - reverse: false, - }) - }) - .collect::, crate::error::Error>>()?; - - produce_relations(relations_parameters) -} - -#[derive(Debug)] -struct ExpandedParams { - variant: syn::Ident, - relation_type: syn::Ident, - related_type: syn::Path, -} - -impl syn::parse::Parse for ExpandedParams { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let variant_path = input.parse::()?; - - let variant = variant_path.segments[1].ident.clone(); - - input.parse::()?; - - let method_path = input.parse::()?; - let relation_type = method_path.segments[1].ident.clone(); - - let group; - syn::parenthesized!(group in input); - - let related_type: syn::Path = group.parse()?; - - // Used to purge remaining buffer - input.step(|cursor| { - let mut rest = *cursor; - - while let Some((_, next)) = rest.token_tree() { - rest = next; - } - - Ok(((), rest)) - })?; - - Ok(Self { - variant, - relation_type, - related_type, - }) - } -} - -pub fn expanded_relation_fn(item: &syn::ItemImpl) -> Result { - if item - .to_token_stream() - .to_string() - .contains("No RelationDef") - { - return Ok(quote! { - #item - - #[async_graphql::ComplexObject] - impl Model { - } - }); - } - - let method_tokens = item.items[0].to_token_stream(); - let method_item: syn::ImplItemMethod = syn::parse2(method_tokens)?; - - let match_tokens = method_item.block.stmts[0].to_token_stream(); - let match_item: syn::ExprMatch = syn::parse2(match_tokens)?; - - let expanded_params: Vec = match_item - .arms - .iter() - .map(|arm| -> Result { - let params: ExpandedParams = - syn::parse_str(arm.to_token_stream().to_string().as_str())?; - - Ok(params) - }) - .collect::, crate::error::Error>>()?; - - let relations_parameters: Vec = expanded_params - .iter() - .map(|params| -> Result { - let belongs_to = if params.relation_type.to_string().eq("belongs_to") { - Some(params.related_type.to_token_stream().to_string()) - } else { - None - }; - - let has_many = if params.relation_type.to_string().ne("belongs_to") { - Some(params.related_type.to_token_stream().to_string()) - } else { - None - }; - - let relation_name = params.variant.to_string(); - - Ok(RelationParams { - relation_name, - belongs_to, - has_many, - reverse: false, - }) - }) - .collect::, crate::error::Error>>()?; - - produce_relations(relations_parameters) -} - -pub fn produce_relations( - relations_parameters: Vec, -) -> Result { - let relations_copy = relations_parameters.clone(); - - let reverse_self_references_parameters = relations_copy - .into_iter() - .filter(|rel_param| { - rel_param.belongs_to.eq(&Some("Entity".into())) - || rel_param.has_many.eq(&Some("Entity".into())) - }) - .map(|rel_param| RelationParams { - relation_name: rel_param.relation_name, - belongs_to: rel_param.has_many, - has_many: rel_param.belongs_to, - reverse: true, - }); - - let (loaders, functions): (Vec<_>, Vec<_>) = relations_parameters - .into_iter() - .chain(reverse_self_references_parameters) - .map(relation_fn) - .collect::, crate::error::Error>>()? - .into_iter() - .map(|(loader, func)| (loader, func)) - .unzip(); - - Ok(quote! { - #(#loaders)* - - #[async_graphql::ComplexObject] - impl Model { - #(#functions)* - } - }) -} - -pub fn relation_fn( - relations_parameters: RelationParams, -) -> Result<(TokenStream, TokenStream), crate::error::Error> { - let RelationParams { - relation_name, - belongs_to, - has_many, - reverse, - } = relations_parameters; - - let relation_ident = format_ident!("{}", relation_name.to_upper_camel_case()); - - let relation_name = if reverse { - format_ident!("{}Reverse", relation_name.to_upper_camel_case()) - } else { - format_ident!("{}", relation_name.to_upper_camel_case()) - }; - - let (reverse, column_type) = if reverse { - (quote! { true }, quote! { to_col }) - } else { - (quote! { false }, quote! { from_col }) - }; - - let target_path = if let Some(target_path) = &has_many { - target_path - } else if let Some(target_path) = &belongs_to { - target_path - } else { - return Err(crate::error::Error::Internal( - "Cannot map relation: neither one-many or many-one".into(), - )); - }; - - let path: TokenStream = if target_path.ne("Entity") { - target_path.as_str()[..target_path.len() - 8] - .parse() - .unwrap() - } else { - quote! { self } - }; - - let relation_enum = quote! {Relation::#relation_ident}; - let foreign_key_name = format_ident!("{}FK", relation_name).to_token_stream(); - - if has_many.is_some() && belongs_to.is_some() { - return Err(crate::error::Error::Internal( - "Cannot map relation: cannot be both one-many and many-one".into(), - )); - } - - let (global_scope, object_scope) = if has_many.is_some() { - ( - quote! { - #[derive(Debug, Clone, PartialEq, Eq, Hash)] - pub struct #foreign_key_name(pub seaography::RelationKeyStruct<#path::Entity>); - - #[async_trait::async_trait] - impl async_graphql::dataloader::Loader<#foreign_key_name> for crate::OrmDataloader { - type Value = Vec<#path::Model>; - type Error = std::sync::Arc; - - async fn load( - &self, - keys: &[#foreign_key_name], - ) -> Result, Self::Error> { - let keys: Vec<_> = keys - .into_iter() - .map(|key| key.0.to_owned()) - .collect(); - - use seaography::itertools::Itertools; - - let data: std::collections::HashMap<#foreign_key_name, Self::Value> = seaography - ::fetch_relation_data::<#path::Entity>( - keys, - #relation_enum.def(), - #reverse, - &self.db, - ).await? - .into_iter() - .map(|(key, model)| (#foreign_key_name(key), model)) - .into_group_map(); - - Ok(data) - } - } - }, - quote! { - pub async fn #relation_name<'a>( - &self, - ctx: &async_graphql::Context<'a>, - filters: Option<#path::Filter>, - pagination: Option, - order_by: Option<#path::OrderBy>, - ) -> async_graphql::types::connection::Connection { - use seaography::heck::ToSnakeCase; - use ::std::str::FromStr; - - let data_loader = ctx - .data::>() - .unwrap(); - - let from_column: Column = Column::from_str( - #relation_enum - .def() - .#column_type - .to_string() - .to_snake_case() - .as_str() - ).unwrap(); - - let key = #foreign_key_name(seaography::RelationKeyStruct { - val: self.get(from_column), - filter: filters, - order_by, - }); - - let nodes: Vec<#path::Model> = data_loader - .load_one(key) - .await - .expect("cannot unwrap load_one") - .unwrap_or_else(|| vec![]); - - if let Some(pagination) = pagination { - return match pagination { - seaography::Pagination::Pages(pagination) => { - let nodes_size = nodes.len() as u64; - let skip_size: usize = (pagination.page * pagination.limit).try_into().unwrap(); - let take_size: usize = pagination.limit.try_into().unwrap(); - - let nodes = nodes - .into_iter() - .skip(skip_size) - .take(take_size) - .collect(); - - let has_previous_page = pagination.page * pagination.limit > 0 && nodes_size != 0; - let has_next_page = (nodes_size / pagination.limit) - pagination.page - 1 > 0; - let pages = nodes_size / pagination.limit; - let current = pagination.page; - - seaography::data_to_connection::<#path::Entity>( - nodes, - has_previous_page, - has_next_page, - Some(pages), - Some(current) - ) - }, - seaography::Pagination::Cursor(cursor) => { - // TODO fix cursor related query pagination - seaography::data_to_connection::<#path::Entity>( - nodes, - false, - false, - Some(1), - Some(1) - ) - } - } - } - - seaography::data_to_connection::<#path::Entity>( - nodes, - false, - false, - Some(1), - Some(1) - ) - } - }, - ) - } else if belongs_to.is_some() { - ( - quote! { - #[derive(Debug, Clone, PartialEq, Eq, Hash)] - pub struct #foreign_key_name(pub seaography::RelationKeyStruct<#path::Entity>); - - #[async_trait::async_trait] - impl async_graphql::dataloader::Loader<#foreign_key_name> for crate::OrmDataloader { - type Value = #path::Model; - type Error = std::sync::Arc; - - async fn load( - &self, - keys: &[#foreign_key_name], - ) -> Result, Self::Error> { - let keys: Vec<_> = keys - .into_iter() - .map(|key| key.0.to_owned()) - .collect(); - - let data: std::collections::HashMap<#foreign_key_name, Self::Value> = seaography - ::fetch_relation_data::<#path::Entity>( - keys, - #relation_enum.def(), - #reverse, - &self.db, - ).await? - .into_iter() - .map(|(key, model)| (#foreign_key_name(key), model)) - .collect(); - - - Ok(data) - } - } - }, - quote! { - pub async fn #relation_name<'a>( - &self, - ctx: &async_graphql::Context<'a>, - ) -> Option<#path::Model> { - use seaography::heck::ToSnakeCase; - use ::std::str::FromStr; - - let data_loader = ctx - .data::>() - .unwrap(); - - let from_column: Column = Column::from_str( - #relation_enum - .def() - .#column_type - .to_string() - .to_snake_case() - .as_str() - ).unwrap(); - - let key = #foreign_key_name(seaography::RelationKeyStruct { - val: self.get(from_column), - filter: None, - order_by: None, - }); - - let data: Option<_> = data_loader.load_one(key).await.unwrap(); - - data - } - }, - ) - } else { - return Err(crate::error::Error::Internal( - "Cannot map relation: neither one-many or many-one".into(), - )); - }; - - Ok((global_scope, object_scope)) -} diff --git a/derive/src/root_query.rs b/derive/src/root_query.rs deleted file mode 100644 index f4a03a85..00000000 --- a/derive/src/root_query.rs +++ /dev/null @@ -1,229 +0,0 @@ -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -#[derive(Debug, Eq, PartialEq, bae::FromAttributes, Clone)] -pub struct Seaography { - entity: Option, - object_config: Option, -} - -pub fn root_query_fn( - ident: &syn::Ident, - attrs: &[Seaography], -) -> Result { - let paths = attrs - .iter() - .filter(|attribute| matches!(&attribute.entity, Some(_))) - .map( - |attribute| -> Result<(TokenStream, TokenStream), crate::error::Error> { - let entity_name = if let syn::Lit::Str(item) = attribute.entity.as_ref().unwrap() { - Ok(item.value().parse::()?) - } else { - Err(crate::error::Error::Internal( - "Unreachable parse of query entities".into(), - )) - }?; - - let config = if let Some(config) = &attribute.object_config { - quote! { - #[graphql(#config)] - } - } else { - quote! {} - }; - - Ok((entity_name, config)) - }, - ) - .collect::, crate::error::Error>>()?; - - let object_config = attrs - .iter() - .find(|attribute| matches!(attribute.object_config, Some(_))) - .map(|attribute| attribute.object_config.as_ref().unwrap()); - - let implement_macros = match object_config { - Some(object_config) => { - quote! { - #[async_graphql::Object(#object_config)] - } - } - _ => { - quote! { - #[async_graphql::Object] - } - } - }; - - let queries: Vec = paths - .iter() - .map(|(path, config)| { - let name = format_ident!("{}", path.clone().into_iter().last().unwrap().to_string()); - - let basic_query = basic_query(&name, path); - - quote! { - #config - #basic_query - } - }) - .collect(); - - Ok(quote! { - #implement_macros - impl #ident { - #(#queries)* - } - }) -} - -pub fn basic_query(name: &Ident, path: &TokenStream) -> TokenStream { - quote! { - pub async fn #name<'a>( - &self, - ctx: &async_graphql::Context<'a>, - filters: Option<#path::Filter>, - pagination: Option, - order_by: Option<#path::OrderBy>, - ) -> async_graphql::types::connection::Connection { - use sea_orm::prelude::*; - use sea_orm::Iterable; - use seaography::itertools::Itertools; - use seaography::{EntityOrderBy, EntityFilter}; - use async_graphql::types::connection::CursorType; - - println!("filters: {:?}", filters); - - let db: &crate::DatabaseConnection = ctx.data::().unwrap(); - let stmt = #path::Entity::find(); - - let stmt: sea_orm::Select<#path::Entity> = if let Some(filters) = filters { - stmt.filter(filters.filter_condition()) - } else { - stmt - }; - - let stmt: sea_orm::Select<#path::Entity> = if let Some(order_by) = order_by { - order_by.order_by(stmt) - } else { - stmt - }; - - if let Some(pagination) = pagination { - - match pagination { - seaography::Pagination::Pages(pagination) => { - let paginator = stmt.paginate(db, pagination.limit); - - let data: Vec<#path::Model> = paginator - .fetch_page(pagination.page) - .await - .unwrap(); - - let pages = paginator - .num_pages() - .await - .unwrap(); - - seaography::data_to_connection::<#path::Entity>(data, pagination.page != 1, pagination.page < pages, Some(pages), Some(pagination.page)) - }, - seaography::Pagination::Cursor(cursor) => { - let next_stmt = stmt.clone(); - let previous_stmt = stmt.clone(); - - fn apply_stmt_cursor_by(stmt: sea_orm::entity::prelude::Select<#path::Entity>) -> sea_orm::Cursor> { - if #path::PrimaryKey::iter().len() == 1 { - let column = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect::>()[0]; - stmt.cursor_by(column) - } else if #path::PrimaryKey::iter().len() == 2 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else if #path::PrimaryKey::iter().len() == 3 { - let columns = #path::PrimaryKey::iter().map(|variant| variant.into_column()).collect_tuple::<(#path::Column, #path::Column, #path::Column)>().unwrap(); - stmt.cursor_by(columns) - } else { - panic!("seaography does not support cursors with size greater than 3") - } - } - - let mut stmt = apply_stmt_cursor_by(stmt); - - if let Some(cursor_string) = cursor.cursor { - let values = seaography::CursorValues::decode_cursor(cursor_string.as_str()).unwrap(); - - let cursor_values: sea_orm::sea_query::value::ValueTuple = seaography::map_cursor_values(values.0); - - stmt.after(cursor_values); - } - - let data = stmt - .first(cursor.limit) - .all(db) - .await - .unwrap(); - - let has_next_page: bool = { - let mut next_stmt = apply_stmt_cursor_by(next_stmt); - - let last_node = data.last(); - - if let Some(node) = last_node { - let values: Vec = #path::PrimaryKey::iter() - .map(|variant| { - node.get(variant.into_column()) - }) - .collect(); - - let values = seaography::map_cursor_values(values); - - let next_data = next_stmt - .first(cursor.limit) - .after(values) - .all(db) - .await - .unwrap(); - - next_data.len() != 0 - } else { - false - } - }; - - let has_previous_page: bool = { - let mut previous_stmt = apply_stmt_cursor_by(previous_stmt); - - let first_node = data.first(); - - if let Some(node) = first_node { - let values: Vec = #path::PrimaryKey::iter() - .map(|variant| { - node.get(variant.into_column()) - }) - .collect(); - - let values = seaography::map_cursor_values(values); - - let previous_data = previous_stmt - .first(cursor.limit) - .before(values) - .all(db) - .await - .unwrap(); - - previous_data.len() != 0 - } else { - false - } - }; - - seaography::data_to_connection::<#path::Entity>(data, has_previous_page, has_next_page, None, None) - } - } - } else { - let data: Vec<#path::Model> = stmt.all(db).await.unwrap(); - - seaography::data_to_connection::<#path::Entity>(data, false, false, Some(1), Some(1)) - } - } - } -} diff --git a/discoverer/Cargo.toml b/discoverer/Cargo.toml deleted file mode 100644 index 232cb127..00000000 --- a/discoverer/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "seaography-discoverer" -version = "0.3.0" -edition = "2021" -rust-version = "1.60" -authors = ["Panagiotis Karatakis "] -description = "🧭 A GraphQL framework and code generator for SeaORM" -license = "MIT OR Apache-2.0" -homepage = "https://www.sea-ql.org/Seaography" -documentation = "https://docs.rs/seaography" -repository = "https://github.com/SeaQL/seaography" -keywords = ["async", "graphql", "mysql", "postgres", "sqlite"] -categories = ["database"] - -[dependencies] -async-std = { version = "1.12.0", features = [ "attributes", "tokio1" ] } -sea-schema = { version = "^0.9.4", default-features = false, features = ["sqlx-sqlite", "sqlx-mysql", "sqlx-postgres", "runtime-async-std-native-tls", "discovery", "writer" ] } -sqlx = { version = "^0.6.1", features = [ "sqlite", "mysql", "postgres", "runtime-async-std-native-tls", "all-types" ] } -itertools = "0.10.3" -heck = "0.4.0" -url = "2.2.2" \ No newline at end of file diff --git a/discoverer/README.md b/discoverer/README.md deleted file mode 100644 index 8cc8fc69..00000000 --- a/discoverer/README.md +++ /dev/null @@ -1,2 +0,0 @@ -This is a library responsible to convert connect to a database and map its schema into a vendor agnostic schema. -The agnostic schema is used to generate ORM entities and GraphQL nodes. \ No newline at end of file diff --git a/discoverer/src/error.rs b/discoverer/src/error.rs deleted file mode 100644 index 670ae7bd..00000000 --- a/discoverer/src/error.rs +++ /dev/null @@ -1,19 +0,0 @@ -#[derive(Debug)] -pub enum Error { - SqlxError(sqlx::Error), - Error(String), -} - -impl From for Error { - fn from(err: sqlx::Error) -> Self { - Self::SqlxError(err) - } -} - -impl From<&str> for Error { - fn from(err: &str) -> Self { - Self::Error(err.into()) - } -} - -pub type Result = std::result::Result; diff --git a/discoverer/src/lib.rs b/discoverer/src/lib.rs deleted file mode 100644 index ee5fcf29..00000000 --- a/discoverer/src/lib.rs +++ /dev/null @@ -1,45 +0,0 @@ -use sea_schema::sea_query::TableCreateStatement; -use std::collections::BTreeMap; - -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub enum SqlVersion { - Sqlite, - Mysql, - Postgres, -} - -pub mod sqlite; -pub use sqlite::explore_sqlite; - -pub mod mysql; -pub use mysql::explore_mysql; - -pub mod postgres; -pub use postgres::explore_postgres; - -pub mod error; -pub use error::{Error, Result}; - -pub use sea_schema; - -pub type TablesHashMap = BTreeMap; - -pub async fn extract_database_metadata( - database_url: &url::Url, -) -> Result<(TablesHashMap, SqlVersion)> { - Ok(match database_url.scheme() { - "mysql" => ( - explore_mysql(database_url.as_ref()).await?, - SqlVersion::Mysql, - ), - "sqlite" => ( - explore_sqlite(database_url.as_ref()).await?, - SqlVersion::Sqlite, - ), - "postgres" | "postgresql" => ( - explore_postgres(database_url.as_ref()).await?, - SqlVersion::Postgres, - ), - _ => unimplemented!("{} is not supported", database_url.scheme()), - }) -} diff --git a/discoverer/src/mysql.rs b/discoverer/src/mysql.rs deleted file mode 100644 index 74a95d1d..00000000 --- a/discoverer/src/mysql.rs +++ /dev/null @@ -1,25 +0,0 @@ -use sea_schema::mysql::{def::TableDef, discovery::SchemaDiscovery}; -use sqlx::MySqlPool; - -use crate::{Result, TablesHashMap}; - -pub async fn explore_mysql(url: &str) -> Result { - let connection = MySqlPool::connect(url).await?; - - let schema = url - .split('/') - .last() - .ok_or("schema not found in database url")?; - - let schema_discovery = SchemaDiscovery::new(connection, schema); - - let schema = schema_discovery.discover().await; - - let tables: TablesHashMap = schema - .tables - .iter() - .map(|table: &TableDef| (table.info.name.clone(), table.write())) - .collect(); - - Ok(tables) -} diff --git a/discoverer/src/postgres.rs b/discoverer/src/postgres.rs deleted file mode 100644 index d0ddab70..00000000 --- a/discoverer/src/postgres.rs +++ /dev/null @@ -1,26 +0,0 @@ -use crate::{Result, TablesHashMap}; -use sea_schema::postgres::{def::TableDef, discovery::SchemaDiscovery}; -use sqlx::PgPool; - -pub async fn explore_postgres(uri: &str) -> Result { - let connection = PgPool::connect(uri).await?; - - let db_uri = url::Url::parse(uri).expect("Fail to parse database URL"); - - let schema = db_uri - .query_pairs() - .find(|(k, _)| k == "currentSchema") - .map_or("public".to_string(), |(_, v)| v.to_string()); - - let schema_discovery = SchemaDiscovery::new(connection, &schema); - - let schema = schema_discovery.discover().await; - - let tables: TablesHashMap = schema - .tables - .iter() - .map(|table: &TableDef| (table.info.name.clone(), table.write())) - .collect(); - - Ok(tables) -} diff --git a/discoverer/src/sqlite.rs b/discoverer/src/sqlite.rs deleted file mode 100644 index d90e3652..00000000 --- a/discoverer/src/sqlite.rs +++ /dev/null @@ -1,23 +0,0 @@ -use sea_schema::sqlite::{def::TableDef, discovery::SchemaDiscovery}; -use sqlx::SqlitePool; - -use crate::{Error, Result, TablesHashMap}; - -pub async fn explore_sqlite(url: &str) -> Result { - let connection = SqlitePool::connect(url).await?; - - let schema_discovery = SchemaDiscovery::new(connection); - - let schema = schema_discovery - .discover() - .await - .map_err(|_| Error::Error("SqliteDiscoveryError".into()))?; - - let tables: TablesHashMap = schema - .tables - .iter() - .map(|table: &TableDef| (table.name.clone(), table.write())) - .collect(); - - Ok(tables) -} diff --git a/examples/mysql/Cargo.toml b/examples/mysql/Cargo.toml index d3679086..88a05fbd 100644 --- a/examples/mysql/Cargo.toml +++ b/examples/mysql/Cargo.toml @@ -4,20 +4,20 @@ name = "seaography-mysql-example" version = "0.3.0" [dependencies] -poem = { version = "1.3.29" } -async-graphql = { version = "4.0.14", features = ["decimal", "chrono", "dataloader"] } -async-graphql-poem = { version = "4.0.14" } -async-trait = { version = "0.1.53" } +poem = { version = "1.3.55" } +async-graphql-poem = { version = "5.0.6" } +async-graphql = { version = "5.0.6", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] } +async-trait = { version = "0.1.64" } dotenv = "0.15.0" -sea-orm = { version = "^0.10", features = ["sqlx-mysql", "runtime-async-std-native-tls"] } -tokio = { version = "1.17.0", features = ["macros", "rt-multi-thread"] } -tracing = { version = "0.1.34" } -tracing-subscriber = { version = "0.3.11" } +sea-orm = { version = "0.11.0", features = ["sqlx-mysql", "runtime-async-std-native-tls"] } +tokio = { version = "1.26.0", features = ["macros", "rt-multi-thread"] } +tracing = { version = "0.1.37" } +tracing-subscriber = { version = "0.3.16" } lazy_static = { version = "1.4.0" } [dependencies.seaography] path = "../../" # remove this line in your own project -version = "0.3.0" +version = "^1.0.0" # seaography version features = ["with-decimal", "with-chrono"] [dev-dependencies] diff --git a/examples/mysql/src/entities/actor.rs b/examples/mysql/src/entities/actor.rs index 3abe97b8..ff14665f 100644 --- a/examples/mysql/src/entities/actor.rs +++ b/examples/mysql/src/entities/actor.rs @@ -1,9 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive(Clone, Debug, PartialEq, DeriveEntityModel, async_graphql::SimpleObject)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "actor")] -#[graphql(complex)] -#[graphql(name = "Actor")] pub struct Model { #[sea_orm(primary_key)] pub actor_id: i32, @@ -12,243 +12,16 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] -pub enum Relation { - #[sea_orm(has_many = "super::film_actor::Entity")] - FilmActor, -} +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmActor.def() + super::film_actor::Relation::Film.def() } -} - -impl ActiveModelBehavior for ActiveModel {} - -// Recursive expansion of seaography::macros::Filter! macro -// ========================================================= - -#[derive(Debug, Clone, async_graphql::InputObject)] -#[graphql(name = "ActorFilter")] -pub struct Filter { - pub or: Option>>, - pub and: Option>>, - actor_id: Option<::Filter>, - first_name: Option<::Filter>, - last_name: Option<::Filter>, - last_update: Option<::Filter>, -} -impl seaography::EntityFilter for Filter { - fn filter_condition(&self) -> sea_orm::Condition { - let mut condition = sea_orm::Condition::all(); - if let Some(or_filters) = &self.or { - let or_condition = or_filters - .iter() - .fold(sea_orm::Condition::any(), |fold_condition, filter| { - fold_condition.add(filter.filter_condition()) - }); - condition = condition.add(or_condition); - } - if let Some(and_filters) = &self.and { - let and_condition = and_filters - .iter() - .fold(sea_orm::Condition::all(), |fold_condition, filter| { - fold_condition.add(filter.filter_condition()) - }); - condition = condition.add(and_condition); - } - if let Some(actor_id) = &self.actor_id { - if let Some(eq_value) = seaography::FilterTrait::eq(actor_id) { - condition = condition.add(Column::ActorId.eq(eq_value)) - } - if let Some(ne_value) = seaography::FilterTrait::ne(actor_id) { - condition = condition.add(Column::ActorId.ne(ne_value)) - } - if let Some(gt_value) = seaography::FilterTrait::gt(actor_id) { - condition = condition.add(Column::ActorId.gt(gt_value)) - } - if let Some(gte_value) = seaography::FilterTrait::gte(actor_id) { - condition = condition.add(Column::ActorId.gte(gte_value)) - } - if let Some(lt_value) = seaography::FilterTrait::lt(actor_id) { - condition = condition.add(Column::ActorId.lt(lt_value)) - } - if let Some(lte_value) = seaography::FilterTrait::lte(actor_id) { - condition = condition.add(Column::ActorId.lte(lte_value)) - } - if let Some(is_in_value) = seaography::FilterTrait::is_in(actor_id) { - condition = condition.add(Column::ActorId.is_in(is_in_value)) - } - if let Some(is_not_in_value) = seaography::FilterTrait::is_not_in(actor_id) { - condition = condition.add(Column::ActorId.is_not_in(is_not_in_value)) - } - if let Some(is_null_value) = seaography::FilterTrait::is_null(actor_id) { - if is_null_value { - condition = condition.add(Column::ActorId.is_null()) - } - } - } - if let Some(first_name) = &self.first_name { - if let Some(eq_value) = seaography::FilterTrait::eq(first_name) { - condition = condition.add(Column::FirstName.eq(eq_value)) - } - if let Some(ne_value) = seaography::FilterTrait::ne(first_name) { - condition = condition.add(Column::FirstName.ne(ne_value)) - } - if let Some(gt_value) = seaography::FilterTrait::gt(first_name) { - condition = condition.add(Column::FirstName.gt(gt_value)) - } - if let Some(gte_value) = seaography::FilterTrait::gte(first_name) { - condition = condition.add(Column::FirstName.gte(gte_value)) - } - if let Some(lt_value) = seaography::FilterTrait::lt(first_name) { - condition = condition.add(Column::FirstName.lt(lt_value)) - } - if let Some(lte_value) = seaography::FilterTrait::lte(first_name) { - condition = condition.add(Column::FirstName.lte(lte_value)) - } - if let Some(is_in_value) = seaography::FilterTrait::is_in(first_name) { - condition = condition.add(Column::FirstName.is_in(is_in_value)) - } - if let Some(is_not_in_value) = seaography::FilterTrait::is_not_in(first_name) { - condition = condition.add(Column::FirstName.is_not_in(is_not_in_value)) - } - if let Some(is_null_value) = seaography::FilterTrait::is_null(first_name) { - if is_null_value { - condition = condition.add(Column::FirstName.is_null()) - } - } - } - if let Some(last_name) = &self.last_name { - if let Some(eq_value) = seaography::FilterTrait::eq(last_name) { - condition = condition.add(Column::LastName.eq(eq_value)) - } - if let Some(ne_value) = seaography::FilterTrait::ne(last_name) { - condition = condition.add(Column::LastName.ne(ne_value)) - } - if let Some(gt_value) = seaography::FilterTrait::gt(last_name) { - condition = condition.add(Column::LastName.gt(gt_value)) - } - if let Some(gte_value) = seaography::FilterTrait::gte(last_name) { - condition = condition.add(Column::LastName.gte(gte_value)) - } - if let Some(lt_value) = seaography::FilterTrait::lt(last_name) { - condition = condition.add(Column::LastName.lt(lt_value)) - } - if let Some(lte_value) = seaography::FilterTrait::lte(last_name) { - condition = condition.add(Column::LastName.lte(lte_value)) - } - if let Some(is_in_value) = seaography::FilterTrait::is_in(last_name) { - condition = condition.add(Column::LastName.is_in(is_in_value)) - } - if let Some(is_not_in_value) = seaography::FilterTrait::is_not_in(last_name) { - condition = condition.add(Column::LastName.is_not_in(is_not_in_value)) - } - if let Some(is_null_value) = seaography::FilterTrait::is_null(last_name) { - if is_null_value { - condition = condition.add(Column::LastName.is_null()) - } - } - } - if let Some(last_update) = &self.last_update { - if let Some(eq_value) = seaography::FilterTrait::eq(last_update) { - condition = condition.add(Column::LastUpdate.eq(eq_value)) - } - if let Some(ne_value) = seaography::FilterTrait::ne(last_update) { - condition = condition.add(Column::LastUpdate.ne(ne_value)) - } - if let Some(gt_value) = seaography::FilterTrait::gt(last_update) { - condition = condition.add(Column::LastUpdate.gt(gt_value)) - } - if let Some(gte_value) = seaography::FilterTrait::gte(last_update) { - condition = condition.add(Column::LastUpdate.gte(gte_value)) - } - if let Some(lt_value) = seaography::FilterTrait::lt(last_update) { - condition = condition.add(Column::LastUpdate.lt(lt_value)) - } - if let Some(lte_value) = seaography::FilterTrait::lte(last_update) { - condition = condition.add(Column::LastUpdate.lte(lte_value)) - } - if let Some(is_in_value) = seaography::FilterTrait::is_in(last_update) { - condition = condition.add(Column::LastUpdate.is_in(is_in_value)) - } - if let Some(is_not_in_value) = seaography::FilterTrait::is_not_in(last_update) { - condition = condition.add(Column::LastUpdate.is_not_in(is_not_in_value)) - } - if let Some(is_null_value) = seaography::FilterTrait::is_null(last_update) { - if is_null_value { - condition = condition.add(Column::LastUpdate.is_null()) - } - } - } - condition + fn via() -> Option { + Some(super::film_actor::Relation::Actor.def().rev()) } } -#[derive(Debug, Clone, async_graphql::InputObject)] -#[graphql(name = "ActorOrderBy")] -pub struct OrderBy { - actor_id: Option, - first_name: Option, - last_name: Option, - last_update: Option, -} -impl seaography::EntityOrderBy for OrderBy { - fn order_by(&self, stmt: sea_orm::Select) -> sea_orm::Select { - use sea_orm::QueryOrder; - let stmt = if let Some(order_by) = self.actor_id { - match order_by { - seaography::OrderByEnum::Asc => { - stmt.order_by(Column::ActorId, sea_orm::query::Order::Asc) - } - seaography::OrderByEnum::Desc => { - stmt.order_by(Column::ActorId, sea_orm::query::Order::Desc) - } - } - } else { - stmt - }; - let stmt = if let Some(order_by) = self.first_name { - match order_by { - seaography::OrderByEnum::Asc => { - stmt.order_by(Column::FirstName, sea_orm::query::Order::Asc) - } - seaography::OrderByEnum::Desc => { - stmt.order_by(Column::FirstName, sea_orm::query::Order::Desc) - } - } - } else { - stmt - }; - let stmt = if let Some(order_by) = self.last_name { - match order_by { - seaography::OrderByEnum::Asc => { - stmt.order_by(Column::LastName, sea_orm::query::Order::Asc) - } - seaography::OrderByEnum::Desc => { - stmt.order_by(Column::LastName, sea_orm::query::Order::Desc) - } - } - } else { - stmt - }; - if let Some(order_by) = self.last_update { - match order_by { - seaography::OrderByEnum::Asc => { - stmt.order_by(Column::LastUpdate, sea_orm::query::Order::Asc) - } - seaography::OrderByEnum::Desc => { - stmt.order_by(Column::LastUpdate, sea_orm::query::Order::Desc) - } - } - } else { - stmt - } - } -} -impl seaography::EnhancedEntity for Entity { - type Entity = Entity; - type Filter = Filter; - type OrderBy = OrderBy; -} +impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/mysql/src/entities/address.rs b/examples/mysql/src/entities/address.rs index 6306ff13..0a721e31 100644 --- a/examples/mysql/src/entities/address.rs +++ b/examples/mysql/src/entities/address.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "address")] -#[graphql(complex)] -#[graphql(name = "Address")] pub struct Model { #[sea_orm(primary_key)] pub address_id: i32, @@ -24,7 +17,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::city::Entity", diff --git a/examples/mysql/src/entities/category.rs b/examples/mysql/src/entities/category.rs index 5338570b..6907b636 100644 --- a/examples/mysql/src/entities/category.rs +++ b/examples/mysql/src/entities/category.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "category")] -#[graphql(complex)] -#[graphql(name = "Category")] pub struct Model { #[sea_orm(primary_key)] pub category_id: u8, @@ -18,15 +11,15 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] -pub enum Relation { - #[sea_orm(has_many = "super::film_category::Entity")] - FilmCategory, -} +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmCategory.def() + super::film_category::Relation::Film.def() + } + fn via() -> Option { + Some(super::film_category::Relation::Category.def().rev()) } } diff --git a/examples/mysql/src/entities/city.rs b/examples/mysql/src/entities/city.rs index 2f732bff..93449c1c 100644 --- a/examples/mysql/src/entities/city.rs +++ b/examples/mysql/src/entities/city.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "city")] -#[graphql(complex)] -#[graphql(name = "City")] pub struct Model { #[sea_orm(primary_key)] pub city_id: i32, @@ -19,8 +12,10 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm(has_many = "super::address::Entity")] + Address, #[sea_orm( belongs_to = "super::country::Entity", from = "Column::CountryId", @@ -29,19 +24,17 @@ pub enum Relation { on_delete = "Restrict" )] Country, - #[sea_orm(has_many = "super::address::Entity")] - Address, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Country.def() + Relation::Address.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Address.def() + Relation::Country.def() } } diff --git a/examples/mysql/src/entities/country.rs b/examples/mysql/src/entities/country.rs index d69dd580..778e709c 100644 --- a/examples/mysql/src/entities/country.rs +++ b/examples/mysql/src/entities/country.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "country")] -#[graphql(complex)] -#[graphql(name = "Country")] pub struct Model { #[sea_orm(primary_key)] pub country_id: i32, @@ -18,7 +11,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm(has_many = "super::city::Entity")] City, diff --git a/examples/mysql/src/entities/customer.rs b/examples/mysql/src/entities/customer.rs index f30a76ff..11485b30 100644 --- a/examples/mysql/src/entities/customer.rs +++ b/examples/mysql/src/entities/customer.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "customer")] -#[graphql(complex)] -#[graphql(name = "Customer")] pub struct Model { #[sea_orm(primary_key)] pub customer_id: i32, @@ -24,7 +17,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -34,6 +27,10 @@ pub enum Relation { on_delete = "Restrict" )] Address, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -42,10 +39,6 @@ pub enum Relation { on_delete = "Restrict" )] Store, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -54,12 +47,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Store.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Payment.def() @@ -72,4 +59,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Store.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/mysql/src/entities/film.rs b/examples/mysql/src/entities/film.rs index 5d3b0536..11e568b2 100644 --- a/examples/mysql/src/entities/film.rs +++ b/examples/mysql/src/entities/film.rs @@ -1,17 +1,10 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use super::sea_orm_active_enums::Rating; use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film")] -#[graphql(complex)] -#[graphql(name = "Film")] pub struct Model { #[sea_orm(primary_key)] pub film_id: i32, @@ -29,15 +22,17 @@ pub struct Model { pub replacement_cost: Decimal, pub rating: Option, #[sea_orm( - column_type = "Custom(\"SET ('Trailers', 'Commentaries', 'Deleted Scenes', 'Behind the Scenes')\".to_owned())", + column_type = "custom(\"SET ('Trailers', 'Commentaries', 'Deleted Scenes', 'Behind the Scenes')\")", nullable )] pub special_features: Option, pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm(has_many = "super::inventory::Entity")] + Inventory, #[sea_orm( belongs_to = "super::language::Entity", from = "Column::LanguageId", @@ -54,29 +49,29 @@ pub enum Relation { on_delete = "Restrict" )] Language1, - #[sea_orm(has_many = "super::film_actor::Entity")] - FilmActor, - #[sea_orm(has_many = "super::film_category::Entity")] - FilmCategory, - #[sea_orm(has_many = "super::inventory::Entity")] - Inventory, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmActor.def() + Relation::Inventory.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmCategory.def() + super::film_actor::Relation::Actor.def() + } + fn via() -> Option { + Some(super::film_actor::Relation::Film.def().rev()) } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Inventory.def() + super::film_category::Relation::Category.def() + } + fn via() -> Option { + Some(super::film_category::Relation::Film.def().rev()) } } diff --git a/examples/mysql/src/entities/film_actor.rs b/examples/mysql/src/entities/film_actor.rs index 80b69273..0f329afb 100644 --- a/examples/mysql/src/entities/film_actor.rs +++ b/examples/mysql/src/entities/film_actor.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_actor")] -#[graphql(complex)] -#[graphql(name = "FilmActor")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub actor_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::actor::Entity", diff --git a/examples/mysql/src/entities/film_category.rs b/examples/mysql/src/entities/film_category.rs index 0c3e6307..09585943 100644 --- a/examples/mysql/src/entities/film_category.rs +++ b/examples/mysql/src/entities/film_category.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_category")] -#[graphql(complex)] -#[graphql(name = "FilmCategory")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub film_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::category::Entity", diff --git a/examples/mysql/src/entities/film_text.rs b/examples/mysql/src/entities/film_text.rs index 6df9cd57..3898cfac 100644 --- a/examples/mysql/src/entities/film_text.rs +++ b/examples/mysql/src/entities/film_text.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_text")] -#[graphql(complex)] -#[graphql(name = "FilmText")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub film_id: i16, @@ -19,7 +12,7 @@ pub struct Model { pub description: Option, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/mysql/src/entities/inventory.rs b/examples/mysql/src/entities/inventory.rs index d5f94aed..330675dc 100644 --- a/examples/mysql/src/entities/inventory.rs +++ b/examples/mysql/src/entities/inventory.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "inventory")] -#[graphql(complex)] -#[graphql(name = "Inventory")] pub struct Model { #[sea_orm(primary_key)] pub inventory_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::film::Entity", @@ -29,6 +22,8 @@ pub enum Relation { on_delete = "Restrict" )] Film, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -37,8 +32,6 @@ pub enum Relation { on_delete = "Restrict" )] Store, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -47,15 +40,15 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Store.def() + Relation::Rental.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Rental.def() + Relation::Store.def() } } diff --git a/examples/mysql/src/entities/language.rs b/examples/mysql/src/entities/language.rs index 0559dff7..e64290ae 100644 --- a/examples/mysql/src/entities/language.rs +++ b/examples/mysql/src/entities/language.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "language")] -#[graphql(complex)] -#[graphql(name = "Language")] pub struct Model { #[sea_orm(primary_key)] pub language_id: i32, @@ -18,7 +11,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/mysql/src/entities/mod.rs b/examples/mysql/src/entities/mod.rs index 7dc23605..68e78f4e 100644 --- a/examples/mysql/src/entities/mod.rs +++ b/examples/mysql/src/entities/mod.rs @@ -1,3 +1,7 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + +pub mod prelude; + pub mod actor; pub mod address; pub mod category; @@ -11,7 +15,6 @@ pub mod film_text; pub mod inventory; pub mod language; pub mod payment; -pub mod prelude; pub mod rental; pub mod sea_orm_active_enums; pub mod staff; diff --git a/examples/mysql/src/entities/payment.rs b/examples/mysql/src/entities/payment.rs index 95ec193f..638f488a 100644 --- a/examples/mysql/src/entities/payment.rs +++ b/examples/mysql/src/entities/payment.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "payment")] -#[graphql(complex)] -#[graphql(name = "Payment")] pub struct Model { #[sea_orm(primary_key)] pub payment_id: i32, @@ -23,7 +16,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::customer::Entity", diff --git a/examples/mysql/src/entities/prelude.rs b/examples/mysql/src/entities/prelude.rs index 07d114ee..8c1f2aac 100644 --- a/examples/mysql/src/entities/prelude.rs +++ b/examples/mysql/src/entities/prelude.rs @@ -1,3 +1,5 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + pub use super::actor::Entity as Actor; pub use super::address::Entity as Address; pub use super::category::Entity as Category; diff --git a/examples/mysql/src/entities/rental.rs b/examples/mysql/src/entities/rental.rs index 22cbf3f8..e377aa67 100644 --- a/examples/mysql/src/entities/rental.rs +++ b/examples/mysql/src/entities/rental.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "rental")] -#[graphql(complex)] -#[graphql(name = "Rental")] pub struct Model { #[sea_orm(primary_key)] pub rental_id: i32, @@ -22,7 +15,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::customer::Entity", @@ -40,6 +33,8 @@ pub enum Relation { on_delete = "Restrict" )] Inventory, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, #[sea_orm( belongs_to = "super::staff::Entity", from = "Column::StaffId", @@ -48,8 +43,6 @@ pub enum Relation { on_delete = "Restrict" )] Staff, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, } impl Related for Entity { @@ -64,15 +57,15 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Staff.def() + Relation::Payment.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Payment.def() + Relation::Staff.def() } } diff --git a/examples/mysql/src/entities/sea_orm_active_enums.rs b/examples/mysql/src/entities/sea_orm_active_enums.rs index 61d92fd8..9c5c4e40 100644 --- a/examples/mysql/src/entities/sea_orm_active_enums.rs +++ b/examples/mysql/src/entities/sea_orm_active_enums.rs @@ -1,16 +1,8 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Debug, - Clone, - PartialEq, - EnumIter, - DeriveActiveEnum, - Eq, - Copy, - async_graphql::Enum, - seaography::macros::EnumFilter, -)] +#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum)] #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "rating")] pub enum Rating { #[sea_orm(string_value = "G")] diff --git a/examples/mysql/src/entities/staff.rs b/examples/mysql/src/entities/staff.rs index 8dec64ec..73572f09 100644 --- a/examples/mysql/src/entities/staff.rs +++ b/examples/mysql/src/entities/staff.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "staff")] -#[graphql(complex)] -#[graphql(name = "Staff")] pub struct Model { #[sea_orm(primary_key)] pub staff_id: i32, @@ -27,7 +20,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -37,6 +30,18 @@ pub enum Relation { on_delete = "Restrict" )] Address, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, + #[sea_orm( + belongs_to = "Entity", + from = "Column::ReportsToId", + to = "Column::StaffId", + on_update = "Cascade", + on_delete = "Restrict" + )] + SelfRef, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -45,18 +50,6 @@ pub enum Relation { on_delete = "Restrict" )] Store, - #[sea_orm( - belongs_to = "Entity", - from = "Column::ReportsToId", - to = "Column::StaffId", - on_update = "Restrict", - on_delete = "Restrict" - )] - SelfRef, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -65,12 +58,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Store.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Payment.def() @@ -83,4 +70,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Store.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/mysql/src/entities/store.rs b/examples/mysql/src/entities/store.rs index f145ae99..abd0007e 100644 --- a/examples/mysql/src/entities/store.rs +++ b/examples/mysql/src/entities/store.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "store")] -#[graphql(complex)] -#[graphql(name = "Store")] pub struct Model { #[sea_orm(primary_key)] pub store_id: i32, @@ -20,7 +13,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -30,6 +23,10 @@ pub enum Relation { on_delete = "Restrict" )] Address, + #[sea_orm(has_many = "super::customer::Entity")] + Customer, + #[sea_orm(has_many = "super::inventory::Entity")] + Inventory, #[sea_orm( belongs_to = "super::staff::Entity", from = "Column::ManagerStaffId", @@ -38,10 +35,6 @@ pub enum Relation { on_delete = "Restrict" )] Staff, - #[sea_orm(has_many = "super::customer::Entity")] - Customer, - #[sea_orm(has_many = "super::inventory::Entity")] - Inventory, } impl Related for Entity { @@ -50,12 +43,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Staff.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Customer.def() @@ -68,4 +55,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Staff.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/mysql/src/lib.rs b/examples/mysql/src/lib.rs index fc1e6274..770aa614 100644 --- a/examples/mysql/src/lib.rs +++ b/examples/mysql/src/lib.rs @@ -1,12 +1,8 @@ -#![recursion_limit = "1024"] - use sea_orm::prelude::*; pub mod entities; pub mod query_root; -pub use query_root::QueryRoot; - pub struct OrmDataloader { pub db: DatabaseConnection, } diff --git a/examples/mysql/src/main.rs b/examples/mysql/src/main.rs index ee6e574f..d5244148 100644 --- a/examples/mysql/src/main.rs +++ b/examples/mysql/src/main.rs @@ -1,7 +1,6 @@ use async_graphql::{ dataloader::DataLoader, http::{playground_source, GraphQLPlaygroundConfig}, - EmptyMutation, EmptySubscription, Schema, }; use async_graphql_poem::GraphQL; use dotenv::dotenv; @@ -27,7 +26,7 @@ lazy_static! { #[handler] async fn graphql_playground() -> impl IntoResponse { - Html(playground_source(GraphQLPlaygroundConfig::new(&ENDPOINT))) + Html(playground_source(GraphQLPlaygroundConfig::new(&*ENDPOINT))) } #[tokio::main] @@ -46,16 +45,13 @@ async fn main() { }, tokio::spawn, ); - let mut schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader); - if let Some(depth) = *DEPTH_LIMIT { - schema = schema.limit_depth(depth); - } - if let Some(complexity) = *COMPLEXITY_LIMIT { - schema = schema.limit_complexity(complexity); - } - let schema = schema.finish(); + let schema = seaography_mysql_example::query_root::schema( + database, + orm_dataloader, + *DEPTH_LIMIT, + *COMPLEXITY_LIMIT, + ) + .unwrap(); let app = Route::new().at( &*ENDPOINT, get(graphql_playground).post(GraphQL::new(schema)), diff --git a/examples/mysql/src/query_root.rs b/examples/mysql/src/query_root.rs index 043b4ca0..d8700e8c 100644 --- a/examples/mysql/src/query_root.rs +++ b/examples/mysql/src/query_root.rs @@ -1,18 +1,253 @@ -#[derive(Debug, seaography::macros::QueryRoot)] -#[seaography(entity = "crate::entities::actor")] -#[seaography(entity = "crate::entities::address")] -#[seaography(entity = "crate::entities::category")] -#[seaography(entity = "crate::entities::city")] -#[seaography(entity = "crate::entities::country")] -#[seaography(entity = "crate::entities::customer")] -#[seaography(entity = "crate::entities::film")] -#[seaography(entity = "crate::entities::film_actor")] -#[seaography(entity = "crate::entities::film_category")] -#[seaography(entity = "crate::entities::film_text")] -#[seaography(entity = "crate::entities::inventory")] -#[seaography(entity = "crate::entities::language")] -#[seaography(entity = "crate::entities::payment")] -#[seaography(entity = "crate::entities::rental")] -#[seaography(entity = "crate::entities::staff")] -#[seaography(entity = "crate::entities::store")] -pub struct QueryRoot; +use crate::OrmDataloader; +use async_graphql::{dataloader::DataLoader, dynamic::*}; +use sea_orm::{DatabaseConnection, RelationTrait}; +use seaography::{ + Builder, BuilderContext, EntityObjectRelationBuilder, EntityObjectViaRelationBuilder, +}; + +lazy_static::lazy_static! { static ref CONTEXT : BuilderContext = BuilderContext :: default () ; } + +pub fn schema( + database: DatabaseConnection, + orm_dataloader: DataLoader, + depth: Option, + complexity: Option, +) -> Result { + let mut builder = Builder::new(&CONTEXT); + let entity_object_relation_builder = EntityObjectRelationBuilder { context: &CONTEXT }; + let entity_object_via_relation_builder = EntityObjectViaRelationBuilder { context: &CONTEXT }; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "actor", + crate::entities::film_actor::Relation::Actor.def(), + ), + entity_object_relation_builder + .get_relation::( + "film", + crate::entities::film_actor::Relation::Film.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::rental::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::rental::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::rental::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::rental::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::( + "film", + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::staff::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::staff::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::staff::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "selfRef", + crate::entities::staff::Relation::SelfRef.def(), + ), + entity_object_relation_builder + .get_relation::( + "selfRefReverse", + crate::entities::staff::Relation::SelfRef.def().rev(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::staff::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + crate::entities::country::Relation::City.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("actor"), + entity_object_via_relation_builder + .get_relation::( + "category", + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::film::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "language1", + crate::entities::film::Relation::Language1.def(), + ), + entity_object_relation_builder + .get_relation::( + "language2", + crate::entities::film::Relation::Language2.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("film"), + ]); + builder.register_entity::(vec![]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::city::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "country", + crate::entities::city::Relation::Country.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "film", + crate::entities::inventory::Relation::Film.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::inventory::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::inventory::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![]); + builder . register_entity :: < crate :: entities :: film_category :: Entity > (vec ! [entity_object_relation_builder . get_relation :: < crate :: entities :: film_category :: Entity , crate :: entities :: category :: Entity > ("category" , crate :: entities :: film_category :: Relation :: Category . def ()) , entity_object_relation_builder . get_relation :: < crate :: entities :: film_category :: Entity , crate :: entities :: film :: Entity > ("film" , crate :: entities :: film_category :: Relation :: Film . def ())]) ; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::customer::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::customer::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::customer::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::customer::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::store::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::store::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::store::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::store::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::payment::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::payment::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::payment::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + crate::entities::address::Relation::City.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::address::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::address::Relation::Staff.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::address::Relation::Store.def(), + ), + ]); + builder.register_enumeration::(); + let schema = builder.schema_builder(); + let schema = if let Some(depth) = depth { + schema.limit_depth(depth) + } else { + schema + }; + let schema = if let Some(complexity) = complexity { + schema.limit_complexity(complexity) + } else { + schema + }; + schema.data(database).data(orm_dataloader).finish() +} diff --git a/examples/mysql/tests/query_tests.rs b/examples/mysql/tests/query_tests.rs index 7cd3d5ef..d19fc994 100644 --- a/examples/mysql/tests/query_tests.rs +++ b/examples/mysql/tests/query_tests.rs @@ -1,8 +1,8 @@ -use async_graphql::{dataloader::DataLoader, EmptyMutation, EmptySubscription, Response, Schema}; +use async_graphql::{dataloader::DataLoader, dynamic::*, Response}; use sea_orm::Database; -use seaography_mysql_example::{OrmDataloader, QueryRoot}; +use seaography_mysql_example::OrmDataloader; -pub async fn get_schema() -> Schema { +pub async fn get_schema() -> Schema { let database = Database::connect("mysql://sea:sea@127.0.0.1/sakila") .await .unwrap(); @@ -12,11 +12,10 @@ pub async fn get_schema() -> Schema }, tokio::spawn, ); + let schema = + seaography_mysql_example::query_root::schema(database, orm_dataloader, None, None).unwrap(); - Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader) - .finish() + schema } pub fn assert_eq(a: Response, b: &str) { @@ -34,42 +33,42 @@ async fn test_simple_query() { schema .execute( r#" - { - store { - nodes { - storeId - staff { - firstName - lastName + { + store { + nodes { + storeId + staff { + firstName + lastName + } + } + } } - } - } - } - "#, + "#, ) .await, r#" - { - "store": { - "nodes": [ - { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } - }, - { - "storeId": 2, - "staff": { - "firstName": "Jon", - "lastName": "Stephens" - } + { + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" + } + }, + { + "storeId": 2, + "staff": { + "firstName": "Jon", + "lastName": "Stephens" + } + } + ] } - ] } - } - "#, + "#, ) } @@ -81,35 +80,35 @@ async fn test_simple_query_with_filter() { schema .execute( r#" - { - store(filters: {storeId:{eq: 1}}) { - nodes { - storeId - staff { - firstName - lastName - } + { + store(filters: {storeId:{eq: 1}}) { + nodes { + storeId + staff { + firstName + lastName + } + } + } } - } - } - "#, + "#, ) .await, r#" - { - "store": { - "nodes": [ - { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } + { + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" + } + } + ] } - ] } - } - "#, + "#, ) } @@ -122,39 +121,43 @@ async fn test_filter_with_pagination() { .execute( r#" { - customer( - filters: { active: { eq: 0 } } - pagination: { pages: { page: 2, limit: 3 } } - ) { - nodes { - customerId + customer( + filters: { active: { eq: 0 } } + pagination: { page: { page: 2, limit: 3 } } + ) { + nodes { + customerId + } + paginationInfo { + pages + current + } } - pages - current } - } - "#, + "#, ) .await, r#" - { - "customer": { - "nodes": [ - { - "customerId": 315 - }, - { - "customerId": 368 - }, - { - "customerId": 406 + { + "customer": { + "nodes": [ + { + "customerId": 315 + }, + { + "customerId": 368 + }, + { + "customerId": 406 + } + ], + "paginationInfo": { + "pages": 5, + "current": 2 + } } - ], - "pages": 5, - "current": 2 } - } - "#, + "#, ) } @@ -167,39 +170,43 @@ async fn test_complex_filter_with_pagination() { .execute( r#" { - payment( - filters: { amount: { gt: "11.1" } } - pagination: { pages: { limit: 2, page: 3 } } - ) { - nodes { - paymentId - amount + payment( + filters: { amount: { gt: "11.1" } } + pagination: { page: { limit: 2, page: 3 } } + ) { + nodes { + paymentId + amount + } + paginationInfo { + pages + current + } } - pages - current - } } - "#, + "#, ) .await, r#" - { - "payment": { - "nodes": [ - { - "paymentId": 8272, - "amount": "11.99" - }, - { - "paymentId": 9803, - "amount": "11.99" + { + "payment": { + "nodes": [ + { + "paymentId": 8272, + "amount": "11.99" + }, + { + "paymentId": 9803, + "amount": "11.99" + } + ], + "paginationInfo": { + "pages": 5, + "current": 3 + } } - ], - "pages": 5, - "current": 3 } - } - "#, + "#, ) } @@ -212,89 +219,89 @@ async fn test_cursor_pagination() { .execute( r#" { - payment( - filters: { amount: { gt: "11" } } - pagination: { cursor: { limit: 5 } } - ) { - edges { - node { - paymentId - amount - customer { - firstName + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 5 } } + ) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor } - } - } - pageInfo { - hasPreviousPage - hasNextPage - startCursor - endCursor } - } } - "#, + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 342, - "amount": "11.99", - "customer": { - "firstName": "KAREN" - } - } - }, - { - "node": { - "paymentId": 3146, - "amount": "11.99", - "customer": { - "firstName": "VICTORIA" - } - } - }, - { - "node": { - "paymentId": 5280, - "amount": "11.99", - "customer": { - "firstName": "VANESSA" - } - } - }, - { - "node": { - "paymentId": 5281, - "amount": "11.99", - "customer": { - "firstName": "ALMA" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 342, + "amount": "11.99", + "customer": { + "firstName": "KAREN" + } + } + }, + { + "node": { + "paymentId": 3146, + "amount": "11.99", + "customer": { + "firstName": "VICTORIA" + } + } + }, + { + "node": { + "paymentId": 5280, + "amount": "11.99", + "customer": { + "firstName": "VANESSA" + } + } + }, + { + "node": { + "paymentId": 5281, + "amount": "11.99", + "customer": { + "firstName": "ALMA" + } + } + }, + { + "node": { + "paymentId": 5550, + "amount": "11.99", + "customer": { + "firstName": "ROSEMARY" + } + } } - }, - { - "node": { - "paymentId": 5550, - "amount": "11.99", - "customer": { - "firstName": "ROSEMARY" - } + ], + "pageInfo": { + "hasPreviousPage": false, + "hasNextPage": true, + "startCursor": "Int[3]:342", + "endCursor": "Int[4]:5550" } - } - ], - "pageInfo": { - "hasPreviousPage": false, - "hasNextPage": true, - "startCursor": "Int[3]:342", - "endCursor": "Int[4]:5550" } - } - } - "#, + } + "#, ) } @@ -328,50 +335,50 @@ async fn test_cursor_pagination_prev() { } } } - "#, + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 6409, - "amount": "11.99", - "customer": { - "firstName": "TANYA" - } - } - }, - { - "node": { - "paymentId": 8272, - "amount": "11.99", - "customer": { - "firstName": "RICHARD" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 6409, + "amount": "11.99", + "customer": { + "firstName": "TANYA" + } + } + }, + { + "node": { + "paymentId": 8272, + "amount": "11.99", + "customer": { + "firstName": "RICHARD" + } + } + }, + { + "node": { + "paymentId": 9803, + "amount": "11.99", + "customer": { + "firstName": "NICHOLAS" + } + } } - }, - { - "node": { - "paymentId": 9803, - "amount": "11.99", - "customer": { - "firstName": "NICHOLAS" - } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "startCursor": "Int[4]:6409", + "endCursor": "Int[4]:9803" } - } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true, - "startCursor": "Int[4]:6409", - "endCursor": "Int[4]:9803" } - } - } - "#, + } + "#, ) } @@ -405,41 +412,41 @@ async fn test_cursor_pagination_no_next() { } } } - "#, + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 15821, - "amount": "11.99", - "customer": { - "firstName": "KENT" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 15821, + "amount": "11.99", + "customer": { + "firstName": "KENT" + } + } + }, + { + "node": { + "paymentId": 15850, + "amount": "11.99", + "customer": { + "firstName": "TERRANCE" + } + } } - }, - { - "node": { - "paymentId": 15850, - "amount": "11.99", - "customer": { - "firstName": "TERRANCE" - } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false, + "startCursor": "Int[5]:15821", + "endCursor": "Int[5]:15850" } - } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": false, - "startCursor": "Int[5]:15821", - "endCursor": "Int[5]:15850" } - } - } - "#, + } + "#, ) } @@ -469,41 +476,41 @@ async fn test_self_ref() { } } } - "#, + "#, ) .await, r#" - { - "staff": { - "nodes": [ - { - "firstName": "Mike", - "reportsToId": null, - "selfRefReverse": { - "nodes": [ - { - "staffId": 2, - "firstName": "Jon" - } - ] - }, - "selfRef": null - }, - { - "firstName": "Jon", - "reportsToId": 1, - "selfRefReverse": { - "nodes": [] - }, - "selfRef": { - "staffId": 1, - "firstName": "Mike" - } + { + "staff": { + "nodes": [ + { + "firstName": "Mike", + "reportsToId": null, + "selfRefReverse": { + "nodes": [ + { + "staffId": 2, + "firstName": "Jon" + } + ] + }, + "selfRef": null + }, + { + "firstName": "Jon", + "reportsToId": 1, + "selfRefReverse": { + "nodes": [] + }, + "selfRef": { + "staffId": 1, + "firstName": "Mike" + } + } + ] } - ] } - } - "#, + "#, ) } @@ -516,31 +523,31 @@ async fn related_queries_filters() { .execute( r#" { - customer( - filters: { active: { eq: 0 } } - pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } - ) { - nodes { - customerId - lastName - email - address { - address - } - payment(filters: { amount: { gt: "8" } }, orderBy: { amount: DESC }) { - nodes { - paymentId + customer( + filters: { active: { eq: 0 } } + pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } + ) { + nodes { + customerId + lastName + email + address { + address + } + payment(filters: { amount: { gt: "8" } }, orderBy: { amount: DESC }) { + nodes { + paymentId + } } } + pageInfo { + hasPreviousPage + hasNextPage + endCursor + } } - pageInfo { - hasPreviousPage - hasNextPage - endCursor - } - } } - "#, + "#, ) .await, r#" @@ -639,14 +646,16 @@ async fn related_queries_pagination() { payment( filters: { amount: { gt: "7" } } orderBy: { amount: ASC } - pagination: { pages: { limit: 1, page: 1 } } + pagination: { page: { limit: 1, page: 1 } } ) { nodes { paymentId amount } - pages - current + paginationInfo { + pages + current + } pageInfo { hasPreviousPage hasNextPage @@ -681,8 +690,10 @@ async fn related_queries_pagination() { "amount": "9.99" } ], - "pages": 2, - "current": 1, + "paginationInfo": { + "pages": 2, + "current": 1 + }, "pageInfo": { "hasPreviousPage": true, "hasNextPage": false @@ -703,8 +714,10 @@ async fn related_queries_pagination() { "amount": "7.99" } ], - "pages": 6, - "current": 1, + "paginationInfo": { + "pages": 6, + "current": 1 + }, "pageInfo": { "hasPreviousPage": true, "hasNextPage": true @@ -725,8 +738,10 @@ async fn related_queries_pagination() { "amount": "7.99" } ], - "pages": 3, - "current": 1, + "paginationInfo": { + "pages": 3, + "current": 1 + }, "pageInfo": { "hasPreviousPage": true, "hasNextPage": true @@ -744,3 +759,56 @@ async fn related_queries_pagination() { "#, ) } + +#[tokio::test] +async fn enumeration_filter() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + film( + filters: { rating: { eq: NC17 } } + pagination: { page: { page: 1, limit: 5 } } + ) { + nodes { + filmId + rating + } + } + } + "#, + ) + .await, + r#" + { + "film": { + "nodes": [ + { + "filmId": 27, + "rating": "NC17" + }, + { + "filmId": 29, + "rating": "NC17" + }, + { + "filmId": 31, + "rating": "NC17" + }, + { + "filmId": 34, + "rating": "NC17" + }, + { + "filmId": 38, + "rating": "NC17" + } + ] + } + } + "#, + ) +} diff --git a/examples/postgres/Cargo.toml b/examples/postgres/Cargo.toml index 90e4d875..49f298e1 100644 --- a/examples/postgres/Cargo.toml +++ b/examples/postgres/Cargo.toml @@ -4,20 +4,20 @@ name = "seaography-postgres-example" version = "0.3.0" [dependencies] -async-graphql = { version = "4.0.10", features = ["decimal", "chrono", "dataloader"] } -async-graphql-poem = { version = "4.0.10" } -async-trait = { version = "0.1.53" } +poem = { version = "1.3.55" } +async-graphql-poem = { version = "5.0.6" } +async-graphql = { version = "5.0.6", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] } +async-trait = { version = "0.1.64" } dotenv = "0.15.0" -poem = { version = "1.3.29" } -sea-orm = { version = "^0.10", features = ["sqlx-postgres", "runtime-async-std-native-tls"] } -tokio = { version = "1.17.0", features = ["macros", "rt-multi-thread"] } -tracing = { version = "0.1.34" } -tracing-subscriber = { version = "0.3.11" } +sea-orm = { version = "0.11.0", features = ["sqlx-postgres", "runtime-async-std-native-tls"] } +tokio = { version = "1.26.0", features = ["macros", "rt-multi-thread"] } +tracing = { version = "0.1.37" } +tracing-subscriber = { version = "0.3.16" } lazy_static = { version = "1.4.0" } [dependencies.seaography] +version = "^1.0.0" # seaography version path = "../../" # remove this line in your own project -version = "0.3.0" features = ["with-decimal", "with-chrono"] [dev-dependencies] diff --git a/examples/postgres/src/entities/actor.rs b/examples/postgres/src/entities/actor.rs index edac61be..ed2d47ad 100644 --- a/examples/postgres/src/entities/actor.rs +++ b/examples/postgres/src/entities/actor.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "actor")] -#[graphql(complex)] -#[graphql(name = "Actor")] pub struct Model { #[sea_orm(primary_key)] pub actor_id: i32, @@ -19,15 +12,15 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] -pub enum Relation { - #[sea_orm(has_many = "super::film_actor::Entity")] - FilmActor, -} +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmActor.def() + super::film_actor::Relation::Film.def() + } + fn via() -> Option { + Some(super::film_actor::Relation::Actor.def().rev()) } } diff --git a/examples/postgres/src/entities/address.rs b/examples/postgres/src/entities/address.rs index daa5addd..5f8d67d5 100644 --- a/examples/postgres/src/entities/address.rs +++ b/examples/postgres/src/entities/address.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "address")] -#[graphql(complex)] -#[graphql(name = "Address")] pub struct Model { #[sea_orm(primary_key)] pub address_id: i32, @@ -23,7 +16,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::city::Entity", diff --git a/examples/postgres/src/entities/category.rs b/examples/postgres/src/entities/category.rs index 460e95e0..8f5ba647 100644 --- a/examples/postgres/src/entities/category.rs +++ b/examples/postgres/src/entities/category.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "category")] -#[graphql(complex)] -#[graphql(name = "Category")] pub struct Model { #[sea_orm(primary_key)] pub category_id: i32, @@ -18,15 +11,15 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] -pub enum Relation { - #[sea_orm(has_many = "super::film_category::Entity")] - FilmCategory, -} +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmCategory.def() + super::film_category::Relation::Film.def() + } + fn via() -> Option { + Some(super::film_category::Relation::Category.def().rev()) } } diff --git a/examples/postgres/src/entities/city.rs b/examples/postgres/src/entities/city.rs index b15af86c..724f0619 100644 --- a/examples/postgres/src/entities/city.rs +++ b/examples/postgres/src/entities/city.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "city")] -#[graphql(complex)] -#[graphql(name = "City")] pub struct Model { #[sea_orm(primary_key)] pub city_id: i32, @@ -19,8 +12,10 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm(has_many = "super::address::Entity")] + Address, #[sea_orm( belongs_to = "super::country::Entity", from = "Column::CountryId", @@ -29,19 +24,17 @@ pub enum Relation { on_delete = "Restrict" )] Country, - #[sea_orm(has_many = "super::address::Entity")] - Address, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Country.def() + Relation::Address.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Address.def() + Relation::Country.def() } } diff --git a/examples/postgres/src/entities/country.rs b/examples/postgres/src/entities/country.rs index 5a0dbc74..2e5388a9 100644 --- a/examples/postgres/src/entities/country.rs +++ b/examples/postgres/src/entities/country.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "country")] -#[graphql(complex)] -#[graphql(name = "Country")] pub struct Model { #[sea_orm(primary_key)] pub country_id: i32, @@ -18,7 +11,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm(has_many = "super::city::Entity")] City, diff --git a/examples/postgres/src/entities/customer.rs b/examples/postgres/src/entities/customer.rs index beafbd49..d9d41b6e 100644 --- a/examples/postgres/src/entities/customer.rs +++ b/examples/postgres/src/entities/customer.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "customer")] -#[graphql(complex)] -#[graphql(name = "Customer")] pub struct Model { #[sea_orm(primary_key)] pub customer_id: i32, @@ -25,7 +18,7 @@ pub struct Model { pub active: Option, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -35,6 +28,10 @@ pub enum Relation { on_delete = "Restrict" )] Address, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -43,10 +40,6 @@ pub enum Relation { on_delete = "Restrict" )] Store, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -55,12 +48,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Store.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Payment.def() @@ -73,4 +60,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Store.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/postgres/src/entities/film.rs b/examples/postgres/src/entities/film.rs index 44a5171a..248259ab 100644 --- a/examples/postgres/src/entities/film.rs +++ b/examples/postgres/src/entities/film.rs @@ -1,17 +1,10 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use super::sea_orm_active_enums::MpaaRating; use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film")] -#[graphql(complex)] -#[graphql(name = "Film")] pub struct Model { #[sea_orm(primary_key)] pub film_id: i32, @@ -29,14 +22,15 @@ pub struct Model { pub replacement_cost: Decimal, pub rating: Option, pub last_update: DateTime, - #[sea_orm(column_type = "Custom(\"array\".to_owned())", nullable)] - pub special_features: Option, - #[sea_orm(column_type = "Custom(\"tsvector\".to_owned())")] - pub fulltext: String, +// pub special_features: Option>, +// #[sea_orm(column_type = "custom(\"tsvector\")")] +// pub fulltext: String, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm(has_many = "super::inventory::Entity")] + Inventory, #[sea_orm( belongs_to = "super::language::Entity", from = "Column::LanguageId", @@ -53,29 +47,29 @@ pub enum Relation { on_delete = "Restrict" )] Language1, - #[sea_orm(has_many = "super::film_actor::Entity")] - FilmActor, - #[sea_orm(has_many = "super::film_category::Entity")] - FilmCategory, - #[sea_orm(has_many = "super::inventory::Entity")] - Inventory, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmActor.def() + Relation::Inventory.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmCategory.def() + super::film_actor::Relation::Actor.def() + } + fn via() -> Option { + Some(super::film_actor::Relation::Film.def().rev()) } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Inventory.def() + super::film_category::Relation::Category.def() + } + fn via() -> Option { + Some(super::film_category::Relation::Film.def().rev()) } } diff --git a/examples/postgres/src/entities/film_actor.rs b/examples/postgres/src/entities/film_actor.rs index 1c3be9a4..882b6f3d 100644 --- a/examples/postgres/src/entities/film_actor.rs +++ b/examples/postgres/src/entities/film_actor.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_actor")] -#[graphql(complex)] -#[graphql(name = "FilmActor")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub actor_id: i16, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::actor::Entity", diff --git a/examples/postgres/src/entities/film_category.rs b/examples/postgres/src/entities/film_category.rs index 84f1bcce..1207a4a8 100644 --- a/examples/postgres/src/entities/film_category.rs +++ b/examples/postgres/src/entities/film_category.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_category")] -#[graphql(complex)] -#[graphql(name = "FilmCategory")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub film_id: i16, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::category::Entity", diff --git a/examples/postgres/src/entities/inventory.rs b/examples/postgres/src/entities/inventory.rs index 6b14465c..a5328d92 100644 --- a/examples/postgres/src/entities/inventory.rs +++ b/examples/postgres/src/entities/inventory.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "inventory")] -#[graphql(complex)] -#[graphql(name = "Inventory")] pub struct Model { #[sea_orm(primary_key)] pub inventory_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::film::Entity", @@ -29,6 +22,8 @@ pub enum Relation { on_delete = "Restrict" )] Film, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -37,8 +32,6 @@ pub enum Relation { on_delete = "Restrict" )] Store, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -47,15 +40,15 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Store.def() + Relation::Rental.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Rental.def() + Relation::Store.def() } } diff --git a/examples/postgres/src/entities/language.rs b/examples/postgres/src/entities/language.rs index 947d4c3b..8dc7d276 100644 --- a/examples/postgres/src/entities/language.rs +++ b/examples/postgres/src/entities/language.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "language")] -#[graphql(complex)] -#[graphql(name = "Language")] pub struct Model { #[sea_orm(primary_key)] pub language_id: i32, @@ -18,13 +11,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} -impl RelationTrait for Relation { - fn def(&self) -> RelationDef { - panic!("No RelationDef") - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/postgres/src/entities/mod.rs b/examples/postgres/src/entities/mod.rs index 2cc28578..d7356287 100644 --- a/examples/postgres/src/entities/mod.rs +++ b/examples/postgres/src/entities/mod.rs @@ -1,3 +1,7 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + +pub mod prelude; + pub mod actor; pub mod address; pub mod category; @@ -10,7 +14,6 @@ pub mod film_category; pub mod inventory; pub mod language; pub mod payment; -pub mod prelude; pub mod rental; pub mod sea_orm_active_enums; pub mod staff; diff --git a/examples/postgres/src/entities/payment.rs b/examples/postgres/src/entities/payment.rs index a12613dc..a590c444 100644 --- a/examples/postgres/src/entities/payment.rs +++ b/examples/postgres/src/entities/payment.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "payment")] -#[graphql(complex)] -#[graphql(name = "Payment")] pub struct Model { #[sea_orm(primary_key)] pub payment_id: i32, @@ -22,7 +15,7 @@ pub struct Model { pub payment_date: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::customer::Entity", diff --git a/examples/postgres/src/entities/prelude.rs b/examples/postgres/src/entities/prelude.rs index 27f39d53..0048a54c 100644 --- a/examples/postgres/src/entities/prelude.rs +++ b/examples/postgres/src/entities/prelude.rs @@ -1,3 +1,5 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + pub use super::actor::Entity as Actor; pub use super::address::Entity as Address; pub use super::category::Entity as Category; diff --git a/examples/postgres/src/entities/rental.rs b/examples/postgres/src/entities/rental.rs index 01ff90b0..642c5c43 100644 --- a/examples/postgres/src/entities/rental.rs +++ b/examples/postgres/src/entities/rental.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "rental")] -#[graphql(complex)] -#[graphql(name = "Rental")] pub struct Model { #[sea_orm(primary_key)] pub rental_id: i32, @@ -22,7 +15,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::customer::Entity", @@ -40,6 +33,8 @@ pub enum Relation { on_delete = "Restrict" )] Inventory, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, #[sea_orm( belongs_to = "super::staff::Entity", from = "Column::StaffId", @@ -48,8 +43,6 @@ pub enum Relation { on_delete = "Restrict" )] Staff, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, } impl Related for Entity { @@ -64,15 +57,15 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Staff.def() + Relation::Payment.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Payment.def() + Relation::Staff.def() } } diff --git a/examples/postgres/src/entities/sea_orm_active_enums.rs b/examples/postgres/src/entities/sea_orm_active_enums.rs index c42c9acf..ee82cd4f 100644 --- a/examples/postgres/src/entities/sea_orm_active_enums.rs +++ b/examples/postgres/src/entities/sea_orm_active_enums.rs @@ -1,16 +1,8 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Debug, - Clone, - PartialEq, - EnumIter, - DeriveActiveEnum, - Eq, - Copy, - async_graphql::Enum, - seaography::macros::EnumFilter, -)] +#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum)] #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "mpaa_rating")] pub enum MpaaRating { #[sea_orm(string_value = "G")] diff --git a/examples/postgres/src/entities/staff.rs b/examples/postgres/src/entities/staff.rs index 4b6df08a..3f61badf 100644 --- a/examples/postgres/src/entities/staff.rs +++ b/examples/postgres/src/entities/staff.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "staff")] -#[graphql(complex)] -#[graphql(name = "Staff")] pub struct Model { #[sea_orm(primary_key)] pub staff_id: i32, @@ -26,7 +19,7 @@ pub struct Model { pub picture: Option>, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -36,6 +29,10 @@ pub enum Relation { on_delete = "Restrict" )] Address, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -44,10 +41,6 @@ pub enum Relation { on_delete = "NoAction" )] Store, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -56,12 +49,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Store.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Payment.def() @@ -74,4 +61,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Store.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/postgres/src/entities/store.rs b/examples/postgres/src/entities/store.rs index ce6371c7..6d799657 100644 --- a/examples/postgres/src/entities/store.rs +++ b/examples/postgres/src/entities/store.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "store")] -#[graphql(complex)] -#[graphql(name = "Store")] pub struct Model { #[sea_orm(primary_key)] pub store_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTime, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -29,6 +22,10 @@ pub enum Relation { on_delete = "Restrict" )] Address, + #[sea_orm(has_many = "super::customer::Entity")] + Customer, + #[sea_orm(has_many = "super::inventory::Entity")] + Inventory, #[sea_orm( belongs_to = "super::staff::Entity", from = "Column::ManagerStaffId", @@ -37,10 +34,6 @@ pub enum Relation { on_delete = "Restrict" )] Staff, - #[sea_orm(has_many = "super::customer::Entity")] - Customer, - #[sea_orm(has_many = "super::inventory::Entity")] - Inventory, } impl Related for Entity { @@ -49,12 +42,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Staff.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Customer.def() @@ -67,4 +54,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Staff.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/postgres/src/lib.rs b/examples/postgres/src/lib.rs index fc1e6274..770aa614 100644 --- a/examples/postgres/src/lib.rs +++ b/examples/postgres/src/lib.rs @@ -1,12 +1,8 @@ -#![recursion_limit = "1024"] - use sea_orm::prelude::*; pub mod entities; pub mod query_root; -pub use query_root::QueryRoot; - pub struct OrmDataloader { pub db: DatabaseConnection, } diff --git a/examples/postgres/src/main.rs b/examples/postgres/src/main.rs index 25240d8d..16810b4e 100644 --- a/examples/postgres/src/main.rs +++ b/examples/postgres/src/main.rs @@ -1,7 +1,6 @@ use async_graphql::{ dataloader::DataLoader, http::{playground_source, GraphQLPlaygroundConfig}, - EmptyMutation, EmptySubscription, Schema, }; use async_graphql_poem::GraphQL; use dotenv::dotenv; @@ -27,7 +26,7 @@ lazy_static! { #[handler] async fn graphql_playground() -> impl IntoResponse { - Html(playground_source(GraphQLPlaygroundConfig::new(&ENDPOINT))) + Html(playground_source(GraphQLPlaygroundConfig::new(&*ENDPOINT))) } #[tokio::main] @@ -37,7 +36,6 @@ async fn main() { .with_max_level(tracing::Level::INFO) .with_test_writer() .init(); - let database = Database::connect(&*DATABASE_URL) .await .expect("Fail to initialize database connection"); @@ -47,21 +45,17 @@ async fn main() { }, tokio::spawn, ); - let mut schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader); - if let Some(depth) = *DEPTH_LIMIT { - schema = schema.limit_depth(depth); - } - if let Some(complexity) = *COMPLEXITY_LIMIT { - schema = schema.limit_complexity(complexity); - } - let schema = schema.finish(); + let schema = seaography_postgres_example::query_root::schema( + database, + orm_dataloader, + *DEPTH_LIMIT, + *COMPLEXITY_LIMIT, + ) + .unwrap(); let app = Route::new().at( &*ENDPOINT, get(graphql_playground).post(GraphQL::new(schema)), ); - println!("Visit GraphQL Playground at http://{}", *URL); Server::new(TcpListener::bind(&*URL)) .run(app) diff --git a/examples/postgres/src/query_root.rs b/examples/postgres/src/query_root.rs index 710d911d..b9ab8302 100644 --- a/examples/postgres/src/query_root.rs +++ b/examples/postgres/src/query_root.rs @@ -1,17 +1,242 @@ -#[derive(Debug, seaography::macros::QueryRoot)] -#[seaography(entity = "crate::entities::actor")] -#[seaography(entity = "crate::entities::address")] -#[seaography(entity = "crate::entities::category")] -#[seaography(entity = "crate::entities::city")] -#[seaography(entity = "crate::entities::country")] -#[seaography(entity = "crate::entities::customer")] -#[seaography(entity = "crate::entities::film")] -#[seaography(entity = "crate::entities::film_actor")] -#[seaography(entity = "crate::entities::film_category")] -#[seaography(entity = "crate::entities::inventory")] -#[seaography(entity = "crate::entities::language")] -#[seaography(entity = "crate::entities::payment")] -#[seaography(entity = "crate::entities::rental")] -#[seaography(entity = "crate::entities::staff")] -#[seaography(entity = "crate::entities::store")] -pub struct QueryRoot; +use crate::OrmDataloader; +use async_graphql::{dataloader::DataLoader, dynamic::*}; +use sea_orm::{DatabaseConnection, RelationTrait}; +use seaography::{ + Builder, BuilderContext, EntityObjectRelationBuilder, EntityObjectViaRelationBuilder, +}; + +lazy_static::lazy_static! { static ref CONTEXT : BuilderContext = BuilderContext :: default () ; } + +pub fn schema( + database: DatabaseConnection, + orm_dataloader: DataLoader, + depth: Option, + complexity: Option, +) -> Result { + let mut builder = Builder::new(&CONTEXT); + let entity_object_relation_builder = EntityObjectRelationBuilder { context: &CONTEXT }; + let entity_object_via_relation_builder = EntityObjectViaRelationBuilder { context: &CONTEXT }; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "actor", + crate::entities::film_actor::Relation::Actor.def(), + ), + entity_object_relation_builder + .get_relation::( + "film", + crate::entities::film_actor::Relation::Film.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::rental::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::rental::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::rental::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::rental::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::( + "film", + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::staff::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::staff::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::staff::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::staff::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + crate::entities::country::Relation::City.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("actor"), + entity_object_via_relation_builder + .get_relation::( + "category", + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::film::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "language1", + crate::entities::film::Relation::Language1.def(), + ), + entity_object_relation_builder + .get_relation::( + "language2", + crate::entities::film::Relation::Language2.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("film"), + ]); + builder.register_entity::(vec![]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::city::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "country", + crate::entities::city::Relation::Country.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "film", + crate::entities::inventory::Relation::Film.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::inventory::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::inventory::Relation::Store.def(), + ), + ]); + builder . register_entity :: < crate :: entities :: film_category :: Entity > (vec ! [entity_object_relation_builder . get_relation :: < crate :: entities :: film_category :: Entity , crate :: entities :: category :: Entity > ("category" , crate :: entities :: film_category :: Relation :: Category . def ()) , entity_object_relation_builder . get_relation :: < crate :: entities :: film_category :: Entity , crate :: entities :: film :: Entity > ("film" , crate :: entities :: film_category :: Relation :: Film . def ())]) ; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::customer::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::customer::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::customer::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::customer::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::store::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::store::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::store::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::store::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::payment::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::payment::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::payment::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + crate::entities::address::Relation::City.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::address::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::address::Relation::Staff.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::address::Relation::Store.def(), + ), + ]); + builder.register_enumeration::(); + let schema = builder.schema_builder(); + let schema = if let Some(depth) = depth { + schema.limit_depth(depth) + } else { + schema + }; + let schema = if let Some(complexity) = complexity { + schema.limit_complexity(complexity) + } else { + schema + }; + schema.data(database).data(orm_dataloader).finish() +} diff --git a/examples/postgres/tests/query_tests.rs b/examples/postgres/tests/query_tests.rs index 54190817..f7ca2f79 100644 --- a/examples/postgres/tests/query_tests.rs +++ b/examples/postgres/tests/query_tests.rs @@ -1,8 +1,8 @@ -use async_graphql::{dataloader::DataLoader, EmptyMutation, EmptySubscription, Response, Schema}; +use async_graphql::{dataloader::DataLoader, dynamic::*, Response}; use sea_orm::Database; -use seaography_postgres_example::{OrmDataloader, QueryRoot}; +use seaography_postgres_example::OrmDataloader; -pub async fn get_schema() -> Schema { +pub async fn get_schema() -> Schema { let database = Database::connect("postgres://sea:sea@127.0.0.1/sakila") .await .unwrap(); @@ -12,11 +12,11 @@ pub async fn get_schema() -> Schema }, tokio::spawn, ); + let schema = + seaography_postgres_example::query_root::schema(database, orm_dataloader, None, None) + .unwrap(); - Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader) - .finish() + schema } pub fn assert_eq(a: Response, b: &str) { @@ -34,41 +34,41 @@ async fn test_simple_query() { schema .execute( r#" - { - store { - nodes { - storeId - staff { - firstName - lastName + { + store { + nodes { + storeId + staff { + firstName + lastName + } + } + } } - } - } - } "#, ) .await, r#" - { + { "store": { - "nodes": [ + "nodes": [ { - "storeId": 1, - "staff": { + "storeId": 1, + "staff": { "firstName": "Mike", "lastName": "Hillyer" - } + } }, { - "storeId": 2, - "staff": { + "storeId": 2, + "staff": { "firstName": "Jon", "lastName": "Stephens" - } } - ] + } + ] } - } + } "#, ) } @@ -81,35 +81,35 @@ async fn test_simple_query_with_filter() { schema .execute( r#" - { - store(filters: {storeId:{eq: 1}}) { - nodes { - storeId - staff { - firstName - lastName + { + store(filters: {storeId:{eq: 1}}) { + nodes { + storeId + staff { + firstName + lastName + } + } } - } } - } - "#, + "#, ) .await, r#" - { - "store": { - "nodes": [ { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" + } + } + ] + } } - ] - } - } - "#, + "#, ) } @@ -121,40 +121,44 @@ async fn test_filter_with_pagination() { schema .execute( r#" - { + { customer( filters: { active: { eq: 0 } } - pagination: { pages: { page: 2, limit: 3 } } + pagination: { page: { page: 2, limit: 3 } } ) { nodes { customerId } - pages - current + paginationInfo { + pages + current + } } } - "#, + "#, ) .await, r#" - { - "customer": { - "nodes": [ { - "customerId": 315 - }, - { - "customerId": 368 - }, - { - "customerId": 406 + "customer": { + "nodes": [ + { + "customerId": 315 + }, + { + "customerId": 368 + }, + { + "customerId": 406 + } + ], + "paginationInfo": { + "pages": 5, + "current": 2 + } + } } - ], - "pages": 5, - "current": 2 - } - } - "#, + "#, ) } @@ -166,40 +170,44 @@ async fn test_complex_filter_with_pagination() { schema .execute( r#" - { + { payment( filters: { amount: { gt: "11.1" } } - pagination: { pages: { limit: 2, page: 3 } } + pagination: { page: { limit: 2, page: 3 } } ) { nodes { paymentId amount } - pages - current + paginationInfo { + pages + current + } } - } - "#, + } + "#, ) .await, r#" - { - "payment": { - "nodes": [ - { - "paymentId": 8272, - "amount": "11.9900" - }, - { - "paymentId": 9803, - "amount": "11.9900" - } - ], - "pages": 5, - "current": 3 - } - } - "#, + { + "payment": { + "nodes": [ + { + "paymentId": 8272, + "amount": "11.9900" + }, + { + "paymentId": 9803, + "amount": "11.9900" + } + ], + "paginationInfo": { + "pages": 5, + "current": 3 + } + } + } + "#, ) } @@ -211,90 +219,90 @@ async fn test_cursor_pagination() { schema .execute( r#" - { + { payment( - filters: { amount: { gt: "11" } } - pagination: { cursor: { limit: 5 } } + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 5 } } ) { - edges { + edges { node { - paymentId - amount - customer { + paymentId + amount + customer { firstName - } + } } - } - pageInfo { + } + pageInfo { hasPreviousPage hasNextPage startCursor endCursor - } + } } - } - "#, + } + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 342, - "amount": "11.9900", - "customer": { - "firstName": "KAREN" - } - } - }, - { - "node": { - "paymentId": 3146, - "amount": "11.9900", - "customer": { - "firstName": "VICTORIA" - } - } - }, - { - "node": { - "paymentId": 5280, - "amount": "11.9900", - "customer": { - "firstName": "VANESSA" - } - } - }, - { - "node": { - "paymentId": 5281, - "amount": "11.9900", - "customer": { - "firstName": "ALMA" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 342, + "amount": "11.9900", + "customer": { + "firstName": "KAREN" + } + } + }, + { + "node": { + "paymentId": 3146, + "amount": "11.9900", + "customer": { + "firstName": "VICTORIA" + } + } + }, + { + "node": { + "paymentId": 5280, + "amount": "11.9900", + "customer": { + "firstName": "VANESSA" + } + } + }, + { + "node": { + "paymentId": 5281, + "amount": "11.9900", + "customer": { + "firstName": "ALMA" + } + } + }, + { + "node": { + "paymentId": 5550, + "amount": "11.9900", + "customer": { + "firstName": "ROSEMARY" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": false, + "hasNextPage": true, + "startCursor": "Int[3]:342", + "endCursor": "Int[4]:5550" + } } - }, - { - "node": { - "paymentId": 5550, - "amount": "11.9900", - "customer": { - "firstName": "ROSEMARY" - } } - } - ], - "pageInfo": { - "hasPreviousPage": false, - "hasNextPage": true, - "startCursor": "Int[3]:342", - "endCursor": "Int[4]:5550" - } - } - } - "#, + "#, ) } @@ -306,207 +314,208 @@ async fn test_cursor_pagination_prev() { schema .execute( r#" - { - payment( - filters: { amount: { gt: "11" } } - pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:5550" } } - ) { - edges { - node { - paymentId - amount - customer { - firstName - } + { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:5550" } } + ) { + edges { + node { + paymentId + amount + customer { + firstName } } - pageInfo { - hasPreviousPage - hasNextPage - startCursor - endCursor - } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor } } - "#, + } + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 6409, - "amount": "11.9900", - "customer": { - "firstName": "TANYA" - } - } - }, - { - "node": { - "paymentId": 8272, - "amount": "11.9900", - "customer": { - "firstName": "RICHARD" - } - } - }, - { - "node": { - "paymentId": 9803, - "amount": "11.9900", - "customer": { - "firstName": "NICHOLAS" - } - } - } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true, - "startCursor": "Int[4]:6409", - "endCursor": "Int[4]:9803" - } - } - } - "#, - ) -} - -#[tokio::test] -async fn test_cursor_pagination_no_next() { - let schema = get_schema().await; - - assert_eq( - schema - .execute( - r#" { - payment( - filters: { amount: { gt: "11" } } - pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:9803" } } - ) { - edges { - node { - paymentId - amount - customer { - firstName + "payment": { + "edges": [ + { + "node": { + "paymentId": 6409, + "amount": "11.9900", + "customer": { + "firstName": "TANYA" + } + } + }, + { + "node": { + "paymentId": 8272, + "amount": "11.9900", + "customer": { + "firstName": "RICHARD" + } + } + }, + { + "node": { + "paymentId": 9803, + "amount": "11.9900", + "customer": { + "firstName": "NICHOLAS" + } } - } } - pageInfo { - hasPreviousPage - hasNextPage - startCursor - endCursor + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "startCursor": "Int[4]:6409", + "endCursor": "Int[4]:9803" } - } - } - "#, - ) - .await, - r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 15821, - "amount": "11.9900", - "customer": { - "firstName": "KENT" - } } - }, - { - "node": { - "paymentId": 15850, - "amount": "11.9900", - "customer": { - "firstName": "TERRANCE" - } } - } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": false, - "startCursor": "Int[5]:15821", - "endCursor": "Int[5]:15850" - } - } - } - "#, + "#, ) } #[tokio::test] -async fn test_self_ref() { +async fn test_cursor_pagination_no_next() { let schema = get_schema().await; assert_eq( schema .execute( r#" - { - staff { - nodes { + { + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 3, cursor: "SmallUnsigned[4]:9803" } } + ) { + edges { + node { + paymentId + amount + customer { firstName - reportsToId - selfRefReverse { - nodes { - staffId - firstName - } - } - selfRef { - staffId - firstName - } } } } - "#, + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor + } + } + } + "#, ) .await, r#" - { - "staff": { - "nodes": [ { - "firstName": "Mike", - "reportsToId": null, - "selfRefReverse": { - "nodes": [ - { - "staffId": 2, - "firstName": "Jon" - } - ] - }, - "selfRef": null - }, - { - "firstName": "Jon", - "reportsToId": 1, - "selfRefReverse": { - "nodes": [] - }, - "selfRef": { - "staffId": 1, - "firstName": "Mike" - } + "payment": { + "edges": [ + { + "node": { + "paymentId": 15821, + "amount": "11.9900", + "customer": { + "firstName": "KENT" + } + } + }, + { + "node": { + "paymentId": 15850, + "amount": "11.9900", + "customer": { + "firstName": "TERRANCE" + } + } + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false, + "startCursor": "Int[5]:15821", + "endCursor": "Int[5]:15850" + } } - ] - } - } - "#, + } + "#, ) } +// FIXME: add required info at database, see mysql +// #[tokio::test] +// async fn test_self_ref() { +// let schema = get_schema().await; + +// assert_eq( +// schema +// .execute( +// r#" +// { +// staff { +// nodes { +// firstName +// reportsToId +// selfRefReverse { +// nodes { +// staffId +// firstName +// } +// } +// selfRef { +// staffId +// firstName +// } +// } +// } +// } +// "#, +// ) +// .await, +// r#" +// { +// "staff": { +// "nodes": [ +// { +// "firstName": "Mike", +// "reportsToId": null, +// "selfRefReverse": { +// "nodes": [ +// { +// "staffId": 2, +// "firstName": "Jon" +// } +// ] +// }, +// "selfRef": null +// }, +// { +// "firstName": "Jon", +// "reportsToId": 1, +// "selfRefReverse": { +// "nodes": [] +// }, +// "selfRef": { +// "staffId": 1, +// "firstName": "Mike" +// } +// } +// ] +// } +// } +// "#, +// ) +// } + #[tokio::test] async fn related_queries_filters() { let schema = get_schema().await; @@ -515,7 +524,7 @@ async fn related_queries_filters() { schema .execute( r#" - { + { customer( filters: { active: { eq: 0 } } pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } @@ -539,80 +548,80 @@ async fn related_queries_filters() { endCursor } } - } - "#, + } + "#, ) .await, r#" - { - "customer": { - "nodes": [ - { - "customerId": 315, - "lastName": "GOODEN", - "email": "KENNETH.GOODEN@sakilacustomer.org", - "address": { - "address": "1542 Lubumbashi Boulevard" - }, - "payment": { - "nodes": [ - { - "paymentId": 8547 + { + "customer": { + "nodes": [ + { + "customerId": 315, + "lastName": "GOODEN", + "email": "KENNETH.GOODEN@sakilacustomer.org", + "address": { + "address": "1542 Lubumbashi Boulevard" }, - { - "paymentId": 8537 + "payment": { + "nodes": [ + { + "paymentId": 8547 + }, + { + "paymentId": 8537 + } + ] } - ] - } - }, - { - "customerId": 368, - "lastName": "ARCE", - "email": "HARRY.ARCE@sakilacustomer.org", - "address": { - "address": "1922 Miraj Way" - }, - "payment": { - "nodes": [ - { - "paymentId": 9945 - }, - { - "paymentId": 9962 - }, - { - "paymentId": 9967 + }, + { + "customerId": 368, + "lastName": "ARCE", + "email": "HARRY.ARCE@sakilacustomer.org", + "address": { + "address": "1922 Miraj Way" }, - { - "paymentId": 9953 + "payment": { + "nodes": [ + { + "paymentId": 9945 + }, + { + "paymentId": 9953 + }, + { + "paymentId": 9962 + }, + { + "paymentId": 9967 + } + ] } - ] - } - }, - { - "customerId": 406, - "lastName": "RUNYON", - "email": "NATHAN.RUNYON@sakilacustomer.org", - "address": { - "address": "264 Bhimavaram Manor" - }, - "payment": { - "nodes": [ - { - "paymentId": 10998 + }, + { + "customerId": 406, + "lastName": "RUNYON", + "email": "NATHAN.RUNYON@sakilacustomer.org", + "address": { + "address": "264 Bhimavaram Manor" + }, + "payment": { + "nodes": [ + { + "paymentId": 10998 + } + ] } - ] + } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "endCursor": "Int[3]:406" } } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true, - "endCursor": "Int[3]:406" } - } - } - "#, + "#, ) } @@ -624,122 +633,183 @@ async fn related_queries_pagination() { schema .execute( r#" - { - customer( - filters: { active: { eq: 0 } } - pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } - ) { - nodes { - customerId - lastName - email - address { - address + { + customer( + filters: { active: { eq: 0 } } + pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } + ) { + nodes { + customerId + lastName + email + address { + address + } + payment( + filters: { amount: { gt: "7" } } + orderBy: { amount: ASC } + pagination: { page: { limit: 1, page: 1 } } + ) { + nodes { + paymentId + amount } - payment( - filters: { amount: { gt: "7" } } - orderBy: { amount: ASC } - pagination: { pages: { limit: 1, page: 1 } } - ) { - nodes { - paymentId - amount - } + paginationInfo { pages current - pageInfo { - hasPreviousPage - hasNextPage - } } - } - pageInfo { - hasPreviousPage - hasNextPage - endCursor + pageInfo { + hasPreviousPage + hasNextPage + } } } + pageInfo { + hasPreviousPage + hasNextPage + endCursor + } } - "#, + } + "#, ) .await, r#" { - "customer": { - "nodes": [ - { - "customerId": 315, - "lastName": "GOODEN", - "email": "KENNETH.GOODEN@sakilacustomer.org", - "address": { - "address": "1542 Lubumbashi Boulevard" - }, - "payment": { - "nodes": [ - { - "paymentId": 8547, - "amount": "9.9900" + "customer": { + "nodes": [ + { + "customerId": 315, + "lastName": "GOODEN", + "email": "KENNETH.GOODEN@sakilacustomer.org", + "address": { + "address": "1542 Lubumbashi Boulevard" + }, + "payment": { + "nodes": [ + { + "paymentId": 8547, + "amount": "9.9900" + } + ], + "paginationInfo": { + "pages": 2, + "current": 1 + }, + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false } - ], - "pages": 2, - "current": 1, - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": false } - } - }, - { - "customerId": 368, - "lastName": "ARCE", - "email": "HARRY.ARCE@sakilacustomer.org", - "address": { - "address": "1922 Miraj Way" }, - "payment": { - "nodes": [ - { - "paymentId": 9972, - "amount": "7.9900" + { + "customerId": 368, + "lastName": "ARCE", + "email": "HARRY.ARCE@sakilacustomer.org", + "address": { + "address": "1922 Miraj Way" + }, + "payment": { + "nodes": [ + { + "paymentId": 9972, + "amount": "7.9900" + } + ], + "paginationInfo": { + "pages": 6, + "current": 1 + }, + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true } - ], - "pages": 6, - "current": 1, - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true } - } - }, - { - "customerId": 406, - "lastName": "RUNYON", - "email": "NATHAN.RUNYON@sakilacustomer.org", - "address": { - "address": "264 Bhimavaram Manor" }, - "payment": { - "nodes": [ - { - "paymentId": 10989, - "amount": "7.9900" + { + "customerId": 406, + "lastName": "RUNYON", + "email": "NATHAN.RUNYON@sakilacustomer.org", + "address": { + "address": "264 Bhimavaram Manor" + }, + "payment": { + "nodes": [ + { + "paymentId": 10989, + "amount": "7.9900" + } + ], + "paginationInfo": { + "pages": 3, + "current": 1 + }, + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true } - ], - "pages": 3, - "current": 1, - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true } } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "endCursor": "Int[3]:406" } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true, - "endCursor": "Int[3]:406" } } + "#, + ) +} + +#[tokio::test] +async fn enumeration_filter() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + film( + filters: { rating: { eq: NC17 } } + pagination: { page: { page: 1, limit: 5 } } + ) { + nodes { + filmId + rating + } + } + } + "#, + ) + .await, + r#" + { + "film": { + "nodes": [ + { + "filmId": 27, + "rating": "NC17" + }, + { + "filmId": 29, + "rating": "NC17" + }, + { + "filmId": 31, + "rating": "NC17" + }, + { + "filmId": 34, + "rating": "NC17" + }, + { + "filmId": 38, + "rating": "NC17" + } + ] + } } "#, ) diff --git a/examples/sqlite/Cargo.toml b/examples/sqlite/Cargo.toml index dbbf9e1f..8e3098ef 100644 --- a/examples/sqlite/Cargo.toml +++ b/examples/sqlite/Cargo.toml @@ -4,20 +4,20 @@ name = "seaography-sqlite-example" version = "0.3.0" [dependencies] -poem = { version = "1.3.29" } -async-graphql = { version = "4.0.14", features = ["decimal", "chrono", "dataloader"] } -async-graphql-poem = { version = "4.0.14" } -async-trait = { version = "0.1.53" } +poem = { version = "1.3.55" } +async-graphql-poem = { version = "5.0.6" } +async-graphql = { version = "5.0.6", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] } +async-trait = { version = "0.1.64" } dotenv = "0.15.0" -sea-orm = { version = "^0.10", features = ["sqlx-sqlite", "runtime-async-std-native-tls"] } -tokio = { version = "1.17.0", features = ["macros", "rt-multi-thread"] } -tracing = { version = "0.1.34" } -tracing-subscriber = { version = "0.3.11" } +sea-orm = { version = "0.11.0", features = ["sqlx-sqlite", "runtime-async-std-native-tls"] } +tokio = { version = "1.26.0", features = ["macros", "rt-multi-thread"] } +tracing = { version = "0.1.37" } +tracing-subscriber = { version = "0.3.16" } lazy_static = { version = "1.4.0" } [dependencies.seaography] path = "../../" # remove this line in your own project -version = "0.3.0" +version = "^1.0.0" # seaography version features = ["with-decimal", "with-chrono"] [dev-dependencies] diff --git a/examples/sqlite/src/entities/actor.rs b/examples/sqlite/src/entities/actor.rs index f4844a2d..ff14665f 100644 --- a/examples/sqlite/src/entities/actor.rs +++ b/examples/sqlite/src/entities/actor.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "actor")] -#[graphql(complex)] -#[graphql(name = "Actor")] pub struct Model { #[sea_orm(primary_key)] pub actor_id: i32, @@ -19,15 +12,15 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] -pub enum Relation { - #[sea_orm(has_many = "super::film_actor::Entity")] - FilmActor, -} +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmActor.def() + super::film_actor::Relation::Film.def() + } + fn via() -> Option { + Some(super::film_actor::Relation::Actor.def().rev()) } } diff --git a/examples/sqlite/src/entities/address.rs b/examples/sqlite/src/entities/address.rs index e41a877a..b9483ed9 100644 --- a/examples/sqlite/src/entities/address.rs +++ b/examples/sqlite/src/entities/address.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "address")] -#[graphql(complex)] -#[graphql(name = "Address")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub address_id: i32, @@ -23,7 +16,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::city::Entity", diff --git a/examples/sqlite/src/entities/category.rs b/examples/sqlite/src/entities/category.rs index 66ee88d3..0ec69eb5 100644 --- a/examples/sqlite/src/entities/category.rs +++ b/examples/sqlite/src/entities/category.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "category")] -#[graphql(complex)] -#[graphql(name = "Category")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub category_id: i16, @@ -18,15 +11,15 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] -pub enum Relation { - #[sea_orm(has_many = "super::film_category::Entity")] - FilmCategory, -} +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmCategory.def() + super::film_category::Relation::Film.def() + } + fn via() -> Option { + Some(super::film_category::Relation::Category.def().rev()) } } diff --git a/examples/sqlite/src/entities/city.rs b/examples/sqlite/src/entities/city.rs index 15691770..0eb99e01 100644 --- a/examples/sqlite/src/entities/city.rs +++ b/examples/sqlite/src/entities/city.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "city")] -#[graphql(complex)] -#[graphql(name = "City")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub city_id: i32, @@ -19,8 +12,10 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm(has_many = "super::address::Entity")] + Address, #[sea_orm( belongs_to = "super::country::Entity", from = "Column::CountryId", @@ -29,19 +24,17 @@ pub enum Relation { on_delete = "NoAction" )] Country, - #[sea_orm(has_many = "super::address::Entity")] - Address, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Country.def() + Relation::Address.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Address.def() + Relation::Country.def() } } diff --git a/examples/sqlite/src/entities/country.rs b/examples/sqlite/src/entities/country.rs index 8f2d16ba..2424fbcc 100644 --- a/examples/sqlite/src/entities/country.rs +++ b/examples/sqlite/src/entities/country.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "country")] -#[graphql(complex)] -#[graphql(name = "Country")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub country_id: i16, @@ -18,7 +11,7 @@ pub struct Model { pub last_update: Option, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm(has_many = "super::city::Entity")] City, diff --git a/examples/sqlite/src/entities/customer.rs b/examples/sqlite/src/entities/customer.rs index 2294dbc8..87ef6d62 100644 --- a/examples/sqlite/src/entities/customer.rs +++ b/examples/sqlite/src/entities/customer.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "customer")] -#[graphql(complex)] -#[graphql(name = "Customer")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub customer_id: i32, @@ -24,7 +17,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -34,6 +27,10 @@ pub enum Relation { on_delete = "NoAction" )] Address, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -42,10 +39,6 @@ pub enum Relation { on_delete = "NoAction" )] Store, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -54,12 +47,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Store.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Payment.def() @@ -72,4 +59,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Store.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/sqlite/src/entities/film.rs b/examples/sqlite/src/entities/film.rs index 0703949a..7a773950 100644 --- a/examples/sqlite/src/entities/film.rs +++ b/examples/sqlite/src/entities/film.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film")] -#[graphql(complex)] -#[graphql(name = "Film")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub film_id: i32, @@ -30,8 +23,10 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { + #[sea_orm(has_many = "super::inventory::Entity")] + Inventory, #[sea_orm( belongs_to = "super::language::Entity", from = "Column::OriginalLanguageId", @@ -48,29 +43,29 @@ pub enum Relation { on_delete = "NoAction" )] Language1, - #[sea_orm(has_many = "super::film_actor::Entity")] - FilmActor, - #[sea_orm(has_many = "super::film_category::Entity")] - FilmCategory, - #[sea_orm(has_many = "super::inventory::Entity")] - Inventory, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmActor.def() + Relation::Inventory.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::FilmCategory.def() + super::film_actor::Relation::Actor.def() + } + fn via() -> Option { + Some(super::film_actor::Relation::Film.def().rev()) } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Inventory.def() + super::film_category::Relation::Category.def() + } + fn via() -> Option { + Some(super::film_category::Relation::Film.def().rev()) } } diff --git a/examples/sqlite/src/entities/film_actor.rs b/examples/sqlite/src/entities/film_actor.rs index 429587c6..aecce111 100644 --- a/examples/sqlite/src/entities/film_actor.rs +++ b/examples/sqlite/src/entities/film_actor.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_actor")] -#[graphql(complex)] -#[graphql(name = "FilmActor")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub actor_id: i32, @@ -19,16 +12,8 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { - #[sea_orm( - belongs_to = "super::film::Entity", - from = "Column::FilmId", - to = "super::film::Column::FilmId", - on_update = "Cascade", - on_delete = "NoAction" - )] - Film, #[sea_orm( belongs_to = "super::actor::Entity", from = "Column::ActorId", @@ -37,17 +22,25 @@ pub enum Relation { on_delete = "NoAction" )] Actor, + #[sea_orm( + belongs_to = "super::film::Entity", + from = "Column::FilmId", + to = "super::film::Column::FilmId", + on_update = "Cascade", + on_delete = "NoAction" + )] + Film, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Film.def() + Relation::Actor.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Actor.def() + Relation::Film.def() } } diff --git a/examples/sqlite/src/entities/film_category.rs b/examples/sqlite/src/entities/film_category.rs index b61e4933..1cc94c7b 100644 --- a/examples/sqlite/src/entities/film_category.rs +++ b/examples/sqlite/src/entities/film_category.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_category")] -#[graphql(complex)] -#[graphql(name = "FilmCategory")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub film_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::category::Entity", diff --git a/examples/sqlite/src/entities/film_text.rs b/examples/sqlite/src/entities/film_text.rs index 2f6bea1b..b822b0fb 100644 --- a/examples/sqlite/src/entities/film_text.rs +++ b/examples/sqlite/src/entities/film_text.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "film_text")] -#[graphql(complex)] -#[graphql(name = "FilmText")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub film_id: i16, @@ -18,7 +11,7 @@ pub struct Model { pub description: Option, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/sqlite/src/entities/inventory.rs b/examples/sqlite/src/entities/inventory.rs index 265983e1..48494f91 100644 --- a/examples/sqlite/src/entities/inventory.rs +++ b/examples/sqlite/src/entities/inventory.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "inventory")] -#[graphql(complex)] -#[graphql(name = "Inventory")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub inventory_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::film::Entity", @@ -29,6 +22,8 @@ pub enum Relation { on_delete = "NoAction" )] Film, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "super::store::Entity", from = "Column::StoreId", @@ -37,8 +32,6 @@ pub enum Relation { on_delete = "NoAction" )] Store, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -47,15 +40,15 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Store.def() + Relation::Rental.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Rental.def() + Relation::Store.def() } } diff --git a/examples/sqlite/src/entities/language.rs b/examples/sqlite/src/entities/language.rs index 094fd81a..d6538f96 100644 --- a/examples/sqlite/src/entities/language.rs +++ b/examples/sqlite/src/entities/language.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "language")] -#[graphql(complex)] -#[graphql(name = "Language")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub language_id: i16, @@ -18,7 +11,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation {} impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/sqlite/src/entities/mod.rs b/examples/sqlite/src/entities/mod.rs index d436c5ff..a2976910 100644 --- a/examples/sqlite/src/entities/mod.rs +++ b/examples/sqlite/src/entities/mod.rs @@ -1,3 +1,7 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + +pub mod prelude; + pub mod actor; pub mod address; pub mod category; @@ -11,7 +15,6 @@ pub mod film_text; pub mod inventory; pub mod language; pub mod payment; -pub mod prelude; pub mod rental; pub mod staff; pub mod store; diff --git a/examples/sqlite/src/entities/payment.rs b/examples/sqlite/src/entities/payment.rs index e1b6949e..7e1edfec 100644 --- a/examples/sqlite/src/entities/payment.rs +++ b/examples/sqlite/src/entities/payment.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "payment")] -#[graphql(complex)] -#[graphql(name = "Payment")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub payment_id: i32, @@ -23,16 +16,8 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { - #[sea_orm( - belongs_to = "super::staff::Entity", - from = "Column::StaffId", - to = "super::staff::Column::StaffId", - on_update = "NoAction", - on_delete = "NoAction" - )] - Staff, #[sea_orm( belongs_to = "super::customer::Entity", from = "Column::CustomerId", @@ -49,12 +34,14 @@ pub enum Relation { on_delete = "SetNull" )] Rental, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Staff.def() - } + #[sea_orm( + belongs_to = "super::staff::Entity", + from = "Column::StaffId", + to = "super::staff::Column::StaffId", + on_update = "NoAction", + on_delete = "NoAction" + )] + Staff, } impl Related for Entity { @@ -69,4 +56,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Staff.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/sqlite/src/entities/prelude.rs b/examples/sqlite/src/entities/prelude.rs index 07d114ee..8c1f2aac 100644 --- a/examples/sqlite/src/entities/prelude.rs +++ b/examples/sqlite/src/entities/prelude.rs @@ -1,3 +1,5 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + pub use super::actor::Entity as Actor; pub use super::address::Entity as Address; pub use super::category::Entity as Category; diff --git a/examples/sqlite/src/entities/rental.rs b/examples/sqlite/src/entities/rental.rs index 7917cd37..7e29307e 100644 --- a/examples/sqlite/src/entities/rental.rs +++ b/examples/sqlite/src/entities/rental.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "rental")] -#[graphql(complex)] -#[graphql(name = "Rental")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub rental_id: i32, @@ -22,7 +15,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::customer::Entity", @@ -40,6 +33,8 @@ pub enum Relation { on_delete = "NoAction" )] Inventory, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, #[sea_orm( belongs_to = "super::staff::Entity", from = "Column::StaffId", @@ -48,8 +43,6 @@ pub enum Relation { on_delete = "NoAction" )] Staff, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, } impl Related for Entity { @@ -64,15 +57,15 @@ impl Related for Entity { } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Staff.def() + Relation::Payment.def() } } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Payment.def() + Relation::Staff.def() } } diff --git a/examples/sqlite/src/entities/staff.rs b/examples/sqlite/src/entities/staff.rs index 80046439..c02cae2a 100644 --- a/examples/sqlite/src/entities/staff.rs +++ b/examples/sqlite/src/entities/staff.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "staff")] -#[graphql(complex)] -#[graphql(name = "Staff")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub staff_id: i16, @@ -27,7 +20,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -37,6 +30,10 @@ pub enum Relation { on_delete = "NoAction" )] Address, + #[sea_orm(has_many = "super::payment::Entity")] + Payment, + #[sea_orm(has_many = "super::rental::Entity")] + Rental, #[sea_orm( belongs_to = "Entity", from = "Column::ReportsToId", @@ -53,10 +50,6 @@ pub enum Relation { on_delete = "NoAction" )] Store, - #[sea_orm(has_many = "super::payment::Entity")] - Payment, - #[sea_orm(has_many = "super::rental::Entity")] - Rental, } impl Related for Entity { @@ -65,12 +58,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Store.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Payment.def() @@ -83,4 +70,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Store.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/sqlite/src/entities/store.rs b/examples/sqlite/src/entities/store.rs index 01a92f99..6b2b36d6 100644 --- a/examples/sqlite/src/entities/store.rs +++ b/examples/sqlite/src/entities/store.rs @@ -1,16 +1,9 @@ +//! `SeaORM` Entity. Generated by sea-orm-codegen 0.11.0 + use sea_orm::entity::prelude::*; -#[derive( - Clone, - Debug, - PartialEq, - DeriveEntityModel, - async_graphql::SimpleObject, - seaography::macros::Filter, -)] +#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)] #[sea_orm(table_name = "store")] -#[graphql(complex)] -#[graphql(name = "Store")] pub struct Model { #[sea_orm(primary_key, auto_increment = false)] pub store_id: i32, @@ -19,7 +12,7 @@ pub struct Model { pub last_update: DateTimeUtc, } -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation, seaography::macros::RelationsCompact)] +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( belongs_to = "super::address::Entity", @@ -29,6 +22,10 @@ pub enum Relation { on_delete = "NoAction" )] Address, + #[sea_orm(has_many = "super::customer::Entity")] + Customer, + #[sea_orm(has_many = "super::inventory::Entity")] + Inventory, #[sea_orm( belongs_to = "super::staff::Entity", from = "Column::ManagerStaffId", @@ -37,10 +34,6 @@ pub enum Relation { on_delete = "NoAction" )] Staff, - #[sea_orm(has_many = "super::customer::Entity")] - Customer, - #[sea_orm(has_many = "super::inventory::Entity")] - Inventory, } impl Related for Entity { @@ -49,12 +42,6 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Staff.def() - } -} - impl Related for Entity { fn to() -> RelationDef { Relation::Customer.def() @@ -67,4 +54,10 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Staff.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/examples/sqlite/src/lib.rs b/examples/sqlite/src/lib.rs index fc1e6274..770aa614 100644 --- a/examples/sqlite/src/lib.rs +++ b/examples/sqlite/src/lib.rs @@ -1,12 +1,8 @@ -#![recursion_limit = "1024"] - use sea_orm::prelude::*; pub mod entities; pub mod query_root; -pub use query_root::QueryRoot; - pub struct OrmDataloader { pub db: DatabaseConnection, } diff --git a/examples/sqlite/src/main.rs b/examples/sqlite/src/main.rs index 34fba3c1..c6052c08 100644 --- a/examples/sqlite/src/main.rs +++ b/examples/sqlite/src/main.rs @@ -1,7 +1,6 @@ use async_graphql::{ dataloader::DataLoader, http::{playground_source, GraphQLPlaygroundConfig}, - EmptyMutation, EmptySubscription, Schema, }; use async_graphql_poem::GraphQL; use dotenv::dotenv; @@ -27,7 +26,7 @@ lazy_static! { #[handler] async fn graphql_playground() -> impl IntoResponse { - Html(playground_source(GraphQLPlaygroundConfig::new(&ENDPOINT))) + Html(playground_source(GraphQLPlaygroundConfig::new(&*ENDPOINT))) } #[tokio::main] @@ -46,16 +45,13 @@ async fn main() { }, tokio::spawn, ); - let mut schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader); - if let Some(depth) = *DEPTH_LIMIT { - schema = schema.limit_depth(depth); - } - if let Some(complexity) = *COMPLEXITY_LIMIT { - schema = schema.limit_complexity(complexity); - } - let schema = schema.finish(); + let schema = seaography_sqlite_example::query_root::schema( + database, + orm_dataloader, + *DEPTH_LIMIT, + *COMPLEXITY_LIMIT, + ) + .unwrap(); let app = Route::new().at( &*ENDPOINT, get(graphql_playground).post(GraphQL::new(schema)), diff --git a/examples/sqlite/src/query_root.rs b/examples/sqlite/src/query_root.rs index 043b4ca0..185c390c 100644 --- a/examples/sqlite/src/query_root.rs +++ b/examples/sqlite/src/query_root.rs @@ -1,18 +1,252 @@ -#[derive(Debug, seaography::macros::QueryRoot)] -#[seaography(entity = "crate::entities::actor")] -#[seaography(entity = "crate::entities::address")] -#[seaography(entity = "crate::entities::category")] -#[seaography(entity = "crate::entities::city")] -#[seaography(entity = "crate::entities::country")] -#[seaography(entity = "crate::entities::customer")] -#[seaography(entity = "crate::entities::film")] -#[seaography(entity = "crate::entities::film_actor")] -#[seaography(entity = "crate::entities::film_category")] -#[seaography(entity = "crate::entities::film_text")] -#[seaography(entity = "crate::entities::inventory")] -#[seaography(entity = "crate::entities::language")] -#[seaography(entity = "crate::entities::payment")] -#[seaography(entity = "crate::entities::rental")] -#[seaography(entity = "crate::entities::staff")] -#[seaography(entity = "crate::entities::store")] -pub struct QueryRoot; +use crate::OrmDataloader; +use async_graphql::{dataloader::DataLoader, dynamic::*}; +use sea_orm::{DatabaseConnection, RelationTrait}; +use seaography::{ + Builder, BuilderContext, EntityObjectRelationBuilder, EntityObjectViaRelationBuilder, +}; + +lazy_static::lazy_static! { static ref CONTEXT : BuilderContext = BuilderContext :: default () ; } + +pub fn schema( + database: DatabaseConnection, + orm_dataloader: DataLoader, + depth: Option, + complexity: Option, +) -> Result { + let mut builder = Builder::new(&CONTEXT); + let entity_object_relation_builder = EntityObjectRelationBuilder { context: &CONTEXT }; + let entity_object_via_relation_builder = EntityObjectViaRelationBuilder { context: &CONTEXT }; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "actor", + crate::entities::film_actor::Relation::Actor.def(), + ), + entity_object_relation_builder + .get_relation::( + "film", + crate::entities::film_actor::Relation::Film.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::rental::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::rental::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::rental::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::rental::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::( + "film", + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::staff::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::staff::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::staff::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "selfRef", + crate::entities::staff::Relation::SelfRef.def(), + ), + entity_object_relation_builder + .get_relation::( + "selfRefReverse", + crate::entities::staff::Relation::SelfRef.def().rev(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::staff::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + crate::entities::country::Relation::City.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("actor"), + entity_object_via_relation_builder + .get_relation::( + "category", + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::film::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "language1", + crate::entities::film::Relation::Language1.def(), + ), + entity_object_relation_builder + .get_relation::( + "language2", + crate::entities::film::Relation::Language2.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("film"), + ]); + builder.register_entity::(vec![]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::city::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "country", + crate::entities::city::Relation::Country.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "film", + crate::entities::inventory::Relation::Film.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::inventory::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::inventory::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![]); + builder . register_entity :: < crate :: entities :: film_category :: Entity > (vec ! [entity_object_relation_builder . get_relation :: < crate :: entities :: film_category :: Entity , crate :: entities :: category :: Entity > ("category" , crate :: entities :: film_category :: Relation :: Category . def ()) , entity_object_relation_builder . get_relation :: < crate :: entities :: film_category :: Entity , crate :: entities :: film :: Entity > ("film" , crate :: entities :: film_category :: Relation :: Film . def ())]) ; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::customer::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + crate::entities::customer::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::customer::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::customer::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + crate::entities::store::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::store::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + crate::entities::store::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::store::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::payment::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + crate::entities::payment::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::payment::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + crate::entities::address::Relation::City.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + crate::entities::address::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + crate::entities::address::Relation::Staff.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + crate::entities::address::Relation::Store.def(), + ), + ]); + let schema = builder.schema_builder(); + let schema = if let Some(depth) = depth { + schema.limit_depth(depth) + } else { + schema + }; + let schema = if let Some(complexity) = complexity { + schema.limit_complexity(complexity) + } else { + schema + }; + schema.data(database).data(orm_dataloader).finish() +} diff --git a/examples/sqlite/tests/guard_tests.rs b/examples/sqlite/tests/guard_tests.rs new file mode 100644 index 00000000..6eed5132 --- /dev/null +++ b/examples/sqlite/tests/guard_tests.rs @@ -0,0 +1,391 @@ +use std::collections::BTreeMap; + +use async_graphql::{dataloader::DataLoader, dynamic::*, Response}; +use sea_orm::{Database, DatabaseConnection, RelationTrait}; +use seaography::{ + Builder, BuilderContext, EntityObjectRelationBuilder, EntityObjectViaRelationBuilder, FnGuard, + GuardsConfig, +}; +use seaography_sqlite_example::OrmDataloader; + +lazy_static::lazy_static! { + static ref CONTEXT : BuilderContext = { + let context = BuilderContext::default(); + let mut entity_guards: BTreeMap = BTreeMap::new(); + entity_guards.insert("FilmCategory".into(), Box::new(|_ctx| { + true + })); + let mut field_guards: BTreeMap = BTreeMap::new(); + field_guards.insert("Language.lastUpdate".into(), Box::new(|_ctx| { + true + })); + BuilderContext { + guards: GuardsConfig { + entity_guards, + field_guards, + }, + ..context + } + }; +} + +pub fn schema( + database: DatabaseConnection, + orm_dataloader: DataLoader, + depth: Option, + complexity: Option, +) -> Result { + let mut builder = Builder::new(&CONTEXT); + let entity_object_relation_builder = EntityObjectRelationBuilder { context: &CONTEXT }; + let entity_object_via_relation_builder = EntityObjectViaRelationBuilder { context: &CONTEXT }; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "actor", + seaography_sqlite_example::entities::film_actor::Relation::Actor.def(), + ), + entity_object_relation_builder + .get_relation::( + "film", + seaography_sqlite_example::entities::film_actor::Relation::Film.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + seaography_sqlite_example::entities::rental::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + seaography_sqlite_example::entities::rental::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + seaography_sqlite_example::entities::rental::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + seaography_sqlite_example::entities::rental::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::( + "film", + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + seaography_sqlite_example::entities::staff::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + seaography_sqlite_example::entities::staff::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + seaography_sqlite_example::entities::staff::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "selfRef", + seaography_sqlite_example::entities::staff::Relation::SelfRef.def(), + ), + entity_object_relation_builder + .get_relation::( + "selfRefReverse", + seaography_sqlite_example::entities::staff::Relation::SelfRef.def().rev(), + ), + entity_object_relation_builder + .get_relation::( + "store", + seaography_sqlite_example::entities::staff::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + seaography_sqlite_example::entities::country::Relation::City.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("actor"), + entity_object_via_relation_builder + .get_relation::( + "category", + ), + entity_object_relation_builder + .get_relation::( + "inventory", + seaography_sqlite_example::entities::film::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "language1", + seaography_sqlite_example::entities::film::Relation::Language1.def(), + ), + entity_object_relation_builder + .get_relation::( + "language2", + seaography_sqlite_example::entities::film::Relation::Language2.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_via_relation_builder + .get_relation::("film"), + ]); + builder.register_entity::(vec![]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + seaography_sqlite_example::entities::city::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "country", + seaography_sqlite_example::entities::city::Relation::Country.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "film", + seaography_sqlite_example::entities::inventory::Relation::Film.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + seaography_sqlite_example::entities::inventory::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + seaography_sqlite_example::entities::inventory::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![]); + builder . register_entity :: < seaography_sqlite_example:: entities :: film_category :: Entity > (vec ! [entity_object_relation_builder . get_relation :: < seaography_sqlite_example:: entities :: film_category :: Entity , seaography_sqlite_example:: entities :: category :: Entity > ("category" , seaography_sqlite_example:: entities :: film_category :: Relation :: Category . def ()) , entity_object_relation_builder . get_relation :: < seaography_sqlite_example:: entities :: film_category :: Entity , seaography_sqlite_example:: entities :: film :: Entity > ("film" , seaography_sqlite_example:: entities :: film_category :: Relation :: Film . def ())]) ; + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + seaography_sqlite_example::entities::customer::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "payment", + seaography_sqlite_example::entities::customer::Relation::Payment.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + seaography_sqlite_example::entities::customer::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + seaography_sqlite_example::entities::customer::Relation::Store.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "address", + seaography_sqlite_example::entities::store::Relation::Address.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + seaography_sqlite_example::entities::store::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "inventory", + seaography_sqlite_example::entities::store::Relation::Inventory.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + seaography_sqlite_example::entities::store::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "customer", + seaography_sqlite_example::entities::payment::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "rental", + seaography_sqlite_example::entities::payment::Relation::Rental.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + seaography_sqlite_example::entities::payment::Relation::Staff.def(), + ), + ]); + builder.register_entity::(vec![ + entity_object_relation_builder + .get_relation::( + "city", + seaography_sqlite_example::entities::address::Relation::City.def(), + ), + entity_object_relation_builder + .get_relation::( + "customer", + seaography_sqlite_example::entities::address::Relation::Customer.def(), + ), + entity_object_relation_builder + .get_relation::( + "staff", + seaography_sqlite_example::entities::address::Relation::Staff.def(), + ), + entity_object_relation_builder + .get_relation::( + "store", + seaography_sqlite_example::entities::address::Relation::Store.def(), + ), + ]); + let schema = builder.schema_builder(); + let schema = if let Some(depth) = depth { + schema.limit_depth(depth) + } else { + schema + }; + let schema = if let Some(complexity) = complexity { + schema.limit_complexity(complexity) + } else { + schema + }; + schema.data(database).data(orm_dataloader).finish() +} + +pub async fn get_schema() -> Schema { + let database = Database::connect("sqlite://sakila.db").await.unwrap(); + let orm_dataloader: DataLoader = DataLoader::new( + OrmDataloader { + db: database.clone(), + }, + tokio::spawn, + ); + let schema = schema(database, orm_dataloader, None, None).unwrap(); + + schema +} + +pub fn assert_eq(a: Response, b: &str) { + assert_eq!( + a.data.into_json().unwrap(), + serde_json::from_str::(b).unwrap() + ) +} + +#[tokio::test] +async fn entity_guard() { + let schema = get_schema().await; + + assert_eq( + schema + .execute( + r#" + { + language { + nodes { + languageId + name + } + } + } + "#, + ) + .await, + r#" + { + "language": { + "nodes": [ + { + "languageId": 1, + "name": "English" + }, + { + "languageId": 2, + "name": "Italian" + }, + { + "languageId": 3, + "name": "Japanese" + }, + { + "languageId": 4, + "name": "Mandarin" + }, + { + "languageId": 5, + "name": "French" + }, + { + "languageId": 6, + "name": "German" + } + ] + } + } + "#, + ); + + let response = schema + .execute( + r#" + { + filmCategory { + nodes { + filmId + } + } + } + "#, + ) + .await; + + assert_eq!(response.errors.len(), 1); + + assert_eq!(response.errors[0].message, "Entity guard triggered."); +} + +#[tokio::test] +async fn field_guard() { + let schema = get_schema().await; + + let response = schema + .execute( + r#" + { + language { + nodes { + languageId + name + lastUpdate + } + } + } + "#, + ) + .await; + + assert_eq!(response.errors.len(), 1); + + assert_eq!(response.errors[0].message, "Field guard triggered."); +} diff --git a/examples/sqlite/tests/query_tests.rs b/examples/sqlite/tests/query_tests.rs index 1a3e931a..1725a4ba 100644 --- a/examples/sqlite/tests/query_tests.rs +++ b/examples/sqlite/tests/query_tests.rs @@ -1,8 +1,8 @@ -use async_graphql::{dataloader::DataLoader, EmptyMutation, EmptySubscription, Response, Schema}; +use async_graphql::{dataloader::DataLoader, dynamic::*, Response}; use sea_orm::Database; -use seaography_sqlite_example::{OrmDataloader, QueryRoot}; +use seaography_sqlite_example::OrmDataloader; -pub async fn get_schema() -> Schema { +pub async fn get_schema() -> Schema { let database = Database::connect("sqlite://sakila.db").await.unwrap(); let orm_dataloader: DataLoader = DataLoader::new( OrmDataloader { @@ -10,11 +10,11 @@ pub async fn get_schema() -> Schema }, tokio::spawn, ); + let schema = + seaography_sqlite_example::query_root::schema(database, orm_dataloader, None, None) + .unwrap(); - Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader) - .finish() + schema } pub fn assert_eq(a: Response, b: &str) { @@ -32,42 +32,42 @@ async fn test_simple_query() { schema .execute( r#" - { - store { - nodes { - storeId - staff { - firstName - lastName + { + store { + nodes { + storeId + staff { + firstName + lastName + } + } + } } - } - } - } - "#, + "#, ) .await, r#" - { - "store": { - "nodes": [ - { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } - }, - { - "storeId": 2, - "staff": { - "firstName": "Jon", - "lastName": "Stephens" - } + { + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" + } + }, + { + "storeId": 2, + "staff": { + "firstName": "Jon", + "lastName": "Stephens" + } + } + ] } - ] } - } - "#, + "#, ) } @@ -79,35 +79,35 @@ async fn test_simple_query_with_filter() { schema .execute( r#" - { - store(filters: {storeId:{eq: 1}}) { - nodes { - storeId - staff { - firstName - lastName - } + { + store(filters: {storeId:{eq: 1}}) { + nodes { + storeId + staff { + firstName + lastName + } + } + } } - } - } - "#, + "#, ) .await, r#" - { - "store": { - "nodes": [ - { - "storeId": 1, - "staff": { - "firstName": "Mike", - "lastName": "Hillyer" - } + { + "store": { + "nodes": [ + { + "storeId": 1, + "staff": { + "firstName": "Mike", + "lastName": "Hillyer" + } + } + ] } - ] } - } - "#, + "#, ) } @@ -120,39 +120,43 @@ async fn test_filter_with_pagination() { .execute( r#" { - customer( - filters: { active: { eq: 0 } } - pagination: { pages: { page: 2, limit: 3 } } - ) { - nodes { - customerId + customer( + filters: { active: { eq: 0 } } + pagination: { page: { page: 2, limit: 3 } } + ) { + nodes { + customerId + } + paginationInfo { + pages + current + } } - pages - current } - } - "#, + "#, ) .await, r#" - { - "customer": { - "nodes": [ - { - "customerId": 315 - }, - { - "customerId": 368 - }, - { - "customerId": 406 + { + "customer": { + "nodes": [ + { + "customerId": 315 + }, + { + "customerId": 368 + }, + { + "customerId": 406 + } + ], + "paginationInfo": { + "pages": 5, + "current": 2 + } } - ], - "pages": 5, - "current": 2 } - } - "#, + "#, ) } @@ -165,39 +169,43 @@ async fn test_complex_filter_with_pagination() { .execute( r#" { - payment( - filters: { amount: { gt: "11.1" } } - pagination: { pages: { limit: 2, page: 3 } } - ) { - nodes { - paymentId - amount + payment( + filters: { amount: { gt: "11.1" } } + pagination: { page: { limit: 2, page: 3 } } + ) { + nodes { + paymentId + amount + } + paginationInfo { + pages + current + } } - pages - current - } } - "#, + "#, ) .await, r#" - { - "payment": { - "nodes": [ - { - "paymentId": 8272, - "amount": "11.99" - }, - { - "paymentId": 9803, - "amount": "11.99" + { + "payment": { + "nodes": [ + { + "paymentId": 8272, + "amount": "11.99" + }, + { + "paymentId": 9803, + "amount": "11.99" + } + ], + "paginationInfo": { + "pages": 5, + "current": 3 + } } - ], - "pages": 5, - "current": 3 } - } - "#, + "#, ) } @@ -210,89 +218,89 @@ async fn test_cursor_pagination() { .execute( r#" { - payment( - filters: { amount: { gt: "11" } } - pagination: { cursor: { limit: 5 } } - ) { - edges { - node { - paymentId - amount - customer { - firstName + payment( + filters: { amount: { gt: "11" } } + pagination: { cursor: { limit: 5 } } + ) { + edges { + node { + paymentId + amount + customer { + firstName + } + } + } + pageInfo { + hasPreviousPage + hasNextPage + startCursor + endCursor } - } - } - pageInfo { - hasPreviousPage - hasNextPage - startCursor - endCursor } - } } - "#, + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 342, - "amount": "11.99", - "customer": { - "firstName": "KAREN" - } - } - }, - { - "node": { - "paymentId": 3146, - "amount": "11.99", - "customer": { - "firstName": "VICTORIA" - } - } - }, - { - "node": { - "paymentId": 5280, - "amount": "11.99", - "customer": { - "firstName": "VANESSA" - } - } - }, - { - "node": { - "paymentId": 5281, - "amount": "11.99", - "customer": { - "firstName": "ALMA" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 342, + "amount": "11.99", + "customer": { + "firstName": "KAREN" + } + } + }, + { + "node": { + "paymentId": 3146, + "amount": "11.99", + "customer": { + "firstName": "VICTORIA" + } + } + }, + { + "node": { + "paymentId": 5280, + "amount": "11.99", + "customer": { + "firstName": "VANESSA" + } + } + }, + { + "node": { + "paymentId": 5281, + "amount": "11.99", + "customer": { + "firstName": "ALMA" + } + } + }, + { + "node": { + "paymentId": 5550, + "amount": "11.99", + "customer": { + "firstName": "ROSEMARY" + } + } } - }, - { - "node": { - "paymentId": 5550, - "amount": "11.99", - "customer": { - "firstName": "ROSEMARY" - } + ], + "pageInfo": { + "hasPreviousPage": false, + "hasNextPage": true, + "startCursor": "Int[3]:342", + "endCursor": "Int[4]:5550" } - } - ], - "pageInfo": { - "hasPreviousPage": false, - "hasNextPage": true, - "startCursor": "Int[3]:342", - "endCursor": "Int[4]:5550" } - } - } - "#, + } + "#, ) } @@ -326,50 +334,50 @@ async fn test_cursor_pagination_prev() { } } } - "#, + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 6409, - "amount": "11.99", - "customer": { - "firstName": "TANYA" - } - } - }, - { - "node": { - "paymentId": 8272, - "amount": "11.99", - "customer": { - "firstName": "RICHARD" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 6409, + "amount": "11.99", + "customer": { + "firstName": "TANYA" + } + } + }, + { + "node": { + "paymentId": 8272, + "amount": "11.99", + "customer": { + "firstName": "RICHARD" + } + } + }, + { + "node": { + "paymentId": 9803, + "amount": "11.99", + "customer": { + "firstName": "NICHOLAS" + } + } } - }, - { - "node": { - "paymentId": 9803, - "amount": "11.99", - "customer": { - "firstName": "NICHOLAS" - } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": true, + "startCursor": "Int[4]:6409", + "endCursor": "Int[4]:9803" } - } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": true, - "startCursor": "Int[4]:6409", - "endCursor": "Int[4]:9803" } - } - } - "#, + } + "#, ) } @@ -403,41 +411,41 @@ async fn test_cursor_pagination_no_next() { } } } - "#, + "#, ) .await, r#" - { - "payment": { - "edges": [ - { - "node": { - "paymentId": 15821, - "amount": "11.99", - "customer": { - "firstName": "KENT" - } + { + "payment": { + "edges": [ + { + "node": { + "paymentId": 15821, + "amount": "11.99", + "customer": { + "firstName": "KENT" + } + } + }, + { + "node": { + "paymentId": 15850, + "amount": "11.99", + "customer": { + "firstName": "TERRANCE" + } + } } - }, - { - "node": { - "paymentId": 15850, - "amount": "11.99", - "customer": { - "firstName": "TERRANCE" - } + ], + "pageInfo": { + "hasPreviousPage": true, + "hasNextPage": false, + "startCursor": "Int[5]:15821", + "endCursor": "Int[5]:15850" } - } - ], - "pageInfo": { - "hasPreviousPage": true, - "hasNextPage": false, - "startCursor": "Int[5]:15821", - "endCursor": "Int[5]:15850" } - } - } - "#, + } + "#, ) } @@ -467,41 +475,41 @@ async fn test_self_ref() { } } } - "#, + "#, ) .await, r#" - { - "staff": { - "nodes": [ - { - "firstName": "Mike", - "reportsToId": null, - "selfRefReverse": { - "nodes": [ - { - "staffId": 2, - "firstName": "Jon" - } - ] - }, - "selfRef": null - }, - { - "firstName": "Jon", - "reportsToId": 1, - "selfRefReverse": { - "nodes": [] - }, - "selfRef": { - "staffId": 1, - "firstName": "Mike" - } + { + "staff": { + "nodes": [ + { + "firstName": "Mike", + "reportsToId": null, + "selfRefReverse": { + "nodes": [ + { + "staffId": 2, + "firstName": "Jon" + } + ] + }, + "selfRef": null + }, + { + "firstName": "Jon", + "reportsToId": 1, + "selfRefReverse": { + "nodes": [] + }, + "selfRef": { + "staffId": 1, + "firstName": "Mike" + } + } + ] } - ] } - } - "#, + "#, ) } @@ -514,31 +522,31 @@ async fn related_queries_filters() { .execute( r#" { - customer( - filters: { active: { eq: 0 } } - pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } - ) { - nodes { - customerId - lastName - email - address { - address - } - payment(filters: { amount: { gt: "8" } }, orderBy: { amount: DESC }) { - nodes { - paymentId + customer( + filters: { active: { eq: 0 } } + pagination: { cursor: { limit: 3, cursor: "Int[3]:271" } } + ) { + nodes { + customerId + lastName + email + address { + address + } + payment(filters: { amount: { gt: "8" } }, orderBy: { amount: DESC }) { + nodes { + paymentId + } } } + pageInfo { + hasPreviousPage + hasNextPage + endCursor + } } - pageInfo { - hasPreviousPage - hasNextPage - endCursor - } - } } - "#, + "#, ) .await, r#" @@ -637,14 +645,16 @@ async fn related_queries_pagination() { payment( filters: { amount: { gt: "7" } } orderBy: { amount: ASC } - pagination: { pages: { limit: 1, page: 1 } } + pagination: { page: { limit: 1, page: 1 } } ) { nodes { paymentId amount } - pages - current + paginationInfo { + pages + current + } pageInfo { hasPreviousPage hasNextPage @@ -679,8 +689,10 @@ async fn related_queries_pagination() { "amount": "9.99" } ], - "pages": 2, - "current": 1, + "paginationInfo": { + "pages": 2, + "current": 1 + }, "pageInfo": { "hasPreviousPage": true, "hasNextPage": false @@ -701,8 +713,10 @@ async fn related_queries_pagination() { "amount": "7.99" } ], - "pages": 6, - "current": 1, + "paginationInfo": { + "pages": 6, + "current": 1 + }, "pageInfo": { "hasPreviousPage": true, "hasNextPage": true @@ -723,8 +737,10 @@ async fn related_queries_pagination() { "amount": "7.99" } ], - "pages": 3, - "current": 1, + "paginationInfo": { + "pages": 3, + "current": 1 + }, "pageInfo": { "hasPreviousPage": true, "hasNextPage": true @@ -742,3 +758,4 @@ async fn related_queries_pagination() { "#, ) } + diff --git a/generator/Cargo.toml b/generator/Cargo.toml index f5c6b860..127efbb2 100644 --- a/generator/Cargo.toml +++ b/generator/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "seaography-generator" -version = "0.3.0" +version = "1.0.0" edition = "2021" rust-version = "1.60" authors = ["Panagiotis Karatakis "] @@ -13,10 +13,10 @@ keywords = ["async", "graphql", "mysql", "postgres", "sqlite"] categories = ["database"] [dependencies] -quote = "1.0.21" -proc-macro2 = "1.0.43" -syn = { version = "1.0.99", features = ["full"] } -heck = "0.4.0" -itertools = "0.10.3" -sea-orm-codegen = { version = "0.9.2"} -sea-query = { version = "0.26.3", default-features = false } \ No newline at end of file +quote = "1.0.23" +proc-macro2 = "1.0.51" +syn = { version = "1.0.109", features = ["full"] } +heck = "0.4.1" +itertools = "0.10.5" +sea-query = { version = "0.28.3", default-features = false } +thiserror = "1.0.38" \ No newline at end of file diff --git a/generator/src/error.rs b/generator/src/error.rs index a6e8f2a2..f6c67bfc 100644 --- a/generator/src/error.rs +++ b/generator/src/error.rs @@ -1,16 +1,13 @@ -#[derive(Debug)] +use thiserror::Error; + +#[derive(Error, Debug)] pub enum Error { + #[error("Code generator: {0}")] Error(String), - SeaOrmCodegenError(sea_orm_codegen::Error), + #[error("IO: {0}")] IoError(std::io::Error), } -impl From for Error { - fn from(err: sea_orm_codegen::Error) -> Self { - Self::SeaOrmCodegenError(err) - } -} - impl From for Error { fn from(err: std::io::Error) -> Self { Self::IoError(err) diff --git a/generator/src/inject_graphql.rs b/generator/src/inject_graphql.rs deleted file mode 100644 index 81064584..00000000 --- a/generator/src/inject_graphql.rs +++ /dev/null @@ -1,153 +0,0 @@ -use heck::ToUpperCamelCase; -use quote::{quote, ToTokens}; - -pub fn inject_graphql( - entities_hashmap: crate::sea_orm_codegen::EntityHashMap, - expanded_format: bool, -) -> crate::sea_orm_codegen::EntityHashMap { - let sea_orm_active_enums = entities_hashmap - .get("sea_orm_active_enums.rs") - .map(|tokens| { - let file_parsed: syn::File = syn::parse2(tokens.clone()).unwrap(); - - let items: Vec = file_parsed - .items - .into_iter() - .map(|item| -> syn::Item { - if let syn::Item::Enum(enumeration) = item { - let derive_attr: syn::Attribute = syn::parse_quote! { - #[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum, Eq, Copy, async_graphql::Enum, seaography::macros::EnumFilter)] - }; - syn::Item::Enum( - syn::ItemEnum { - attrs: [vec![derive_attr], enumeration.attrs[1..].to_vec()].concat(), - ..enumeration - } - ) - } else { - item - } - }) - .collect(); - - let file_parsed = syn::File { - items, - ..file_parsed - }; - - file_parsed.to_token_stream() - }); - - let mut entities: crate::sea_orm_codegen::EntityHashMap = entities_hashmap - .into_iter() - .filter(|(name, _)| !name.eq("sea_orm_active_enums.rs")) - .map(|(name, entity)| { - let tree = syn::parse2::(entity).unwrap(); - - let tree = syn::File { - items: tree - .items - .into_iter() - .map(|item| match &item { - syn::Item::Struct(structure) if structure.ident.eq("Model") => { - let mut attributes = structure.attrs.clone(); - - let mut derives = attributes[0].tokens.to_string(); - derives.truncate(derives.len() - 1); - - attributes[0] = syn::Attribute { - tokens: format!( - "{}, async_graphql::SimpleObject, seaography::macros::Filter)", - derives - ) - .parse() - .unwrap(), - ..attributes[0].clone() - }; - - if expanded_format { - let entity_name = &name[0..name.len() - 3]; - - let table_name_attr: syn::Attribute = - syn::parse_quote! { #[sea_orm(table_name=#entity_name)] }; - - attributes.push(table_name_attr); - } - - { - let complex_graphql_attr: syn::Attribute = - syn::parse_quote! { #[graphql(complex)] }; - - attributes.push(complex_graphql_attr); - } - - { - let entity_name = &name[0..name.len() - 3]; - - let name = entity_name.to_upper_camel_case(); - - let complex_graphql_attr: syn::Attribute = - syn::parse_quote! { #[graphql(name=#name)] }; - - attributes.push(complex_graphql_attr); - } - - syn::Item::Struct(syn::ItemStruct { - attrs: attributes, - ..structure.clone() - }) - } - syn::Item::Enum(enumeration) - if enumeration.ident.eq("Relation") && !expanded_format => - { - let mut attributes = enumeration.attrs.clone(); - - let mut derives = attributes[0].tokens.to_string(); - derives.truncate(derives.len() - 1); - - attributes[0] = syn::Attribute { - tokens: format!( - "{}, seaography::macros::RelationsCompact)", - derives - ) - .parse() - .unwrap(), - ..attributes[0].clone() - }; - - syn::Item::Enum(syn::ItemEnum { - attrs: attributes, - ..enumeration.clone() - }) - } - syn::Item::Impl(implementation) - if implementation - .to_token_stream() - .to_string() - .starts_with("impl RelationTrait") - && expanded_format => - { - let relation_macro_attr: syn::Attribute = - syn::parse_quote! { #[seaography::macros::relation] }; - - syn::Item::Impl(syn::ItemImpl { - attrs: vec![relation_macro_attr], - ..implementation.clone() - }) - } - _ => item, - }) - .collect(), - ..tree - }; - - (name, quote! { #tree }) - }) - .collect(); - - if let Some(sea_orm_active_enums) = sea_orm_active_enums { - entities.insert("sea_orm_active_enums.rs".into(), sea_orm_active_enums); - } - - entities -} diff --git a/generator/src/lib.rs b/generator/src/lib.rs index 7b61c8b6..030fc57e 100644 --- a/generator/src/lib.rs +++ b/generator/src/lib.rs @@ -1,9 +1,6 @@ -use std::path::Path; - pub mod error; pub use error::{Error, Result}; -pub mod inject_graphql; -pub mod sea_orm_codegen; +pub mod parser; pub mod templates; pub mod writer; @@ -15,46 +12,33 @@ pub enum WebFrameworkEnum { Poem, } -pub async fn write_project>( - path: &P, +pub async fn write_project, T: AsRef>( + root_path: &P, + entities_path: &T, db_url: &str, crate_name: &str, - expanded_format: bool, - tables: std::collections::BTreeMap, sql_library: &str, framework: WebFrameworkEnum, depth_limit: Option, complexity_limit: Option, ) -> Result<()> { - std::fs::create_dir_all(path.as_ref().join("src/entities"))?; - - writer::write_cargo_toml(path, crate_name, sql_library, framework)?; - - let src_path = &path.as_ref().join("src"); + writer::write_cargo_toml(root_path, crate_name, &sql_library, framework)?; - let entities_hashmap = - sea_orm_codegen::generate_entities(tables.values().cloned().collect(), expanded_format) - .unwrap(); + let src_path = &root_path.as_ref().join("src"); - let entities_hashmap = inject_graphql::inject_graphql(entities_hashmap, expanded_format); - - writer::write_query_root(src_path, &entities_hashmap).unwrap(); + writer::write_query_root(src_path, entities_path)?; writer::write_lib(src_path)?; match framework { - WebFrameworkEnum::Actix => { - crate::templates::actix::write_main(src_path, crate_name).unwrap() - } - WebFrameworkEnum::Poem => crate::templates::poem::write_main(src_path, crate_name).unwrap(), + WebFrameworkEnum::Actix => crate::templates::actix::write_main(src_path, crate_name)?, + WebFrameworkEnum::Poem => crate::templates::poem::write_main(src_path, crate_name)?, } - writer::write_env(&path.as_ref(), db_url, depth_limit, complexity_limit)?; - - sea_orm_codegen::write_entities(&src_path.join("entities"), entities_hashmap).unwrap(); + writer::write_env(&root_path.as_ref(), db_url, depth_limit, complexity_limit)?; std::process::Command::new("cargo") .arg("fmt") - .current_dir(path) + .current_dir(&root_path) .spawn()? .wait()?; diff --git a/generator/src/parser.rs b/generator/src/parser.rs new file mode 100644 index 00000000..9fdfb0bc --- /dev/null +++ b/generator/src/parser.rs @@ -0,0 +1,212 @@ +use std::collections::BTreeMap; + +use proc_macro2::TokenStream; +use quote::{quote, ToTokens}; + +use crate::writer::EntityDefinition; + +pub struct RelationDef { + pub target: TokenStream, + pub variant: TokenStream, + pub related: bool, + pub reverse: bool, + pub self_rel: bool, +} + +impl RelationDef { + fn related(target: TokenStream) -> Self { + Self { + target, + variant: quote! {}, + related: true, + reverse: false, + self_rel: false, + } + } +} + +pub fn parse_entity(file_name: String, file_content: String) -> EntityDefinition { + let name = &file_name[..file_name.len() - 3]; + let name: TokenStream = format!("crate::entities::{}", name).parse().unwrap(); + + let tree = syn::parse2::(file_content.parse().unwrap()).unwrap(); + + let relations: BTreeMap = + tree.items + .iter() + .fold(BTreeMap::new(), |mut acc, cur| match cur { + syn::Item::Impl(implementation) => { + if let Some((_bang, path, _for)) = &implementation.trait_ { + let path = path.to_token_stream().to_string(); + if path.starts_with("Related") { + let path: TokenStream = path[18..path.len() - 1].parse().unwrap(); + let path = quote! { crate::entities::#path }; + + let to_method = implementation + .items + .iter() + .find(|item| match item { + syn::ImplItem::Method(method) => method + .sig + .to_token_stream() + .to_string() + .starts_with("fn to ()"), + _ => false, + }) + .expect("We expect Related to have `to` method"); + + let via_method = implementation.items.iter().find(|item| match item { + syn::ImplItem::Method(method) => method + .sig + .to_token_stream() + .to_string() + .starts_with("fn via ()"), + _ => false, + }); + + let name: String = if let syn::ImplItem::Method(method) = to_method { + let ident = + (&method.block.stmts[0]).into_token_stream().to_string(); + let ident: String = ident + [12..ident.chars().position(|c| c == '.').unwrap() - 1] + .into(); + ident.split("::").last().unwrap().trim().into() + } else { + panic!("We expect to_method variable to be Method type") + }; + + if let Some(_) = via_method { + acc.insert(name, RelationDef::related(path)); + } + } + } + acc + } + syn::Item::Enum(enumeration) => { + if enumeration.ident.to_string().eq("Relation") { + enumeration.variants.iter().for_each(|variant| { + let name = variant.ident.to_string(); + let attr = variant.attrs.iter().find(|attr| { + attr.path + .get_ident() + .map(|i| i.to_string().eq("sea_orm")) + .unwrap_or_else(|| false) + }); + if let Some(attr) = attr { + let ident = quote::format_ident!("{}", name); + + let attributes_string = attr.tokens.to_string(); + let attributes_string = + &attributes_string[1..attributes_string.len() - 1]; + + let attributes = attributes_string.split(",").fold( + std::collections::BTreeMap::<&str, &str>::new(), + |mut acc, cur| { + let mut parts = cur.split("="); + if parts.clone().count() == 2 { + let key = parts + .next() + .expect("We expect to have first part") + .trim(); + let value = parts + .next() + .expect("We expect to have second part") + .trim(); + acc.insert(key, value); + } + + acc + }, + ); + + let belongs_to = attributes.get("belongs_to"); + let has_one = attributes.get("has_one"); + let has_many = attributes.get("has_many"); + + let target = if let Some(v) = belongs_to { + v + } else if let Some(v) = has_one { + v + } else if let Some(v) = has_many { + v + } else { + panic!("Invalid relation definition") + }; + + let target = target.replace("super", "crate::entities"); + + let target: TokenStream = + target[1..target.len() - 1].parse().unwrap(); + + let self_belongs_to = + belongs_to.map_or_else(|| false, |v| v.eq(&"\"Entity\"")); + let self_has_one = + has_one.map_or_else(|| false, |v| v.eq(&"\"Entity\"")); + let self_has_many = + has_many.map_or_else(|| false, |v| v.eq(&"\"Entity\"")); + + if self_belongs_to || self_has_one || self_has_many { + let normal = RelationDef { + target: target.clone(), + variant: quote! { #ident }, + related: false, + reverse: false, + self_rel: true, + }; + acc.insert(name.clone(), normal); + + let reverse = RelationDef { + target, + variant: quote! { #ident }, + related: false, + reverse: true, + self_rel: true, + }; + acc.insert(format!("{}Reverse", name), reverse); + } else { + let normal = RelationDef { + target, + variant: quote! { #ident }, + related: false, + reverse: false, + self_rel: false, + }; + acc.insert(name, normal); + } + } + }); + acc + } else { + acc + } + } + _ => acc, + }); + + EntityDefinition { name, relations } +} + +pub struct EnumerationDefinition { + pub name: TokenStream, +} + +pub fn parse_enumerations(file_content: String) -> Vec { + let tree = syn::parse2::(file_content.parse().unwrap()).unwrap(); + + let items: Vec = tree + .items + .iter() + .filter(|item| match item { + syn::Item::Enum(_) => true, + _ => false, + }) + .map(|item| match item { + syn::Item::Enum(enumeration) => EnumerationDefinition { + name: enumeration.ident.to_token_stream(), + }, + _ => panic!("This is unreachable."), + }) + .collect(); + + items +} diff --git a/generator/src/sea_orm_codegen.rs b/generator/src/sea_orm_codegen.rs deleted file mode 100644 index ae722100..00000000 --- a/generator/src/sea_orm_codegen.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::util::add_line_break; - -pub type EntityHashMap = std::collections::BTreeMap; - -pub fn generate_entities( - table_crate_stmts: Vec, - expanded_format: bool, -) -> crate::Result { - let entity_writer = sea_orm_codegen::EntityTransformer::transform(table_crate_stmts)?; - - let entity_writer_ctx = sea_orm_codegen::EntityWriterContext::new( - expanded_format, - sea_orm_codegen::WithSerde::None, - true, - sea_orm_codegen::DateTimeCrate::Chrono, - None, - ); - - let writer_output = entity_writer.generate(&entity_writer_ctx); - - let data: EntityHashMap = writer_output - .files - .iter() - .map(|output_file| { - ( - output_file.name.clone(), - output_file.content.parse().unwrap(), - ) - }) - .collect(); - - Ok(data) -} - -pub fn write_entities>( - path: &P, - entities_hashmap: EntityHashMap, -) -> crate::Result<()> { - for (name, content) in entities_hashmap.iter() { - let file_path = path.as_ref().join(name); - std::fs::write(file_path, add_line_break(content.clone()))?; - } - - Ok(()) -} diff --git a/generator/src/templates/actix.rs b/generator/src/templates/actix.rs index 8896b8e8..97d04cbc 100644 --- a/generator/src/templates/actix.rs +++ b/generator/src/templates/actix.rs @@ -12,7 +12,9 @@ pub fn generate_main(crate_name: &str) -> TokenStream { quote! { use actix_web::{guard, web, web::Data, App, HttpResponse, HttpServer, Result}; use async_graphql::{ - dataloader::DataLoader, http::{playground_source, GraphQLPlaygroundConfig}, EmptyMutation, EmptySubscription, Schema, + dataloader::DataLoader, + http::{playground_source, GraphQLPlaygroundConfig}, + dynamic::*, }; use async_graphql_actix_web::{GraphQLRequest, GraphQLResponse}; use dotenv::dotenv; @@ -35,9 +37,7 @@ pub fn generate_main(crate_name: &str) -> TokenStream { }); } - type AppSchema = Schema; - - async fn index(schema: web::Data, req: GraphQLRequest) -> GraphQLResponse { + async fn index(schema: web::Data, req: GraphQLRequest) -> GraphQLResponse { schema.execute(req.into_inner()).await.into() } @@ -60,22 +60,15 @@ pub fn generate_main(crate_name: &str) -> TokenStream { let database = Database::connect(&*DATABASE_URL) .await .expect("Fail to initialize database connection"); + let orm_dataloader: DataLoader = DataLoader::new( OrmDataloader { db: database.clone(), }, tokio::spawn, ); - let mut schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader); - if let Some(depth) = *DEPTH_LIMIT { - schema = schema.limit_depth(depth); - } - if let Some(complexity) = *COMPLEXITY_LIMIT { - schema = schema.limit_complexity(complexity); - } - let schema = schema.finish(); + + let schema = #crate_name_token::query_root::schema(database, orm_dataloader, *DEPTH_LIMIT, *COMPLEXITY_LIMIT).unwrap(); println!("Visit GraphQL Playground at http://{}", *URL); diff --git a/generator/src/templates/actix_cargo.toml b/generator/src/templates/actix_cargo.toml index 5807f790..09d39831 100644 --- a/generator/src/templates/actix_cargo.toml +++ b/generator/src/templates/actix_cargo.toml @@ -1,18 +1,18 @@ [package] edition = "2021" name = "" -version = "0.1.0" +version = "0.3.0" [dependencies] -actix-web = { version = "4.0.1", default-features = false, features = ["macros"] } -async-graphql = { version = "4.0.14", features = ["decimal", "chrono", "dataloader"] } -async-graphql-actix-web = { version = "4.0.14" } -async-trait = { version = "0.1.53" } +actix-web = { version = "4.3.1", default-features = false, features = ["macros"] } +async-graphql-actix-web = { version = "5.0.6" } +async-graphql = { version = "5.0.6", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] } +async-trait = { version = "0.1.64" } dotenv = "0.15.0" -sea-orm = { version = "^0.10", features = ["", "runtime-async-std-native-tls"] } -tokio = { version = "1.17.0", features = ["macros", "rt-multi-thread"] } -tracing = { version = "0.1.34" } -tracing-subscriber = { version = "0.3.11" } +sea-orm = { version = "0.11.0", features = ["", "runtime-async-std-native-tls"] } +tokio = { version = "1.26.0", features = ["macros", "rt-multi-thread"] } +tracing = { version = "0.1.37" } +tracing-subscriber = { version = "0.3.16" } lazy_static = { version = "1.4.0" } [dependencies.seaography] diff --git a/generator/src/templates/poem.rs b/generator/src/templates/poem.rs index 34f30896..2ad1857c 100644 --- a/generator/src/templates/poem.rs +++ b/generator/src/templates/poem.rs @@ -12,8 +12,7 @@ pub fn generate_main(crate_name: &str) -> TokenStream { quote! { use async_graphql::{ dataloader::DataLoader, - http::{playground_source, GraphQLPlaygroundConfig}, - EmptyMutation, EmptySubscription, Schema, + http::{playground_source, GraphQLPlaygroundConfig} }; use async_graphql_poem::GraphQL; use dotenv::dotenv; @@ -49,7 +48,6 @@ pub fn generate_main(crate_name: &str) -> TokenStream { .with_max_level(tracing::Level::INFO) .with_test_writer() .init(); - let database = Database::connect(&*DATABASE_URL) .await .expect("Fail to initialize database connection"); @@ -59,21 +57,13 @@ pub fn generate_main(crate_name: &str) -> TokenStream { }, tokio::spawn, ); - let mut schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription) - .data(database) - .data(orm_dataloader); - if let Some(depth) = *DEPTH_LIMIT { - schema = schema.limit_depth(depth); - } - if let Some(complexity) = *COMPLEXITY_LIMIT { - schema = schema.limit_complexity(complexity); - } - let schema = schema.finish(); + + let schema = #crate_name_token::query_root::schema(database, orm_dataloader, *DEPTH_LIMIT, *COMPLEXITY_LIMIT).unwrap(); + let app = Route::new().at( &*ENDPOINT, get(graphql_playground).post(GraphQL::new(schema)), ); - println!("Visit GraphQL Playground at http://{}", *URL); Server::new(TcpListener::bind(&*URL)) .run(app) diff --git a/generator/src/templates/poem_cargo.toml b/generator/src/templates/poem_cargo.toml index ea24cc9c..0c5c38fa 100644 --- a/generator/src/templates/poem_cargo.toml +++ b/generator/src/templates/poem_cargo.toml @@ -1,18 +1,18 @@ [package] edition = "2021" name = "" -version = "0.2.0" +version = "0.3.0" [dependencies] -poem = { version = "1.3.29" } -async-graphql = { version = "4.0.14", features = ["decimal", "chrono", "dataloader"] } -async-graphql-poem = { version = "4.0.14" } -async-trait = { version = "0.1.53" } +poem = { version = "1.3.55" } +async-graphql-poem = { version = "5.0.6" } +async-graphql = { version = "5.0.6", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] } +async-trait = { version = "0.1.64" } dotenv = "0.15.0" -sea-orm = { version = "^0.10", features = ["", "runtime-async-std-native-tls"] } -tokio = { version = "1.17.0", features = ["macros", "rt-multi-thread"] } -tracing = { version = "0.1.34" } -tracing-subscriber = { version = "0.3.11" } +sea-orm = { version = "0.11.0", features = ["", "runtime-async-std-native-tls"] } +tokio = { version = "1.26.0", features = ["macros", "rt-multi-thread"] } +tracing = { version = "0.1.37" } +tracing-subscriber = { version = "0.3.16" } lazy_static = { version = "1.4.0" } [dependencies.seaography] diff --git a/generator/src/writer.rs b/generator/src/writer.rs index 68cf5bb8..8a066321 100644 --- a/generator/src/writer.rs +++ b/generator/src/writer.rs @@ -1,44 +1,184 @@ +use std::collections::BTreeMap; +use std::path::Path; + +use heck::ToLowerCamelCase; use proc_macro2::TokenStream; use quote::quote; -use crate::{util::add_line_break, WebFrameworkEnum}; - -pub fn generate_query_root( - entities_hashmap: &crate::sea_orm_codegen::EntityHashMap, -) -> Result { - let items: Vec<_> = entities_hashmap - .keys() - .filter(|entity| { - entity.ne(&&"mod.rs".to_string()) - && entity.ne(&&"prelude.rs".to_string()) - && entity.ne(&&"sea_orm_active_enums.rs".to_string()) - }) - .map(|entity| { - let entity = &entity.as_str()[..entity.len() - 3]; - format!("crate::entities::{}", entity) +use crate::{ + parser::{parse_entity, parse_enumerations, RelationDef}, + util::add_line_break, + WebFrameworkEnum, +}; + +pub struct EntityDefinition { + pub name: TokenStream, + pub relations: BTreeMap, +} + +pub fn generate_query_root>(entities_path: &P) -> TokenStream { + let entities_paths = std::fs::read_dir(entities_path) + .unwrap() + .into_iter() + .filter(|r| r.is_ok()) + .map(|r| r.unwrap().path()) + .filter(|r| r.is_file()) + .filter(|r| { + let name = r.file_stem(); + + if let Some(v) = name { + !v.eq(std::ffi::OsStr::new("prelude")) + && !v.eq(std::ffi::OsStr::new("sea_orm_active_enums")) + && !v.eq(std::ffi::OsStr::new("mod")) + } else { + false + } + }); + + let entities: Vec = entities_paths + .map(|path| { + let file_name = path.file_name().unwrap().to_str().unwrap(); + let file_content = + std::fs::read_to_string(entities_path.as_ref().join(file_name)).unwrap(); + parse_entity(file_name.into(), file_content) }) .collect(); - Ok(quote! { - #[derive(Debug, seaography::macros::QueryRoot)] - #(#[seaography(entity = #items)])* - pub struct QueryRoot; - }) + let entities: Vec = entities.iter().map(|entity| { + let entity_path = &entity.name; + + let relations: Vec = entity.relations.iter().map(|(relationship_name, rel_def)| { + let variant = &rel_def.variant; + let target = &rel_def.target; + let relationship_name = relationship_name.to_lower_camel_case(); + + if rel_def.self_rel && rel_def.reverse { + quote!{ + entity_object_relation_builder.get_relation::<#entity_path::Entity, #entity_path::Entity>(#relationship_name, #entity_path::Relation::#variant.def().rev()) + } + } else if rel_def.related { + quote!{ + entity_object_via_relation_builder.get_relation::<#entity_path::Entity, #target>(#relationship_name) + } + } else if rel_def.self_rel { + quote!{ + entity_object_relation_builder.get_relation::<#entity_path::Entity, #entity_path::Entity>(#relationship_name, #entity_path::Relation::#variant.def()) + } + } else if rel_def.reverse { + quote!{ + entity_object_relation_builder.get_relation::<#target, #entity_path::Entity>(#relationship_name, #entity_path::Relation::#variant.def().rev()) + } + } else { + quote!{ + entity_object_relation_builder.get_relation::<#entity_path::Entity, #target>(#relationship_name, #entity_path::Relation::#variant.def()) + } + } + }).collect(); + + quote!{ + + builder.register_entity::<#entity_path::Entity>(vec![#(#relations),*]); + + } + }).collect(); + + let enumerations = std::fs::read_dir(entities_path) + .unwrap() + .into_iter() + .filter(|r| r.is_ok()) + .map(|r| r.unwrap().path()) + .find(|r| { + let name = r.file_stem(); + + if let Some(v) = name { + v.eq(std::ffi::OsStr::new("sea_orm_active_enums")) + } else { + false + } + }); + + let enumerations = match enumerations { + Some(_) => { + let file_content = + std::fs::read_to_string(entities_path.as_ref().join("sea_orm_active_enums.rs")) + .unwrap(); + + parse_enumerations(file_content) + } + None => vec![], + }; + + let enumerations = enumerations.iter().map(|definition| { + let name = &definition.name; + + quote! { + + builder.register_enumeration::(); + + } + }); + + quote! { + use crate::OrmDataloader; + use async_graphql::{dataloader::DataLoader, dynamic::*}; + use sea_orm::{DatabaseConnection, RelationTrait}; + use seaography::{ + Builder, BuilderContext, EntityObjectRelationBuilder, EntityObjectViaRelationBuilder, + }; + + lazy_static::lazy_static! { + static ref CONTEXT: BuilderContext = BuilderContext::default(); + } + + pub fn schema( + database: DatabaseConnection, + orm_dataloader: DataLoader, + depth: Option, + complexity: Option, + ) -> Result { + let mut builder = Builder::new(&CONTEXT); + let entity_object_relation_builder = EntityObjectRelationBuilder { context: &CONTEXT }; + let entity_object_via_relation_builder = EntityObjectViaRelationBuilder { context: &CONTEXT }; + + #(#entities)* + + #(#enumerations)* + + let schema = builder.schema_builder(); + + let schema = if let Some(depth) = depth { + schema.limit_depth(depth) + } else { + schema + }; + + let schema = if let Some(complexity) = complexity { + schema.limit_complexity(complexity) + } else { + schema + }; + + schema.data(database).data(orm_dataloader).finish() + } + } } -pub fn write_query_root>( - path: &P, - entities_hashmap: &crate::sea_orm_codegen::EntityHashMap, +pub fn write_query_root, T: AsRef>( + src_path: &P, + entities_path: &T, ) -> Result<(), crate::error::Error> { - let tokens = generate_query_root(entities_hashmap)?; + let tokens = generate_query_root(entities_path); - let file_name = path.as_ref().join("query_root.rs"); + let file_name = src_path.as_ref().join("query_root.rs"); std::fs::write(file_name, add_line_break(tokens))?; Ok(()) } +/// +/// Used to generate project/Cargo.toml file content +/// pub fn write_cargo_toml>( path: &P, crate_name: &str, @@ -71,34 +211,29 @@ pub fn write_cargo_toml>( /// /// Used to generate project/src/lib.rs file content /// -pub fn generate_lib() -> TokenStream { - quote! { +pub fn write_lib>(path: &P) -> std::io::Result<()> { + let tokens = quote! { use sea_orm::prelude::*; pub mod entities; pub mod query_root; - pub use query_root::QueryRoot; - pub struct OrmDataloader { pub db: DatabaseConnection, } - } -} -pub fn write_lib>(path: &P) -> std::io::Result<()> { - let tokens = generate_lib(); + }; let file_name = path.as_ref().join("lib.rs"); - std::fs::write( - file_name, - format!("#![recursion_limit = \"1024\"]\n{}", add_line_break(tokens)), - )?; + std::fs::write(file_name, add_line_break(tokens))?; Ok(()) } +/// +/// Used to generate project/.env file content +/// pub fn write_env>( path: &P, db_url: &str, diff --git a/sakila.db b/sakila.db new file mode 100644 index 00000000..e511f197 Binary files /dev/null and b/sakila.db differ diff --git a/src/builder.rs b/src/builder.rs new file mode 100644 index 00000000..ecc69162 --- /dev/null +++ b/src/builder.rs @@ -0,0 +1,237 @@ +use async_graphql::dynamic::{Enum, Field, InputObject, Object, Schema, SchemaBuilder}; +use sea_orm::{ActiveEnum, EntityTrait}; +use std::collections::BTreeMap; + +use crate::{ + ActiveEnumBuilder, ActiveEnumFilterInputBuilder, BuilderContext, ConnectionObjectBuilder, + CursorInputBuilder, EdgeObjectBuilder, EntityObjectBuilder, EntityQueryFieldBuilder, + FilterInputBuilder, OffsetInputBuilder, OrderByEnumBuilder, OrderInputBuilder, + PageInfoObjectBuilder, PageInputBuilder, PaginationInfoObjectBuilder, PaginationInputBuilder, +}; + +/// The Builder is used to create the Schema for GraphQL +/// +/// You can populate it with the entities, enumerations of your choice +pub struct Builder { + pub entities: Vec, + pub edges: Vec, + pub connections: Vec, + pub filters: Vec, + pub orders: Vec, + pub enumerations: Vec, + pub queries: Vec, + pub relations: BTreeMap>, + pub context: &'static BuilderContext, +} + +impl Builder { + /// Used to create a new Builder from the given configuration context + pub fn new(context: &'static BuilderContext) -> Self { + Self { + entities: Vec::new(), + edges: Vec::new(), + connections: Vec::new(), + filters: Vec::new(), + orders: Vec::new(), + enumerations: Vec::new(), + queries: Vec::new(), + relations: BTreeMap::new(), + context, + } + } + + /// used to register a new entity to the Builder context + pub fn register_entity(&mut self, relations: Vec) + where + T: EntityTrait, + ::Model: Sync, + { + let entity_object_builder = EntityObjectBuilder { + context: self.context, + }; + + let edge_object_builder = EdgeObjectBuilder { + context: self.context, + }; + + let connection_object_builder = ConnectionObjectBuilder { + context: self.context, + }; + + let filter_input_builder = FilterInputBuilder { + context: self.context, + }; + + let order_input_builder = OrderInputBuilder { + context: self.context, + }; + + let entity_query_field_builder = EntityQueryFieldBuilder { + context: self.context, + }; + + let entity_object = entity_object_builder.to_object::(); + + let entity_object = relations + .into_iter() + .fold(entity_object, |entity_object, field| { + entity_object.field(field) + }); + + self.entities.extend(vec![entity_object]); + + let edge = edge_object_builder.to_object::(); + self.edges.extend(vec![edge]); + + let connection = connection_object_builder.to_object::(); + self.connections.extend(vec![connection]); + + let filter = filter_input_builder.to_object::(); + self.filters.extend(vec![filter]); + + let order = order_input_builder.to_object::(); + self.orders.extend(vec![order]); + + let query = entity_query_field_builder.to_field::(); + self.queries.extend(vec![query]); + } + + /// used to register a new enumeration to the builder context + pub fn register_enumeration(&mut self) + where + A: ActiveEnum, + { + let active_enum_builder = ActiveEnumBuilder { + context: self.context, + }; + let active_enum_filter_input_builder = ActiveEnumFilterInputBuilder { + context: self.context, + }; + + let enumeration = active_enum_builder.enumeration::(); + self.enumerations.extend(vec![enumeration]); + + let filter = active_enum_filter_input_builder.input_object::(); + self.filters.extend(vec![filter]); + } + + /// used to consume the builder context and generate a ready to be completed GraphQL schema + pub fn schema_builder(self) -> SchemaBuilder { + let query = Object::new("Query"); + + let query = self + .queries + .into_iter() + .fold(query, |query, field| query.field(field)); + + let schema = Schema::build(query.type_name(), None, None); + + let mut relations = self.relations; + + // register entities to schema + let schema = self + .entities + .into_iter() + // add related fields to entities + .map( + |entity: Object| match relations.remove(entity.type_name()) { + Some(fields) => fields + .into_iter() + .fold(entity, |entity, field| entity.field(field)), + None => entity, + }, + ) + .fold(schema, |schema, entity| schema.register(entity)); + + // register edges to schema + let schema = self + .edges + .into_iter() + .fold(schema, |schema, edge| schema.register(edge)); + + // register connections to schema + let schema = self + .connections + .into_iter() + .fold(schema, |schema, connection| schema.register(connection)); + + // register filters to schema + let schema = self + .filters + .into_iter() + .fold(schema, |schema, filter| schema.register(filter)); + + // register orders to schema + let schema = self + .orders + .into_iter() + .fold(schema, |schema, order| schema.register(order)); + + // register enumerations + let schema = self + .enumerations + .into_iter() + .fold(schema, |schema, enumeration| schema.register(enumeration)); + + let filter_input_builder = FilterInputBuilder { + context: self.context, + }; + + // register static filter types + schema + .register(filter_input_builder.string_filter()) + .register(filter_input_builder.integer_filter()) + .register(filter_input_builder.float_filter()) + .register(filter_input_builder.text_filter()) + .register(filter_input_builder.boolean_filter()) + .register( + OrderByEnumBuilder { + context: self.context, + } + .enumeration(), + ) + .register( + CursorInputBuilder { + context: self.context, + } + .input_object(), + ) + .register( + CursorInputBuilder { + context: self.context, + } + .input_object(), + ) + .register( + PageInputBuilder { + context: self.context, + } + .input_object(), + ) + .register( + OffsetInputBuilder { + context: self.context, + } + .input_object(), + ) + .register( + PaginationInputBuilder { + context: self.context, + } + .input_object(), + ) + .register( + PageInfoObjectBuilder { + context: self.context, + } + .to_object(), + ) + .register( + PaginationInfoObjectBuilder { + context: self.context, + } + .to_object(), + ) + .register(query) + } +} diff --git a/src/builder_context.rs b/src/builder_context.rs new file mode 100644 index 00000000..e5d7274d --- /dev/null +++ b/src/builder_context.rs @@ -0,0 +1,42 @@ +use crate::{ + ActiveEnumConfig, ActiveEnumFilterInputConfig, ConnectionObjectConfig, CursorInputConfig, + EdgeObjectConfig, EntityObjectConfig, EntityQueryFieldConfig, FilterInputConfig, + OffsetInputConfig, OrderByEnumConfig, OrderInputConfig, PageInfoObjectConfig, PageInputConfig, + PaginationInfoObjectConfig, PaginationInputConfig, +}; + +pub mod guards; +pub use guards::*; + +/// Used to hold the configuration for various aspects +/// related to our builder options. You can modify the +/// context to make the generated GraphQL nodes match +/// your needs. +#[derive(Default)] +pub struct BuilderContext { + pub order_by_enum: OrderByEnumConfig, + pub active_enum: ActiveEnumConfig, + + pub cursor_input: CursorInputConfig, + pub page_input: PageInputConfig, + pub offset_input: OffsetInputConfig, + pub pagination_input: PaginationInputConfig, + + pub order_input: OrderInputConfig, + + pub filter_input: FilterInputConfig, + pub active_enum_filter_input: ActiveEnumFilterInputConfig, + + pub page_info_object: PageInfoObjectConfig, + pub pagination_info_object: PaginationInfoObjectConfig, + pub edge_object: EdgeObjectConfig, + pub entity_object: EntityObjectConfig, + pub connection_object: ConnectionObjectConfig, + pub entity_query_field: EntityQueryFieldConfig, + + pub guards: GuardsConfig, + // guards functions + // is_skipped function + // naming function + // fields type overrides +} diff --git a/src/builder_context/guards.rs b/src/builder_context/guards.rs new file mode 100644 index 00000000..000a1d5b --- /dev/null +++ b/src/builder_context/guards.rs @@ -0,0 +1,16 @@ +use std::collections::BTreeMap; + +use async_graphql::dynamic::ResolverContext; + +/// Entities and Field guards configuration. +/// The guards are used to control access to entities or fields. +#[derive(Default)] +pub struct GuardsConfig { + /// entity guards are executed before accessing an entity + pub entity_guards: BTreeMap, + /// field guards are executed before accessing an entity field + pub field_guards: BTreeMap, +} + +/// guards are functions that receive the application context +pub type FnGuard = Box bool + Sync + Send>; diff --git a/src/enumerations/active_enum.rs b/src/enumerations/active_enum.rs new file mode 100644 index 00000000..163e3e14 --- /dev/null +++ b/src/enumerations/active_enum.rs @@ -0,0 +1,63 @@ +use async_graphql::dynamic::Enum; +use heck::ToUpperCamelCase; +use sea_orm::{ActiveEnum, DynIden, Value}; + +use crate::BuilderContext; + +/// The configuration structure for ActiveEnumBuilder +pub struct ActiveEnumConfig { + /// used to format enumeration name + pub type_name: crate::SimpleNamingFn, + /// used to format variant name + pub variant_name: crate::ComplexNamingFn, +} + +impl std::default::Default for ActiveEnumConfig { + fn default() -> Self { + ActiveEnumConfig { + type_name: Box::new(|name: &str| -> String { + format!("{}Enum", name.to_upper_camel_case()) + }), + variant_name: Box::new(|_enum_name: &str, variant: &str| -> String { + variant.to_upper_camel_case().to_ascii_uppercase() + }), + } + } +} + +/// This builder is used to convert a SeaORM enumeration to GraphQL +pub struct ActiveEnumBuilder { + pub context: &'static BuilderContext, +} + +impl ActiveEnumBuilder { + /// used to format SeaORM enumeration name to GraphQL enumeration name + pub fn type_name(&self) -> String { + let name = A::name().to_string(); + self.context.active_enum.type_name.as_ref()(&name) + } + + /// used to format enumeration Iden name to GraphQL enumeration name + pub fn type_name_from_iden(&self, name: &DynIden) -> String { + let name = name.to_string(); + self.context.active_enum.type_name.as_ref()(&name) + } + + /// used to format SeaORM variant name to GraphQL variant name + pub fn variant_name(&self, enum_name: &str, variant: &str) -> String { + self.context.active_enum.variant_name.as_ref()(enum_name, variant) + } + + /// used to convert SeaORM enumeration to GraphQL enumeration + pub fn enumeration(&self) -> Enum { + let enum_name = self.type_name::(); + + A::values() + .into_iter() + .fold(Enum::new(&enum_name), |enumeration, variant| { + let variant: Value = variant.into(); + let variant: String = variant.to_string(); + enumeration.item(self.variant_name(&enum_name, &variant)) + }) + } +} diff --git a/src/enumerations/mod.rs b/src/enumerations/mod.rs new file mode 100644 index 00000000..28e20c28 --- /dev/null +++ b/src/enumerations/mod.rs @@ -0,0 +1,5 @@ +pub mod order_by_enum; +pub use order_by_enum::*; + +pub mod active_enum; +pub use active_enum::*; diff --git a/src/enumerations/order_by_enum.rs b/src/enumerations/order_by_enum.rs new file mode 100644 index 00000000..7bc2c2c0 --- /dev/null +++ b/src/enumerations/order_by_enum.rs @@ -0,0 +1,57 @@ +use async_graphql::dynamic::{Enum, EnumItem}; + +use crate::BuilderContext; + +/// The configuration structure for OrderByEnumBuilder +pub struct OrderByEnumConfig { + /// the enumeration name + pub type_name: String, + /// the ASC variant name + pub asc_variant: String, + /// the DESC variant name + pub desc_variant: String, +} + +impl std::default::Default for OrderByEnumConfig { + fn default() -> Self { + OrderByEnumConfig { + type_name: "OrderByEnum".into(), + asc_variant: "ASC".into(), + desc_variant: "DESC".into(), + } + } +} + +/// The OrderByEnumeration is used for Entities Fields sorting +pub struct OrderByEnumBuilder { + pub context: &'static BuilderContext, +} + +impl OrderByEnumBuilder { + pub fn type_name(&self) -> String { + self.context.order_by_enum.type_name.clone() + } + + pub fn asc_variant(&self) -> String { + self.context.order_by_enum.asc_variant.clone() + } + + pub fn desc_variant(&self) -> String { + self.context.order_by_enum.desc_variant.clone() + } + + pub fn is_asc(&self, value: &str) -> bool { + self.context.order_by_enum.asc_variant.eq(value) + } + + pub fn is_desc(&self, value: &str) -> bool { + self.context.order_by_enum.desc_variant.eq(value) + } + + /// used to get the GraphQL enumeration config + pub fn enumeration(&self) -> Enum { + Enum::new(self.type_name()) + .item(EnumItem::new(self.asc_variant())) + .item(EnumItem::new(self.desc_variant())) + } +} diff --git a/src/inputs/active_enum_filter_input.rs b/src/inputs/active_enum_filter_input.rs new file mode 100644 index 00000000..5fc2ed4a --- /dev/null +++ b/src/inputs/active_enum_filter_input.rs @@ -0,0 +1,174 @@ +use async_graphql::dynamic::{InputObject, InputValue, ObjectAccessor, TypeRef}; +use heck::ToUpperCamelCase; +use sea_orm::{ActiveEnum, ColumnTrait, Condition, DynIden, Iden}; + +use crate::{ActiveEnumBuilder, BuilderContext}; + +/// The configuration structure for ActiveEnumFilterInputConfig +pub struct ActiveEnumFilterInputConfig { + /// used to format type_name + pub type_name: crate::SimpleNamingFn, +} + +impl std::default::Default for ActiveEnumFilterInputConfig { + fn default() -> Self { + ActiveEnumFilterInputConfig { + type_name: Box::new(|enum_name: &str| -> String { + format!("{}EnumFilterInput", enum_name.to_upper_camel_case()) + }), + } + } +} + +/// This builder produces a filter input for a SeaORM enumeration +pub struct ActiveEnumFilterInputBuilder { + pub context: &'static BuilderContext, +} + +impl ActiveEnumFilterInputBuilder { + /// used to get filter input name for SeaORM enumeration + pub fn type_name(&self) -> String { + let enum_name = A::name().to_string(); + self.context.active_enum_filter_input.type_name.as_ref()(&enum_name) + } + + /// used to get filter input name for SeaORM enumeration Iden + pub fn type_name_from_iden(&self, enum_name: &DynIden) -> String { + let enum_name = enum_name.to_string(); + self.context.active_enum_filter_input.type_name.as_ref()(&enum_name) + } + + /// used to get filter input object for SeaORM enumeration + pub fn input_object(&self) -> InputObject { + let active_enum_builder = ActiveEnumBuilder { + context: self.context, + }; + + let name = self.type_name::(); + + let enum_name = active_enum_builder.type_name::(); + + InputObject::new(name) + .field(InputValue::new("eq", TypeRef::named(&enum_name))) + .field(InputValue::new("ne", TypeRef::named(&enum_name))) + .field(InputValue::new("gt", TypeRef::named(&enum_name))) + .field(InputValue::new("gte", TypeRef::named(&enum_name))) + .field(InputValue::new("lt", TypeRef::named(&enum_name))) + .field(InputValue::new("lte", TypeRef::named(&enum_name))) + .field(InputValue::new("is_in", TypeRef::named_nn_list(&enum_name))) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(&enum_name), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + } +} + +/// used to update the query condition with enumeration filters +pub fn prepare_enumeration_condition( + filter: &ObjectAccessor, + column: T, + variants: &[std::sync::Arc], + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let extract_variant = move |input: &str| -> String { + let variant = variants.iter().find(|variant| { + let variant = variant + .to_string() + .to_upper_camel_case() + .to_ascii_uppercase(); + variant.eq(input) + }); + variant.unwrap().to_string() + }; + + let condition = if let Some(data) = filter.get("eq") { + let data = data.enum_name().unwrap(); + condition.add(column.eq(extract_variant(data))) + } else { + condition + }; + + let condition = if let Some(data) = filter.get("ne") { + let data = data.enum_name().unwrap(); + condition.add(column.ne(extract_variant(data))) + } else { + condition + }; + + let condition = if let Some(data) = filter.get("gt") { + let data = data.enum_name().unwrap(); + condition.add(column.gt(extract_variant(data))) + } else { + condition + }; + + let condition = if let Some(data) = filter.get("gte") { + let data = data.enum_name().unwrap(); + condition.add(column.gte(extract_variant(data))) + } else { + condition + }; + + let condition = if let Some(data) = filter.get("lt") { + let data = data.enum_name().unwrap(); + condition.add(column.lt(extract_variant(data))) + } else { + condition + }; + + let condition = if let Some(data) = filter.get("lte") { + let data = data.enum_name().unwrap(); + condition.add(column.lte(extract_variant(data))) + } else { + condition + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data: Vec<_> = data + .list() + .unwrap() + .iter() + .map(|item| item.enum_name().unwrap().to_string()) + .map(|v| extract_variant(&v)) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data: Vec<_> = data + .list() + .unwrap() + .iter() + .map(|item| item.enum_name().unwrap().to_string()) + .map(|v| extract_variant(&v)) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} diff --git a/src/inputs/cursor_input.rs b/src/inputs/cursor_input.rs new file mode 100644 index 00000000..e0a6c8c5 --- /dev/null +++ b/src/inputs/cursor_input.rs @@ -0,0 +1,69 @@ +use async_graphql::dynamic::{InputObject, InputValue, ObjectAccessor, TypeRef}; + +use crate::BuilderContext; + +/// used to hold information about cursor pagination +#[derive(Clone, Debug)] +pub struct CursorInput { + pub cursor: Option, + pub limit: u64, +} + +/// The configuration structure for CursorInputBuilder +pub struct CursorInputConfig { + /// name of the object + pub type_name: String, + /// name for 'cursor' field + pub cursor: String, + /// name for 'limit' field + pub limit: String, +} + +impl std::default::Default for CursorInputConfig { + fn default() -> Self { + Self { + type_name: "CursorInput".into(), + cursor: "cursor".into(), + limit: "limit".into(), + } + } +} + +/// This builder produces the cursor pagination options input object +pub struct CursorInputBuilder { + pub context: &'static BuilderContext, +} + +impl CursorInputBuilder { + /// used to get type name + pub fn type_name(&self) -> String { + self.context.cursor_input.type_name.clone() + } + + /// used to get cursor pagination options object + pub fn input_object(&self) -> InputObject { + InputObject::new(&self.context.cursor_input.type_name) + .field(InputValue::new( + &self.context.cursor_input.cursor, + TypeRef::named(TypeRef::STRING), + )) + .field(InputValue::new( + &self.context.cursor_input.limit, + TypeRef::named_nn(TypeRef::INT), + )) + } + + /// used to parse query input to cursor pagination options struct + pub fn parse_object(&self, object: &ObjectAccessor) -> CursorInput { + let limit = object + .get(&self.context.cursor_input.limit) + .unwrap() + .u64() + .unwrap(); + + let cursor = object.get(&self.context.cursor_input.cursor); + let cursor: Option = cursor.map(|cursor| cursor.string().unwrap().into()); + + CursorInput { cursor, limit } + } +} diff --git a/src/inputs/filter_input.rs b/src/inputs/filter_input.rs new file mode 100644 index 00000000..6eb77f6d --- /dev/null +++ b/src/inputs/filter_input.rs @@ -0,0 +1,1211 @@ +use async_graphql::dynamic::{InputObject, InputValue, ObjectAccessor, TypeRef}; +use sea_orm::{ColumnTrait, ColumnType, Condition, EntityTrait, Iterable, Value}; + +use crate::{ActiveEnumFilterInputBuilder, BuilderContext, EntityObjectBuilder}; + +/// The configuration structure for FilterInputBuilder +pub struct FilterInputConfig { + /// the filter input type name formatter function + pub type_name: crate::SimpleNamingFn, + /// filter input object name for string type + pub string_type: String, + /// filter input object name for integer type + pub integer_type: String, + /// filter input object name for float type + pub float_type: String, + /// filter input object name for text type + pub text_type: String, + /// filter input object name for boolean type + pub boolean_type: String, + /// filter input object name for id type + pub id_type: String, +} + +impl std::default::Default for FilterInputConfig { + fn default() -> Self { + FilterInputConfig { + type_name: Box::new(|object_name: &str| -> String { + format!("{}FilterInput", object_name) + }), + string_type: "StringFilterInput".into(), + integer_type: "IntegerFilterInput".into(), + float_type: "FloatFilterInput".into(), + text_type: "TextFilterInput".into(), + boolean_type: "BooleanFilterInput".into(), + id_type: "IdFilterInput".into(), + } + } +} + +/// This builder is used to produce the filter input object of a SeaORM entity +pub struct FilterInputBuilder { + pub context: &'static BuilderContext, +} + +impl FilterInputBuilder { + /// used to get the filter input object name + /// object_name is the name of the SeaORM Entity GraphQL object + pub fn type_name(&self, object_name: &str) -> String { + self.context.filter_input.type_name.as_ref()(object_name) + } + + /// used to produce the filter input object of a SeaORM entity + pub fn to_object(&self) -> InputObject + where + T: EntityTrait, + ::Model: Sync, + { + let active_enum_filter_input_builder = ActiveEnumFilterInputBuilder { + context: self.context, + }; + let entity_object_builder = EntityObjectBuilder { + context: self.context, + }; + let object_name = entity_object_builder.type_name::(); + + let name = self.type_name(&object_name); + + let object = T::Column::iter().fold(InputObject::new(&name), |object, column| { + let column_name = entity_object_builder.column_name::(column); + + let field = match column.def().get_column_type() { + ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text => { + Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.string_type), + )) + } + ColumnType::TinyInteger + | ColumnType::SmallInteger + | ColumnType::Integer + | ColumnType::BigInteger + | ColumnType::TinyUnsigned + | ColumnType::SmallUnsigned + | ColumnType::Unsigned + | ColumnType::BigUnsigned => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.integer_type), + )), + ColumnType::Float | ColumnType::Double => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.float_type), + )), + ColumnType::Decimal(_) | ColumnType::Money(_) => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.text_type), + )), + ColumnType::DateTime + | ColumnType::Timestamp + | ColumnType::TimestampWithTimeZone + | ColumnType::Time + | ColumnType::Date => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.text_type), + )), + ColumnType::Year(_) => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.integer_type), + )), + ColumnType::Interval(_, _) => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.text_type), + )), + // FIXME: binary type + // ColumnType::Binary(_) | + // ColumnType::VarBinary(_) | + // ColumnType::Bit(_) | + // ColumnType::VarBit(_) => Some(InputValue::new( + // column_name, + // TypeRef::named(&self.context.filter_input.text_type), + // )), + ColumnType::Boolean => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.boolean_type), + )), + // FIXME: json type + // ColumnType::Json | ColumnType::JsonBinary => Some(InputValue::new( + // column_name, + // TypeRef::named(&self.context.filter_input.text_type), + // )), + ColumnType::Uuid => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.text_type), + )), + ColumnType::Enum { + name: enum_name, + variants: _, + } => Some(InputValue::new( + column_name, + TypeRef::named(active_enum_filter_input_builder.type_name_from_iden(enum_name)), + )), + // FIXME: cidr, inet, mac type + ColumnType::Cidr | ColumnType::Inet | ColumnType::MacAddr => Some(InputValue::new( + column_name, + TypeRef::named(&self.context.filter_input.text_type), + )), + // FIXME: support array types + // ColumnType::Array(_) => {} + // FIXME: support custom types + // ColumnType::Custom(iden) => {} + _ => None, + }; + + match field { + Some(field) => object.field(field), + None => object, + } + }); + + // FIXME: 'and' & 'or' should be configurable + object + .field(InputValue::new("and", TypeRef::named_nn_list(&name))) + .field(InputValue::new("or", TypeRef::named_nn_list(&name))) + } + + /// used to get the input object for string type fields + pub fn string_filter(&self) -> InputObject { + InputObject::new(&self.context.filter_input.string_type) + .field(InputValue::new("eq", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("ne", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("gt", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("gte", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("lt", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("lte", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new( + "is_in", + TypeRef::named_nn_list(TypeRef::STRING), + )) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(TypeRef::STRING), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_not_null", + TypeRef::named(TypeRef::BOOLEAN), + )) + .field(InputValue::new("contains", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new( + "starts_with", + TypeRef::named(TypeRef::STRING), + )) + .field(InputValue::new( + "ends_with", + TypeRef::named(TypeRef::STRING), + )) + .field(InputValue::new("like", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("not_like", TypeRef::named(TypeRef::STRING))) + } + + /// used to get the input object for text type fields + pub fn text_filter(&self) -> InputObject { + InputObject::new(&self.context.filter_input.text_type) + .field(InputValue::new("eq", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("ne", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("gt", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("gte", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("lt", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new("lte", TypeRef::named(TypeRef::STRING))) + .field(InputValue::new( + "is_in", + TypeRef::named_nn_list(TypeRef::STRING), + )) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(TypeRef::STRING), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_not_null", + TypeRef::named(TypeRef::BOOLEAN), + )) + } + + /// used to get the input object for integer type fields + pub fn integer_filter(&self) -> InputObject { + InputObject::new(&self.context.filter_input.integer_type) + .field(InputValue::new("eq", TypeRef::named(TypeRef::INT))) + .field(InputValue::new("ne", TypeRef::named(TypeRef::INT))) + .field(InputValue::new("gt", TypeRef::named(TypeRef::INT))) + .field(InputValue::new("gte", TypeRef::named(TypeRef::INT))) + .field(InputValue::new("lt", TypeRef::named(TypeRef::INT))) + .field(InputValue::new("lte", TypeRef::named(TypeRef::INT))) + .field(InputValue::new( + "is_in", + TypeRef::named_nn_list(TypeRef::INT), + )) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(TypeRef::INT), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_not_null", + TypeRef::named(TypeRef::BOOLEAN), + )) + } + + /// used to get the input object for float type fields + pub fn float_filter(&self) -> InputObject { + InputObject::new(&self.context.filter_input.float_type) + .field(InputValue::new("eq", TypeRef::named(TypeRef::FLOAT))) + .field(InputValue::new("ne", TypeRef::named(TypeRef::FLOAT))) + .field(InputValue::new("gt", TypeRef::named(TypeRef::FLOAT))) + .field(InputValue::new("gte", TypeRef::named(TypeRef::FLOAT))) + .field(InputValue::new("lt", TypeRef::named(TypeRef::FLOAT))) + .field(InputValue::new("lte", TypeRef::named(TypeRef::FLOAT))) + .field(InputValue::new( + "is_in", + TypeRef::named_nn_list(TypeRef::FLOAT), + )) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(TypeRef::FLOAT), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_not_null", + TypeRef::named(TypeRef::BOOLEAN), + )) + } + + /// used to get the input object for boolean type fields + pub fn boolean_filter(&self) -> InputObject { + InputObject::new(&self.context.filter_input.boolean_type) + .field(InputValue::new("eq", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new("ne", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new("gt", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new("gte", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new("lt", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new("lte", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_in", + TypeRef::named_nn_list(TypeRef::BOOLEAN), + )) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(TypeRef::BOOLEAN), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_not_null", + TypeRef::named(TypeRef::BOOLEAN), + )) + } + + /// used to get the input object for id type fields + pub fn id_filter(&self) -> InputObject { + InputObject::new(&self.context.filter_input.id_type) + .field(InputValue::new("eq", TypeRef::named(TypeRef::ID))) + .field(InputValue::new("ne", TypeRef::named(TypeRef::ID))) + .field(InputValue::new("gt", TypeRef::named(TypeRef::ID))) + .field(InputValue::new("gte", TypeRef::named(TypeRef::ID))) + .field(InputValue::new("lt", TypeRef::named(TypeRef::ID))) + .field(InputValue::new("lte", TypeRef::named(TypeRef::ID))) + .field(InputValue::new( + "is_in", + TypeRef::named_nn_list(TypeRef::ID), + )) + .field(InputValue::new( + "is_not_in", + TypeRef::named_nn_list(TypeRef::ID), + )) + .field(InputValue::new("is_null", TypeRef::named(TypeRef::BOOLEAN))) + .field(InputValue::new( + "is_not_null", + TypeRef::named(TypeRef::BOOLEAN), + )) + } +} + +/// used to update the query condition with string filters +pub fn prepare_string_condition( + filter: &ObjectAccessor, + column: T, + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data = data.list().unwrap(); + let data: Vec = data + .iter() + .map(|v| v.string().unwrap().to_string()) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data = data.list().unwrap(); + let data: Vec = data + .iter() + .map(|v| v.string().unwrap().to_string()) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("contains") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.contains(data)) + } + None => condition, + }; + + let condition = match filter.get("starts_with") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.starts_with(data)) + } + None => condition, + }; + + let condition = match filter.get("ends_with") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.ends_with(data)) + } + None => condition, + }; + + let condition = match filter.get("like") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.like(data)) + } + None => condition, + }; + + let condition = match filter.get("not_like") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.not_like(data)) + } + None => condition, + }; + + condition +} + +/// used to update the query condition with text filters +pub fn prepare_text_condition( + filter: &ObjectAccessor, + column: T, + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.string().unwrap(); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data = data.list().unwrap(); + let data: Vec = data + .iter() + .map(|v| v.string().unwrap().to_string()) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data = data.list().unwrap(); + let data: Vec = data + .iter() + .map(|v| v.string().unwrap().to_string()) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} + +/// used to update the query condition with custom parse filters +pub fn prepare_parsed_condition( + filter: &ObjectAccessor, + column: T, + parse: F, + condition: Condition, +) -> Condition +where + T: ColumnTrait, + Y: Into, + F: Fn(String) -> Y, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.string().unwrap().to_string(); + let data = parse(data); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.string().unwrap().to_string(); + let data = parse(data); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.string().unwrap().to_string(); + let data = parse(data); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.string().unwrap().to_string(); + let data = parse(data); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.string().unwrap().to_string(); + let data = parse(data); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.string().unwrap().to_string(); + let data = parse(data); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data: Vec<_> = data + .list() + .unwrap() + .iter() + .map(|item| item.string().unwrap().to_string()) + .map(&parse) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data: Vec<_> = data + .list() + .unwrap() + .iter() + .map(|item| item.string().unwrap().to_string()) + .map(&parse) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} + +/// used to update the query condition with integer filters +pub fn prepare_integer_condition( + filter: &ObjectAccessor, + column: T, + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.i64().unwrap(); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.i64().unwrap(); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.i64().unwrap(); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.i64().unwrap(); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.i64().unwrap(); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.i64().unwrap(); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.i64().unwrap()) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.i64().unwrap()) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} + +/// used to update the query condition with unsigned filters +pub fn prepare_unsigned_condition( + filter: &ObjectAccessor, + column: T, + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.u64().unwrap(); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.u64().unwrap(); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.u64().unwrap(); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.u64().unwrap(); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.u64().unwrap(); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.u64().unwrap(); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.u64().unwrap()) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.u64().unwrap()) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} + +/// used to update the query condition with float filters +pub fn prepare_float_condition( + filter: &ObjectAccessor, + column: T, + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.f64().unwrap(); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.f64().unwrap(); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.f64().unwrap(); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.f64().unwrap(); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.f64().unwrap(); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.f64().unwrap(); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.f64().unwrap()) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.f64().unwrap()) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} + +/// used to update the query condition with boolean filters +pub fn prepare_boolean_condition( + filter: &ObjectAccessor, + column: T, + condition: Condition, +) -> Condition +where + T: ColumnTrait, +{ + let condition = match filter.get("eq") { + Some(data) => { + let data = data.boolean().unwrap(); + + condition.add(column.eq(data)) + } + None => condition, + }; + + let condition = match filter.get("ne") { + Some(data) => { + let data = data.boolean().unwrap(); + + condition.add(column.ne(data)) + } + None => condition, + }; + + let condition = match filter.get("gt") { + Some(data) => { + let data = data.boolean().unwrap(); + + condition.add(column.gt(data)) + } + None => condition, + }; + + let condition = match filter.get("gte") { + Some(data) => { + let data = data.boolean().unwrap(); + + condition.add(column.gte(data)) + } + None => condition, + }; + + let condition = match filter.get("lt") { + Some(data) => { + let data = data.boolean().unwrap(); + + condition.add(column.lt(data)) + } + None => condition, + }; + + let condition = match filter.get("lte") { + Some(data) => { + let data = data.boolean().unwrap(); + + condition.add(column.lte(data)) + } + None => condition, + }; + + let condition = match filter.get("is_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.boolean().unwrap()) + .collect(); + + condition.add(column.is_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_not_in") { + Some(data) => { + let data: Vec = data + .list() + .unwrap() + .iter() + .map(|item| item.boolean().unwrap()) + .collect(); + + condition.add(column.is_not_in(data)) + } + None => condition, + }; + + let condition = match filter.get("is_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_null()) + } else { + condition + } + } + None => condition, + }; + + let condition = match filter.get("is_not_null") { + Some(data) => { + let data = data.boolean().unwrap(); + + if data { + condition.add(column.is_not_null()) + } else { + condition + } + } + None => condition, + }; + + condition +} diff --git a/src/inputs/mod.rs b/src/inputs/mod.rs new file mode 100644 index 00000000..91091b24 --- /dev/null +++ b/src/inputs/mod.rs @@ -0,0 +1,21 @@ +// INPUTS +pub mod cursor_input; +pub use cursor_input::*; + +pub mod page_input; +pub use page_input::*; + +pub mod offset_input; +pub use offset_input::*; + +pub mod pagination_input; +pub use pagination_input::*; + +pub mod order_input; +pub use order_input::*; + +pub mod filter_input; +pub use filter_input::*; + +pub mod active_enum_filter_input; +pub use active_enum_filter_input::*; diff --git a/src/inputs/offset_input.rs b/src/inputs/offset_input.rs new file mode 100644 index 00000000..69f39a82 --- /dev/null +++ b/src/inputs/offset_input.rs @@ -0,0 +1,71 @@ +use async_graphql::dynamic::{InputObject, InputValue, ObjectAccessor, TypeRef}; + +use crate::BuilderContext; + +/// used to hold information about offset pagination +#[derive(Clone, Debug)] +pub struct OffsetInput { + pub offset: u64, + pub limit: u64, +} + +/// The configuration structure for OffsetInputBuilder +pub struct OffsetInputConfig { + /// name of the object + pub type_name: String, + /// name for 'offset' field + pub offset: String, + /// name for 'limit' field + pub limit: String, +} + +impl std::default::Default for OffsetInputConfig { + fn default() -> Self { + Self { + type_name: "OffsetInput".into(), + offset: "offset".into(), + limit: "limit".into(), + } + } +} + +/// This builder produces the offset pagination options input object +pub struct OffsetInputBuilder { + pub context: &'static BuilderContext, +} + +impl OffsetInputBuilder { + /// used to get type name + pub fn type_name(&self) -> String { + self.context.offset_input.type_name.clone() + } + + /// used to get offset pagination options object + pub fn input_object(&self) -> InputObject { + InputObject::new(&self.context.offset_input.type_name) + .field(InputValue::new( + &self.context.offset_input.limit, + TypeRef::named_nn(TypeRef::INT), + )) + .field(InputValue::new( + &self.context.offset_input.offset, + TypeRef::named_nn(TypeRef::INT), + )) + } + + /// used to parse query input to offset pagination options struct + pub fn parse_object(&self, object: &ObjectAccessor) -> OffsetInput { + let offset = object + .get(&self.context.offset_input.offset) + .map_or_else(|| Ok(0), |v| v.u64()) + .unwrap(); + + let limit = object + .get(&self.context.offset_input.limit) + .unwrap() + .u64() + .unwrap(); + + OffsetInput { offset, limit } + } +} diff --git a/src/inputs/order_input.rs b/src/inputs/order_input.rs new file mode 100644 index 00000000..35a2b50b --- /dev/null +++ b/src/inputs/order_input.rs @@ -0,0 +1,53 @@ +use async_graphql::dynamic::{InputObject, InputValue, TypeRef}; +use sea_orm::{EntityTrait, Iterable}; + +use crate::{BuilderContext, EntityObjectBuilder}; + +/// The configuration structure for OrderInputBuilder +pub struct OrderInputConfig { + /// used to format OrderInput object name + pub type_name: crate::SimpleNamingFn, +} + +impl std::default::Default for OrderInputConfig { + fn default() -> Self { + OrderInputConfig { + type_name: Box::new(|object_name: &str| -> String { + format!("{}OrderInput", object_name) + }), + } + } +} + +/// This builder produces the OrderInput object of a SeaORM entity +pub struct OrderInputBuilder { + pub context: &'static BuilderContext, +} + +impl OrderInputBuilder { + /// used to get type name + pub fn type_name(&self, object_name: &str) -> String { + self.context.order_input.type_name.as_ref()(object_name) + } + + /// used to get the OrderInput object of a SeaORM entity + pub fn to_object(&self) -> InputObject + where + T: EntityTrait, + ::Model: Sync, + { + let entity_object_builder = EntityObjectBuilder { + context: self.context, + }; + + let object_name = entity_object_builder.type_name::(); + let name = self.type_name(&object_name); + + T::Column::iter().fold(InputObject::new(name), |object, column| { + object.field(InputValue::new( + entity_object_builder.column_name::(column), + TypeRef::named(&self.context.order_by_enum.type_name), + )) + }) + } +} diff --git a/src/inputs/page_input.rs b/src/inputs/page_input.rs new file mode 100644 index 00000000..1109ea44 --- /dev/null +++ b/src/inputs/page_input.rs @@ -0,0 +1,70 @@ +use async_graphql::dynamic::{InputObject, InputValue, ObjectAccessor, TypeRef}; + +use crate::BuilderContext; + +/// used to hold information about page pagination +#[derive(Clone, Debug)] +pub struct PageInput { + pub page: u64, + pub limit: u64, +} + +/// The configuration structure for PageInputBuilder +pub struct PageInputConfig { + /// name of the object + pub type_name: String, + /// name for 'page' field + pub page: String, + /// name for 'limit' field + pub limit: String, +} + +impl std::default::Default for PageInputConfig { + fn default() -> Self { + PageInputConfig { + type_name: "PageInput".into(), + page: "page".into(), + limit: "limit".into(), + } + } +} + +/// This builder produces the page pagination options input object +pub struct PageInputBuilder { + pub context: &'static BuilderContext, +} + +impl PageInputBuilder { + /// used to get type name + pub fn type_name(&self) -> String { + self.context.page_input.type_name.clone() + } + + /// used to get page pagination options object + pub fn input_object(&self) -> InputObject { + InputObject::new(&self.context.page_input.type_name) + .field(InputValue::new( + &self.context.page_input.limit, + TypeRef::named_nn(TypeRef::INT), + )) + .field(InputValue::new( + &self.context.page_input.page, + TypeRef::named_nn(TypeRef::INT), + )) + } + + /// used to parse query input to page pagination options struct + pub fn parse_object(&self, object: &ObjectAccessor) -> PageInput { + let page = object + .get(&self.context.page_input.page) + .map_or_else(|| Ok(0), |v| v.u64()) + .unwrap_or(0); + let limit = object + .get(&self.context.page_input.limit) + .unwrap() + .u64() + .unwrap(); + + PageInput { page, limit } + } +} diff --git a/src/inputs/pagination_input.rs b/src/inputs/pagination_input.rs new file mode 100644 index 00000000..67f41231 --- /dev/null +++ b/src/inputs/pagination_input.rs @@ -0,0 +1,106 @@ +use async_graphql::dynamic::{InputObject, InputValue, ObjectAccessor, TypeRef}; + +use crate::{BuilderContext, CursorInputBuilder, OffsetInputBuilder, PageInputBuilder}; + +use super::{CursorInput, OffsetInput, PageInput}; + +/// used to hold information about which pagination +/// strategy will be applied on the query +#[derive(Clone, Debug)] +pub struct PaginationInput { + pub cursor: Option, + pub page: Option, + pub offset: Option, +} + +/// The configuration structure for PaginationInputBuilder +pub struct PaginationInputConfig { + /// name of the object + pub type_name: String, + /// name for 'cursor' field + pub cursor: String, + /// name for 'page' field + pub page: String, + /// name for 'offset' field + pub offset: String, +} + +impl std::default::Default for PaginationInputConfig { + fn default() -> Self { + PaginationInputConfig { + type_name: "PaginationInput".into(), + cursor: "cursor".into(), + page: "page".into(), + offset: "offset".into(), + } + } +} + +pub struct PaginationInputBuilder { + pub context: &'static BuilderContext, +} + +impl PaginationInputBuilder { + /// used to get type name + pub fn type_name(&self) -> String { + self.context.pagination_input.type_name.clone() + } + + /// used to get pagination input object + pub fn input_object(&self) -> InputObject { + InputObject::new(&self.context.pagination_input.type_name) + .field(InputValue::new( + &self.context.pagination_input.cursor, + TypeRef::named(&self.context.cursor_input.type_name), + )) + .field(InputValue::new( + &self.context.pagination_input.page, + TypeRef::named(&self.context.page_input.type_name), + )) + .field(InputValue::new( + &self.context.pagination_input.offset, + TypeRef::named(&self.context.offset_input.type_name), + )) + .oneof() + } + + /// used to parse query input to pagination information structure + pub fn parse_object(&self, object: &ObjectAccessor) -> PaginationInput { + let cursor_input_builder = CursorInputBuilder { + context: self.context, + }; + let page_input_builder = PageInputBuilder { + context: self.context, + }; + let offset_input_builder = OffsetInputBuilder { + context: self.context, + }; + + let cursor = if let Some(cursor) = object.get(&self.context.pagination_input.cursor) { + let object = cursor.object().unwrap(); + Some(cursor_input_builder.parse_object(&object)) + } else { + None + }; + + let page = if let Some(page) = object.get(&self.context.pagination_input.page) { + let object = page.object().unwrap(); + Some(page_input_builder.parse_object(&object)) + } else { + None + }; + + let offset = if let Some(offset) = object.get(&self.context.pagination_input.offset) { + let object = offset.object().unwrap(); + Some(offset_input_builder.parse_object(&object)) + } else { + None + }; + + PaginationInput { + cursor, + page, + offset, + } + } +} diff --git a/src/lib.rs b/src/lib.rs index d7298c8b..83ff7697 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -26,9 +26,11 @@ //! ## Features //! //! * Relational query (1-to-1, 1-to-N) -//! * Pagination on query's root entity -//! * Filter with operators (e.g. gt, lt, eq) +//! * Pagination for queries and relations (1-N) +//! * Filtering with operators (e.g. gt, lt, eq) //! * Order by any column +//! * Guard fields, queries or relations +//! * Rename fields //! //! (Right now there is no mutation, but it's on our plan!) //! @@ -37,6 +39,7 @@ //! ### Install //! //! ```sh +//! cargo install sea-orm-cli //! cargo install seaography-cli //! ``` //! @@ -46,7 +49,8 @@ //! //! ```sh //! cd examples/mysql -//! seaography-cli mysql://user:pw@localhost/sakila seaography-mysql-example . +//! sea-orm-cli generate entity -o src/entities -u mysql://user:pw@127.0.0.1/sakila +//! seaography-cli ./ src/entities mysql://user:pw@127.0.0.1/sakila seaography-mysql-example //! cargo run //! ``` //! @@ -56,17 +60,15 @@ //! //! ```graphql //! { -//! film(pagination: { pages: { limit: 10, page: 0 } }, orderBy: { title: ASC }) { +//! film(pagination: { page: { limit: 10, page: 0 } }, orderBy: { title: ASC }) { //! nodes { //! title //! description //! releaseYear -//! filmActor { +//! actor { //! nodes { -//! actor { -//! firstName -//! lastName -//! } +//! firstName +//! lastName //! } //! } //! } @@ -100,15 +102,17 @@ //! { //! customer( //! filters: { active: { eq: 0 } } -//! pagination: { pages: { page: 2, limit: 3 } } +//! pagination: { page: { page: 2, limit: 3 } } //! ) { //! nodes { //! customerId //! lastName //! email //! } -//! pages -//! current +//! paginationInfo { +//! pages +//! current +//! } //! } //! } //! ``` @@ -155,14 +159,16 @@ //! payment( //! filters: { amount: { gt: "7" } } //! orderBy: { amount: ASC } -//! pagination: { pages: { limit: 1, page: 1 } } +//! pagination: { page: { limit: 1, page: 1 } } //! ) { //! nodes { //! paymentId //! amount //! } -//! pages -//! current +//! paginationInfo { +//! pages +//! current +//! } //! pageInfo { //! hasPreviousPage //! hasNextPage @@ -178,13 +184,29 @@ //! } //! ``` //! +//! ### Filter using enumeration +//! ```graphql +//! { +//! film( +//! filters: { rating: { eq: NC17 } } +//! pagination: { page: { page: 1, limit: 5 } } +//! ) { +//! nodes { +//! filmId +//! rating +//! } +//! } +//! } +//! ``` +//! //! ### Postgres //! //! Setup the [sakila](https://github.com/SeaQL/seaography/blob/main/examples/postgres/sakila-schema.sql) sample database. //! //! ```sh //! cd examples/postgres -//! seaography-cli postgres://user:pw@localhost/sakila seaography-postgres-example . +//! sea-orm-cli generate entity -o src/entities -u postgres://user:pw@localhost/sakila +//! seaography-cli ./ src/entities postgres://user:pw@localhost/sakila seaography-postgres-example //! cargo run //! ``` //! @@ -192,7 +214,8 @@ //! //! ```sh //! cd examples/sqlite -//! seaography-cli sqlite://sakila.db seaography-sqlite-example . +//! sea-orm-cli generate entity -o src/entities -u sqlite://sakila.db +//! seaography-cli ./ src/entities sqlite://sakila.db seaography-sqlite-example //! cargo run //! ``` //! @@ -204,539 +227,27 @@ pub use heck; pub use itertools; -pub use seaography_derive as macros; - -pub mod type_filter; -pub use type_filter::{FilterTrait, FilterTypeTrait, TypeFilter}; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, async_graphql::Enum)] -pub enum OrderByEnum { - Asc, - Desc, -} - -#[derive(Debug, async_graphql::InputObject)] -pub struct PageInput { - pub limit: u64, - pub page: u64, -} - -#[derive(Debug, async_graphql::InputObject)] -pub struct CursorInput { - pub cursor: Option, - pub limit: u64, -} - -#[derive(async_graphql::OneofObject)] -pub enum Pagination { - Pages(PageInput), - Cursor(CursorInput), -} - -#[derive(async_graphql::SimpleObject)] -pub struct ExtraPaginationFields { - pub pages: Option, - pub current: Option, -} - -#[derive(Debug)] -pub enum DecodeMode { - Type, - Length, - ColonSkip, - Data, -} - -pub fn map_cursor_values(values: Vec) -> sea_orm::sea_query::value::ValueTuple { - if values.len() == 1 { - sea_orm::sea_query::value::ValueTuple::One(values[0].clone()) - } else if values.len() == 2 { - sea_orm::sea_query::value::ValueTuple::Two(values[0].clone(), values[1].clone()) - } else if values.len() == 3 { - sea_orm::sea_query::value::ValueTuple::Three( - values[0].clone(), - values[1].clone(), - values[2].clone(), - ) - } else { - panic!("seaography does not support cursors values with size greater than 3") - } -} - -#[derive(Debug)] -pub struct CursorValues(pub Vec); - -impl async_graphql::types::connection::CursorType for CursorValues { - type Error = String; - - fn decode_cursor(s: &str) -> Result { - let chars = s.chars(); - - let mut values: Vec = vec![]; - - let mut type_indicator = String::new(); - let mut length_indicator = String::new(); - let mut data_buffer = String::new(); - let mut length = -1; - - let mut mode: DecodeMode = DecodeMode::Type; - for char in chars { - match mode { - DecodeMode::Type => { - if char.eq(&'[') { - mode = DecodeMode::Length; - } else if char.eq(&',') { - // SKIP - } else { - type_indicator.push(char); - } - } - DecodeMode::Length => { - if char.eq(&']') { - mode = DecodeMode::ColonSkip; - length = length_indicator.parse::().unwrap(); - } else { - length_indicator.push(char); - } - } - DecodeMode::ColonSkip => { - // skips ':' char - mode = DecodeMode::Data; - } - DecodeMode::Data => { - if length > 0 { - data_buffer.push(char); - length -= 1; - } - - if length <= 0 { - let value: sea_orm::Value = match type_indicator.as_str() { - "TinyInt" => { - if length.eq(&-1) { - sea_orm::Value::TinyInt(None) - } else { - sea_orm::Value::TinyInt(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "SmallInt" => { - if length.eq(&-1) { - sea_orm::Value::SmallInt(None) - } else { - sea_orm::Value::SmallInt(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "Int" => { - if length.eq(&-1) { - sea_orm::Value::Int(None) - } else { - sea_orm::Value::Int(Some(data_buffer.parse::().unwrap())) - } - } - "BigInt" => { - if length.eq(&-1) { - sea_orm::Value::BigInt(None) - } else { - sea_orm::Value::BigInt(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "TinyUnsigned" => { - if length.eq(&-1) { - sea_orm::Value::TinyUnsigned(None) - } else { - sea_orm::Value::TinyUnsigned(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "SmallUnsigned" => { - if length.eq(&-1) { - sea_orm::Value::SmallUnsigned(None) - } else { - sea_orm::Value::SmallUnsigned(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "Unsigned" => { - if length.eq(&-1) { - sea_orm::Value::Unsigned(None) - } else { - sea_orm::Value::Unsigned(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "BigUnsigned" => { - if length.eq(&-1) { - sea_orm::Value::BigUnsigned(None) - } else { - sea_orm::Value::BigUnsigned(Some( - data_buffer.parse::().unwrap(), - )) - } - } - "String" => { - if length.eq(&-1) { - sea_orm::Value::String(None) - } else { - sea_orm::Value::String(Some(Box::new( - data_buffer.parse::().unwrap(), - ))) - } - } - #[cfg(feature = "with-uuid")] - "Uuid" => { - if length.eq(&-1) { - sea_orm::Value::Uuid(None) - } else { - sea_orm::Value::Uuid(Some(Box::new( - data_buffer.parse::().unwrap(), - ))) - } - } - _ => { - // FIXME: missing value types - panic!("cannot encode current type") - } - }; - - values.push(value); - - type_indicator = String::new(); - length_indicator = String::new(); - data_buffer = String::new(); - length = -1; - - mode = DecodeMode::Type; - } - } - } - } - - Ok(Self(values)) - } - - fn encode_cursor(&self) -> String { - use itertools::Itertools; - - self.0 - .iter() - .map(|value| -> String { - match value { - sea_orm::Value::TinyInt(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("TinyInt[{}]:{}", value.len(), value) - } else { - "TinyInt[-1]:".into() - } - } - sea_orm::Value::SmallInt(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("SmallInt[{}]:{}", value.len(), value) - } else { - "SmallInt[-1]:".into() - } - } - sea_orm::Value::Int(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("Int[{}]:{}", value.len(), value) - } else { - "Int[-1]:".into() - } - } - sea_orm::Value::BigInt(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("BigInt[{}]:{}", value.len(), value) - } else { - "BigInt[-1]:".into() - } - } - sea_orm::Value::TinyUnsigned(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("TinyUnsigned[{}]:{}", value.len(), value) - } else { - "TinyUnsigned[-1]:".into() - } - } - sea_orm::Value::SmallUnsigned(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("SmallUnsigned[{}]:{}", value.len(), value) - } else { - "SmallUnsigned[-1]:".into() - } - } - sea_orm::Value::Unsigned(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("Unsigned[{}]:{}", value.len(), value) - } else { - "Unsigned[-1]:".into() - } - } - sea_orm::Value::BigUnsigned(value) => { - if let Some(value) = value { - let value = value.to_string(); - format!("BigUnsigned[{}]:{}", value.len(), value) - } else { - "BigUnsigned[-1]:".into() - } - } - sea_orm::Value::String(value) => { - if let Some(value) = value { - let value = value.as_ref(); - format!("String[{}]:{}", value.len(), value) - } else { - "String[-1]:".into() - } - } - #[cfg(feature = "with-uuid")] - sea_orm::Value::Uuid(value) => { - if let Some(value) = value { - let value = value.as_ref().to_string(); - format!("Uuid[{}]:{}", value.len(), value) - } else { - "Uuid[-1]:".into() - } - } - _ => { - // FIXME: missing value types - panic!( - "cannot - current type" - ) - } - } - }) - .join(",") - } -} - -#[derive(Debug, Clone)] -pub struct RelationKeyStruct { - pub val: sea_orm::Value, - pub filter: Option, - pub order_by: Option, -} - -impl PartialEq for RelationKeyStruct { - fn eq(&self, other: &Self) -> bool { - // TODO temporary hack to solve the following problem - // let v1 = TestFK(sea_orm::Value::TinyInt(Some(1))); - // let v2 = TestFK(sea_orm::Value::Int(Some(1))); - // println!("Result: {}", v1.eq(&v2)); - - fn split_at_nth_char(s: &str, p: char, n: usize) -> Option<(&str, &str)> { - s.match_indices(p) - .nth(n) - .map(|(index, _)| s.split_at(index)) - } - - let a = format!("{:?}", self.val); - let b = format!("{:?}", other.val); - - let a = split_at_nth_char(a.as_str(), '(', 1).map(|v| v.1); - let b = split_at_nth_char(b.as_str(), '(', 1).map(|v| v.1); - - a.eq(&b) - } -} - -impl Eq for RelationKeyStruct {} - -impl std::hash::Hash for RelationKeyStruct { - fn hash(&self, state: &mut H) { - // TODO this is a hack - - fn split_at_nth_char(s: &str, p: char, n: usize) -> Option<(&str, &str)> { - s.match_indices(p) - .nth(n) - .map(|(index, _)| s.split_at(index)) - } - - let a = format!("{:?}", self.val); - let a = split_at_nth_char(a.as_str(), '(', 1).map(|v| v.1); - - a.hash(state) - // TODO else do the following - // match self.0 { - // sea_orm::Value::TinyInt(int) => int.unwrap().hash(state), - // sea_orm::Value::SmallInt(int) => int.unwrap().hash(state), - // sea_orm::Value::Int(int) => int.unwrap().hash(state), - // sea_orm::Value::BigInt(int) => int.unwrap().hash(state), - // sea_orm::Value::TinyUnsigned(int) => int.unwrap().hash(state), - // sea_orm::Value::SmallUnsigned(int) => int.unwrap().hash(state), - // sea_orm::Value::Unsigned(int) => int.unwrap().hash(state), - // sea_orm::Value::BigUnsigned(int) => int.unwrap().hash(state), - // sea_orm::Value::String(str) => str.unwrap().hash(state), - // sea_orm::Value::Uuid(uuid) => uuid.unwrap().hash(state), - // _ => format!("{:?}", self.0).hash(state) - // } - } -} - -pub async fn fetch_relation_data( - keys: Vec>, - relation: sea_orm::RelationDef, - reverse: bool, - db: &sea_orm::DatabaseConnection, -) -> std::result::Result< - Vec<( - RelationKeyStruct, - ::Model, - )>, - sea_orm::error::DbErr, -> -where - Entity: sea_orm::EntityTrait + EnhancedEntity, - ::Err: core::fmt::Debug, -{ - use heck::ToSnakeCase; - use sea_orm::prelude::*; - - let filters = if !keys.is_empty() { - keys[0].clone().filter - } else { - None - }; - - let order_by = if !keys.is_empty() { - keys[0].clone().order_by - } else { - None - }; - - let keys: Vec = keys.into_iter().map(|key| key.val).collect(); - - // TODO support multiple columns - let to_column = if reverse { - ::from_str( - relation.from_col.to_string().to_snake_case().as_str(), - ) - .unwrap() - } else { - ::from_str( - relation.to_col.to_string().to_snake_case().as_str(), - ) - .unwrap() - }; - - let stmt = ::find(); - - let filter = sea_orm::Condition::all().add(to_column.is_in(keys)); - - let filter = if let Some(filters) = filters { - filter.add(filters.filter_condition()) - } else { - filter - }; - - let stmt = sea_orm::QueryFilter::filter(stmt, filter); - - let stmt = if let Some(order_by) = order_by { - order_by.order_by(stmt) - } else { - stmt - }; - - let data = stmt.all(db).await?.into_iter().map( - |model: ::Model| -> ( - RelationKeyStruct, - ::Model, - ) { - let key = RelationKeyStruct:: { - val: model.get(to_column), - filter: None, - order_by: None, - }; - - (key, model) - }, - ); - - Ok(data.collect()) -} - -pub trait EntityFilter { - fn filter_condition(&self) -> sea_orm::Condition; -} - -pub trait EntityOrderBy -where - Entity: sea_orm::EntityTrait, -{ - fn order_by(&self, stmt: sea_orm::Select) -> sea_orm::Select; -} - -pub trait EnhancedEntity { - type Entity: sea_orm::EntityTrait; - - type Filter: EntityFilter + Clone; - type OrderBy: EntityOrderBy + Clone; -} +pub mod inputs; +pub use inputs::*; -pub fn data_to_connection( - data: Vec, - has_previous_page: bool, - has_next_page: bool, - pages: Option, - current: Option, -) -> async_graphql::types::connection::Connection< - String, - T::Model, - ExtraPaginationFields, - async_graphql::types::connection::EmptyFields, -> -where - T: sea_orm::EntityTrait, - ::Model: async_graphql::OutputType, -{ - use async_graphql::connection::CursorType; - use sea_orm::{Iterable, ModelTrait, PrimaryKeyToColumn}; +pub mod outputs; +pub use outputs::*; - let edges: Vec< - async_graphql::types::connection::Edge< - String, - T::Model, - async_graphql::types::connection::EmptyFields, - >, - > = data - .into_iter() - .map(|node| { - let values: Vec = T::PrimaryKey::iter() - .map(|variant| node.get(variant.into_column())) - .collect(); +pub mod enumerations; +pub use enumerations::*; - let cursor_string = CursorValues(values).encode_cursor(); +pub mod utilities; +pub use utilities::*; - async_graphql::types::connection::Edge::new(cursor_string, node) - }) - .collect(); +pub mod query; +pub use query::*; - let mut result = async_graphql::types::connection::Connection::< - String, - T::Model, - ExtraPaginationFields, - async_graphql::types::connection::EmptyFields, - >::with_additional_fields( - has_previous_page, - has_next_page, - ExtraPaginationFields { pages, current }, - ); +pub mod builder_context; +pub use builder_context::*; - result.edges.extend(edges); +pub mod builder; +pub use builder::*; - result -} +pub type SimpleNamingFn = Box String + Sync + Send>; +pub type ComplexNamingFn = Box String + Sync + Send>; diff --git a/src/outputs/connection_object.rs b/src/outputs/connection_object.rs new file mode 100644 index 00000000..a01b16de --- /dev/null +++ b/src/outputs/connection_object.rs @@ -0,0 +1,136 @@ +use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef}; +use sea_orm::EntityTrait; + +use crate::{ + BuilderContext, Edge, EdgeObjectBuilder, EntityObjectBuilder, PageInfo, PaginationInfo, +}; + +/// used to represent a GraphQL Connection node for any Type +#[derive(Clone, Debug)] +pub struct Connection +where + T: EntityTrait, + ::Model: Sync, +{ + /// cursor pagination info + pub page_info: PageInfo, + + /// pagination info + pub pagination_info: Option, + + /// vector of data vector + pub edges: Vec>, +} + +/// The configuration structure for ConnectionObjectBuilder +pub struct ConnectionObjectConfig { + /// used to format the type name of the object + pub type_name: crate::SimpleNamingFn, + /// name for 'pageInfo' field + pub page_info: String, + /// name for 'paginationInfo' field + pub pagination_info: String, + /// name for 'edges' field + pub edges: String, + /// name for 'nodes' field + pub nodes: String, +} + +impl std::default::Default for ConnectionObjectConfig { + fn default() -> Self { + ConnectionObjectConfig { + type_name: Box::new(|object_name: &str| -> String { + format!("{}Connection", object_name) + }), + page_info: "pageInfo".into(), + pagination_info: "paginationInfo".into(), + edges: "edges".into(), + nodes: "nodes".into(), + } + } +} + +/// This builder produces the Connection object for a SeaORM entity +pub struct ConnectionObjectBuilder { + pub context: &'static BuilderContext, +} + +impl ConnectionObjectBuilder { + /// used to get type name + pub fn type_name(&self, object_name: &str) -> String { + self.context.connection_object.type_name.as_ref()(object_name) + } + + /// used to get the Connection object for a SeaORM entity + pub fn to_object(&self) -> Object + where + T: EntityTrait, + ::Model: Sync, + { + let edge_object_builder = EdgeObjectBuilder { + context: self.context, + }; + let entity_object_builder = EntityObjectBuilder { + context: self.context, + }; + let object_name = entity_object_builder.type_name::(); + let name = self.type_name(&object_name); + + Object::new(name) + .field(Field::new( + &self.context.connection_object.page_info, + TypeRef::named_nn(&self.context.page_info_object.type_name), + |ctx| { + FieldFuture::new(async move { + let connection = ctx.parent_value.try_downcast_ref::>()?; + Ok(Some(FieldValue::borrowed_any(&connection.page_info))) + }) + }, + )) + .field(Field::new( + &self.context.connection_object.pagination_info, + TypeRef::named(&self.context.pagination_info_object.type_name), + |ctx| { + FieldFuture::new(async move { + let connection = ctx.parent_value.try_downcast_ref::>()?; + if let Some(value) = connection + .pagination_info + .as_ref() + .map(|v| FieldValue::borrowed_any(v)) + { + Ok(Some(value)) + } else { + Ok(FieldValue::NONE) + } + }) + }, + )) + .field(Field::new( + &self.context.connection_object.nodes, + TypeRef::named_nn_list_nn(&object_name), + |ctx| { + FieldFuture::new(async move { + let connection = ctx.parent_value.try_downcast_ref::>()?; + Ok(Some(FieldValue::list(connection.edges.iter().map( + |edge: &Edge| FieldValue::borrowed_any(&edge.node), + )))) + }) + }, + )) + .field(Field::new( + &self.context.connection_object.edges, + TypeRef::named_nn_list_nn(edge_object_builder.type_name(&object_name)), + |ctx| { + FieldFuture::new(async move { + let connection = ctx.parent_value.try_downcast_ref::>()?; + Ok(Some(FieldValue::list( + connection + .edges + .iter() + .map(|edge: &Edge| FieldValue::borrowed_any(edge)), + ))) + }) + }, + )) + } +} diff --git a/src/outputs/edge_object.rs b/src/outputs/edge_object.rs new file mode 100644 index 00000000..358ac749 --- /dev/null +++ b/src/outputs/edge_object.rs @@ -0,0 +1,85 @@ +use async_graphql::dynamic::{Field, FieldFuture, FieldValue, Object, TypeRef}; +use async_graphql::Value; +use sea_orm::EntityTrait; + +use crate::{BuilderContext, EntityObjectBuilder}; +/// used to represent a data Edge for GraphQL pagination +#[derive(Clone, Debug)] +pub struct Edge +where + T: EntityTrait, + ::Model: Sync, +{ + /// cursor string + pub cursor: String, + + /// data + pub node: T::Model, +} + +/// The configuration structure for EdgeObjectBuilder +pub struct EdgeObjectConfig { + /// used to format the type name of the object + pub type_name: crate::SimpleNamingFn, + /// name for 'cursor' field + pub cursor: String, + /// name for 'node' field + pub node: String, +} + +impl std::default::Default for EdgeObjectConfig { + fn default() -> EdgeObjectConfig { + EdgeObjectConfig { + type_name: Box::new(|object_name: &str| -> String { format!("{}Edge", object_name) }), + cursor: "cursor".into(), + node: "node".into(), + } + } +} + +/// This builder produces the Node object for a SeaORM entity +pub struct EdgeObjectBuilder { + pub context: &'static BuilderContext, +} + +impl EdgeObjectBuilder { + /// used to get type name + pub fn type_name(&self, object_name: &str) -> String { + self.context.edge_object.type_name.as_ref()(object_name) + } + + /// used to get the Node object for a SeaORM entity + pub fn to_object(&self) -> Object + where + T: EntityTrait, + ::Model: Sync, + { + let entity_object_builder = EntityObjectBuilder { + context: self.context, + }; + let object_name = entity_object_builder.type_name::(); + let name = self.type_name(&object_name); + + Object::new(name) + .field(Field::new( + &self.context.edge_object.cursor, + TypeRef::named_nn(TypeRef::STRING), + |ctx| { + FieldFuture::new(async move { + let edge = ctx.parent_value.try_downcast_ref::>()?; + Ok(Some(Value::from(edge.cursor.as_str()))) + }) + }, + )) + .field(Field::new( + &self.context.edge_object.node, + TypeRef::named_nn(object_name), + |ctx| { + FieldFuture::new(async move { + let edge = ctx.parent_value.try_downcast_ref::>()?; + Ok(Some(FieldValue::borrowed_any(&edge.node))) + }) + }, + )) + } +} diff --git a/src/outputs/entity_object.rs b/src/outputs/entity_object.rs new file mode 100644 index 00000000..5e16d10c --- /dev/null +++ b/src/outputs/entity_object.rs @@ -0,0 +1,460 @@ +use async_graphql::dynamic::{Field, FieldFuture, Object, TypeRef}; +use async_graphql::{Error, Value}; +use heck::{ToLowerCamelCase, ToUpperCamelCase}; +use sea_orm::{ColumnTrait, ColumnType, EntityName, EntityTrait, IdenStatic, Iterable, ModelTrait}; + +/// The configuration structure for EntityObjectBuilder +pub struct EntityObjectConfig { + /// used to format the type name of the object + pub type_name: crate::SimpleNamingFn, + /// used to format the name for the query field of the object + pub query_entity_name: crate::SimpleNamingFn, + /// used to format the name of column fields + pub column_name: crate::ComplexNamingFn, +} + +impl std::default::Default for EntityObjectConfig { + fn default() -> Self { + Self { + type_name: Box::new(|entity_name: &str| -> String { + entity_name.to_upper_camel_case() + }), + query_entity_name: Box::new(|entity_name: &str| -> String { + entity_name.to_lower_camel_case() + }), + column_name: Box::new(|_entity_name: &str, column_name: &str| -> String { + column_name.to_lower_camel_case() + }), + } + } +} + +use crate::{ActiveEnumBuilder, BuilderContext}; + +/// This builder produces the GraphQL object of a SeaORM entity +pub struct EntityObjectBuilder { + pub context: &'static BuilderContext, +} + +impl EntityObjectBuilder { + /// used to get type name + pub fn type_name(&self) -> String + where + T: EntityTrait, + ::Model: Sync, + { + let name: String = ::table_name(&T::default()).into(); + self.context.entity_object.type_name.as_ref()(&name) + } + + /// used to get query field name of entity + pub fn query_entity_name(&self) -> String + where + T: EntityTrait, + ::Model: Sync, + { + let name: String = ::table_name(&T::default()).into(); + self.context.entity_object.query_entity_name.as_ref()(&name) + } + + /// used to get column field name of entity column + pub fn column_name(&self, column: T::Column) -> String + where + T: EntityTrait, + ::Model: Sync, + { + let entity_name = self.type_name::(); + let column_name: String = column.as_str().into(); + self.context.entity_object.column_name.as_ref()(&entity_name, &column_name) + } + + /// used to get the GraphQL object of a SeaORM entity + pub fn to_object(&self) -> Object + where + T: EntityTrait, + ::Model: Sync, + { + let object_name = self.type_name::(); + let active_enum_builder = ActiveEnumBuilder { + context: self.context, + }; + + T::Column::iter().fold(Object::new(&object_name), |object, column: T::Column| { + let column_name = self.column_name::(column); + + let column_def = column.def(); + + // map column type to GraphQL type + let type_name: Option = match &column_def.get_column_type() { + ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text => { + Some(TypeRef::STRING.into()) + } + ColumnType::TinyInteger + | ColumnType::SmallInteger + | ColumnType::Integer + | ColumnType::BigInteger + | ColumnType::TinyUnsigned + | ColumnType::SmallUnsigned + | ColumnType::Unsigned + | ColumnType::BigUnsigned => Some(TypeRef::INT.into()), + ColumnType::Float | ColumnType::Double => Some(TypeRef::FLOAT.into()), + ColumnType::Decimal(_) => Some(TypeRef::STRING.into()), + ColumnType::DateTime + | ColumnType::Timestamp + | ColumnType::TimestampWithTimeZone + | ColumnType::Time + | ColumnType::Date => Some(TypeRef::STRING.into()), + ColumnType::Year(_) => Some(TypeRef::INT.into()), + ColumnType::Interval(_, _) => Some(TypeRef::STRING.into()), + ColumnType::Binary(_) + | ColumnType::VarBinary(_) + | ColumnType::VarBit(_) + | ColumnType::Bit(_) => Some(TypeRef::STRING.into()), + ColumnType::Boolean => Some(TypeRef::BOOLEAN.into()), + ColumnType::Money(_) => Some(TypeRef::STRING.into()), + // FIXME: json type + ColumnType::Json | ColumnType::JsonBinary => Some(TypeRef::STRING.into()), + ColumnType::Uuid => Some(TypeRef::STRING.into()), + // FIXME: research what type is behind the custom type + ColumnType::Custom(_) => Some(TypeRef::STRING.into()), + ColumnType::Enum { name, variants: _ } => { + Some(active_enum_builder.type_name_from_iden(name)) + } + // FIXME: array type + // ColumnType::Array(_) => Some(TypeRef::STRING.into()) + ColumnType::Cidr | ColumnType::Inet | ColumnType::MacAddr => { + Some(TypeRef::STRING.into()) + } + _ => None, + }; + + let type_name = if let Some(type_name) = type_name { + type_name + } else { + return object; + }; + + // map if field is nullable + let graphql_type = if column_def.is_null() { + TypeRef::named(type_name) + } else { + TypeRef::named_nn(type_name) + }; + + let is_enum = matches!( + column_def.get_column_type(), + ColumnType::Enum { + name: _, + variants: _ + } + ); + + let guard = self + .context + .guards + .field_guards + .get(&format!("{}.{}", &object_name, &column_name)); + + // convert SeaQL value to GraphQL value + let field = Field::new(column_name, graphql_type, move |ctx| { + let guard_flag = if let Some(guard) = guard { + (*guard)(&ctx) + } else { + false + }; + + if guard_flag { + return FieldFuture::new(async move { + if guard_flag { + Err(Error::new("Field guard triggered.")) + } else { + Ok(Some(Value::from(false))) + } + }); + } + + let object = ctx + .parent_value + .try_downcast_ref::() + .expect("Something went wrong when trying to downcast entity object."); + + match object.get(column) { + sea_orm::sea_query::Value::Bool(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::TinyInt(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::SmallInt(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::Int(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::BigInt(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::TinyUnsigned(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }) + } + sea_orm::sea_query::Value::SmallUnsigned(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }) + } + sea_orm::sea_query::Value::Unsigned(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::BigUnsigned(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::Float(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::Double(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value))), + None => Ok(None), + } + }), + sea_orm::sea_query::Value::String(value) => FieldFuture::new(async move { + match value { + Some(value) => { + if is_enum { + Ok(Some(Value::from( + value.as_str().to_upper_camel_case().to_ascii_uppercase(), + ))) + } else { + Ok(Some(Value::from(value.as_str()))) + } + } + None => Ok(None), + } + }), + sea_orm::sea_query::Value::Char(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + #[allow(clippy::box_collection)] + sea_orm::sea_query::Value::Bytes(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(String::from_utf8_lossy(&value)))), + None => Ok(None), + } + }), + #[cfg(feature = "with-json")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-json")))] + sea_orm::sea_query::Value::Json(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-chrono")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-chrono")))] + sea_orm::sea_query::Value::ChronoDate(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-chrono")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-chrono")))] + sea_orm::sea_query::Value::ChronoTime(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-chrono")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-chrono")))] + sea_orm::sea_query::Value::ChronoDateTime(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-chrono")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-chrono")))] + sea_orm::sea_query::Value::ChronoDateTimeUtc(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-chrono")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-chrono")))] + sea_orm::sea_query::Value::ChronoDateTimeLocal(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-chrono")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-chrono")))] + sea_orm::sea_query::Value::ChronoDateTimeWithTimeZone(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-time")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-time")))] + sea_orm::sea_query::Value::TimeDate(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-time")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-time")))] + sea_orm::sea_query::Value::TimeTime(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-time")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-time")))] + sea_orm::sea_query::Value::TimeDateTime(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-time")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-time")))] + sea_orm::sea_query::Value::TimeDateTimeWithTimeZone(value) => { + FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-uuid")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-uuid")))] + sea_orm::sea_query::Value::Uuid(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-decimal")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-decimal")))] + sea_orm::sea_query::Value::Decimal(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "with-bigdecimal")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-bigdecimal")))] + sea_orm::sea_query::Value::BigDecimal(value) => FieldFuture::new(async move { + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }), + + #[cfg(feature = "postgres-array")] + #[cfg_attr(docsrs, doc(cfg(feature = "postgres-array")))] + sea_orm::sea_query::Value::Array(array_type, value) => { + FieldFuture::new(async move { + // FIXME: array type + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-ipnetwork")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-ipnetwork")))] + sea_orm::sea_query::Value::IpNetwork(value) => { + FieldFuture::new(async move { + // FIXME: ipnet type + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + + #[cfg(feature = "with-mac_address")] + #[cfg_attr(docsrs, doc(cfg(feature = "with-mac_address")))] + sea_orm::sea_query::Value::MacAddress(value) => { + FieldFuture::new(async move { + // FIXME: mac type + match value { + Some(value) => Ok(Some(Value::from(value.to_string()))), + None => Ok(None), + } + }) + } + #[allow(unreachable_patterns)] + _ => todo!(), + } + }); + + object.field(field) + }) + } +} diff --git a/src/outputs/mod.rs b/src/outputs/mod.rs new file mode 100644 index 00000000..e46117c9 --- /dev/null +++ b/src/outputs/mod.rs @@ -0,0 +1,14 @@ +pub mod page_info_object; +pub use page_info_object::*; + +pub mod pagination_info_object; +pub use pagination_info_object::*; + +pub mod edge_object; +pub use edge_object::*; + +pub mod connection_object; +pub use connection_object::*; + +pub mod entity_object; +pub use entity_object::*; diff --git a/src/outputs/page_info_object.rs b/src/outputs/page_info_object.rs new file mode 100644 index 00000000..02386c9b --- /dev/null +++ b/src/outputs/page_info_object.rs @@ -0,0 +1,108 @@ +use async_graphql::dynamic::{Field, FieldFuture, Object, TypeRef}; +use async_graphql::Value; + +use crate::BuilderContext; + +/// used to hold pages pagination info +#[derive(Clone, Debug)] +pub struct PageInfo { + pub has_previous_page: bool, + pub has_next_page: bool, + pub start_cursor: Option, + pub end_cursor: Option, +} + +/// The configuration structure for PageInfoObjectBuilder +pub struct PageInfoObjectConfig { + /// type name + pub type_name: String, + /// name for 'hasPreviousPage' field + pub has_previous_page: String, + /// name for 'hasNextPage' field + pub has_next_page: String, + /// name for 'startCursor' field + pub start_cursor: String, + /// name for 'endCursor' field + pub end_cursor: String, +} + +impl std::default::Default for PageInfoObjectConfig { + fn default() -> Self { + PageInfoObjectConfig { + type_name: "PageInfo".into(), + has_previous_page: "hasPreviousPage".into(), + has_next_page: "hasNextPage".into(), + start_cursor: "startCursor".into(), + end_cursor: "endCursor".into(), + } + } +} + +/// This builder produces the PageInfo object +/// that contains cursor pagination information +/// for a query +pub struct PageInfoObjectBuilder { + pub context: &'static BuilderContext, +} + +impl PageInfoObjectBuilder { + /// used to get type name + pub fn type_name(&self) -> String { + self.context.page_info_object.type_name.clone() + } + + /// used to get GraphQL object for PageInfo + pub fn to_object(&self) -> Object { + Object::new(&self.context.page_info_object.type_name) + .field(Field::new( + &self.context.page_info_object.has_previous_page, + TypeRef::named_nn(TypeRef::BOOLEAN), + |ctx| { + FieldFuture::new(async move { + let cursor_page_info = ctx.parent_value.try_downcast_ref::()?; + Ok(Some(Value::from(cursor_page_info.has_previous_page))) + }) + }, + )) + .field(Field::new( + &self.context.page_info_object.has_next_page, + TypeRef::named_nn(TypeRef::BOOLEAN), + |ctx| { + FieldFuture::new(async move { + let cursor_page_info = ctx.parent_value.try_downcast_ref::()?; + Ok(Some(Value::from(cursor_page_info.has_next_page))) + }) + }, + )) + .field(Field::new( + &self.context.page_info_object.start_cursor, + TypeRef::named(TypeRef::STRING), + |ctx| { + FieldFuture::new(async move { + let cursor_page_info = ctx.parent_value.try_downcast_ref::()?; + let value = cursor_page_info + .start_cursor + .as_ref() + .map(|v| Value::from(v.as_str())) + .or_else(|| Some(Value::Null)); + Ok(value) + }) + }, + )) + .field(Field::new( + &self.context.page_info_object.end_cursor, + TypeRef::named(TypeRef::STRING), + |ctx| { + FieldFuture::new(async move { + let cursor_page_info = ctx.parent_value.try_downcast_ref::()?; + let value = cursor_page_info + .end_cursor + .as_ref() + .map(|v| Value::from(v.as_str())) + .or_else(|| Some(Value::Null)); + Ok(value) + }) + }, + )) + } +} diff --git a/src/outputs/pagination_info_object.rs b/src/outputs/pagination_info_object.rs new file mode 100644 index 00000000..e69da5ac --- /dev/null +++ b/src/outputs/pagination_info_object.rs @@ -0,0 +1,102 @@ +use async_graphql::dynamic::{Field, FieldFuture, Object, TypeRef}; +use async_graphql::Value; + +use crate::BuilderContext; + +/// used to hold offset pagination info +#[derive(Clone, Debug)] +pub struct PaginationInfo { + pub pages: u64, + pub current: u64, + pub offset: u64, + pub total: u64, +} + +/// The configuration structure for PaginationInfoObjectBuilder +pub struct PaginationInfoObjectConfig { + /// type name + pub type_name: String, + /// name for 'pages' field + pub pages: String, + /// name for 'current' field + pub current: String, + /// name for 'offset' field + pub offset: String, + /// name for 'total' field + pub total: String, +} + +impl std::default::Default for PaginationInfoObjectConfig { + fn default() -> Self { + PaginationInfoObjectConfig { + type_name: "PaginationInfo".into(), + pages: "pages".into(), + current: "current".into(), + offset: "offset".into(), + total: "total".into(), + } + } +} + +/// This builder produces the PaginationInfo object +/// that contains page/offset pagination information +/// for a query +pub struct PaginationInfoObjectBuilder { + pub context: &'static BuilderContext, +} + +impl PaginationInfoObjectBuilder { + /// used to get type name + pub fn type_name(&self) -> String { + self.context.pagination_info_object.type_name.clone() + } + + /// used to get GraphQL object for PaginationInfo + pub fn to_object(&self) -> Object { + Object::new(&self.context.pagination_info_object.type_name) + .field(Field::new( + &self.context.pagination_info_object.pages, + TypeRef::named_nn(TypeRef::INT), + |ctx| { + FieldFuture::new(async move { + let pagination_page_info = + ctx.parent_value.try_downcast_ref::()?; + Ok(Some(Value::from(pagination_page_info.pages))) + }) + }, + )) + .field(Field::new( + &self.context.pagination_info_object.current, + TypeRef::named_nn(TypeRef::INT), + |ctx| { + FieldFuture::new(async move { + let pagination_page_info = + ctx.parent_value.try_downcast_ref::()?; + Ok(Some(Value::from(pagination_page_info.current))) + }) + }, + )) + .field(Field::new( + &self.context.pagination_info_object.offset, + TypeRef::named_nn(TypeRef::INT), + |ctx| { + FieldFuture::new(async move { + let pagination_page_info = + ctx.parent_value.try_downcast_ref::()?; + Ok(Some(Value::from(pagination_page_info.current))) + }) + }, + )) + .field(Field::new( + &self.context.pagination_info_object.total, + TypeRef::named_nn(TypeRef::INT), + |ctx| { + FieldFuture::new(async move { + let pagination_page_info = + ctx.parent_value.try_downcast_ref::()?; + Ok(Some(Value::from(pagination_page_info.current))) + }) + }, + )) + } +} diff --git a/src/query/entity_object_relation.rs b/src/query/entity_object_relation.rs new file mode 100644 index 00000000..ea701bb2 --- /dev/null +++ b/src/query/entity_object_relation.rs @@ -0,0 +1,162 @@ +use async_graphql::{ + dynamic::{Field, FieldFuture, FieldValue, InputValue, TypeRef}, + Error, +}; +use heck::ToSnakeCase; +use sea_orm::{ + ColumnTrait, Condition, DatabaseConnection, EntityTrait, Iden, ModelTrait, QueryFilter, + RelationDef, +}; + +use crate::{ + apply_order, apply_pagination, get_filter_conditions, BuilderContext, ConnectionObjectBuilder, + EntityObjectBuilder, FilterInputBuilder, OrderInputBuilder, +}; + +/// This builder produces a GraphQL field for an SeaORM entity relationship +/// that can be added to the entity object +pub struct EntityObjectRelationBuilder { + pub context: &'static BuilderContext, +} + +impl EntityObjectRelationBuilder { + /// used to get a GraphQL field for an SeaORM entity relationship + pub fn get_relation(&self, name: &str, relation_definition: RelationDef) -> Field + where + T: EntityTrait, + ::Model: Sync, + <::Column as std::str::FromStr>::Err: core::fmt::Debug, + R: EntityTrait, + ::Model: Sync, + <::Column as std::str::FromStr>::Err: core::fmt::Debug, + { + let context: &'static BuilderContext = self.context; + let entity_object_builder = EntityObjectBuilder { context }; + let connection_object_builder = ConnectionObjectBuilder { context }; + let filter_input_builder = FilterInputBuilder { context }; + let order_input_builder = OrderInputBuilder { context }; + + let object_name: String = entity_object_builder.type_name::(); + let guard = self.context.guards.entity_guards.get(&object_name); + + let from_col = ::from_str( + relation_definition + .from_col + .to_string() + .to_snake_case() + .as_str(), + ) + .unwrap(); + + let to_col = ::from_str( + relation_definition + .to_col + .to_string() + .to_snake_case() + .as_str(), + ) + .unwrap(); + + let field = match relation_definition.is_owner { + false => { + Field::new(name, TypeRef::named(&object_name), move |ctx| { + // FIXME: optimize with dataloader + FieldFuture::new(async move { + let guard_flag = if let Some(guard) = guard { + (*guard)(&ctx) + } else { + false + }; + + if guard_flag { + return Err(Error::new("Entity guard triggered.")); + } + + let parent: &T::Model = ctx + .parent_value + .try_downcast_ref::() + .expect("Parent should exist"); + + let stmt = R::find(); + + let filter = Condition::all().add(to_col.eq(parent.get(from_col))); + + let stmt = stmt.filter(filter); + + let db = ctx.data::()?; + + let data = stmt.one(db).await?; + + if let Some(data) = data { + Ok(Some(FieldValue::owned_any(data))) + } else { + Ok(None) + } + }) + }) + } + true => Field::new( + name, + TypeRef::named_nn(connection_object_builder.type_name(&object_name)), + move |ctx| { + let context: &'static BuilderContext = context; + FieldFuture::new(async move { + let guard_flag = if let Some(guard) = guard { + (*guard)(&ctx) + } else { + false + }; + + if guard_flag { + return Err(Error::new("Entity guard triggered.")); + } + + // FIXME: optimize union queries + // NOTE: each has unique query in order to apply pagination... + let parent: &T::Model = ctx + .parent_value + .try_downcast_ref::() + .expect("Parent should exist"); + + let stmt = R::find(); + + let condition = Condition::all().add(to_col.eq(parent.get(from_col))); + + let filters = ctx.args.get(&context.entity_query_field.filters); + let order_by = ctx.args.get(&context.entity_query_field.order_by); + let pagination = ctx.args.get(&context.entity_query_field.pagination); + + let base_condition = get_filter_conditions::(context, filters); + + let stmt = stmt.filter(condition.add(base_condition)); + let stmt = apply_order(context, stmt, order_by); + + let db = ctx.data::()?; + + let connection = + apply_pagination::(context, db, stmt, pagination).await?; + + Ok(Some(FieldValue::owned_any(connection))) + }) + }, + ), + }; + + match relation_definition.is_owner { + false => field, + true => field + .argument(InputValue::new( + &context.entity_query_field.filters, + TypeRef::named(filter_input_builder.type_name(&object_name)), + )) + .argument(InputValue::new( + &context.entity_query_field.order_by, + TypeRef::named(order_input_builder.type_name(&object_name)), + )) + .argument(InputValue::new( + &context.entity_query_field.pagination, + TypeRef::named(&context.pagination_input.type_name), + )), + } + } +} diff --git a/src/query/entity_object_via_relation.rs b/src/query/entity_object_via_relation.rs new file mode 100644 index 00000000..6cddcdba --- /dev/null +++ b/src/query/entity_object_via_relation.rs @@ -0,0 +1,175 @@ +use async_graphql::{ + dynamic::{Field, FieldFuture, FieldValue, InputValue, TypeRef}, + Error, +}; +use heck::ToSnakeCase; +use sea_orm::{ + ColumnTrait, Condition, DatabaseConnection, EntityTrait, Iden, ModelTrait, QueryFilter, Related, +}; + +use crate::{ + apply_order, apply_pagination, get_filter_conditions, BuilderContext, ConnectionObjectBuilder, + EntityObjectBuilder, FilterInputBuilder, OrderInputBuilder, +}; + +/// This builder produces a GraphQL field for an SeaORM entity related trait +/// that can be added to the entity object +pub struct EntityObjectViaRelationBuilder { + pub context: &'static BuilderContext, +} + +impl EntityObjectViaRelationBuilder { + /// used to get a GraphQL field for an SeaORM entity related trait + pub fn get_relation(&self, name: &str) -> Field + where + T: Related, + T: EntityTrait, + R: EntityTrait, + ::Model: Sync, + ::Model: Sync, + <::Column as std::str::FromStr>::Err: core::fmt::Debug, + <::Column as std::str::FromStr>::Err: core::fmt::Debug, + { + let context: &'static BuilderContext = self.context; + let to_relation_definition = >::to(); + let via_relation_definition = >::via().expect( + "We expect this function to be used with Related that has `via` method implemented!", + ); + + let entity_object_builder = EntityObjectBuilder { context }; + let connection_object_builder = ConnectionObjectBuilder { context }; + let filter_input_builder = FilterInputBuilder { context }; + let order_input_builder = OrderInputBuilder { context }; + + let object_name: String = entity_object_builder.type_name::(); + let guard = self.context.guards.entity_guards.get(&object_name); + + let from_col = ::from_str( + via_relation_definition + .from_col + .to_string() + .to_snake_case() + .as_str(), + ) + .unwrap(); + + let to_col = ::from_str( + to_relation_definition + .to_col + .to_string() + .to_snake_case() + .as_str(), + ) + .unwrap(); + + let field = match via_relation_definition.is_owner { + false => { + Field::new(name, TypeRef::named(&object_name), move |ctx| { + // FIXME: optimize by adding dataloader + FieldFuture::new(async move { + let guard_flag = if let Some(guard) = guard { + (*guard)(&ctx) + } else { + false + }; + + if guard_flag { + return Err(Error::new("Entity guard triggered.")); + } + + let parent: &T::Model = ctx + .parent_value + .try_downcast_ref::() + .expect("Parent should exist"); + + let stmt = if >::via().is_some() { + >::find_related() + } else { + R::find() + }; + + let filter = Condition::all().add(to_col.eq(parent.get(from_col))); + + let stmt = stmt.filter(filter); + + let db = ctx.data::()?; + + let data = stmt.one(db).await?; + + if let Some(data) = data { + Ok(Some(FieldValue::owned_any(data))) + } else { + Ok(None) + } + }) + }) + } + true => Field::new( + name, + TypeRef::named_nn(connection_object_builder.type_name(&object_name)), + move |ctx| { + let context: &'static BuilderContext = context; + FieldFuture::new(async move { + let guard_flag = if let Some(guard) = guard { + (*guard)(&ctx) + } else { + false + }; + + if guard_flag { + return Err(Error::new("Entity guard triggered.")); + } + + // FIXME: optimize union queries + // NOTE: each has unique query in order to apply pagination... + let parent: &T::Model = ctx + .parent_value + .try_downcast_ref::() + .expect("Parent should exist"); + + let stmt = if >::via().is_some() { + >::find_related() + } else { + R::find() + }; + + let condition = Condition::all().add(to_col.eq(parent.get(from_col))); + + let filters = ctx.args.get(&context.entity_query_field.filters); + let order_by = ctx.args.get(&context.entity_query_field.order_by); + let pagination = ctx.args.get(&context.entity_query_field.pagination); + + let base_condition = get_filter_conditions::(context, filters); + + let stmt = stmt.filter(condition.add(base_condition)); + let stmt = apply_order(context, stmt, order_by); + + let db = ctx.data::()?; + + let connection = + apply_pagination::(context, db, stmt, pagination).await?; + + Ok(Some(FieldValue::owned_any(connection))) + }) + }, + ), + }; + + match via_relation_definition.is_owner { + false => field, + true => field + .argument(InputValue::new( + &context.entity_query_field.filters, + TypeRef::named(filter_input_builder.type_name(&object_name)), + )) + .argument(InputValue::new( + &context.entity_query_field.order_by, + TypeRef::named(order_input_builder.type_name(&object_name)), + )) + .argument(InputValue::new( + &context.entity_query_field.pagination, + TypeRef::named(&context.pagination_input.type_name), + )), + } + } +} diff --git a/src/query/entity_query_field.rs b/src/query/entity_query_field.rs new file mode 100644 index 00000000..31140428 --- /dev/null +++ b/src/query/entity_query_field.rs @@ -0,0 +1,130 @@ +use async_graphql::{ + dynamic::{Field, FieldFuture, FieldValue, InputValue, TypeRef}, + Error, +}; +use heck::ToLowerCamelCase; +use sea_orm::{DatabaseConnection, EntityTrait, QueryFilter}; + +use crate::{ + apply_order, apply_pagination, get_filter_conditions, BuilderContext, ConnectionObjectBuilder, + EntityObjectBuilder, FilterInputBuilder, OrderInputBuilder, PaginationInputBuilder, +}; + +/// The configuration structure for EntityQueryFieldBuilder +pub struct EntityQueryFieldConfig { + /// used to format entity field name + pub type_name: crate::SimpleNamingFn, + /// name for 'filters' field + pub filters: String, + /// name for 'orderBy' field + pub order_by: String, + /// name for 'pagination' field + pub pagination: String, +} + +impl std::default::Default for EntityQueryFieldConfig { + fn default() -> Self { + EntityQueryFieldConfig { + type_name: Box::new(|object_name: &str| -> String { + object_name.to_lower_camel_case() + }), + filters: "filters".into(), + order_by: "orderBy".into(), + pagination: "pagination".into(), + } + } +} + +/// This builder produces a field for the Query object that queries a SeaORM entity +pub struct EntityQueryFieldBuilder { + pub context: &'static BuilderContext, +} + +impl EntityQueryFieldBuilder { + /// used to get field name for a SeaORM entity + pub fn type_name(&self) -> String + where + T: EntityTrait, + ::Model: Sync, + { + let entity_object = EntityObjectBuilder { + context: self.context, + }; + let object_name = entity_object.type_name::(); + self.context.entity_query_field.type_name.as_ref()(&object_name) + } + + /// used to get the Query object field for a SeaORM entity + pub fn to_field(&self) -> Field + where + T: EntityTrait, + ::Model: Sync, + { + let connection_object_builder = ConnectionObjectBuilder { + context: self.context, + }; + let filter_input_builder = FilterInputBuilder { + context: self.context, + }; + let order_input_builder = OrderInputBuilder { + context: self.context, + }; + let pagination_input_builder = PaginationInputBuilder { + context: self.context, + }; + let entity_object = EntityObjectBuilder { + context: self.context, + }; + + let object_name = entity_object.type_name::(); + let type_name = connection_object_builder.type_name(&object_name); + + let guard = self.context.guards.entity_guards.get(&object_name); + + let context: &'static BuilderContext = self.context; + Field::new( + entity_object.query_entity_name::(), + TypeRef::named_nn(type_name), + move |ctx| { + let context: &'static BuilderContext = context; + FieldFuture::new(async move { + let guard_flag = if let Some(guard) = guard { + (*guard)(&ctx) + } else { + false + }; + + if guard_flag { + return Err(Error::new("Entity guard triggered.")); + } + + let filters = ctx.args.get(&context.entity_query_field.filters); + let order_by = ctx.args.get(&context.entity_query_field.order_by); + let pagination = ctx.args.get(&context.entity_query_field.pagination); + + let stmt = T::find(); + let stmt = stmt.filter(get_filter_conditions::(context, filters)); + let stmt = apply_order(context, stmt, order_by); + + let db = ctx.data::()?; + + let connection = apply_pagination::(context, db, stmt, pagination).await?; + + Ok(Some(FieldValue::owned_any(connection))) + }) + }, + ) + .argument(InputValue::new( + &self.context.entity_query_field.filters, + TypeRef::named(filter_input_builder.type_name(&object_name)), + )) + .argument(InputValue::new( + &self.context.entity_query_field.order_by, + TypeRef::named(order_input_builder.type_name(&object_name)), + )) + .argument(InputValue::new( + &self.context.entity_query_field.pagination, + TypeRef::named(pagination_input_builder.type_name()), + )) + } +} diff --git a/src/query/filtering.rs b/src/query/filtering.rs new file mode 100644 index 00000000..c91ee426 --- /dev/null +++ b/src/query/filtering.rs @@ -0,0 +1,172 @@ +use async_graphql::dynamic::{ObjectAccessor, ValueAccessor}; +use sea_orm::{ColumnTrait, ColumnType, Condition, EntityTrait, Iterable}; + +use crate::{ + prepare_enumeration_condition, prepare_float_condition, prepare_integer_condition, + prepare_string_condition, prepare_text_condition, prepare_unsigned_condition, BuilderContext, + EntityObjectBuilder, +}; + +/// utility function used to create the query filter condition +/// for a SeaORM entity using query filter inputs +pub fn get_filter_conditions( + context: &'static BuilderContext, + filters: Option, +) -> Condition +where + T: EntityTrait, + ::Model: Sync, +{ + if let Some(filters) = filters { + let filters = filters.object().unwrap(); + + recursive_prepare_condition::(context, filters) + } else { + Condition::all() + } +} + +/// used to prepare recursively the query filtering condition +pub fn recursive_prepare_condition( + context: &'static BuilderContext, + filters: ObjectAccessor, +) -> Condition +where + T: EntityTrait, + ::Model: Sync, +{ + let entity_object_builder = EntityObjectBuilder { context }; + + let condition = T::Column::iter().fold(Condition::all(), |condition, column: T::Column| { + let column_name = entity_object_builder.column_name::(column); + + let filter = filters.get(&column_name); + + if let Some(filter) = filter { + let filter = filter.object().unwrap(); + + // for more info on supported types read "filter_input.rs" + match column.def().get_column_type() { + ColumnType::Char(_) | ColumnType::String(_) | ColumnType::Text => { + prepare_string_condition(&filter, column, condition) + } + ColumnType::TinyInteger + | ColumnType::SmallInteger + | ColumnType::Integer + | ColumnType::BigInteger => prepare_integer_condition(&filter, column, condition), + ColumnType::TinyUnsigned + | ColumnType::SmallUnsigned + | ColumnType::Unsigned + | ColumnType::BigUnsigned => prepare_unsigned_condition(&filter, column, condition), + // FIXME: support f32 (different precision) + ColumnType::Float | ColumnType::Double => { + prepare_float_condition(&filter, column, condition) + } + // FIXME: research how to integrate big decimal + #[cfg(feature = "with-decimal")] + ColumnType::Decimal(_) | ColumnType::Money(_) => crate::prepare_parsed_condition( + &filter, + column, + |v| { + use std::str::FromStr; + + sea_orm::entity::prelude::Decimal::from_str(&v) + .expect("We expect value to be Decimal") + }, + condition, + ), + ColumnType::DateTime + | ColumnType::Timestamp + | ColumnType::TimestampWithTimeZone + | ColumnType::Time + | ColumnType::Date + | ColumnType::Interval(_, _) => prepare_text_condition(&filter, column, condition), + ColumnType::Year(_) => prepare_integer_condition(&filter, column, condition), + ColumnType::Boolean => crate::prepare_boolean_condition(&filter, column, condition), + #[cfg(feature = "with-uuid")] + ColumnType::Uuid => crate::prepare_parsed_condition( + &filter, + column, + |v| { + use std::str::FromStr; + + sea_orm::entity::prelude::Uuid::from_str(&v) + .expect("We expect value to be Uuid") + }, + condition, + ), + ColumnType::Enum { name: _, variants } => { + prepare_enumeration_condition(&filter, column, variants, condition) + } + // ColumnType::Binary => { + // FIXME: binary type + // }, + // ColumnType::VarBinary => { + // FIXME: binary type + // }, + // ColumnType::Bit => { + // FIXME: binary type + // }, + // ColumnType::VarBit => { + // FIXME: binary type + // }, + // ColumnType::Json => { + // FIXME: json type + // }, + // ColumnType::JsonBinary => { + // FIXME: json type + // }, + // ColumnType::Array(_) => { + // FIXME: array type + // }, + // ColumnType::Cidr => { + // FIXME: cidr type + // }, + // ColumnType::Inet => { + // FIXME: inet type + // }, + // ColumnType::MacAddr => { + // FIXME: mac type + // }, + // ColumnType::Custom(_) => { + // FIXME: custom type + // }, + _ => panic!("Type is not supported"), + } + } else { + condition + } + }); + + let condition = if let Some(and) = filters.get("and") { + let filters = and.list().unwrap(); + + condition.add( + filters + .iter() + .fold(Condition::all(), |condition, filters: ValueAccessor| { + let filters = filters.object().unwrap(); + condition.add(recursive_prepare_condition::(context, filters)) + }), + ) + } else { + condition + }; + + let condition = if let Some(or) = filters.get("or") { + let filters = or.list().unwrap(); + + condition.add( + filters + .iter() + .fold(Condition::any(), |condition, filters: ValueAccessor| { + let filters = filters.object().unwrap(); + condition.add(recursive_prepare_condition::(context, filters)) + }), + ) + } else { + condition + }; + + condition +} diff --git a/src/query/mod.rs b/src/query/mod.rs new file mode 100644 index 00000000..49969615 --- /dev/null +++ b/src/query/mod.rs @@ -0,0 +1,17 @@ +pub mod entity_query_field; +pub use entity_query_field::*; + +pub mod ordering; +pub use ordering::*; + +pub mod pagination; +pub use pagination::*; + +pub mod filtering; +pub use filtering::*; + +pub mod entity_object_relation; +pub use entity_object_relation::*; + +pub mod entity_object_via_relation; +pub use entity_object_via_relation::*; diff --git a/src/query/ordering.rs b/src/query/ordering.rs new file mode 100644 index 00000000..4f790bdb --- /dev/null +++ b/src/query/ordering.rs @@ -0,0 +1,46 @@ +use async_graphql::dynamic::ValueAccessor; +use sea_orm::{EntityTrait, Iterable, QueryOrder, Select}; + +use crate::{BuilderContext, EntityObjectBuilder}; + +/// used to parse order input object and apply it to statement +pub fn apply_order( + context: &'static BuilderContext, + stmt: Select, + order_by: Option, +) -> Select +where + T: EntityTrait, + ::Model: Sync, +{ + if let Some(order_by) = order_by { + let order_by = order_by.object().unwrap(); + + let entity_object = EntityObjectBuilder { context }; + + T::Column::iter().fold(stmt, |stmt, column: T::Column| { + let column_name = entity_object.column_name::(column); + + let order = order_by.get(&column_name); + + if let Some(order) = order { + let order = order.enum_name().unwrap(); + + let asc_variant = &context.order_by_enum.asc_variant; + let desc_variant = &context.order_by_enum.desc_variant; + + if order.eq(asc_variant) { + stmt.order_by(column, sea_orm::Order::Asc) + } else if order.eq(desc_variant) { + stmt.order_by(column, sea_orm::Order::Desc) + } else { + panic!("Cannot map enumeration") + } + } else { + stmt + } + }) + } else { + stmt + } +} diff --git a/src/query/pagination.rs b/src/query/pagination.rs new file mode 100644 index 00000000..592a1c01 --- /dev/null +++ b/src/query/pagination.rs @@ -0,0 +1,277 @@ +use async_graphql::dynamic::ValueAccessor; +use itertools::Itertools; +use sea_orm::{ + ConnectionTrait, CursorTrait, DatabaseConnection, DbErr, EntityTrait, Iterable, ModelTrait, + PaginatorTrait, PrimaryKeyToColumn, QuerySelect, QueryTrait, Select, +}; + +use crate::{ + decode_cursor, encode_cursor, map_cursor_values, BuilderContext, Connection, Edge, PageInfo, + PaginationInfo, PaginationInputBuilder, +}; + +/// used to parse pagination input object and apply it to statement +pub async fn apply_pagination( + context: &'static BuilderContext, + db: &DatabaseConnection, + stmt: Select, + pagination: Option>, +) -> Result, sea_orm::error::DbErr> +where + T: EntityTrait, + ::Model: Sync, +{ + if let Some(pagination) = pagination { + let pagination = pagination.object().unwrap(); + let pagination_input_builder = PaginationInputBuilder { context }; + + let pagination = pagination_input_builder.parse_object(&pagination); + + if let Some(cursor_object) = pagination.cursor { + let next_stmt = stmt.clone(); + let previous_stmt = stmt.clone(); + + fn apply_stmt_cursor_by( + stmt: sea_orm::entity::prelude::Select, + ) -> sea_orm::Cursor> + where + T: EntityTrait, + ::Model: Sync, + { + let size = T::PrimaryKey::iter().fold(0, |acc, _| acc + 1); + if size == 1 { + let column = T::PrimaryKey::iter() + .map(|variant| variant.into_column()) + .collect::>()[0]; + stmt.cursor_by(column) + } else if size == 2 { + let columns = T::PrimaryKey::iter() + .map(|variant| variant.into_column()) + .collect_tuple::<(T::Column, T::Column)>() + .unwrap(); + stmt.cursor_by(columns) + } else if size == 3 { + let columns = T::PrimaryKey::iter() + .map(|variant| variant.into_column()) + .collect_tuple::<(T::Column, T::Column, T::Column)>() + .unwrap(); + stmt.cursor_by(columns) + } else { + panic!("seaography does not support cursors with size greater than 3") + } + } + + let mut stmt = apply_stmt_cursor_by(stmt); + + if let Some(cursor) = cursor_object.cursor { + let values = decode_cursor(&cursor)?; + + let cursor_values: sea_orm::sea_query::value::ValueTuple = + map_cursor_values(values); + + stmt.after(cursor_values); + } + + let data = stmt.first(cursor_object.limit).all(db).await.unwrap(); + + let has_next_page: bool = { + let mut next_stmt = apply_stmt_cursor_by(next_stmt); + + let last_node = data.last(); + + if let Some(node) = last_node { + let values: Vec = T::PrimaryKey::iter() + .map(|variant| node.get(variant.into_column())) + .collect(); + + let values = map_cursor_values(values); + + let next_data = next_stmt.first(1).after(values).all(db).await.unwrap(); + + !next_data.is_empty() + } else { + false + } + }; + + let has_previous_page: bool = { + let mut previous_stmt = apply_stmt_cursor_by(previous_stmt); + + let first_node = data.first(); + + if let Some(node) = first_node { + let values: Vec = T::PrimaryKey::iter() + .map(|variant| node.get(variant.into_column())) + .collect(); + + let values = map_cursor_values(values); + + let previous_data = + previous_stmt.first(1).before(values).all(db).await.unwrap(); + + !previous_data.is_empty() + } else { + false + } + }; + + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = T::PrimaryKey::iter() + .map(|variant| node.get(variant.into_column())) + .collect(); + + let cursor: String = encode_cursor(values); + + Edge { cursor, node } + }) + .collect(); + + let start_cursor = edges.first().map(|edge| edge.cursor.clone()); + let end_cursor = edges.last().map(|edge| edge.cursor.clone()); + + Ok(Connection { + edges, + page_info: PageInfo { + has_previous_page, + has_next_page, + start_cursor, + end_cursor, + }, + pagination_info: None, + }) + } else if let Some(page_object) = pagination.page { + let paginator = stmt.paginate(db, page_object.limit); + + let pages = paginator.num_pages().await?; + + let data = paginator.fetch_page(page_object.page).await?; + + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = T::PrimaryKey::iter() + .map(|variant| node.get(variant.into_column())) + .collect(); + + let cursor: String = encode_cursor(values); + + Edge { cursor, node } + }) + .collect(); + + let start_cursor = edges.first().map(|edge| edge.cursor.clone()); + let end_cursor = edges.last().map(|edge| edge.cursor.clone()); + + Ok(Connection { + edges, + page_info: PageInfo { + has_previous_page: page_object.page != 0, + has_next_page: page_object.page + 1 != pages, + start_cursor, + end_cursor, + }, + pagination_info: Some(PaginationInfo { + pages, + current: page_object.page, + offset: page_object.page * page_object.limit, + total: pages * page_object.limit, + }), + }) + } else if let Some(offset_object) = pagination.offset { + let offset = offset_object.offset; + let limit = offset_object.limit; + + let count_stmt = stmt.clone().as_query().to_owned(); + + let data = stmt.offset(offset).limit(limit).all(db).await?; + + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = T::PrimaryKey::iter() + .map(|variant| node.get(variant.into_column())) + .collect(); + + let cursor: String = encode_cursor(values); + + Edge { cursor, node } + }) + .collect(); + + let start_cursor = edges.first().map(|edge| edge.cursor.clone()); + let end_cursor = edges.last().map(|edge| edge.cursor.clone()); + + let count_stmt = db.get_database_backend().build( + sea_orm::sea_query::SelectStatement::new() + .expr(sea_orm::sea_query::Expr::cust("COUNT(*) AS num_items")) + .from_subquery(count_stmt, sea_orm::sea_query::Alias::new("sub_query")), + ); + + let total = match db.query_one(count_stmt).await? { + Some(res) => match db.get_database_backend() { + sea_orm::DbBackend::Postgres => res.try_get::("", "num_items")? as u64, + _ => res.try_get::("", "num_items")? as u64, + }, + None => 0, + }; + + Ok(Connection { + edges, + page_info: PageInfo { + has_previous_page: offset != 0, + has_next_page: offset * limit < total, + start_cursor, + end_cursor, + }, + pagination_info: Some(PaginationInfo { + current: f64::ceil(offset as f64 / limit as f64) as u64, + pages: f64::ceil(total as f64 / limit as f64) as u64, + total, + offset, + }), + }) + } else { + Err(DbErr::Type( + "Something is wrong with the pagination input".into(), + )) + } + } else { + let data = stmt.all(db).await?; + + let edges: Vec> = data + .into_iter() + .map(|node| { + let values: Vec = T::PrimaryKey::iter() + .map(|variant| node.get(variant.into_column())) + .collect(); + + let cursor: String = encode_cursor(values); + + Edge { cursor, node } + }) + .collect(); + + let start_cursor = edges.first().map(|edge| edge.cursor.clone()); + let end_cursor = edges.last().map(|edge| edge.cursor.clone()); + + let total = edges.len() as u64; + + Ok(Connection { + edges, + page_info: PageInfo { + has_previous_page: false, + has_next_page: false, + start_cursor, + end_cursor, + }, + pagination_info: Some(PaginationInfo { + pages: 1, + current: 1, + offset: 0, + total, + }), + }) + } +} diff --git a/src/type_filter.rs b/src/type_filter.rs deleted file mode 100644 index 5a521a66..00000000 --- a/src/type_filter.rs +++ /dev/null @@ -1,269 +0,0 @@ -pub type BinaryVector = Vec; - -pub trait FilterTrait { - type Ty: async_graphql::InputType; - - fn eq(&self) -> Option; - fn ne(&self) -> Option; - fn gt(&self) -> Option; - fn gte(&self) -> Option; - fn lt(&self) -> Option; - fn lte(&self) -> Option; - fn is_in(&self) -> Option>; - fn is_not_in(&self) -> Option>; - fn is_null(&self) -> Option; - fn contains(&self) -> Option; - fn starts_with(&self) -> Option; - fn ends_with(&self) -> Option; - fn like(&self) -> Option; - fn not_like(&self) -> Option; -} - -pub trait FilterTypeTrait { - type Filter: async_graphql::InputType + FilterTrait; -} - -#[derive(Debug, Clone, async_graphql::InputObject)] -#[graphql(concrete(name = "TinyIntegerFilter", params(i8)))] -#[graphql(concrete(name = "SmallIntegerFilter", params(i16)))] -#[graphql(concrete(name = "IntegerFilter", params(i32)))] -#[graphql(concrete(name = "BigIntegerFilter", params(i64)))] -#[graphql(concrete(name = "TinyUnsignedFilter", params(u8)))] -#[graphql(concrete(name = "SmallUnsignedFilter", params(u16)))] -#[graphql(concrete(name = "UnsignedFilter", params(u32)))] -#[graphql(concrete(name = "BigUnsignedFilter", params(u64)))] -#[graphql(concrete(name = "FloatFilter", params(f32)))] -#[graphql(concrete(name = "DoubleFilter", params(f64)))] -#[cfg_attr( - feature = "with-json", - graphql(concrete(name = "JsonFilter", params(sea_orm::prelude::Json))) -)] -// TODO #[graphql(concrete(name = "DateFilter", params()))] -// TODO #[graphql(concrete(name = "TimeFilter", params()))] -#[cfg_attr( - feature = "with-chrono", - graphql(concrete(name = "DateFilter", params(sea_orm::prelude::Date))) -)] -#[cfg_attr( - feature = "with-chrono", - graphql(concrete(name = "DateTimeFilter", params(sea_orm::prelude::DateTime))) -)] -#[cfg_attr( - feature = "with-chrono", - graphql(concrete(name = "DateTimeUtcFilter", params(sea_orm::prelude::DateTimeUtc))) -)] -#[cfg_attr( - feature = "with-chrono", - graphql(concrete( - name = "DateTimeWithTimeZoneFilter", - params(sea_orm::prelude::DateTimeWithTimeZone) - )) -)] -// TODO #[graphql(concrete(name = "TimestampFilter", params()))] -// TODO #[graphql(concrete(name = "TimestampWithTimeZoneFilter", params()))] -#[cfg_attr( - feature = "with-decimal", - graphql(concrete(name = "DecimalFilter", params(sea_orm::prelude::Decimal))) -)] -#[cfg_attr( - feature = "with-uuid", - graphql(concrete(name = "UuidFilter", params(sea_orm::prelude::Uuid))) -)] -#[graphql(concrete(name = "BinaryFilter", params(BinaryVector)))] -#[graphql(concrete(name = "BooleanFilter", params(bool)))] -pub struct TypeFilter -where - T: async_graphql::InputType, -{ - pub eq: Option, - pub ne: Option, - pub gt: Option, - pub gte: Option, - pub lt: Option, - pub lte: Option, - pub is_in: Option>, - pub is_not_in: Option>, - pub is_null: Option, -} - -impl FilterTrait for TypeFilter -where - T: async_graphql::InputType + Clone, -{ - type Ty = T; - - fn eq(&self) -> Option { - self.eq.clone() - } - - fn ne(&self) -> Option { - self.ne.clone() - } - - fn gt(&self) -> Option { - self.gt.clone() - } - - fn gte(&self) -> Option { - self.gte.clone() - } - - fn lt(&self) -> Option { - self.lt.clone() - } - - fn lte(&self) -> Option { - self.lte.clone() - } - - fn is_in(&self) -> Option> { - self.is_in.clone() - } - - fn is_not_in(&self) -> Option> { - self.is_not_in.clone() - } - - fn is_null(&self) -> Option { - self.is_null - } - - fn contains(&self) -> Option { - panic!("FilterType does not support contains") - } - - fn starts_with(&self) -> Option { - panic!("FilterType does not support starts_with") - } - - fn ends_with(&self) -> Option { - panic!("FilterType does not support ends_with") - } - - fn like(&self) -> Option { - panic!("FilterType does not support like") - } - - fn not_like(&self) -> Option { - panic!("FilterType does not support not_like") - } -} - -#[derive(Debug, Clone, async_graphql::InputObject)] -pub struct StringFilter { - pub eq: Option, - pub ne: Option, - pub gt: Option, - pub gte: Option, - pub lt: Option, - pub lte: Option, - pub is_in: Option>, - pub is_not_in: Option>, - pub is_null: Option, - pub contains: Option, - pub starts_with: Option, - pub ends_with: Option, - pub like: Option, - pub not_like: Option, -} - -impl FilterTrait for StringFilter { - type Ty = String; - - fn eq(&self) -> Option { - self.eq.clone() - } - - fn ne(&self) -> Option { - self.ne.clone() - } - - fn gt(&self) -> Option { - self.gt.clone() - } - - fn gte(&self) -> Option { - self.gte.clone() - } - - fn lt(&self) -> Option { - self.lt.clone() - } - - fn lte(&self) -> Option { - self.lte.clone() - } - - fn is_in(&self) -> Option> { - self.is_in.clone() - } - - fn is_not_in(&self) -> Option> { - self.is_not_in.clone() - } - - fn is_null(&self) -> Option { - self.is_null - } - - fn contains(&self) -> Option { - self.contains.clone() - } - - fn starts_with(&self) -> Option { - self.starts_with.clone() - } - - fn ends_with(&self) -> Option { - self.ends_with.clone() - } - - fn like(&self) -> Option { - self.like.clone() - } - - fn not_like(&self) -> Option { - self.not_like.clone() - } -} - -macro_rules! impl_filter_type_trait { - ( $type: ty ) => { - impl FilterTypeTrait for $type { - type Filter = TypeFilter<$type>; - } - }; - ( $type: ty, $filter: ty ) => { - impl FilterTypeTrait for $type { - type Filter = $filter; - } - }; -} - -impl_filter_type_trait!(i8); -impl_filter_type_trait!(i16); -impl_filter_type_trait!(i32); -impl_filter_type_trait!(i64); -impl_filter_type_trait!(u8); -impl_filter_type_trait!(u16); -impl_filter_type_trait!(u32); -impl_filter_type_trait!(u64); -impl_filter_type_trait!(f32); -impl_filter_type_trait!(f64); -#[cfg(feature = "with-json")] -impl_filter_type_trait!(sea_orm::prelude::Json); -#[cfg(feature = "with-chrono")] -impl_filter_type_trait!(sea_orm::prelude::Date); -#[cfg(feature = "with-chrono")] -impl_filter_type_trait!(sea_orm::prelude::DateTime); -#[cfg(feature = "with-chrono")] -impl_filter_type_trait!(sea_orm::prelude::DateTimeUtc); -#[cfg(feature = "with-chrono")] -impl_filter_type_trait!(sea_orm::prelude::DateTimeWithTimeZone); -#[cfg(feature = "with-decimal")] -impl_filter_type_trait!(sea_orm::prelude::Decimal); -#[cfg(feature = "with-uuid")] -impl_filter_type_trait!(sea_orm::prelude::Uuid); -impl_filter_type_trait!(BinaryVector); -impl_filter_type_trait!(bool); -impl_filter_type_trait!(String, StringFilter); diff --git a/src/utilities.rs b/src/utilities.rs new file mode 100644 index 00000000..647a8d35 --- /dev/null +++ b/src/utilities.rs @@ -0,0 +1,267 @@ +use itertools::Itertools; + +/// used to encode the primary key values of a SeaORM entity to a String +pub fn encode_cursor(values: Vec) -> String { + values + .iter() + .map(|value| -> String { + match value { + sea_orm::Value::TinyInt(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("TinyInt[{}]:{}", value.len(), value) + } else { + "TinyInt[-1]:".into() + } + } + sea_orm::Value::SmallInt(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("SmallInt[{}]:{}", value.len(), value) + } else { + "SmallInt[-1]:".into() + } + } + sea_orm::Value::Int(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("Int[{}]:{}", value.len(), value) + } else { + "Int[-1]:".into() + } + } + sea_orm::Value::BigInt(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("BigInt[{}]:{}", value.len(), value) + } else { + "BigInt[-1]:".into() + } + } + sea_orm::Value::TinyUnsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("TinyUnsigned[{}]:{}", value.len(), value) + } else { + "TinyUnsigned[-1]:".into() + } + } + sea_orm::Value::SmallUnsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("SmallUnsigned[{}]:{}", value.len(), value) + } else { + "SmallUnsigned[-1]:".into() + } + } + sea_orm::Value::Unsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("Unsigned[{}]:{}", value.len(), value) + } else { + "Unsigned[-1]:".into() + } + } + sea_orm::Value::BigUnsigned(value) => { + if let Some(value) = value { + let value = value.to_string(); + format!("BigUnsigned[{}]:{}", value.len(), value) + } else { + "BigUnsigned[-1]:".into() + } + } + sea_orm::Value::String(value) => { + if let Some(value) = value { + let value = value.as_ref(); + format!("String[{}]:{}", value.len(), value) + } else { + "String[-1]:".into() + } + } + #[cfg(feature = "with-uuid")] + sea_orm::Value::Uuid(value) => { + if let Some(value) = value { + let value = value.as_ref().to_string(); + format!("Uuid[{}]:{}", value.len(), value) + } else { + "Uuid[-1]:".into() + } + } + _ => { + // FIXME: missing value types + panic!("Cannot convert type to cursor") + } + } + }) + .join(",") +} + +#[derive(Debug)] +pub enum DecodeMode { + Type, + Length, + ColonSkip, + Data, +} + +pub fn map_cursor_values(values: Vec) -> sea_orm::sea_query::value::ValueTuple { + if values.len() == 1 { + sea_orm::sea_query::value::ValueTuple::One(values[0].clone()) + } else if values.len() == 2 { + sea_orm::sea_query::value::ValueTuple::Two(values[0].clone(), values[1].clone()) + } else if values.len() == 3 { + sea_orm::sea_query::value::ValueTuple::Three( + values[0].clone(), + values[1].clone(), + values[2].clone(), + ) + } else { + panic!("seaography does not support cursors values with size greater than 3") + } +} + +/// used to decode a String to a vector of SeaORM values +pub fn decode_cursor(s: &str) -> Result, sea_orm::error::DbErr> { + let chars = s.chars(); + + let mut values: Vec = vec![]; + + let mut type_indicator = String::new(); + let mut length_indicator = String::new(); + let mut data_buffer = String::new(); + let mut length = -1; + + let mut mode: DecodeMode = DecodeMode::Type; + for char in chars { + match mode { + DecodeMode::Type => { + if char.eq(&'[') { + mode = DecodeMode::Length; + } else if char.eq(&',') { + // SKIP + } else { + type_indicator.push(char); + } + } + DecodeMode::Length => { + if char.eq(&']') { + mode = DecodeMode::ColonSkip; + length = length_indicator.parse::().unwrap(); + } else { + length_indicator.push(char); + } + } + DecodeMode::ColonSkip => { + // skips ':' char + mode = DecodeMode::Data; + } + DecodeMode::Data => { + if length > 0 { + data_buffer.push(char); + length -= 1; + } + + if length <= 0 { + let value: sea_orm::Value = match type_indicator.as_str() { + "TinyInt" => { + if length.eq(&-1) { + sea_orm::Value::TinyInt(None) + } else { + sea_orm::Value::TinyInt(Some(data_buffer.parse::().unwrap())) + } + } + "SmallInt" => { + if length.eq(&-1) { + sea_orm::Value::SmallInt(None) + } else { + sea_orm::Value::SmallInt(Some(data_buffer.parse::().unwrap())) + } + } + "Int" => { + if length.eq(&-1) { + sea_orm::Value::Int(None) + } else { + sea_orm::Value::Int(Some(data_buffer.parse::().unwrap())) + } + } + "BigInt" => { + if length.eq(&-1) { + sea_orm::Value::BigInt(None) + } else { + sea_orm::Value::BigInt(Some(data_buffer.parse::().unwrap())) + } + } + "TinyUnsigned" => { + if length.eq(&-1) { + sea_orm::Value::TinyUnsigned(None) + } else { + sea_orm::Value::TinyUnsigned(Some( + data_buffer.parse::().unwrap(), + )) + } + } + "SmallUnsigned" => { + if length.eq(&-1) { + sea_orm::Value::SmallUnsigned(None) + } else { + sea_orm::Value::SmallUnsigned(Some( + data_buffer.parse::().unwrap(), + )) + } + } + "Unsigned" => { + if length.eq(&-1) { + sea_orm::Value::Unsigned(None) + } else { + sea_orm::Value::Unsigned(Some(data_buffer.parse::().unwrap())) + } + } + "BigUnsigned" => { + if length.eq(&-1) { + sea_orm::Value::BigUnsigned(None) + } else { + sea_orm::Value::BigUnsigned(Some( + data_buffer.parse::().unwrap(), + )) + } + } + "String" => { + if length.eq(&-1) { + sea_orm::Value::String(None) + } else { + sea_orm::Value::String(Some(Box::new( + data_buffer.parse::().unwrap(), + ))) + } + } + #[cfg(feature = "with-uuid")] + "Uuid" => { + if length.eq(&-1) { + sea_orm::Value::Uuid(None) + } else { + sea_orm::Value::Uuid(Some(Box::new( + data_buffer.parse::().unwrap(), + ))) + } + } + _ => { + // FIXME: missing value types + panic!("cannot encode current type") + } + }; + + values.push(value); + + type_indicator = String::new(); + length_indicator = String::new(); + data_buffer = String::new(); + length = -1; + + mode = DecodeMode::Type; + } + } + } + } + + Ok(values) +}