Skip to content

Commit

Permalink
refac: rebase syn 2 changes
Browse files Browse the repository at this point in the history
  • Loading branch information
saiintbrisson committed Mar 12, 2024
1 parent c5357f1 commit 11eefa1
Show file tree
Hide file tree
Showing 10 changed files with 249 additions and 283 deletions.
30 changes: 16 additions & 14 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

61 changes: 49 additions & 12 deletions sqlx-macros-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,52 @@ sqlite = ["sqlx-sqlite"]
# type integrations
json = ["sqlx-core/json", "sqlx-mysql?/json", "sqlx-sqlite?/json"]

bigdecimal = ["sqlx-core/bigdecimal", "sqlx-mysql?/bigdecimal", "sqlx-postgres?/bigdecimal"]
bigdecimal = [
"sqlx-core/bigdecimal",
"sqlx-mysql?/bigdecimal",
"sqlx-postgres?/bigdecimal",
]
bit-vec = ["sqlx-core/bit-vec", "sqlx-postgres?/bit-vec"]
chrono = ["sqlx-core/chrono", "sqlx-mysql?/chrono", "sqlx-postgres?/chrono", "sqlx-sqlite?/chrono"]
chrono = [
"sqlx-core/chrono",
"sqlx-mysql?/chrono",
"sqlx-postgres?/chrono",
"sqlx-sqlite?/chrono",
]
ipnetwork = ["sqlx-core/ipnetwork", "sqlx-postgres?/ipnetwork"]
mac_address = ["sqlx-core/mac_address", "sqlx-postgres?/mac_address"]
rust_decimal = ["sqlx-core/rust_decimal", "sqlx-mysql?/rust_decimal", "sqlx-postgres?/rust_decimal"]
time = ["sqlx-core/time", "sqlx-mysql?/time", "sqlx-postgres?/time", "sqlx-sqlite?/time"]
uuid = ["sqlx-core/uuid", "sqlx-mysql?/uuid", "sqlx-postgres?/uuid", "sqlx-sqlite?/uuid"]
rust_decimal = [
"sqlx-core/rust_decimal",
"sqlx-mysql?/rust_decimal",
"sqlx-postgres?/rust_decimal",
]
time = [
"sqlx-core/time",
"sqlx-mysql?/time",
"sqlx-postgres?/time",
"sqlx-sqlite?/time",
]
uuid = [
"sqlx-core/uuid",
"sqlx-mysql?/uuid",
"sqlx-postgres?/uuid",
"sqlx-sqlite?/uuid",
]

[dependencies]
sqlx-core = { workspace = true, features = ["offline"] }
sqlx-mysql = { workspace = true, features = ["offline", "migrate"], optional = true }
sqlx-postgres = { workspace = true, features = ["offline", "migrate"], optional = true }
sqlx-sqlite = { workspace = true, features = ["offline", "migrate"], optional = true }
sqlx-mysql = { workspace = true, features = [
"offline",
"migrate",
], optional = true }
sqlx-postgres = { workspace = true, features = [
"offline",
"migrate",
], optional = true }
sqlx-sqlite = { workspace = true, features = [
"offline",
"migrate",
], optional = true }

async-std = { workspace = true, optional = true }
tokio = { workspace = true, optional = true }
Expand All @@ -52,12 +84,17 @@ hex = { version = "0.4.3" }
heck = { version = "0.4", features = ["unicode"] }
either = "1.6.1"
once_cell = "1.9.0"
proc-macro2 = { version = "1.0.36", default-features = false }
proc-macro2 = { version = "1.0.79", default-features = false }
serde = { version = "1.0.132", features = ["derive"] }
serde_json = { version = "1.0.73" }
sha2 = { version = "0.10.0" }
syn = { version = "1.0.84", default-features = false, features = ["full", "derive", "parsing", "printing", "clone-impls"] }
syn = { version = "2.0.52", default-features = false, features = [
"full",
"derive",
"parsing",
"printing",
"clone-impls",
] }
tempfile = { version = "3.3.0" }
quote = { version = "1.0.14", default-features = false }
quote = { version = "1.0.26", default-features = false }
url = { version = "2.2.2", default-features = false }

1 change: 1 addition & 0 deletions sqlx-macros-core/src/database/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ mod mysql;
mod sqlite;

mod fake_sqlx {
#[cfg(any(feature = "mysql", feature = "postgres", feature = "sqlite"))]
pub use sqlx_core::*;

Check warning on line 176 in sqlx-macros-core/src/database/mod.rs

View workflow job for this annotation

GitHub Actions / SQLite Examples

unused import: `sqlx_core::*`

#[cfg(feature = "mysql")]
Expand Down
172 changes: 68 additions & 104 deletions sqlx-macros-core/src/derives/attributes.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use syn::{
punctuated::Punctuated, spanned::Spanned, token::Comma, Attribute, DeriveInput, Field, Lit,
Meta, MetaNameValue, NestedMeta, Type, Variant,
punctuated::Punctuated, token::Comma, Attribute, DeriveInput, Field, LitStr, Meta, Token, Type,
Variant,
};

macro_rules! assert_attribute {
Expand Down Expand Up @@ -77,82 +77,53 @@ pub fn parse_container_attributes(input: &[Attribute]) -> syn::Result<SqlxContai
let mut no_pg_array = None;
let mut default = None;

for attr in input
.iter()
.filter(|a| a.path.is_ident("sqlx") || a.path.is_ident("repr"))
{
let meta = attr
.parse_meta()
.map_err(|e| syn::Error::new_spanned(attr, e))?;
match meta {
Meta::List(list) if list.path.is_ident("sqlx") => {
for value in list.nested.iter() {
match value {
NestedMeta::Meta(meta) => match meta {
Meta::Path(p) if p.is_ident("transparent") => {
try_set!(transparent, true, value)
}

Meta::Path(p) if p.is_ident("no_pg_array") => {
try_set!(no_pg_array, true, value);
}

Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("rename_all") => {
let val = match &*val.value() {
"lowercase" => RenameAll::LowerCase,
"snake_case" => RenameAll::SnakeCase,
"UPPERCASE" => RenameAll::UpperCase,
"SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase,
"kebab-case" => RenameAll::KebabCase,
"camelCase" => RenameAll::CamelCase,
"PascalCase" => RenameAll::PascalCase,
_ => fail!(meta, "unexpected value for rename_all"),
};

try_set!(rename_all, val, value)
}

Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("type_name") => {
try_set!(
type_name,
TypeName {
val: val.value(),
span: value.span(),
},
value
)
}

Meta::Path(p) if p.is_ident("default") => {
try_set!(default, true, value)
}

u => fail!(u, "unexpected attribute"),
},
u => fail!(u, "unexpected attribute"),
}
}
}
Meta::List(list) if list.path.is_ident("repr") => {
if list.nested.len() != 1 {
fail!(&list.nested, "expected one value")
}
match list.nested.first().unwrap() {
NestedMeta::Meta(Meta::Path(p)) if p.get_ident().is_some() => {
try_set!(repr, p.get_ident().unwrap().clone(), list);
}
u => fail!(u, "unexpected value"),
for attr in input {
if attr.path().is_ident("sqlx") {
attr.parse_nested_meta(|meta| {
if meta.path.is_ident("transparent") {
try_set!(transparent, true, attr);
} else if meta.path.is_ident("no_pg_array") {
try_set!(no_pg_array, true, attr);
} else if meta.path.is_ident("default") {
try_set!(default, true, attr);
} else if meta.path.is_ident("rename_all") {
meta.input.parse::<Token![=]>()?;
let lit: LitStr = meta.input.parse()?;

let val = match lit.value().as_str() {
"lowercase" => RenameAll::LowerCase,
"snake_case" => RenameAll::SnakeCase,
"UPPERCASE" => RenameAll::UpperCase,
"SCREAMING_SNAKE_CASE" => RenameAll::ScreamingSnakeCase,
"kebab-case" => RenameAll::KebabCase,
"camelCase" => RenameAll::CamelCase,
"PascalCase" => RenameAll::PascalCase,
_ => fail!(lit, "unexpected value for rename_all"),
};

try_set!(rename_all, val, lit)
} else if meta.path.is_ident("type_name") {
meta.input.parse::<Token![=]>()?;
let lit: LitStr = meta.input.parse()?;
let name = TypeName {
val: lit.value(),
span: lit.span(),
};

try_set!(type_name, name, lit)
} else {
fail!(meta.path, "unexpected attribute")
}

Ok(())
})?;
} else if attr.path().is_ident("repr") {
let list: Punctuated<Meta, Token![,]> =
attr.parse_args_with(<Punctuated<Meta, Token![,]>>::parse_terminated)?;

if let Some(path) = list.iter().find_map(|f| f.require_path_only().ok()) {
try_set!(repr, path.get_ident().unwrap().clone(), list);
}
_ => {}
}
}

Expand All @@ -174,35 +145,28 @@ pub fn parse_child_attributes(input: &[Attribute]) -> syn::Result<SqlxChildAttri
let mut skip: bool = false;
let mut json = false;

for attr in input.iter().filter(|a| a.path.is_ident("sqlx")) {
let meta = attr
.parse_meta()
.map_err(|e| syn::Error::new_spanned(attr, e))?;

if let Meta::List(list) = meta {
for value in list.nested.iter() {
match value {
NestedMeta::Meta(meta) => match meta {
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("rename") => try_set!(rename, val.value(), value),
Meta::NameValue(MetaNameValue {
path,
lit: Lit::Str(val),
..
}) if path.is_ident("try_from") => try_set!(try_from, val.parse()?, value),
Meta::Path(path) if path.is_ident("default") => default = true,
Meta::Path(path) if path.is_ident("flatten") => flatten = true,
Meta::Path(path) if path.is_ident("skip") => skip = true,
Meta::Path(path) if path.is_ident("json") => json = true,
u => fail!(u, "unexpected attribute"),
},
u => fail!(u, "unexpected attribute"),
}
for attr in input.iter().filter(|a| a.path().is_ident("sqlx")) {
attr.parse_nested_meta(|meta| {
if meta.path.is_ident("rename") {
meta.input.parse::<Token![=]>()?;
let val: LitStr = meta.input.parse()?;
try_set!(rename, val.value(), val);
} else if meta.path.is_ident("try_from") {
meta.input.parse::<Token![=]>()?;
let val: LitStr = meta.input.parse()?;
try_set!(try_from, val.parse()?, val);
} else if meta.path.is_ident("default") {
default = true;
} else if meta.path.is_ident("flatten") {
flatten = true;
} else if meta.path.is_ident("skip") {
skip = true;
} else if meta.path.is_ident("json") {
json = true;
}
}

return Ok(());
})?;

if json && flatten {
fail!(
Expand Down
Loading

0 comments on commit 11eefa1

Please sign in to comment.