Update syn/quote/proc-macro2 to 1.0
This commit is contained in:
parent
505c369f27
commit
7d162a8fb5
4 changed files with 45 additions and 35 deletions
|
@ -2,14 +2,14 @@
|
|||
name = "schemars"
|
||||
description = "Generate JSON Schemas from Rust code"
|
||||
repository = "https://github.com/GREsau/schemars"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
authors = ["Graham Esau <gesau@hotmail.co.uk>"]
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
keywords = ["rust", "json-schema", "serde"]
|
||||
|
||||
[dependencies]
|
||||
schemars_derive = { version = "0.1.9", path = "../schemars_derive" }
|
||||
schemars_derive = { version = "0.1.10", path = "../schemars_derive" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
chrono = { version = "0.4", default-features = false, optional = true }
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name = "schemars_derive"
|
||||
description = "Macros for #[derive(JsonSchema)], for use with schemars"
|
||||
repository = "https://github.com/GREsau/schemars"
|
||||
version = "0.1.9"
|
||||
version = "0.1.10"
|
||||
authors = ["Graham Esau <gesau@hotmail.co.uk>"]
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
|
@ -12,10 +12,10 @@ keywords = ["rust", "json-schema", "serde"]
|
|||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "0.4"
|
||||
quote = "0.6.13"
|
||||
syn = { version = "0.15.44", features = ["extra-traits"] }
|
||||
serde_derive_internals = "0.24.1"
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
syn = { version = "1.0", features = ["extra-traits"] }
|
||||
serde_derive_internals = "0.25"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
|
|
@ -6,10 +6,10 @@ extern crate proc_macro;
|
|||
|
||||
mod preprocess;
|
||||
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use serde_derive_internals::ast::{Container, Data, Field, Style, Variant};
|
||||
use serde_derive_internals::attr::{self, Default as SerdeDefault, EnumTag};
|
||||
use serde_derive_internals::attr::{self, Default as SerdeDefault, TagType};
|
||||
use serde_derive_internals::{Ctxt, Derive};
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
|
@ -19,14 +19,15 @@ pub fn derive_json_schema(input: proc_macro::TokenStream) -> proc_macro::TokenSt
|
|||
|
||||
preprocess::add_trait_bounds(&mut input.generics);
|
||||
if let Err(e) = preprocess::process_serde_attrs(&mut input) {
|
||||
return compile_error(input.span(), e).into();
|
||||
return compile_error(e).into();
|
||||
}
|
||||
|
||||
let ctxt = Ctxt::new();
|
||||
let cont = Container::from_ast(&ctxt, &input, Derive::Deserialize);
|
||||
if let Err(e) = ctxt.check() {
|
||||
return compile_error(input.span(), e).into();
|
||||
return compile_error(e).into();
|
||||
}
|
||||
let cont = cont.expect("from_ast set no errors on Ctxt, so should have returned Some");
|
||||
|
||||
let schema = match cont.data {
|
||||
Data::Struct(Style::Unit, _) => schema_for_unit_struct(),
|
||||
|
@ -56,10 +57,8 @@ pub fn derive_json_schema(input: proc_macro::TokenStream) -> proc_macro::TokenSt
|
|||
for tp in &type_params {
|
||||
schema_name_fmt.push_str(&format!("{{{}:.0}}", tp));
|
||||
}
|
||||
let fmt_param_names = &type_params;
|
||||
let type_params = &type_params;
|
||||
quote! {
|
||||
format!(#schema_name_fmt #(,#fmt_param_names=#type_params::schema_name())*)
|
||||
format!(#schema_name_fmt #(,#type_params=#type_params::schema_name())*)
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -90,9 +89,10 @@ fn wrap_schema_fields(schema_contents: TokenStream) -> TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
fn compile_error(span: Span, message: String) -> TokenStream {
|
||||
quote_spanned! {span=>
|
||||
compile_error!(#message);
|
||||
fn compile_error(errors: Vec<syn::Error>) -> TokenStream {
|
||||
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
|
||||
quote! {
|
||||
#(#compile_errors)*
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,10 +106,10 @@ fn is_unit_variant(v: &Variant) -> bool {
|
|||
fn schema_for_enum(variants: &[Variant], cattrs: &attr::Container) -> TokenStream {
|
||||
let variants = variants.iter().filter(|v| !v.attrs.skip_deserializing());
|
||||
match cattrs.tag() {
|
||||
EnumTag::External => schema_for_external_tagged_enum(variants, cattrs),
|
||||
EnumTag::None => schema_for_untagged_enum(variants, cattrs),
|
||||
EnumTag::Internal { tag } => schema_for_internal_tagged_enum(variants, cattrs, tag),
|
||||
EnumTag::Adjacent { .. } => unimplemented!("Adjacent tagged enums not yet supported."),
|
||||
TagType::External => schema_for_external_tagged_enum(variants, cattrs),
|
||||
TagType::None => schema_for_untagged_enum(variants, cattrs),
|
||||
TagType::Internal { tag } => schema_for_internal_tagged_enum(variants, cattrs, tag),
|
||||
TagType::Adjacent { .. } => unimplemented!("Adjacent tagged enums not yet supported."),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -329,7 +329,7 @@ fn without_last_element(path: Option<&syn::ExprPath>, last: &str) -> Option<syn:
|
|||
.path
|
||||
.segments
|
||||
.last()
|
||||
.map(|p| p.value().ident == last)
|
||||
.map(|p| p.ident == last)
|
||||
.unwrap_or(false) =>
|
||||
{
|
||||
let mut expr_path = expr_path.clone();
|
||||
|
|
|
@ -16,7 +16,7 @@ pub fn add_trait_bounds(generics: &mut Generics) {
|
|||
|
||||
// If a struct/variant/field has any #[schemars] attributes, then rename them
|
||||
// to #[serde] so that serde_derive_internals will parse them for us.
|
||||
pub fn process_serde_attrs(input: &mut DeriveInput) -> Result<(), String> {
|
||||
pub fn process_serde_attrs(input: &mut DeriveInput) -> Result<(), Vec<syn::Error>> {
|
||||
let ctxt = Ctxt::new();
|
||||
process_attrs(&ctxt, &mut input.attrs);
|
||||
match input.data {
|
||||
|
@ -63,7 +63,6 @@ fn process_attrs(ctxt: &Ctxt, attrs: &mut Vec<Attribute>) {
|
|||
.flat_map(|attr| get_meta_items(&ctxt, attr))
|
||||
.flatten()
|
||||
.flat_map(|m| get_meta_ident(&ctxt, &m))
|
||||
.map(|i| i.to_string())
|
||||
.collect();
|
||||
if schemars_meta_names.contains("with") {
|
||||
schemars_meta_names.insert("serialize_with".to_string());
|
||||
|
@ -87,32 +86,43 @@ fn process_attrs(ctxt: &Ctxt, attrs: &mut Vec<Attribute>) {
|
|||
let parser = Attribute::parse_outer;
|
||||
match parser.parse2(new_serde_attr) {
|
||||
Ok(ref mut parsed) => attrs.append(parsed),
|
||||
Err(e) => ctxt.error(e),
|
||||
Err(e) => ctxt.error_spanned_by(to_tokens(attrs), e),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_tokens(attrs: &[Attribute]) -> impl ToTokens {
|
||||
let mut tokens = proc_macro2::TokenStream::new();
|
||||
for attr in attrs {
|
||||
attr.to_tokens(&mut tokens);
|
||||
}
|
||||
tokens
|
||||
}
|
||||
|
||||
fn get_meta_items(ctxt: &Ctxt, attr: &Attribute) -> Result<Vec<NestedMeta>, ()> {
|
||||
match attr.parse_meta() {
|
||||
Ok(Meta::List(meta)) => Ok(meta.nested.into_iter().collect()),
|
||||
Ok(_) => {
|
||||
ctxt.error("expected #[schemars(...)] or #[serde(...)]");
|
||||
ctxt.error_spanned_by(attr, "expected #[schemars(...)] or #[serde(...)]");
|
||||
Err(())
|
||||
}
|
||||
Err(err) => {
|
||||
ctxt.error(err);
|
||||
ctxt.error_spanned_by(attr, err);
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_meta_ident(ctxt: &Ctxt, meta: &NestedMeta) -> Result<Ident, ()> {
|
||||
fn get_meta_ident(ctxt: &Ctxt, meta: &NestedMeta) -> Result<String, ()> {
|
||||
match meta {
|
||||
NestedMeta::Meta(m) => Ok(m.name()),
|
||||
NestedMeta::Literal(lit) => {
|
||||
ctxt.error(format!(
|
||||
NestedMeta::Meta(m) => m.path().get_ident().map(|i| i.to_string()).ok_or(()),
|
||||
NestedMeta::Lit(lit) => {
|
||||
ctxt.error_spanned_by(
|
||||
meta,
|
||||
format!(
|
||||
"unexpected literal in attribute: {}",
|
||||
lit.into_token_stream()
|
||||
));
|
||||
),
|
||||
);
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
@ -156,7 +166,7 @@ mod tests {
|
|||
};
|
||||
|
||||
if let Err(e) = process_serde_attrs(&mut input) {
|
||||
panic!("process_serde_attrs returned error: {}", e)
|
||||
panic!("process_serde_attrs returned error: {}", e[0])
|
||||
};
|
||||
|
||||
assert_eq!(input, expected);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue