Skip to content

Commit

Permalink
Add skip_serializing_if
Browse files Browse the repository at this point in the history
Signed-off-by: Heinz N. Gies <[email protected]>
  • Loading branch information
Licenser committed Oct 10, 2023
1 parent 9d0ea0e commit a82ddde
Show file tree
Hide file tree
Showing 4 changed files with 304 additions and 85 deletions.
50 changes: 35 additions & 15 deletions simd-json-derive-int/src/args.rs
Original file line number Diff line number Diff line change
@@ -1,24 +1,38 @@
use proc_macro2::{Ident, Literal};
use simd_json::prelude::*;
use syn::parse::{Parse, ParseStream};
use simd_json::OwnedValue;
use syn::{
parse::{Parse, ParseStream},
LitStr, Path,
};
use syn::{Attribute, Field, Token};

#[derive(Debug)]
#[derive(Debug, Default)]
pub(crate) struct FieldAttrs {
rename: Option<String>,
skip_serializing_if: Option<Path>,
}

impl Parse for FieldAttrs {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut rename = None;
let mut attrs = FieldAttrs::default();

while !input.is_empty() {
let attr: Ident = input.parse()?;
match attr.to_string().as_str() {
"rename" => {
let _eqal_token: Token![=] = input.parse()?;
let name: Literal = input.parse()?;
let name: LitStr = input.parse()?;

rename = Some(name.to_string().trim_matches('"').to_string());
attrs.rename = Some(name.value());
}
"skip_serializing_if" => {
let _eqal_token: Token![=] = input.parse()?;
let function: LitStr = input.parse()?;

let path: Path = function.parse()?;

attrs.skip_serializing_if = Some(path);
}
"borrow" => (),
other => {
Expand All @@ -32,7 +46,7 @@ impl Parse for FieldAttrs {
let _comma_token: Token![,] = input.parse()?;
}
}
Ok(FieldAttrs { rename })
Ok(attrs)
}
}

Expand Down Expand Up @@ -147,24 +161,30 @@ impl StructAttrs {
self.deny_unknown_fields
}

pub(crate) fn name(&self, field: &Field) -> Option<String> {
pub(crate) fn skip_serializing_if(&self, field: &Field) -> Option<Path> {
get_attr(&field.attrs, "simd_json")
.or_else(|| get_attr(&field.attrs, "serde"))
.map(field_attrs)
.and_then(|a| a.skip_serializing_if)
}
pub(crate) fn name(&self, field: &Field) -> String {
if let Some(attr) = get_attr(&field.attrs, "simd_json")
.map(field_attrs)
.and_then(|a| a.rename)
{
Some(format!("{}:", simd_json::OwnedValue::from(attr).encode()))
format!("{}:", OwnedValue::from(attr).encode())
} else if let Some(attr) = get_attr(&field.attrs, "serde")
.map(field_attrs)
.and_then(|a| a.rename)
{
Some(format!("{}:", simd_json::OwnedValue::from(attr).encode()))
format!("{}:", OwnedValue::from(attr).encode())
} else {
field.ident.as_ref().map(|ident| {
format!(
"{}:",
simd_json::OwnedValue::from(self.rename_all.apply(&ident.to_string())).encode()
)
})
let f = field
.ident
.as_ref()
.expect("Field is missing ident")
.to_string();
format!("{}:", OwnedValue::from(self.rename_all.apply(&f)).encode())
}
}
}
35 changes: 15 additions & 20 deletions simd-json-derive-int/src/deserialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ fn derive_named_struct(
let mut options = Vec::new();
let mut ids = Vec::new();
let mut opt_ids = Vec::new();
let mut id: u64 = 0;
let mut all_needed: u64 = 0;
let deny_unknown_fields: bool = attrs.deny_unknown_fields();
let params = &generics.params;
Expand All @@ -32,7 +31,7 @@ fn derive_named_struct(
Some(GenericParam::Lifetime(lifetime)) => (quote! { <#params> }, quote! { #lifetime }),
Some(_) => (quote! { <'input, #params> }, quote! { 'input }),
};
for f in &fields {
for (id, f) in fields.iter().enumerate() {
let mut is_option = false;
if let Type::Path(TypePath {
path: Path { segments, .. },
Expand All @@ -44,25 +43,21 @@ fn derive_named_struct(
}
}

if let Some((name, ident)) = attrs
.name(f)
.and_then(|name| Some((name, f.ident.as_ref()?.clone())))
{
let name = name.trim_matches(':').trim_matches('"').to_string();
let bit = 1 << id;
id += 1;
if is_option {
options.push(ident.clone());
opt_ids.push(bit);
getters.push(quote! { #ident.and_then(::std::convert::identity) })
} else {
all_needed |= bit;
getters.push(quote! { #ident.expect(concat!("failed to get field ", #name)) })
}
keys.push(name);
values.push(ident);
ids.push(bit);
let ident = f.ident.clone().expect("Missing ident");
let name = attrs.name(f);
let name = name.trim_matches(':').trim_matches('"').to_string();
let bit = 1 << id;
if is_option {
options.push(ident.clone());
opt_ids.push(bit);
getters.push(quote! { #ident.and_then(::std::convert::identity) })
} else {
all_needed |= bit;
getters.push(quote! { #ident.expect(concat!("failed to get field ", #name)) })
}
keys.push(name);
values.push(ident);
ids.push(bit);
}

let expanded = quote! {
Expand Down
206 changes: 156 additions & 50 deletions simd-json-derive-int/src/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,42 +62,91 @@ fn derive_named_struct(
) -> proc_macro::TokenStream {
let mut keys = Vec::new();
let mut values = Vec::new();
let mut skip_if = Vec::new();

for f in &fields {
if let Some((name, ident)) = attrs
.name(f)
.and_then(|name| Some((name, f.ident.as_ref()?.clone())))
{
keys.push(name);
values.push(ident);
}
let ident = f.ident.clone().expect("Missing ident");
let name = attrs.name(f);
keys.push(name);
values.push(ident);
skip_if.push(attrs.skip_serializing_if(f));
}
let expanded = if skip_if.iter().all(Option::is_none) {
if let Some((first, rest)) = keys.split_first_mut() {
*first = format!("{{{}", first);
for r in rest {
*r = format!(",{}", r);
}
};

if let Some((first, rest)) = keys.split_first_mut() {
*first = format!("{{{}", first);
for r in rest {
*r = format!(",{}", r);
quote! {
impl #generics simd_json_derive::Serialize for #ident #generics {
#[inline]
fn json_write<W>(&self, writer: &mut W) -> std::io::Result<()>
where
W: std::io::Write {
#(
writer.write_all(#keys.as_bytes())?;
self.#values.json_write(writer)?;
)*
writer.write_all(b"}")
}
}
}
};

let expanded = quote! {
impl #generics simd_json_derive::Serialize for #ident #generics {
#[inline]
fn json_write<W>(&self, writer: &mut W) -> std::io::Result<()>
where
W: std::io::Write {
#(
writer.write_all(#keys.as_bytes())?;
self.#values.json_write(writer)?;
)*
writer.write_all(b"}")
} else {
let writes = keys
.iter()
.zip(values.iter())
.zip(skip_if.iter())
.map(|((k, v), s)| {
if let Some(s) = s {
quote! {
if !#s(&self.#v) {
if has_written_key {
writer.write_all(b",")?;
}
has_written_key = true;
writer.write_all(#k.as_bytes())?;
self.#v.json_write(writer)?;
}
}
} else {
quote! {
if has_written_key {
writer.write_all(b",")?;
}
has_written_key = true;
writer.write_all(#k.as_bytes())?;
self.#v.json_write(writer)?;
}
}
})
.collect::<Vec<_>>();
quote! {
impl #generics simd_json_derive::Serialize for #ident #generics {
#[inline]
fn json_write<W>(&self, writer: &mut W) -> std::io::Result<()>
where
W: std::io::Write {
writer.write_all(b"{")?;
let mut has_written_key = false;
#(
#writes
)*
writer.write_all(b"}")
}
}
}
};
TokenStream::from(expanded)
}

fn derive_enum(ident: Ident, data: DataEnum, generics: Generics) -> TokenStream {
fn derive_enum(
attrs: StructAttrs,
ident: Ident,
data: DataEnum,
generics: Generics,
) -> TokenStream {
let mut body_elements = Vec::new();
let variants = data.variants;
let (simple, variants): (Vec<_>, Vec<_>) =
Expand Down Expand Up @@ -222,34 +271,81 @@ fn derive_enum(ident: Ident, data: DataEnum, generics: Generics) -> TokenStream
let mut named_bodies = Vec::new();
for v in named {
let named_ident = &v.ident;
let fields: Vec<_> = v
.fields
.iter()
.cloned()
.map(|f| f.ident.expect("no field ident"))
.collect();
let (first, rest) = fields.split_first().expect("zero fields");
let mut keys = Vec::new();
let mut values = Vec::new();
let mut skip_if = Vec::new();

let start = format!(
"{{{}:{{{}:",
simd_json::OwnedValue::from(v.ident.to_string()).encode(),
simd_json::OwnedValue::from(first.to_string()).encode()
);
for f in &v.fields {
let name = attrs.name(f);
let ident = f.ident.clone().expect("Missing ident");
keys.push(name);
values.push(ident);
skip_if.push(attrs.skip_serializing_if(f));
}
let variant_name = simd_json::OwnedValue::from(v.ident.to_string()).encode();

let rest_keys = rest
.iter()
.map(|f| format!(",{}:", simd_json::OwnedValue::from(f.to_string()).encode()));
named_bodies.push(if skip_if.iter().all(Option::is_none) {
let (first_key, rest_keys) = keys.split_first().expect("zero fields");
let (first_value, rest_values) = values.split_first().expect("zero fields");

named_bodies.push(quote! {
#ident::#named_ident{#(#fields),*} => {
writer.write_all(#start.as_bytes())?;
#first.json_write(writer)?;
#(
writer.write_all(#rest_keys.as_bytes())?;
#rest.json_write(writer)?;
let start = format!("{{{variant_name}:{{{first_key}",);
let rest_keys = rest_keys
.iter()
.map(|k| format!(",{k}"))
.collect::<Vec<_>>();

)*
writer.write_all(b"}}")
quote! {
#ident::#named_ident{#(#values),*} => {
writer.write_all(#start.as_bytes())?;
#first_value.json_write(writer)?;
#(
writer.write_all(#rest_keys.as_bytes())?;
#rest_values.json_write(writer)?;

)*
writer.write_all(b"}}")
}
}
} else {
let writes = keys
.iter()
.zip(values.iter())
.zip(skip_if.iter())
.map(|((k, v), s)| {
if let Some(s) = s {
quote! {

if !#s(#v) {
if has_written_key {
writer.write_all(b",")?;
}
has_written_key = true;
writer.write_all(#k.as_bytes())?;
#v.json_write(writer)?;
}
}
} else {
quote! {
if has_written_key {
writer.write_all(b",")?;
}
has_written_key = true;
writer.write_all(#k.as_bytes())?;
#v.json_write(writer)?;
}
}
})
.collect::<Vec<_>>();
let prefix = format!("{{{variant_name}:{{");
quote! {
#ident::#named_ident{#(#values),*} => {
writer.write_all(#prefix.as_bytes())?;
let mut has_written_key = false;
#(
#writes
)*
writer.write_all(b"}}")
}
}
});
}
Expand Down Expand Up @@ -318,8 +414,18 @@ pub(crate) fn derive(input: TokenStream) -> TokenStream {
ident,
data: Data::Enum(data),
generics,
attrs,
..
} => derive_enum(ident, data, generics),
} => {
let attrs = if let Some(attrs) = get_attr(&attrs, "simd_json") {
struct_attrs(attrs)
} else if let Some(attrs) = get_attr(&attrs, "serde") {
struct_attrs(attrs)
} else {
StructAttrs::default()
};
derive_enum(attrs, ident, data, generics)
}
_ => TokenStream::from(quote! {}),
}
}
Loading

0 comments on commit a82ddde

Please sign in to comment.