RPM build fix (reverted CI changes which will need to be un-reverted or made conditional) and vendor Rust dependencies to make builds much faster in any CI system.

This commit is contained in:
Adam Ierymenko
2022-06-08 07:32:16 -04:00
parent 373ca30269
commit d5ca4e5f52
12611 changed files with 2898014 additions and 284 deletions

View File

@@ -0,0 +1 @@
{"files":{"Cargo.toml":"7ace83e5997487c7eb23f82a2b0d65d2fa7d83f2f9024aa5bc047eb89a50ab92","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"7049b88cb7366be0c8e593f3ffffa313803a5b382f35686c542b4a0da3b291f3","build.rs":"d0fc2047a792e90b95df7be8d1e75732b231b3a60cb481feebfca46b7edb1209","crates-io.md":"5c42406936cf9af6d4cd7fe0ac730c609e82fd3f15a54549518c72d0ded70c29","src/bound.rs":"268b4995a5d0a129dcbd6e32ef11f587bd271df3f6c4f7230ed54bc99f5ce871","src/de.rs":"c5a41016ce15f8176a2d7a8445ba06d2eb8de0863c1fea0dab51c395dd7dccff","src/dummy.rs":"ad78556876053e74be976e91032200666ffbeeb6f7e92f3a7a8463fea1f60ac5","src/fragment.rs":"5548ba65a53d90a296f60c1328a7a7fb040db467f59c2f5210b2fb320457145d","src/internals/ast.rs":"b019865eef92c1ddbb9029423ac22179f132dc655a51c09fb2a42f4aaef172fd","src/internals/attr.rs":"778074380c4e353b77e03aff9edf15fda9e15a0e7ec25cdfc51d79a26636ddef","src/internals/case.rs":"9492f0c5142d7b7e8cd39c86d13a855e5ce4489425adb2b96aed89e1b7851ac0","src/internals/check.rs":"11ea94257d2a2ee2276938a6beb4ae11b74c39225c1e342e6df1e7d2b2924496","src/internals/ctxt.rs":"6fa544ae52914498a62a395818ebdc1b36ac2fb5903c60afb741a864ad559f1c","src/internals/mod.rs":"f32138ff19d57eb00f88ba11f6b015efab2102657804f71ebbf386a3698dad91","src/internals/receiver.rs":"cd125ba4a3dd6250ed4737555c58627bffd630a536cd7223068eed7c10a170d8","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"3c9ce461773b7df3bb64d82aa5a0d93052c3bb0e60209db6c0b5c10ee9cfc9cf","src/lib.rs":"e286e6ff8653bc06107ffb5f6cd6d94002b609aae133b6ef77c9ccb45e5cb08a","src/pretend.rs":"73fe121ced5804e77d37512bd2c7548be249dcab3eeb0bad59f82e64188f9ace","src/ser.rs":"0d99c841f6c7bc9751ab225fe42d1f8b7fe56e36903efcb4ff10bf6e35c390ba","src/try.rs":"b171b0088c23ebf4bfa07ba457881b41ac5e547d55dd16f737ea988d34badf61"},"package":"1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"}

61
zeroidc/vendor/serde_derive/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,61 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
rust-version = "1.31"
name = "serde_derive"
version = "1.0.137"
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
"David Tolnay <dtolnay@gmail.com>",
]
include = [
"build.rs",
"src/**/*.rs",
"crates-io.md",
"README.md",
"LICENSE-APACHE",
"LICENSE-MIT",
]
description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]"
homepage = "https://serde.rs"
documentation = "https://serde.rs/derive.html"
readme = "crates-io.md"
keywords = [
"serde",
"serialization",
"no_std",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/serde-rs/serde"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
name = "serde_derive"
proc-macro = true
[dependencies.proc-macro2]
version = "1.0"
[dependencies.quote]
version = "1.0"
[dependencies.syn]
version = "1.0.90"
[dev-dependencies.serde]
version = "1.0"
[features]
default = []
deserialize_in_place = []

View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

23
zeroidc/vendor/serde_derive/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,23 @@
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

114
zeroidc/vendor/serde_derive/README.md vendored Normal file
View File

@@ -0,0 +1,114 @@
# Serde &emsp; [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31]
[Build Status]: https://img.shields.io/github/workflow/status/serde-rs/serde/CI/master
[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
[Latest Version]: https://img.shields.io/crates/v/serde.svg
[crates.io]: https://crates.io/crates/serde
[serde: rustc 1.13+]: https://img.shields.io/badge/serde-rustc_1.13+-lightgray.svg
[serde_derive: rustc 1.31+]: https://img.shields.io/badge/serde_derive-rustc_1.31+-lightgray.svg
[Rust 1.13]: https://blog.rust-lang.org/2016/11/10/Rust-1.13.html
[Rust 1.31]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html
**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
---
You may be looking for:
- [An overview of Serde](https://serde.rs/)
- [Data formats supported by Serde](https://serde.rs/#data-formats)
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
- [Examples](https://serde.rs/examples.html)
- [API documentation](https://docs.serde.rs/serde/)
- [Release notes](https://github.com/serde-rs/serde/releases)
## Serde in action
<details>
<summary>
Click to show Cargo.toml.
<a href="https://play.rust-lang.org/?edition=2018&gist=72755f28f99afc95e01d63174b28c1f5" target="_blank">Run this code in the playground.</a>
</summary>
```toml
[dependencies]
# The core APIs, including the Serialize and Deserialize traits. Always
# required when using Serde. The "derive" feature is only required when
# using #[derive(Serialize, Deserialize)] to make Serde work with structs
# and enums defined in your crate.
serde = { version = "1.0", features = ["derive"] }
# Each data format lives in its own crate; the sample code below uses JSON
# but you may be using a different one.
serde_json = "1.0"
```
</details>
<p></p>
```rust
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize, Debug)]
struct Point {
x: i32,
y: i32,
}
fn main() {
let point = Point { x: 1, y: 2 };
// Convert the Point to a JSON string.
let serialized = serde_json::to_string(&point).unwrap();
// Prints serialized = {"x":1,"y":2}
println!("serialized = {}", serialized);
// Convert the JSON string back to a Point.
let deserialized: Point = serde_json::from_str(&serialized).unwrap();
// Prints deserialized = Point { x: 1, y: 2 }
println!("deserialized = {:?}", deserialized);
}
```
## Getting help
Serde is one of the most widely used Rust libraries so any place that Rustaceans
congregate will be able to help you out. For chat, consider trying the
[#rust-questions] or [#rust-beginners] channels of the unofficial community
Discord (invite: <https://discord.gg/rust-lang-community>), the [#rust-usage] or
[#beginners] channels of the official Rust Project Discord (invite:
<https://discord.gg/rust-lang>), or the [#general][zulip] stream in Zulip. For
asynchronous, consider the [\[rust\] tag on StackOverflow][stackoverflow], the
[/r/rust] subreddit which has a pinned weekly easy questions post, or the Rust
[Discourse forum][discourse]. It's acceptable to file a support issue in this
repo but they tend not to get as many eyes as any of the above and may get
closed without a response after some time.
[#rust-questions]: https://discord.com/channels/273534239310479360/274215136414400513
[#rust-beginners]: https://discord.com/channels/273534239310479360/273541522815713281
[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848
[#beginners]: https://discord.com/channels/442252698964721669/448238009733742612
[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general
[stackoverflow]: https://stackoverflow.com/questions/tagged/rust
[/r/rust]: https://www.reddit.com/r/rust
[discourse]: https://users.rust-lang.org
<br>
#### License
<sup>
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
</sup>
<br>
<sub>
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
dual licensed as above, without any additional terms or conditions.
</sub>

36
zeroidc/vendor/serde_derive/build.rs vendored Normal file
View File

@@ -0,0 +1,36 @@
use std::env;
use std::process::Command;
use std::str;
// The rustc-cfg strings below are *not* public API. Please let us know by
// opening a GitHub issue if your build environment requires some way to enable
// these cfgs other than by executing our build script.
fn main() {
let minor = match rustc_minor_version() {
Some(minor) => minor,
None => return,
};
// Underscore const names stabilized in Rust 1.37:
// https://blog.rust-lang.org/2019/08/15/Rust-1.37.0.html#using-unnamed-const-items-for-macros
if minor >= 37 {
println!("cargo:rustc-cfg=underscore_consts");
}
// The ptr::addr_of! macro stabilized in Rust 1.51:
// https://blog.rust-lang.org/2021/03/25/Rust-1.51.0.html#stabilized-apis
if minor >= 51 {
println!("cargo:rustc-cfg=ptr_addr_of");
}
}
fn rustc_minor_version() -> Option<u32> {
let rustc = env::var_os("RUSTC")?;
let output = Command::new(rustc).arg("--version").output().ok()?;
let version = str::from_utf8(&output.stdout).ok()?;
let mut pieces = version.split('.');
if pieces.next() != Some("rustc 1") {
return None;
}
pieces.next()?.parse().ok()
}

View File

@@ -0,0 +1,65 @@
<!-- Serde readme rendered on crates.io -->
**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
---
You may be looking for:
- [An overview of Serde](https://serde.rs/)
- [Data formats supported by Serde](https://serde.rs/#data-formats)
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
- [Examples](https://serde.rs/examples.html)
- [API documentation](https://docs.serde.rs/serde/)
- [Release notes](https://github.com/serde-rs/serde/releases)
## Serde in action
```rust
use serde::{Serialize, Deserialize};
#[derive(Serialize, Deserialize, Debug)]
struct Point {
x: i32,
y: i32,
}
fn main() {
let point = Point { x: 1, y: 2 };
// Convert the Point to a JSON string.
let serialized = serde_json::to_string(&point).unwrap();
// Prints serialized = {"x":1,"y":2}
println!("serialized = {}", serialized);
// Convert the JSON string back to a Point.
let deserialized: Point = serde_json::from_str(&serialized).unwrap();
// Prints deserialized = Point { x: 1, y: 2 }
println!("deserialized = {:?}", deserialized);
}
```
## Getting help
Serde is one of the most widely used Rust libraries so any place that Rustaceans
congregate will be able to help you out. For chat, consider trying the
[#rust-questions] or [#rust-beginners] channels of the unofficial community
Discord (invite: <https://discord.gg/rust-lang-community>, the [#rust-usage] or
[#beginners] channels of the official Rust Project Discord (invite:
<https://discord.gg/rust-lang>), or the [#general][zulip] stream in Zulip. For
asynchronous, consider the [\[rust\] tag on StackOverflow][stackoverflow], the
[/r/rust] subreddit which has a pinned weekly easy questions post, or the Rust
[Discourse forum][discourse]. It's acceptable to file a support issue in this
repo but they tend not to get as many eyes as any of the above and may get
closed without a response after some time.
[#rust-questions]: https://discord.com/channels/273534239310479360/274215136414400513
[#rust-beginners]: https://discord.com/channels/273534239310479360/273541522815713281
[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848
[#beginners]: https://discord.com/channels/442252698964721669/448238009733742612
[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general
[stackoverflow]: https://stackoverflow.com/questions/tagged/rust
[/r/rust]: https://www.reddit.com/r/rust
[discourse]: https://users.rust-lang.org

406
zeroidc/vendor/serde_derive/src/bound.rs vendored Normal file
View File

@@ -0,0 +1,406 @@
use std::collections::HashSet;
use syn;
use syn::punctuated::{Pair, Punctuated};
use internals::ast::{Container, Data};
use internals::{attr, ungroup};
use proc_macro2::Span;
// Remove the default from every type parameter because in the generated impls
// they look like associated types: "error: associated type bindings are not
// allowed here".
pub fn without_defaults(generics: &syn::Generics) -> syn::Generics {
syn::Generics {
params: generics
.params
.iter()
.map(|param| match param {
syn::GenericParam::Type(param) => syn::GenericParam::Type(syn::TypeParam {
eq_token: None,
default: None,
..param.clone()
}),
_ => param.clone(),
})
.collect(),
..generics.clone()
}
}
pub fn with_where_predicates(
generics: &syn::Generics,
predicates: &[syn::WherePredicate],
) -> syn::Generics {
let mut generics = generics.clone();
generics
.make_where_clause()
.predicates
.extend(predicates.iter().cloned());
generics
}
pub fn with_where_predicates_from_fields(
cont: &Container,
generics: &syn::Generics,
from_field: fn(&attr::Field) -> Option<&[syn::WherePredicate]>,
) -> syn::Generics {
let predicates = cont
.data
.all_fields()
.filter_map(|field| from_field(&field.attrs))
.flat_map(<[syn::WherePredicate]>::to_vec);
let mut generics = generics.clone();
generics.make_where_clause().predicates.extend(predicates);
generics
}
pub fn with_where_predicates_from_variants(
cont: &Container,
generics: &syn::Generics,
from_variant: fn(&attr::Variant) -> Option<&[syn::WherePredicate]>,
) -> syn::Generics {
let variants = match &cont.data {
Data::Enum(variants) => variants,
Data::Struct(_, _) => {
return generics.clone();
}
};
let predicates = variants
.iter()
.filter_map(|variant| from_variant(&variant.attrs))
.flat_map(<[syn::WherePredicate]>::to_vec);
let mut generics = generics.clone();
generics.make_where_clause().predicates.extend(predicates);
generics
}
// Puts the given bound on any generic type parameters that are used in fields
// for which filter returns true.
//
// For example, the following struct needs the bound `A: Serialize, B:
// Serialize`.
//
// struct S<'b, A, B: 'b, C> {
// a: A,
// b: Option<&'b B>
// #[serde(skip_serializing)]
// c: C,
// }
pub fn with_bound(
cont: &Container,
generics: &syn::Generics,
filter: fn(&attr::Field, Option<&attr::Variant>) -> bool,
bound: &syn::Path,
) -> syn::Generics {
struct FindTyParams<'ast> {
// Set of all generic type parameters on the current struct (A, B, C in
// the example). Initialized up front.
all_type_params: HashSet<syn::Ident>,
// Set of generic type parameters used in fields for which filter
// returns true (A and B in the example). Filled in as the visitor sees
// them.
relevant_type_params: HashSet<syn::Ident>,
// Fields whose type is an associated type of one of the generic type
// parameters.
associated_type_usage: Vec<&'ast syn::TypePath>,
}
impl<'ast> FindTyParams<'ast> {
fn visit_field(&mut self, field: &'ast syn::Field) {
if let syn::Type::Path(ty) = ungroup(&field.ty) {
if let Some(Pair::Punctuated(t, _)) = ty.path.segments.pairs().next() {
if self.all_type_params.contains(&t.ident) {
self.associated_type_usage.push(ty);
}
}
}
self.visit_type(&field.ty);
}
fn visit_path(&mut self, path: &'ast syn::Path) {
if let Some(seg) = path.segments.last() {
if seg.ident == "PhantomData" {
// Hardcoded exception, because PhantomData<T> implements
// Serialize and Deserialize whether or not T implements it.
return;
}
}
if path.leading_colon.is_none() && path.segments.len() == 1 {
let id = &path.segments[0].ident;
if self.all_type_params.contains(id) {
self.relevant_type_params.insert(id.clone());
}
}
for segment in &path.segments {
self.visit_path_segment(segment);
}
}
// Everything below is simply traversing the syntax tree.
fn visit_type(&mut self, ty: &'ast syn::Type) {
match ty {
syn::Type::Array(ty) => self.visit_type(&ty.elem),
syn::Type::BareFn(ty) => {
for arg in &ty.inputs {
self.visit_type(&arg.ty);
}
self.visit_return_type(&ty.output);
}
syn::Type::Group(ty) => self.visit_type(&ty.elem),
syn::Type::ImplTrait(ty) => {
for bound in &ty.bounds {
self.visit_type_param_bound(bound);
}
}
syn::Type::Macro(ty) => self.visit_macro(&ty.mac),
syn::Type::Paren(ty) => self.visit_type(&ty.elem),
syn::Type::Path(ty) => {
if let Some(qself) = &ty.qself {
self.visit_type(&qself.ty);
}
self.visit_path(&ty.path);
}
syn::Type::Ptr(ty) => self.visit_type(&ty.elem),
syn::Type::Reference(ty) => self.visit_type(&ty.elem),
syn::Type::Slice(ty) => self.visit_type(&ty.elem),
syn::Type::TraitObject(ty) => {
for bound in &ty.bounds {
self.visit_type_param_bound(bound);
}
}
syn::Type::Tuple(ty) => {
for elem in &ty.elems {
self.visit_type(elem);
}
}
syn::Type::Infer(_) | syn::Type::Never(_) | syn::Type::Verbatim(_) => {}
#[cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
_ => {}
}
}
fn visit_path_segment(&mut self, segment: &'ast syn::PathSegment) {
self.visit_path_arguments(&segment.arguments);
}
fn visit_path_arguments(&mut self, arguments: &'ast syn::PathArguments) {
match arguments {
syn::PathArguments::None => {}
syn::PathArguments::AngleBracketed(arguments) => {
for arg in &arguments.args {
match arg {
syn::GenericArgument::Type(arg) => self.visit_type(arg),
syn::GenericArgument::Binding(arg) => self.visit_type(&arg.ty),
syn::GenericArgument::Lifetime(_)
| syn::GenericArgument::Constraint(_)
| syn::GenericArgument::Const(_) => {}
}
}
}
syn::PathArguments::Parenthesized(arguments) => {
for argument in &arguments.inputs {
self.visit_type(argument);
}
self.visit_return_type(&arguments.output);
}
}
}
fn visit_return_type(&mut self, return_type: &'ast syn::ReturnType) {
match return_type {
syn::ReturnType::Default => {}
syn::ReturnType::Type(_, output) => self.visit_type(output),
}
}
fn visit_type_param_bound(&mut self, bound: &'ast syn::TypeParamBound) {
match bound {
syn::TypeParamBound::Trait(bound) => self.visit_path(&bound.path),
syn::TypeParamBound::Lifetime(_) => {}
}
}
// Type parameter should not be considered used by a macro path.
//
// struct TypeMacro<T> {
// mac: T!(),
// marker: PhantomData<T>,
// }
fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
}
let all_type_params = generics
.type_params()
.map(|param| param.ident.clone())
.collect();
let mut visitor = FindTyParams {
all_type_params,
relevant_type_params: HashSet::new(),
associated_type_usage: Vec::new(),
};
match &cont.data {
Data::Enum(variants) => {
for variant in variants.iter() {
let relevant_fields = variant
.fields
.iter()
.filter(|field| filter(&field.attrs, Some(&variant.attrs)));
for field in relevant_fields {
visitor.visit_field(field.original);
}
}
}
Data::Struct(_, fields) => {
for field in fields.iter().filter(|field| filter(&field.attrs, None)) {
visitor.visit_field(field.original);
}
}
}
let relevant_type_params = visitor.relevant_type_params;
let associated_type_usage = visitor.associated_type_usage;
let new_predicates = generics
.type_params()
.map(|param| param.ident.clone())
.filter(|id| relevant_type_params.contains(id))
.map(|id| syn::TypePath {
qself: None,
path: id.into(),
})
.chain(associated_type_usage.into_iter().cloned())
.map(|bounded_ty| {
syn::WherePredicate::Type(syn::PredicateType {
lifetimes: None,
// the type parameter that is being bounded e.g. T
bounded_ty: syn::Type::Path(bounded_ty),
colon_token: <Token![:]>::default(),
// the bound e.g. Serialize
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
paren_token: None,
modifier: syn::TraitBoundModifier::None,
lifetimes: None,
path: bound.clone(),
})]
.into_iter()
.collect(),
})
});
let mut generics = generics.clone();
generics
.make_where_clause()
.predicates
.extend(new_predicates);
generics
}
pub fn with_self_bound(
cont: &Container,
generics: &syn::Generics,
bound: &syn::Path,
) -> syn::Generics {
let mut generics = generics.clone();
generics
.make_where_clause()
.predicates
.push(syn::WherePredicate::Type(syn::PredicateType {
lifetimes: None,
// the type that is being bounded e.g. MyStruct<'a, T>
bounded_ty: type_of_item(cont),
colon_token: <Token![:]>::default(),
// the bound e.g. Default
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
paren_token: None,
modifier: syn::TraitBoundModifier::None,
lifetimes: None,
path: bound.clone(),
})]
.into_iter()
.collect(),
}));
generics
}
pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Generics {
let bound = syn::Lifetime::new(lifetime, Span::call_site());
let def = syn::LifetimeDef {
attrs: Vec::new(),
lifetime: bound.clone(),
colon_token: None,
bounds: Punctuated::new(),
};
let params = Some(syn::GenericParam::Lifetime(def))
.into_iter()
.chain(generics.params.iter().cloned().map(|mut param| {
match &mut param {
syn::GenericParam::Lifetime(param) => {
param.bounds.push(bound.clone());
}
syn::GenericParam::Type(param) => {
param
.bounds
.push(syn::TypeParamBound::Lifetime(bound.clone()));
}
syn::GenericParam::Const(_) => {}
}
param
}))
.collect();
syn::Generics {
params,
..generics.clone()
}
}
fn type_of_item(cont: &Container) -> syn::Type {
syn::Type::Path(syn::TypePath {
qself: None,
path: syn::Path {
leading_colon: None,
segments: vec![syn::PathSegment {
ident: cont.ident.clone(),
arguments: syn::PathArguments::AngleBracketed(
syn::AngleBracketedGenericArguments {
colon2_token: None,
lt_token: <Token![<]>::default(),
args: cont
.generics
.params
.iter()
.map(|param| match param {
syn::GenericParam::Type(param) => {
syn::GenericArgument::Type(syn::Type::Path(syn::TypePath {
qself: None,
path: param.ident.clone().into(),
}))
}
syn::GenericParam::Lifetime(param) => {
syn::GenericArgument::Lifetime(param.lifetime.clone())
}
syn::GenericParam::Const(_) => {
panic!("Serde does not support const generics yet");
}
})
.collect(),
gt_token: <Token![>]>::default(),
},
),
}]
.into_iter()
.collect(),
},
})
}

3132
zeroidc/vendor/serde_derive/src/de.rs vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,48 @@
use proc_macro2::{Ident, TokenStream};
use quote::format_ident;
use syn;
use try;
pub fn wrap_in_const(
serde_path: Option<&syn::Path>,
trait_: &str,
ty: &Ident,
code: TokenStream,
) -> TokenStream {
let try_replacement = try::replacement();
let dummy_const = if cfg!(underscore_consts) {
format_ident!("_")
} else {
format_ident!("_IMPL_{}_FOR_{}", trait_, unraw(ty))
};
let use_serde = match serde_path {
Some(path) => quote! {
use #path as _serde;
},
None => quote! {
#[allow(unused_extern_crates, clippy::useless_attribute)]
extern crate serde as _serde;
},
};
quote! {
#[doc(hidden)]
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
const #dummy_const: () = {
#use_serde
#try_replacement
#code
};
}
}
#[allow(deprecated)]
fn unraw(ident: &Ident) -> String {
// str::trim_start_matches was added in 1.30, trim_left_matches deprecated
// in 1.33. We currently support rustc back to 1.15 so we need to continue
// to use the deprecated one.
ident.to_string().trim_left_matches("r#").to_owned()
}

View File

@@ -0,0 +1,74 @@
use proc_macro2::TokenStream;
use quote::ToTokens;
use syn::token;
pub enum Fragment {
/// Tokens that can be used as an expression.
Expr(TokenStream),
/// Tokens that can be used inside a block. The surrounding curly braces are
/// not part of these tokens.
Block(TokenStream),
}
macro_rules! quote_expr {
($($tt:tt)*) => {
$crate::fragment::Fragment::Expr(quote!($($tt)*))
}
}
macro_rules! quote_block {
($($tt:tt)*) => {
$crate::fragment::Fragment::Block(quote!($($tt)*))
}
}
/// Interpolate a fragment in place of an expression. This involves surrounding
/// Block fragments in curly braces.
pub struct Expr(pub Fragment);
impl ToTokens for Expr {
fn to_tokens(&self, out: &mut TokenStream) {
match &self.0 {
Fragment::Expr(expr) => expr.to_tokens(out),
Fragment::Block(block) => {
token::Brace::default().surround(out, |out| block.to_tokens(out));
}
}
}
}
/// Interpolate a fragment as the statements of a block.
pub struct Stmts(pub Fragment);
impl ToTokens for Stmts {
fn to_tokens(&self, out: &mut TokenStream) {
match &self.0 {
Fragment::Expr(expr) => expr.to_tokens(out),
Fragment::Block(block) => block.to_tokens(out),
}
}
}
/// Interpolate a fragment as the value part of a `match` expression. This
/// involves putting a comma after expressions and curly braces around blocks.
pub struct Match(pub Fragment);
impl ToTokens for Match {
fn to_tokens(&self, out: &mut TokenStream) {
match &self.0 {
Fragment::Expr(expr) => {
expr.to_tokens(out);
<Token![,]>::default().to_tokens(out);
}
Fragment::Block(block) => {
token::Brace::default().surround(out, |out| block.to_tokens(out));
}
}
}
}
impl AsRef<TokenStream> for Fragment {
fn as_ref(&self) -> &TokenStream {
match self {
Fragment::Expr(expr) => expr,
Fragment::Block(block) => block,
}
}
}

View File

@@ -0,0 +1,202 @@
//! A Serde ast, parsed from the Syn ast and ready to generate Rust code.
use internals::attr;
use internals::check;
use internals::{Ctxt, Derive};
use syn;
use syn::punctuated::Punctuated;
/// A source data structure annotated with `#[derive(Serialize)]` and/or `#[derive(Deserialize)]`,
/// parsed into an internal representation.
pub struct Container<'a> {
/// The struct or enum name (without generics).
pub ident: syn::Ident,
/// Attributes on the structure, parsed for Serde.
pub attrs: attr::Container,
/// The contents of the struct or enum.
pub data: Data<'a>,
/// Any generics on the struct or enum.
pub generics: &'a syn::Generics,
/// Original input.
pub original: &'a syn::DeriveInput,
}
/// The fields of a struct or enum.
///
/// Analogous to `syn::Data`.
pub enum Data<'a> {
Enum(Vec<Variant<'a>>),
Struct(Style, Vec<Field<'a>>),
}
/// A variant of an enum.
pub struct Variant<'a> {
pub ident: syn::Ident,
pub attrs: attr::Variant,
pub style: Style,
pub fields: Vec<Field<'a>>,
pub original: &'a syn::Variant,
}
/// A field of a struct.
pub struct Field<'a> {
pub member: syn::Member,
pub attrs: attr::Field,
pub ty: &'a syn::Type,
pub original: &'a syn::Field,
}
#[derive(Copy, Clone)]
pub enum Style {
/// Named fields.
Struct,
/// Many unnamed fields.
Tuple,
/// One unnamed field.
Newtype,
/// No fields.
Unit,
}
impl<'a> Container<'a> {
/// Convert the raw Syn ast into a parsed container object, collecting errors in `cx`.
pub fn from_ast(
cx: &Ctxt,
item: &'a syn::DeriveInput,
derive: Derive,
) -> Option<Container<'a>> {
let mut attrs = attr::Container::from_ast(cx, item);
let mut data = match &item.data {
syn::Data::Enum(data) => Data::Enum(enum_from_ast(cx, &data.variants, attrs.default())),
syn::Data::Struct(data) => {
let (style, fields) = struct_from_ast(cx, &data.fields, None, attrs.default());
Data::Struct(style, fields)
}
syn::Data::Union(_) => {
cx.error_spanned_by(item, "Serde does not support derive for unions");
return None;
}
};
let mut has_flatten = false;
match &mut data {
Data::Enum(variants) => {
for variant in variants {
variant.attrs.rename_by_rules(attrs.rename_all_rules());
for field in &mut variant.fields {
if field.attrs.flatten() {
has_flatten = true;
}
field
.attrs
.rename_by_rules(variant.attrs.rename_all_rules());
}
}
}
Data::Struct(_, fields) => {
for field in fields {
if field.attrs.flatten() {
has_flatten = true;
}
field.attrs.rename_by_rules(attrs.rename_all_rules());
}
}
}
if has_flatten {
attrs.mark_has_flatten();
}
let mut item = Container {
ident: item.ident.clone(),
attrs,
data,
generics: &item.generics,
original: item,
};
check::check(cx, &mut item, derive);
Some(item)
}
}
impl<'a> Data<'a> {
pub fn all_fields(&'a self) -> Box<Iterator<Item = &'a Field<'a>> + 'a> {
match self {
Data::Enum(variants) => {
Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
}
Data::Struct(_, fields) => Box::new(fields.iter()),
}
}
pub fn has_getter(&self) -> bool {
self.all_fields().any(|f| f.attrs.getter().is_some())
}
}
fn enum_from_ast<'a>(
cx: &Ctxt,
variants: &'a Punctuated<syn::Variant, Token![,]>,
container_default: &attr::Default,
) -> Vec<Variant<'a>> {
variants
.iter()
.map(|variant| {
let attrs = attr::Variant::from_ast(cx, variant);
let (style, fields) =
struct_from_ast(cx, &variant.fields, Some(&attrs), container_default);
Variant {
ident: variant.ident.clone(),
attrs,
style,
fields,
original: variant,
}
})
.collect()
}
fn struct_from_ast<'a>(
cx: &Ctxt,
fields: &'a syn::Fields,
attrs: Option<&attr::Variant>,
container_default: &attr::Default,
) -> (Style, Vec<Field<'a>>) {
match fields {
syn::Fields::Named(fields) => (
Style::Struct,
fields_from_ast(cx, &fields.named, attrs, container_default),
),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => (
Style::Newtype,
fields_from_ast(cx, &fields.unnamed, attrs, container_default),
),
syn::Fields::Unnamed(fields) => (
Style::Tuple,
fields_from_ast(cx, &fields.unnamed, attrs, container_default),
),
syn::Fields::Unit => (Style::Unit, Vec::new()),
}
}
fn fields_from_ast<'a>(
cx: &Ctxt,
fields: &'a Punctuated<syn::Field, Token![,]>,
attrs: Option<&attr::Variant>,
container_default: &attr::Default,
) -> Vec<Field<'a>> {
fields
.iter()
.enumerate()
.map(|(i, field)| Field {
member: match &field.ident {
Some(ident) => syn::Member::Named(ident.clone()),
None => syn::Member::Unnamed(i.into()),
},
attrs: attr::Field::from_ast(cx, i, field, attrs, container_default),
ty: &field.ty,
original: field,
})
.collect()
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,197 @@
//! Code to convert the Rust-styled field/variant (e.g. `my_field`, `MyType`) to the
//! case of the source (e.g. `my-field`, `MY_FIELD`).
// See https://users.rust-lang.org/t/psa-dealing-with-warning-unused-import-std-ascii-asciiext-in-today-s-nightly/13726
#[allow(deprecated, unused_imports)]
use std::ascii::AsciiExt;
use std::fmt::{self, Debug, Display};
use self::RenameRule::*;
/// The different possible ways to change case of fields in a struct, or variants in an enum.
#[derive(Copy, Clone, PartialEq)]
pub enum RenameRule {
/// Don't apply a default rename rule.
None,
/// Rename direct children to "lowercase" style.
LowerCase,
/// Rename direct children to "UPPERCASE" style.
UpperCase,
/// Rename direct children to "PascalCase" style, as typically used for
/// enum variants.
PascalCase,
/// Rename direct children to "camelCase" style.
CamelCase,
/// Rename direct children to "snake_case" style, as commonly used for
/// fields.
SnakeCase,
/// Rename direct children to "SCREAMING_SNAKE_CASE" style, as commonly
/// used for constants.
ScreamingSnakeCase,
/// Rename direct children to "kebab-case" style.
KebabCase,
/// Rename direct children to "SCREAMING-KEBAB-CASE" style.
ScreamingKebabCase,
}
static RENAME_RULES: &[(&str, RenameRule)] = &[
("lowercase", LowerCase),
("UPPERCASE", UpperCase),
("PascalCase", PascalCase),
("camelCase", CamelCase),
("snake_case", SnakeCase),
("SCREAMING_SNAKE_CASE", ScreamingSnakeCase),
("kebab-case", KebabCase),
("SCREAMING-KEBAB-CASE", ScreamingKebabCase),
];
impl RenameRule {
pub fn from_str(rename_all_str: &str) -> Result<Self, ParseError> {
for (name, rule) in RENAME_RULES {
if rename_all_str == *name {
return Ok(*rule);
}
}
Err(ParseError {
unknown: rename_all_str,
})
}
/// Apply a renaming rule to an enum variant, returning the version expected in the source.
pub fn apply_to_variant(&self, variant: &str) -> String {
match *self {
None | PascalCase => variant.to_owned(),
LowerCase => variant.to_ascii_lowercase(),
UpperCase => variant.to_ascii_uppercase(),
CamelCase => variant[..1].to_ascii_lowercase() + &variant[1..],
SnakeCase => {
let mut snake = String::new();
for (i, ch) in variant.char_indices() {
if i > 0 && ch.is_uppercase() {
snake.push('_');
}
snake.push(ch.to_ascii_lowercase());
}
snake
}
ScreamingSnakeCase => SnakeCase.apply_to_variant(variant).to_ascii_uppercase(),
KebabCase => SnakeCase.apply_to_variant(variant).replace('_', "-"),
ScreamingKebabCase => ScreamingSnakeCase
.apply_to_variant(variant)
.replace('_', "-"),
}
}
/// Apply a renaming rule to a struct field, returning the version expected in the source.
pub fn apply_to_field(&self, field: &str) -> String {
match *self {
None | LowerCase | SnakeCase => field.to_owned(),
UpperCase => field.to_ascii_uppercase(),
PascalCase => {
let mut pascal = String::new();
let mut capitalize = true;
for ch in field.chars() {
if ch == '_' {
capitalize = true;
} else if capitalize {
pascal.push(ch.to_ascii_uppercase());
capitalize = false;
} else {
pascal.push(ch);
}
}
pascal
}
CamelCase => {
let pascal = PascalCase.apply_to_field(field);
pascal[..1].to_ascii_lowercase() + &pascal[1..]
}
ScreamingSnakeCase => field.to_ascii_uppercase(),
KebabCase => field.replace('_', "-"),
ScreamingKebabCase => ScreamingSnakeCase.apply_to_field(field).replace('_', "-"),
}
}
}
pub struct ParseError<'a> {
unknown: &'a str,
}
impl<'a> Display for ParseError<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("unknown rename rule `rename_all = ")?;
Debug::fmt(self.unknown, f)?;
f.write_str("`, expected one of ")?;
for (i, (name, _rule)) in RENAME_RULES.iter().enumerate() {
if i > 0 {
f.write_str(", ")?;
}
Debug::fmt(name, f)?;
}
Ok(())
}
}
#[test]
fn rename_variants() {
for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[
(
"Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
),
(
"VeryTasty",
"verytasty",
"VERYTASTY",
"veryTasty",
"very_tasty",
"VERY_TASTY",
"very-tasty",
"VERY-TASTY",
),
("A", "a", "A", "a", "a", "A", "a", "A"),
("Z42", "z42", "Z42", "z42", "z42", "Z42", "z42", "Z42"),
] {
assert_eq!(None.apply_to_variant(original), original);
assert_eq!(LowerCase.apply_to_variant(original), lower);
assert_eq!(UpperCase.apply_to_variant(original), upper);
assert_eq!(PascalCase.apply_to_variant(original), original);
assert_eq!(CamelCase.apply_to_variant(original), camel);
assert_eq!(SnakeCase.apply_to_variant(original), snake);
assert_eq!(ScreamingSnakeCase.apply_to_variant(original), screaming);
assert_eq!(KebabCase.apply_to_variant(original), kebab);
assert_eq!(
ScreamingKebabCase.apply_to_variant(original),
screaming_kebab
);
}
}
#[test]
fn rename_fields() {
for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[
(
"outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
),
(
"very_tasty",
"VERY_TASTY",
"VeryTasty",
"veryTasty",
"VERY_TASTY",
"very-tasty",
"VERY-TASTY",
),
("a", "A", "A", "a", "A", "a", "A"),
("z42", "Z42", "Z42", "z42", "Z42", "z42", "Z42"),
] {
assert_eq!(None.apply_to_field(original), original);
assert_eq!(UpperCase.apply_to_field(original), upper);
assert_eq!(PascalCase.apply_to_field(original), pascal);
assert_eq!(CamelCase.apply_to_field(original), camel);
assert_eq!(SnakeCase.apply_to_field(original), original);
assert_eq!(ScreamingSnakeCase.apply_to_field(original), screaming);
assert_eq!(KebabCase.apply_to_field(original), kebab);
assert_eq!(ScreamingKebabCase.apply_to_field(original), screaming_kebab);
}
}

View File

@@ -0,0 +1,420 @@
use internals::ast::{Container, Data, Field, Style};
use internals::attr::{Identifier, TagType};
use internals::{ungroup, Ctxt, Derive};
use syn::{Member, Type};
/// Cross-cutting checks that require looking at more than a single attrs
/// object. Simpler checks should happen when parsing and building the attrs.
pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
check_getter(cx, cont);
check_flatten(cx, cont);
check_identifier(cx, cont);
check_variant_skip_attrs(cx, cont);
check_internal_tag_field_name_conflict(cx, cont);
check_adjacent_tag_conflict(cx, cont);
check_transparent(cx, cont, derive);
check_from_and_try_from(cx, cont);
}
/// Getters are only allowed inside structs (not enums) with the `remote`
/// attribute.
fn check_getter(cx: &Ctxt, cont: &Container) {
match cont.data {
Data::Enum(_) => {
if cont.data.has_getter() {
cx.error_spanned_by(
cont.original,
"#[serde(getter = \"...\")] is not allowed in an enum",
);
}
}
Data::Struct(_, _) => {
if cont.data.has_getter() && cont.attrs.remote().is_none() {
cx.error_spanned_by(
cont.original,
"#[serde(getter = \"...\")] can only be used in structs that have #[serde(remote = \"...\")]",
);
}
}
}
}
/// Flattening has some restrictions we can test.
fn check_flatten(cx: &Ctxt, cont: &Container) {
match &cont.data {
Data::Enum(variants) => {
for variant in variants {
for field in &variant.fields {
check_flatten_field(cx, variant.style, field);
}
}
}
Data::Struct(style, fields) => {
for field in fields {
check_flatten_field(cx, *style, field);
}
}
}
}
fn check_flatten_field(cx: &Ctxt, style: Style, field: &Field) {
if !field.attrs.flatten() {
return;
}
match style {
Style::Tuple => {
cx.error_spanned_by(
field.original,
"#[serde(flatten)] cannot be used on tuple structs",
);
}
Style::Newtype => {
cx.error_spanned_by(
field.original,
"#[serde(flatten)] cannot be used on newtype structs",
);
}
_ => {}
}
}
/// The `other` attribute must be used at most once and it must be the last
/// variant of an enum.
///
/// Inside a `variant_identifier` all variants must be unit variants. Inside a
/// `field_identifier` all but possibly one variant must be unit variants. The
/// last variant may be a newtype variant which is an implicit "other" case.
fn check_identifier(cx: &Ctxt, cont: &Container) {
let variants = match &cont.data {
Data::Enum(variants) => variants,
Data::Struct(_, _) => {
return;
}
};
for (i, variant) in variants.iter().enumerate() {
match (
variant.style,
cont.attrs.identifier(),
variant.attrs.other(),
cont.attrs.tag(),
) {
// The `other` attribute may not be used in a variant_identifier.
(_, Identifier::Variant, true, _) => {
cx.error_spanned_by(
variant.original,
"#[serde(other)] may not be used on a variant identifier",
);
}
// Variant with `other` attribute cannot appear in untagged enum
(_, Identifier::No, true, &TagType::None) => {
cx.error_spanned_by(
variant.original,
"#[serde(other)] cannot appear on untagged enum",
);
}
// Variant with `other` attribute must be the last one.
(Style::Unit, Identifier::Field, true, _) | (Style::Unit, Identifier::No, true, _) => {
if i < variants.len() - 1 {
cx.error_spanned_by(
variant.original,
"#[serde(other)] must be on the last variant",
);
}
}
// Variant with `other` attribute must be a unit variant.
(_, Identifier::Field, true, _) | (_, Identifier::No, true, _) => {
cx.error_spanned_by(
variant.original,
"#[serde(other)] must be on a unit variant",
);
}
// Any sort of variant is allowed if this is not an identifier.
(_, Identifier::No, false, _) => {}
// Unit variant without `other` attribute is always fine.
(Style::Unit, _, false, _) => {}
// The last field is allowed to be a newtype catch-all.
(Style::Newtype, Identifier::Field, false, _) => {
if i < variants.len() - 1 {
cx.error_spanned_by(
variant.original,
format!("`{}` must be the last variant", variant.ident),
);
}
}
(_, Identifier::Field, false, _) => {
cx.error_spanned_by(
variant.original,
"#[serde(field_identifier)] may only contain unit variants",
);
}
(_, Identifier::Variant, false, _) => {
cx.error_spanned_by(
variant.original,
"#[serde(variant_identifier)] may only contain unit variants",
);
}
}
}
}
/// Skip-(de)serializing attributes are not allowed on variants marked
/// (de)serialize_with.
fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
let variants = match &cont.data {
Data::Enum(variants) => variants,
Data::Struct(_, _) => {
return;
}
};
for variant in variants.iter() {
if variant.attrs.serialize_with().is_some() {
if variant.attrs.skip_serializing() {
cx.error_spanned_by(
variant.original,
format!(
"variant `{}` cannot have both #[serde(serialize_with)] and #[serde(skip_serializing)]",
variant.ident
),
);
}
for field in &variant.fields {
let member = member_message(&field.member);
if field.attrs.skip_serializing() {
cx.error_spanned_by(
variant.original,
format!(
"variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing)]",
variant.ident, member
),
);
}
if field.attrs.skip_serializing_if().is_some() {
cx.error_spanned_by(
variant.original,
format!(
"variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing_if)]",
variant.ident, member
),
);
}
}
}
if variant.attrs.deserialize_with().is_some() {
if variant.attrs.skip_deserializing() {
cx.error_spanned_by(
variant.original,
format!(
"variant `{}` cannot have both #[serde(deserialize_with)] and #[serde(skip_deserializing)]",
variant.ident
),
);
}
for field in &variant.fields {
if field.attrs.skip_deserializing() {
let member = member_message(&field.member);
cx.error_spanned_by(
variant.original,
format!(
"variant `{}` cannot have both #[serde(deserialize_with)] and a field {} marked with #[serde(skip_deserializing)]",
variant.ident, member
),
);
}
}
}
}
}
/// The tag of an internally-tagged struct variant must not be
/// the same as either one of its fields, as this would result in
/// duplicate keys in the serialized output and/or ambiguity in
/// the to-be-deserialized input.
fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) {
let variants = match &cont.data {
Data::Enum(variants) => variants,
Data::Struct(_, _) => return,
};
let tag = match cont.attrs.tag() {
TagType::Internal { tag } => tag.as_str(),
TagType::External | TagType::Adjacent { .. } | TagType::None => return,
};
let diagnose_conflict = || {
cx.error_spanned_by(
cont.original,
format!("variant field name `{}` conflicts with internal tag", tag),
);
};
for variant in variants {
match variant.style {
Style::Struct => {
for field in &variant.fields {
let check_ser = !field.attrs.skip_serializing();
let check_de = !field.attrs.skip_deserializing();
let name = field.attrs.name();
let ser_name = name.serialize_name();
if check_ser && ser_name == tag {
diagnose_conflict();
return;
}
for de_name in field.attrs.aliases() {
if check_de && de_name == tag {
diagnose_conflict();
return;
}
}
}
}
Style::Unit | Style::Newtype | Style::Tuple => {}
}
}
}
/// In the case of adjacently-tagged enums, the type and the
/// contents tag must differ, for the same reason.
fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
let (type_tag, content_tag) = match cont.attrs.tag() {
TagType::Adjacent { tag, content } => (tag, content),
TagType::Internal { .. } | TagType::External | TagType::None => return,
};
if type_tag == content_tag {
cx.error_spanned_by(
cont.original,
format!(
"enum tags `{}` for type and content conflict with each other",
type_tag
),
);
}
}
/// Enums and unit structs cannot be transparent.
fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
if !cont.attrs.transparent() {
return;
}
if cont.attrs.type_from().is_some() {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] is not allowed with #[serde(from = \"...\")]",
);
}
if cont.attrs.type_try_from().is_some() {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] is not allowed with #[serde(try_from = \"...\")]",
);
}
if cont.attrs.type_into().is_some() {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] is not allowed with #[serde(into = \"...\")]",
);
}
let fields = match &mut cont.data {
Data::Enum(_) => {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] is not allowed on an enum",
);
return;
}
Data::Struct(Style::Unit, _) => {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] is not allowed on a unit struct",
);
return;
}
Data::Struct(_, fields) => fields,
};
let mut transparent_field = None;
for field in fields {
if allow_transparent(field, derive) {
if transparent_field.is_some() {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] requires struct to have at most one transparent field",
);
return;
}
transparent_field = Some(field);
}
}
match transparent_field {
Some(transparent_field) => transparent_field.attrs.mark_transparent(),
None => match derive {
Derive::Serialize => {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] requires at least one field that is not skipped",
);
}
Derive::Deserialize => {
cx.error_spanned_by(
cont.original,
"#[serde(transparent)] requires at least one field that is neither skipped nor has a default",
);
}
},
}
}
fn member_message(member: &Member) -> String {
match member {
Member::Named(ident) => format!("`{}`", ident),
Member::Unnamed(i) => format!("#{}", i.index),
}
}
fn allow_transparent(field: &Field, derive: Derive) -> bool {
if let Type::Path(ty) = ungroup(field.ty) {
if let Some(seg) = ty.path.segments.last() {
if seg.ident == "PhantomData" {
return false;
}
}
}
match derive {
Derive::Serialize => !field.attrs.skip_serializing(),
Derive::Deserialize => !field.attrs.skip_deserializing() && field.attrs.default().is_none(),
}
}
fn check_from_and_try_from(cx: &Ctxt, cont: &mut Container) {
if cont.attrs.type_from().is_some() && cont.attrs.type_try_from().is_some() {
cx.error_spanned_by(
cont.original,
"#[serde(from = \"...\")] and #[serde(try_from = \"...\")] conflict with each other",
);
}
}

View File

@@ -0,0 +1,62 @@
use quote::ToTokens;
use std::cell::RefCell;
use std::fmt::Display;
use std::thread;
use syn;
/// A type to collect errors together and format them.
///
/// Dropping this object will cause a panic. It must be consumed using `check`.
///
/// References can be shared since this type uses run-time exclusive mut checking.
#[derive(Default)]
pub struct Ctxt {
// The contents will be set to `None` during checking. This is so that checking can be
// enforced.
errors: RefCell<Option<Vec<syn::Error>>>,
}
impl Ctxt {
/// Create a new context object.
///
/// This object contains no errors, but will still trigger a panic if it is not `check`ed.
pub fn new() -> Self {
Ctxt {
errors: RefCell::new(Some(Vec::new())),
}
}
/// Add an error to the context object with a tokenenizable object.
///
/// The object is used for spanning in error messages.
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
self.errors
.borrow_mut()
.as_mut()
.unwrap()
// Curb monomorphization from generating too many identical methods.
.push(syn::Error::new_spanned(obj.into_token_stream(), msg));
}
/// Add one of Syn's parse errors.
pub fn syn_error(&self, err: syn::Error) {
self.errors.borrow_mut().as_mut().unwrap().push(err);
}
/// Consume this object, producing a formatted error string if there are errors.
pub fn check(self) -> Result<(), Vec<syn::Error>> {
let errors = self.errors.borrow_mut().take().unwrap();
match errors.len() {
0 => Ok(()),
_ => Err(errors),
}
}
}
impl Drop for Ctxt {
fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors");
}
}
}

View File

@@ -0,0 +1,28 @@
pub mod ast;
pub mod attr;
mod ctxt;
pub use self::ctxt::Ctxt;
mod receiver;
pub use self::receiver::replace_receiver;
mod case;
mod check;
mod respan;
mod symbol;
use syn::Type;
#[derive(Copy, Clone)]
pub enum Derive {
Serialize,
Deserialize,
}
pub fn ungroup(mut ty: &Type) -> &Type {
while let Type::Group(group) = ty {
ty = &group.elem;
}
ty
}

View File

@@ -0,0 +1,285 @@
use internals::respan::respan;
use proc_macro2::Span;
use quote::ToTokens;
use std::mem;
use syn::punctuated::Punctuated;
use syn::{
parse_quote, Data, DeriveInput, Expr, ExprPath, GenericArgument, GenericParam, Generics, Macro,
Path, PathArguments, QSelf, ReturnType, Type, TypeParamBound, TypePath, WherePredicate,
};
pub fn replace_receiver(input: &mut DeriveInput) {
let self_ty = {
let ident = &input.ident;
let ty_generics = input.generics.split_for_impl().1;
parse_quote!(#ident #ty_generics)
};
let mut visitor = ReplaceReceiver(&self_ty);
visitor.visit_generics_mut(&mut input.generics);
visitor.visit_data_mut(&mut input.data);
}
struct ReplaceReceiver<'a>(&'a TypePath);
impl ReplaceReceiver<'_> {
fn self_ty(&self, span: Span) -> TypePath {
let tokens = self.0.to_token_stream();
let respanned = respan(tokens, span);
syn::parse2(respanned).unwrap()
}
fn self_to_qself(&self, qself: &mut Option<QSelf>, path: &mut Path) {
if path.leading_colon.is_some() || path.segments[0].ident != "Self" {
return;
}
if path.segments.len() == 1 {
self.self_to_expr_path(path);
return;
}
let span = path.segments[0].ident.span();
*qself = Some(QSelf {
lt_token: Token![<](span),
ty: Box::new(Type::Path(self.self_ty(span))),
position: 0,
as_token: None,
gt_token: Token![>](span),
});
path.leading_colon = Some(**path.segments.pairs().next().unwrap().punct().unwrap());
let segments = mem::replace(&mut path.segments, Punctuated::new());
path.segments = segments.into_pairs().skip(1).collect();
}
fn self_to_expr_path(&self, path: &mut Path) {
let self_ty = self.self_ty(path.segments[0].ident.span());
let variant = mem::replace(path, self_ty.path);
for segment in &mut path.segments {
if let PathArguments::AngleBracketed(bracketed) = &mut segment.arguments {
if bracketed.colon2_token.is_none() && !bracketed.args.is_empty() {
bracketed.colon2_token = Some(<Token![::]>::default());
}
}
}
if variant.segments.len() > 1 {
path.segments.push_punct(<Token![::]>::default());
path.segments.extend(variant.segments.into_pairs().skip(1));
}
}
}
impl ReplaceReceiver<'_> {
// `Self` -> `Receiver`
fn visit_type_mut(&mut self, ty: &mut Type) {
let span = if let Type::Path(node) = ty {
if node.qself.is_none() && node.path.is_ident("Self") {
node.path.segments[0].ident.span()
} else {
self.visit_type_path_mut(node);
return;
}
} else {
self.visit_type_mut_impl(ty);
return;
};
*ty = self.self_ty(span).into();
}
// `Self::Assoc` -> `<Receiver>::Assoc`
fn visit_type_path_mut(&mut self, ty: &mut TypePath) {
if ty.qself.is_none() {
self.self_to_qself(&mut ty.qself, &mut ty.path);
}
self.visit_type_path_mut_impl(ty);
}
// `Self::method` -> `<Receiver>::method`
fn visit_expr_path_mut(&mut self, expr: &mut ExprPath) {
if expr.qself.is_none() {
self.self_to_qself(&mut expr.qself, &mut expr.path);
}
self.visit_expr_path_mut_impl(expr);
}
// Everything below is simply traversing the syntax tree.
fn visit_type_mut_impl(&mut self, ty: &mut Type) {
match ty {
Type::Array(ty) => {
self.visit_type_mut(&mut ty.elem);
self.visit_expr_mut(&mut ty.len);
}
Type::BareFn(ty) => {
for arg in &mut ty.inputs {
self.visit_type_mut(&mut arg.ty);
}
self.visit_return_type_mut(&mut ty.output);
}
Type::Group(ty) => self.visit_type_mut(&mut ty.elem),
Type::ImplTrait(ty) => {
for bound in &mut ty.bounds {
self.visit_type_param_bound_mut(bound);
}
}
Type::Macro(ty) => self.visit_macro_mut(&mut ty.mac),
Type::Paren(ty) => self.visit_type_mut(&mut ty.elem),
Type::Path(ty) => {
if let Some(qself) = &mut ty.qself {
self.visit_type_mut(&mut qself.ty);
}
self.visit_path_mut(&mut ty.path);
}
Type::Ptr(ty) => self.visit_type_mut(&mut ty.elem),
Type::Reference(ty) => self.visit_type_mut(&mut ty.elem),
Type::Slice(ty) => self.visit_type_mut(&mut ty.elem),
Type::TraitObject(ty) => {
for bound in &mut ty.bounds {
self.visit_type_param_bound_mut(bound);
}
}
Type::Tuple(ty) => {
for elem in &mut ty.elems {
self.visit_type_mut(elem);
}
}
Type::Infer(_) | Type::Never(_) | Type::Verbatim(_) => {}
#[cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
_ => {}
}
}
fn visit_type_path_mut_impl(&mut self, ty: &mut TypePath) {
if let Some(qself) = &mut ty.qself {
self.visit_type_mut(&mut qself.ty);
}
self.visit_path_mut(&mut ty.path);
}
fn visit_expr_path_mut_impl(&mut self, expr: &mut ExprPath) {
if let Some(qself) = &mut expr.qself {
self.visit_type_mut(&mut qself.ty);
}
self.visit_path_mut(&mut expr.path);
}
fn visit_path_mut(&mut self, path: &mut Path) {
for segment in &mut path.segments {
self.visit_path_arguments_mut(&mut segment.arguments);
}
}
fn visit_path_arguments_mut(&mut self, arguments: &mut PathArguments) {
match arguments {
PathArguments::None => {}
PathArguments::AngleBracketed(arguments) => {
for arg in &mut arguments.args {
match arg {
GenericArgument::Type(arg) => self.visit_type_mut(arg),
GenericArgument::Binding(arg) => self.visit_type_mut(&mut arg.ty),
GenericArgument::Lifetime(_)
| GenericArgument::Constraint(_)
| GenericArgument::Const(_) => {}
}
}
}
PathArguments::Parenthesized(arguments) => {
for argument in &mut arguments.inputs {
self.visit_type_mut(argument);
}
self.visit_return_type_mut(&mut arguments.output);
}
}
}
fn visit_return_type_mut(&mut self, return_type: &mut ReturnType) {
match return_type {
ReturnType::Default => {}
ReturnType::Type(_, output) => self.visit_type_mut(output),
}
}
fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) {
match bound {
TypeParamBound::Trait(bound) => self.visit_path_mut(&mut bound.path),
TypeParamBound::Lifetime(_) => {}
}
}
fn visit_generics_mut(&mut self, generics: &mut Generics) {
for param in &mut generics.params {
match param {
GenericParam::Type(param) => {
for bound in &mut param.bounds {
self.visit_type_param_bound_mut(bound);
}
}
GenericParam::Lifetime(_) | GenericParam::Const(_) => {}
}
}
if let Some(where_clause) = &mut generics.where_clause {
for predicate in &mut where_clause.predicates {
match predicate {
WherePredicate::Type(predicate) => {
self.visit_type_mut(&mut predicate.bounded_ty);
for bound in &mut predicate.bounds {
self.visit_type_param_bound_mut(bound);
}
}
WherePredicate::Lifetime(_) | WherePredicate::Eq(_) => {}
}
}
}
}
fn visit_data_mut(&mut self, data: &mut Data) {
match data {
Data::Struct(data) => {
for field in &mut data.fields {
self.visit_type_mut(&mut field.ty);
}
}
Data::Enum(data) => {
for variant in &mut data.variants {
for field in &mut variant.fields {
self.visit_type_mut(&mut field.ty);
}
}
}
Data::Union(_) => {}
}
}
fn visit_expr_mut(&mut self, expr: &mut Expr) {
match expr {
Expr::Binary(expr) => {
self.visit_expr_mut(&mut expr.left);
self.visit_expr_mut(&mut expr.right);
}
Expr::Call(expr) => {
self.visit_expr_mut(&mut expr.func);
for arg in &mut expr.args {
self.visit_expr_mut(arg);
}
}
Expr::Cast(expr) => {
self.visit_expr_mut(&mut expr.expr);
self.visit_type_mut(&mut expr.ty);
}
Expr::Field(expr) => self.visit_expr_mut(&mut expr.base),
Expr::Index(expr) => {
self.visit_expr_mut(&mut expr.expr);
self.visit_expr_mut(&mut expr.index);
}
Expr::Paren(expr) => self.visit_expr_mut(&mut expr.expr),
Expr::Path(expr) => self.visit_expr_path_mut(expr),
Expr::Unary(expr) => self.visit_expr_mut(&mut expr.expr),
_ => {}
}
}
fn visit_macro_mut(&mut self, _mac: &mut Macro) {}
}

View File

@@ -0,0 +1,16 @@
use proc_macro2::{Group, Span, TokenStream, TokenTree};
pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream {
stream
.into_iter()
.map(|token| respan_token(token, span))
.collect()
}
fn respan_token(mut token: TokenTree, span: Span) -> TokenTree {
if let TokenTree::Group(g) = &mut token {
*g = Group::new(g.delimiter(), respan(g.stream(), span));
}
token.set_span(span);
token
}

View File

@@ -0,0 +1,68 @@
use std::fmt::{self, Display};
use syn::{Ident, Path};
#[derive(Copy, Clone)]
pub struct Symbol(&'static str);
pub const ALIAS: Symbol = Symbol("alias");
pub const BORROW: Symbol = Symbol("borrow");
pub const BOUND: Symbol = Symbol("bound");
pub const CONTENT: Symbol = Symbol("content");
pub const CRATE: Symbol = Symbol("crate");
pub const DEFAULT: Symbol = Symbol("default");
pub const DENY_UNKNOWN_FIELDS: Symbol = Symbol("deny_unknown_fields");
pub const DESERIALIZE: Symbol = Symbol("deserialize");
pub const DESERIALIZE_WITH: Symbol = Symbol("deserialize_with");
pub const FIELD_IDENTIFIER: Symbol = Symbol("field_identifier");
pub const FLATTEN: Symbol = Symbol("flatten");
pub const FROM: Symbol = Symbol("from");
pub const GETTER: Symbol = Symbol("getter");
pub const INTO: Symbol = Symbol("into");
pub const OTHER: Symbol = Symbol("other");
pub const REMOTE: Symbol = Symbol("remote");
pub const RENAME: Symbol = Symbol("rename");
pub const RENAME_ALL: Symbol = Symbol("rename_all");
pub const SERDE: Symbol = Symbol("serde");
pub const SERIALIZE: Symbol = Symbol("serialize");
pub const SERIALIZE_WITH: Symbol = Symbol("serialize_with");
pub const SKIP: Symbol = Symbol("skip");
pub const SKIP_DESERIALIZING: Symbol = Symbol("skip_deserializing");
pub const SKIP_SERIALIZING: Symbol = Symbol("skip_serializing");
pub const SKIP_SERIALIZING_IF: Symbol = Symbol("skip_serializing_if");
pub const TAG: Symbol = Symbol("tag");
pub const TRANSPARENT: Symbol = Symbol("transparent");
pub const TRY_FROM: Symbol = Symbol("try_from");
pub const UNTAGGED: Symbol = Symbol("untagged");
pub const VARIANT_IDENTIFIER: Symbol = Symbol("variant_identifier");
pub const WITH: Symbol = Symbol("with");
pub const EXPECTING: Symbol = Symbol("expecting");
impl PartialEq<Symbol> for Ident {
fn eq(&self, word: &Symbol) -> bool {
self == word.0
}
}
impl<'a> PartialEq<Symbol> for &'a Ident {
fn eq(&self, word: &Symbol) -> bool {
*self == word.0
}
}
impl PartialEq<Symbol> for Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl<'a> PartialEq<Symbol> for &'a Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl Display for Symbol {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(self.0)
}
}

108
zeroidc/vendor/serde_derive/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,108 @@
//! This crate provides Serde's two derive macros.
//!
//! ```edition2018
//! # use serde_derive::{Serialize, Deserialize};
//! #
//! #[derive(Serialize, Deserialize)]
//! # struct S;
//! #
//! # fn main() {}
//! ```
//!
//! Please refer to [https://serde.rs/derive.html] for how to set this up.
//!
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.137")]
#![allow(unknown_lints, bare_trait_objects)]
// Ignored clippy lints
#![allow(
// clippy false positive: https://github.com/rust-lang/rust-clippy/issues/7054
clippy::branches_sharing_code,
clippy::cognitive_complexity,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/7575
clippy::collapsible_match,
clippy::enum_variant_names,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6797
clippy::manual_map,
clippy::match_like_matches_macro,
clippy::needless_pass_by_value,
clippy::too_many_arguments,
clippy::trivially_copy_pass_by_ref,
clippy::used_underscore_binding,
clippy::wildcard_in_or_patterns,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/5704
clippy::unnested_or_patterns,
)]
// Ignored clippy_pedantic lints
#![allow(
clippy::cast_possible_truncation,
clippy::checked_conversions,
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::indexing_slicing,
clippy::items_after_statements,
clippy::let_underscore_drop,
clippy::manual_assert,
clippy::map_err_ignore,
clippy::match_same_arms,
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
clippy::match_wildcard_for_single_variants,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::option_if_let_else,
clippy::similar_names,
clippy::single_match_else,
clippy::struct_excessive_bools,
clippy::too_many_lines,
clippy::unseparated_literal_suffix,
clippy::unused_self,
clippy::use_self,
clippy::wildcard_imports
)]
#![cfg_attr(all(test, exhaustive), feature(non_exhaustive_omitted_patterns_lint))]
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
extern crate proc_macro;
extern crate proc_macro2;
mod internals;
use proc_macro::TokenStream;
use syn::DeriveInput;
#[macro_use]
mod bound;
#[macro_use]
mod fragment;
mod de;
mod dummy;
mod pretend;
mod ser;
mod try;
#[proc_macro_derive(Serialize, attributes(serde))]
pub fn derive_serialize(input: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(input as DeriveInput);
ser::expand_derive_serialize(&mut input)
.unwrap_or_else(to_compile_errors)
.into()
}
#[proc_macro_derive(Deserialize, attributes(serde))]
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(input as DeriveInput);
de::expand_derive_deserialize(&mut input)
.unwrap_or_else(to_compile_errors)
.into()
}
fn to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {
let compile_errors = errors.iter().map(syn::Error::to_compile_error);
quote!(#(#compile_errors)*)
}

View File

@@ -0,0 +1,201 @@
use proc_macro2::TokenStream;
use quote::format_ident;
use internals::ast::{Container, Data, Field, Style, Variant};
// Suppress dead_code warnings that would otherwise appear when using a remote
// derive. Other than this pretend code, a struct annotated with remote derive
// never has its fields referenced and an enum annotated with remote derive
// never has its variants constructed.
//
// warning: field is never used: `i`
// --> src/main.rs:4:20
// |
// 4 | struct StructDef { i: i32 }
// | ^^^^^^
//
// warning: variant is never constructed: `V`
// --> src/main.rs:8:16
// |
// 8 | enum EnumDef { V }
// | ^
//
pub fn pretend_used(cont: &Container, is_packed: bool) -> TokenStream {
let pretend_fields = pretend_fields_used(cont, is_packed);
let pretend_variants = pretend_variants_used(cont);
quote! {
#pretend_fields
#pretend_variants
}
}
// For structs with named fields, expands to:
//
// match None::<&T> {
// Some(T { a: __v0, b: __v1 }) => {}
// _ => {}
// }
//
// For packed structs on sufficiently new rustc, expands to:
//
// match None::<&T> {
// Some(__v @ T { a: _, b: _ }) => {
// let _ = addr_of!(__v.a);
// let _ = addr_of!(__v.b);
// }
// _ => {}
// }
//
// For packed structs on older rustc, we assume Sized and !Drop, and expand to:
//
// match None::<T> {
// Some(T { a: __v0, b: __v1 }) => {}
// _ => {}
// }
//
// For enums, expands to the following but only including struct variants:
//
// match None::<&T> {
// Some(T::A { a: __v0 }) => {}
// Some(T::B { b: __v0 }) => {}
// _ => {}
// }
//
fn pretend_fields_used(cont: &Container, is_packed: bool) -> TokenStream {
match &cont.data {
Data::Enum(variants) => pretend_fields_used_enum(cont, variants),
Data::Struct(Style::Struct, fields) => {
if is_packed {
pretend_fields_used_struct_packed(cont, fields)
} else {
pretend_fields_used_struct(cont, fields)
}
}
Data::Struct(_, _) => quote!(),
}
}
fn pretend_fields_used_struct(cont: &Container, fields: &[Field]) -> TokenStream {
let type_ident = &cont.ident;
let (_, ty_generics, _) = cont.generics.split_for_impl();
let members = fields.iter().map(|field| &field.member);
let placeholders = (0usize..).map(|i| format_ident!("__v{}", i));
quote! {
match _serde::__private::None::<&#type_ident #ty_generics> {
_serde::__private::Some(#type_ident { #(#members: #placeholders),* }) => {}
_ => {}
}
}
}
fn pretend_fields_used_struct_packed(cont: &Container, fields: &[Field]) -> TokenStream {
let type_ident = &cont.ident;
let (_, ty_generics, _) = cont.generics.split_for_impl();
let members = fields.iter().map(|field| &field.member).collect::<Vec<_>>();
#[cfg(ptr_addr_of)]
{
quote! {
match _serde::__private::None::<&#type_ident #ty_generics> {
_serde::__private::Some(__v @ #type_ident { #(#members: _),* }) => {
#(
let _ = _serde::__private::ptr::addr_of!(__v.#members);
)*
}
_ => {}
}
}
}
#[cfg(not(ptr_addr_of))]
{
let placeholders = (0usize..).map(|i| format_ident!("__v{}", i));
quote! {
match _serde::__private::None::<#type_ident #ty_generics> {
_serde::__private::Some(#type_ident { #(#members: #placeholders),* }) => {}
_ => {}
}
}
}
}
fn pretend_fields_used_enum(cont: &Container, variants: &[Variant]) -> TokenStream {
let type_ident = &cont.ident;
let (_, ty_generics, _) = cont.generics.split_for_impl();
let patterns = variants
.iter()
.filter_map(|variant| match variant.style {
Style::Struct => {
let variant_ident = &variant.ident;
let members = variant.fields.iter().map(|field| &field.member);
let placeholders = (0usize..).map(|i| format_ident!("__v{}", i));
Some(quote!(#type_ident::#variant_ident { #(#members: #placeholders),* }))
}
_ => None,
})
.collect::<Vec<_>>();
quote! {
match _serde::__private::None::<&#type_ident #ty_generics> {
#(
_serde::__private::Some(#patterns) => {}
)*
_ => {}
}
}
}
// Expands to one of these per enum variant:
//
// match None {
// Some((__v0, __v1,)) => {
// let _ = E::V { a: __v0, b: __v1 };
// }
// _ => {}
// }
//
fn pretend_variants_used(cont: &Container) -> TokenStream {
let variants = match &cont.data {
Data::Enum(variants) => variants,
Data::Struct(_, _) => {
return quote!();
}
};
let type_ident = &cont.ident;
let (_, ty_generics, _) = cont.generics.split_for_impl();
let turbofish = ty_generics.as_turbofish();
let cases = variants.iter().map(|variant| {
let variant_ident = &variant.ident;
let placeholders = &(0..variant.fields.len())
.map(|i| format_ident!("__v{}", i))
.collect::<Vec<_>>();
let pat = match variant.style {
Style::Struct => {
let members = variant.fields.iter().map(|field| &field.member);
quote!({ #(#members: #placeholders),* })
}
Style::Tuple | Style::Newtype => quote!(( #(#placeholders),* )),
Style::Unit => quote!(),
};
quote! {
match _serde::__private::None {
_serde::__private::Some((#(#placeholders,)*)) => {
let _ = #type_ident::#variant_ident #turbofish #pat;
}
_ => {}
}
}
});
quote!(#(#cases)*)
}

1338
zeroidc/vendor/serde_derive/src/ser.rs vendored Normal file

File diff suppressed because it is too large Load Diff

24
zeroidc/vendor/serde_derive/src/try.rs vendored Normal file
View File

@@ -0,0 +1,24 @@
use proc_macro2::{Punct, Spacing, TokenStream};
// None of our generated code requires the `From::from` error conversion
// performed by the standard library's `try!` macro. With this simplified macro
// we see a significant improvement in type checking and borrow checking time of
// the generated code and a slight improvement in binary size.
pub fn replacement() -> TokenStream {
// Cannot pass `$expr` to `quote!` prior to Rust 1.17.0 so interpolate it.
let dollar = Punct::new('$', Spacing::Alone);
quote! {
#[allow(unused_macros)]
macro_rules! try {
(#dollar __expr:expr) => {
match #dollar __expr {
_serde::__private::Ok(__val) => __val,
_serde::__private::Err(__err) => {
return _serde::__private::Err(__err);
}
}
}
}
}
}