Initial vendor packages
Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
1
vendor/serde_derive/.cargo-checksum.json
vendored
Normal file
1
vendor/serde_derive/.cargo-checksum.json
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"files":{"Cargo.toml":"292f527e949377d3d23759746419c447bb8e93603f48d933ef1af34a0bf8b666","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"c3ece10a36d19b4e857a770eaf74a2164d220f55fa11947065a3898c1697ecef","crates-io.md":"56e988ac4944c45f5bf5051e3827892ed8fb817853d99d9df1fff6621108e270","src/bound.rs":"6c5c20785ac95af9480f8d0de35a7e844cc36a16012f6468db148acd03cb15c2","src/de.rs":"c221ab2b94a5d80dccff74a37f3448b3d695656552b452595dc289c73b12fb2b","src/dummy.rs":"9533dfee23f20d92ea75734c739022820c2787ded0d54f459feacdeb770ec912","src/fragment.rs":"6757cb4c3131d4300f093572efc273c4ab5a20e3e1efb54a311dcfa52d0bd6eb","src/internals/ast.rs":"7dc997e4090033bbd1d0bdd870e8bb87b096b7f66cfd02047f6b85ebdd569b12","src/internals/attr.rs":"6584c0a02de0d17993877303f3cc2c1bccf235257632220421f98082d82d387a","src/internals/case.rs":"10c8dda2b32d8c6c6b63cf09cdc63d02375af7e95ecefe8fecb34f93b65191bb","src/internals/check.rs":"d842eb9912fd29311060b67f3bc62c438eb7b5d86093355acb4de7eee02a0ef8","src/internals/ctxt.rs":"83a4e6fbe0e439d578478883594407e03f2f340541be479bdf0b04a202633a37","src/internals/mod.rs":"ed021ca635c18132a0e5c3d90f21b7f65def0a61e946421a30200b5b9ab6ad43","src/internals/receiver.rs":"fe8a480669511b5edcfe71f5dd290cf72ccec54c9016ec85f2ac59dce538077f","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"d619e88caa3c7a09b03014257f2b349ee922290062d9b97b4dd19d0e64532690","src/lib.rs":"3fc5a148c35cda8a27bc0a65da41a991d63f8ea0e0d607336082dd0f528750ef","src/pretend.rs":"7facc10a5b805564dd95735ae11118ec17ca6adcc49a59764e7c920e27b9fc4a","src/ser.rs":"e3341471cea9d7e2fb4043e5d1746862beb9a4e25196170879eeac529d460920","src/this.rs":"87818dc80cbb521b51938a653d09daf10aafc220bb10425948de82ad670fcb85"},"package":"46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c"}
|
59
vendor/serde_derive/Cargo.toml
vendored
Normal file
59
vendor/serde_derive/Cargo.toml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
rust-version = "1.56"
|
||||
name = "serde_derive"
|
||||
version = "1.0.195"
|
||||
authors = [
|
||||
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
|
||||
"David Tolnay <dtolnay@gmail.com>",
|
||||
]
|
||||
description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]"
|
||||
homepage = "https://serde.rs"
|
||||
documentation = "https://serde.rs/derive.html"
|
||||
readme = "crates-io.md"
|
||||
keywords = [
|
||||
"serde",
|
||||
"serialization",
|
||||
"no_std",
|
||||
"derive",
|
||||
]
|
||||
categories = [
|
||||
"no-std",
|
||||
"no-std::no-alloc",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/serde-rs/serde"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[lib]
|
||||
name = "serde_derive"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0.74"
|
||||
|
||||
[dependencies.quote]
|
||||
version = "1.0.35"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "2.0.46"
|
||||
|
||||
[dev-dependencies.serde]
|
||||
version = "1"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
deserialize_in_place = []
|
176
vendor/serde_derive/LICENSE-APACHE
vendored
Normal file
176
vendor/serde_derive/LICENSE-APACHE
vendored
Normal file
@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
vendor/serde_derive/LICENSE-MIT
vendored
Normal file
23
vendor/serde_derive/LICENSE-MIT
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
114
vendor/serde_derive/README.md
vendored
Normal file
114
vendor/serde_derive/README.md
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.31+]][Rust 1.31] [![serde_derive: rustc 1.56+]][Rust 1.56]
|
||||
|
||||
[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/serde/ci.yml?branch=master
|
||||
[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
|
||||
[Latest Version]: https://img.shields.io/crates/v/serde.svg
|
||||
[crates.io]: https://crates.io/crates/serde
|
||||
[serde: rustc 1.31+]: https://img.shields.io/badge/serde-rustc_1.31+-lightgray.svg
|
||||
[serde_derive: rustc 1.56+]: https://img.shields.io/badge/serde_derive-rustc_1.56+-lightgray.svg
|
||||
[Rust 1.31]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html
|
||||
[Rust 1.56]: https://blog.rust-lang.org/2021/10/21/Rust-1.56.0.html
|
||||
|
||||
**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
|
||||
|
||||
---
|
||||
|
||||
You may be looking for:
|
||||
|
||||
- [An overview of Serde](https://serde.rs/)
|
||||
- [Data formats supported by Serde](https://serde.rs/#data-formats)
|
||||
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
|
||||
- [Examples](https://serde.rs/examples.html)
|
||||
- [API documentation](https://docs.rs/serde)
|
||||
- [Release notes](https://github.com/serde-rs/serde/releases)
|
||||
|
||||
## Serde in action
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
Click to show Cargo.toml.
|
||||
<a href="https://play.rust-lang.org/?edition=2018&gist=72755f28f99afc95e01d63174b28c1f5" target="_blank">Run this code in the playground.</a>
|
||||
</summary>
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
|
||||
# The core APIs, including the Serialize and Deserialize traits. Always
|
||||
# required when using Serde. The "derive" feature is only required when
|
||||
# using #[derive(Serialize, Deserialize)] to make Serde work with structs
|
||||
# and enums defined in your crate.
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
# Each data format lives in its own crate; the sample code below uses JSON
|
||||
# but you may be using a different one.
|
||||
serde_json = "1.0"
|
||||
```
|
||||
|
||||
</details>
|
||||
<p></p>
|
||||
|
||||
```rust
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct Point {
|
||||
x: i32,
|
||||
y: i32,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let point = Point { x: 1, y: 2 };
|
||||
|
||||
// Convert the Point to a JSON string.
|
||||
let serialized = serde_json::to_string(&point).unwrap();
|
||||
|
||||
// Prints serialized = {"x":1,"y":2}
|
||||
println!("serialized = {}", serialized);
|
||||
|
||||
// Convert the JSON string back to a Point.
|
||||
let deserialized: Point = serde_json::from_str(&serialized).unwrap();
|
||||
|
||||
// Prints deserialized = Point { x: 1, y: 2 }
|
||||
println!("deserialized = {:?}", deserialized);
|
||||
}
|
||||
```
|
||||
|
||||
## Getting help
|
||||
|
||||
Serde is one of the most widely used Rust libraries so any place that Rustaceans
|
||||
congregate will be able to help you out. For chat, consider trying the
|
||||
[#rust-questions] or [#rust-beginners] channels of the unofficial community
|
||||
Discord (invite: <https://discord.gg/rust-lang-community>), the [#rust-usage] or
|
||||
[#beginners] channels of the official Rust Project Discord (invite:
|
||||
<https://discord.gg/rust-lang>), or the [#general][zulip] stream in Zulip. For
|
||||
asynchronous, consider the [\[rust\] tag on StackOverflow][stackoverflow], the
|
||||
[/r/rust] subreddit which has a pinned weekly easy questions post, or the Rust
|
||||
[Discourse forum][discourse]. It's acceptable to file a support issue in this
|
||||
repo but they tend not to get as many eyes as any of the above and may get
|
||||
closed without a response after some time.
|
||||
|
||||
[#rust-questions]: https://discord.com/channels/273534239310479360/274215136414400513
|
||||
[#rust-beginners]: https://discord.com/channels/273534239310479360/273541522815713281
|
||||
[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848
|
||||
[#beginners]: https://discord.com/channels/442252698964721669/448238009733742612
|
||||
[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general
|
||||
[stackoverflow]: https://stackoverflow.com/questions/tagged/rust
|
||||
[/r/rust]: https://www.reddit.com/r/rust
|
||||
[discourse]: https://users.rust-lang.org
|
||||
|
||||
<br>
|
||||
|
||||
#### License
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
65
vendor/serde_derive/crates-io.md
vendored
Normal file
65
vendor/serde_derive/crates-io.md
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
<!-- Serde readme rendered on crates.io -->
|
||||
|
||||
**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
|
||||
|
||||
---
|
||||
|
||||
You may be looking for:
|
||||
|
||||
- [An overview of Serde](https://serde.rs/)
|
||||
- [Data formats supported by Serde](https://serde.rs/#data-formats)
|
||||
- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
|
||||
- [Examples](https://serde.rs/examples.html)
|
||||
- [API documentation](https://docs.rs/serde)
|
||||
- [Release notes](https://github.com/serde-rs/serde/releases)
|
||||
|
||||
## Serde in action
|
||||
|
||||
```rust
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
struct Point {
|
||||
x: i32,
|
||||
y: i32,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let point = Point { x: 1, y: 2 };
|
||||
|
||||
// Convert the Point to a JSON string.
|
||||
let serialized = serde_json::to_string(&point).unwrap();
|
||||
|
||||
// Prints serialized = {"x":1,"y":2}
|
||||
println!("serialized = {}", serialized);
|
||||
|
||||
// Convert the JSON string back to a Point.
|
||||
let deserialized: Point = serde_json::from_str(&serialized).unwrap();
|
||||
|
||||
// Prints deserialized = Point { x: 1, y: 2 }
|
||||
println!("deserialized = {:?}", deserialized);
|
||||
}
|
||||
```
|
||||
|
||||
## Getting help
|
||||
|
||||
Serde is one of the most widely used Rust libraries so any place that Rustaceans
|
||||
congregate will be able to help you out. For chat, consider trying the
|
||||
[#rust-questions] or [#rust-beginners] channels of the unofficial community
|
||||
Discord (invite: <https://discord.gg/rust-lang-community>, the [#rust-usage] or
|
||||
[#beginners] channels of the official Rust Project Discord (invite:
|
||||
<https://discord.gg/rust-lang>), or the [#general][zulip] stream in Zulip. For
|
||||
asynchronous, consider the [\[rust\] tag on StackOverflow][stackoverflow], the
|
||||
[/r/rust] subreddit which has a pinned weekly easy questions post, or the Rust
|
||||
[Discourse forum][discourse]. It's acceptable to file a support issue in this
|
||||
repo but they tend not to get as many eyes as any of the above and may get
|
||||
closed without a response after some time.
|
||||
|
||||
[#rust-questions]: https://discord.com/channels/273534239310479360/274215136414400513
|
||||
[#rust-beginners]: https://discord.com/channels/273534239310479360/273541522815713281
|
||||
[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848
|
||||
[#beginners]: https://discord.com/channels/442252698964721669/448238009733742612
|
||||
[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general
|
||||
[stackoverflow]: https://stackoverflow.com/questions/tagged/rust
|
||||
[/r/rust]: https://www.reddit.com/r/rust
|
||||
[discourse]: https://users.rust-lang.org
|
408
vendor/serde_derive/src/bound.rs
vendored
Normal file
408
vendor/serde_derive/src/bound.rs
vendored
Normal file
@ -0,0 +1,408 @@
|
||||
use crate::internals::ast::{Container, Data};
|
||||
use crate::internals::{attr, ungroup};
|
||||
use proc_macro2::Span;
|
||||
use std::collections::HashSet;
|
||||
use syn::punctuated::{Pair, Punctuated};
|
||||
use syn::Token;
|
||||
|
||||
// Remove the default from every type parameter because in the generated impls
|
||||
// they look like associated types: "error: associated type bindings are not
|
||||
// allowed here".
|
||||
pub fn without_defaults(generics: &syn::Generics) -> syn::Generics {
|
||||
syn::Generics {
|
||||
params: generics
|
||||
.params
|
||||
.iter()
|
||||
.map(|param| match param {
|
||||
syn::GenericParam::Type(param) => syn::GenericParam::Type(syn::TypeParam {
|
||||
eq_token: None,
|
||||
default: None,
|
||||
..param.clone()
|
||||
}),
|
||||
_ => param.clone(),
|
||||
})
|
||||
.collect(),
|
||||
..generics.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_where_predicates(
|
||||
generics: &syn::Generics,
|
||||
predicates: &[syn::WherePredicate],
|
||||
) -> syn::Generics {
|
||||
let mut generics = generics.clone();
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(predicates.iter().cloned());
|
||||
generics
|
||||
}
|
||||
|
||||
pub fn with_where_predicates_from_fields(
|
||||
cont: &Container,
|
||||
generics: &syn::Generics,
|
||||
from_field: fn(&attr::Field) -> Option<&[syn::WherePredicate]>,
|
||||
) -> syn::Generics {
|
||||
let predicates = cont
|
||||
.data
|
||||
.all_fields()
|
||||
.filter_map(|field| from_field(&field.attrs))
|
||||
.flat_map(<[syn::WherePredicate]>::to_vec);
|
||||
|
||||
let mut generics = generics.clone();
|
||||
generics.make_where_clause().predicates.extend(predicates);
|
||||
generics
|
||||
}
|
||||
|
||||
pub fn with_where_predicates_from_variants(
|
||||
cont: &Container,
|
||||
generics: &syn::Generics,
|
||||
from_variant: fn(&attr::Variant) -> Option<&[syn::WherePredicate]>,
|
||||
) -> syn::Generics {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
Data::Struct(_, _) => {
|
||||
return generics.clone();
|
||||
}
|
||||
};
|
||||
|
||||
let predicates = variants
|
||||
.iter()
|
||||
.filter_map(|variant| from_variant(&variant.attrs))
|
||||
.flat_map(<[syn::WherePredicate]>::to_vec);
|
||||
|
||||
let mut generics = generics.clone();
|
||||
generics.make_where_clause().predicates.extend(predicates);
|
||||
generics
|
||||
}
|
||||
|
||||
// Puts the given bound on any generic type parameters that are used in fields
|
||||
// for which filter returns true.
|
||||
//
|
||||
// For example, the following struct needs the bound `A: Serialize, B:
|
||||
// Serialize`.
|
||||
//
|
||||
// struct S<'b, A, B: 'b, C> {
|
||||
// a: A,
|
||||
// b: Option<&'b B>
|
||||
// #[serde(skip_serializing)]
|
||||
// c: C,
|
||||
// }
|
||||
pub fn with_bound(
|
||||
cont: &Container,
|
||||
generics: &syn::Generics,
|
||||
filter: fn(&attr::Field, Option<&attr::Variant>) -> bool,
|
||||
bound: &syn::Path,
|
||||
) -> syn::Generics {
|
||||
struct FindTyParams<'ast> {
|
||||
// Set of all generic type parameters on the current struct (A, B, C in
|
||||
// the example). Initialized up front.
|
||||
all_type_params: HashSet<syn::Ident>,
|
||||
|
||||
// Set of generic type parameters used in fields for which filter
|
||||
// returns true (A and B in the example). Filled in as the visitor sees
|
||||
// them.
|
||||
relevant_type_params: HashSet<syn::Ident>,
|
||||
|
||||
// Fields whose type is an associated type of one of the generic type
|
||||
// parameters.
|
||||
associated_type_usage: Vec<&'ast syn::TypePath>,
|
||||
}
|
||||
|
||||
impl<'ast> FindTyParams<'ast> {
|
||||
fn visit_field(&mut self, field: &'ast syn::Field) {
|
||||
if let syn::Type::Path(ty) = ungroup(&field.ty) {
|
||||
if let Some(Pair::Punctuated(t, _)) = ty.path.segments.pairs().next() {
|
||||
if self.all_type_params.contains(&t.ident) {
|
||||
self.associated_type_usage.push(ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.visit_type(&field.ty);
|
||||
}
|
||||
|
||||
fn visit_path(&mut self, path: &'ast syn::Path) {
|
||||
if let Some(seg) = path.segments.last() {
|
||||
if seg.ident == "PhantomData" {
|
||||
// Hardcoded exception, because PhantomData<T> implements
|
||||
// Serialize and Deserialize whether or not T implements it.
|
||||
return;
|
||||
}
|
||||
}
|
||||
if path.leading_colon.is_none() && path.segments.len() == 1 {
|
||||
let id = &path.segments[0].ident;
|
||||
if self.all_type_params.contains(id) {
|
||||
self.relevant_type_params.insert(id.clone());
|
||||
}
|
||||
}
|
||||
for segment in &path.segments {
|
||||
self.visit_path_segment(segment);
|
||||
}
|
||||
}
|
||||
|
||||
// Everything below is simply traversing the syntax tree.
|
||||
|
||||
fn visit_type(&mut self, ty: &'ast syn::Type) {
|
||||
match ty {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
syn::Type::Array(ty) => self.visit_type(&ty.elem),
|
||||
syn::Type::BareFn(ty) => {
|
||||
for arg in &ty.inputs {
|
||||
self.visit_type(&arg.ty);
|
||||
}
|
||||
self.visit_return_type(&ty.output);
|
||||
}
|
||||
syn::Type::Group(ty) => self.visit_type(&ty.elem),
|
||||
syn::Type::ImplTrait(ty) => {
|
||||
for bound in &ty.bounds {
|
||||
self.visit_type_param_bound(bound);
|
||||
}
|
||||
}
|
||||
syn::Type::Macro(ty) => self.visit_macro(&ty.mac),
|
||||
syn::Type::Paren(ty) => self.visit_type(&ty.elem),
|
||||
syn::Type::Path(ty) => {
|
||||
if let Some(qself) = &ty.qself {
|
||||
self.visit_type(&qself.ty);
|
||||
}
|
||||
self.visit_path(&ty.path);
|
||||
}
|
||||
syn::Type::Ptr(ty) => self.visit_type(&ty.elem),
|
||||
syn::Type::Reference(ty) => self.visit_type(&ty.elem),
|
||||
syn::Type::Slice(ty) => self.visit_type(&ty.elem),
|
||||
syn::Type::TraitObject(ty) => {
|
||||
for bound in &ty.bounds {
|
||||
self.visit_type_param_bound(bound);
|
||||
}
|
||||
}
|
||||
syn::Type::Tuple(ty) => {
|
||||
for elem in &ty.elems {
|
||||
self.visit_type(elem);
|
||||
}
|
||||
}
|
||||
|
||||
syn::Type::Infer(_) | syn::Type::Never(_) | syn::Type::Verbatim(_) => {}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_path_segment(&mut self, segment: &'ast syn::PathSegment) {
|
||||
self.visit_path_arguments(&segment.arguments);
|
||||
}
|
||||
|
||||
fn visit_path_arguments(&mut self, arguments: &'ast syn::PathArguments) {
|
||||
match arguments {
|
||||
syn::PathArguments::None => {}
|
||||
syn::PathArguments::AngleBracketed(arguments) => {
|
||||
for arg in &arguments.args {
|
||||
match arg {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
syn::GenericArgument::Type(arg) => self.visit_type(arg),
|
||||
syn::GenericArgument::AssocType(arg) => self.visit_type(&arg.ty),
|
||||
syn::GenericArgument::Lifetime(_)
|
||||
| syn::GenericArgument::Const(_)
|
||||
| syn::GenericArgument::AssocConst(_)
|
||||
| syn::GenericArgument::Constraint(_) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
syn::PathArguments::Parenthesized(arguments) => {
|
||||
for argument in &arguments.inputs {
|
||||
self.visit_type(argument);
|
||||
}
|
||||
self.visit_return_type(&arguments.output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_return_type(&mut self, return_type: &'ast syn::ReturnType) {
|
||||
match return_type {
|
||||
syn::ReturnType::Default => {}
|
||||
syn::ReturnType::Type(_, output) => self.visit_type(output),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_type_param_bound(&mut self, bound: &'ast syn::TypeParamBound) {
|
||||
match bound {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
syn::TypeParamBound::Trait(bound) => self.visit_path(&bound.path),
|
||||
syn::TypeParamBound::Lifetime(_) | syn::TypeParamBound::Verbatim(_) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Type parameter should not be considered used by a macro path.
|
||||
//
|
||||
// struct TypeMacro<T> {
|
||||
// mac: T!(),
|
||||
// marker: PhantomData<T>,
|
||||
// }
|
||||
fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
|
||||
}
|
||||
|
||||
let all_type_params = generics
|
||||
.type_params()
|
||||
.map(|param| param.ident.clone())
|
||||
.collect();
|
||||
|
||||
let mut visitor = FindTyParams {
|
||||
all_type_params,
|
||||
relevant_type_params: HashSet::new(),
|
||||
associated_type_usage: Vec::new(),
|
||||
};
|
||||
match &cont.data {
|
||||
Data::Enum(variants) => {
|
||||
for variant in variants {
|
||||
let relevant_fields = variant
|
||||
.fields
|
||||
.iter()
|
||||
.filter(|field| filter(&field.attrs, Some(&variant.attrs)));
|
||||
for field in relevant_fields {
|
||||
visitor.visit_field(field.original);
|
||||
}
|
||||
}
|
||||
}
|
||||
Data::Struct(_, fields) => {
|
||||
for field in fields.iter().filter(|field| filter(&field.attrs, None)) {
|
||||
visitor.visit_field(field.original);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let relevant_type_params = visitor.relevant_type_params;
|
||||
let associated_type_usage = visitor.associated_type_usage;
|
||||
let new_predicates = generics
|
||||
.type_params()
|
||||
.map(|param| param.ident.clone())
|
||||
.filter(|id| relevant_type_params.contains(id))
|
||||
.map(|id| syn::TypePath {
|
||||
qself: None,
|
||||
path: id.into(),
|
||||
})
|
||||
.chain(associated_type_usage.into_iter().cloned())
|
||||
.map(|bounded_ty| {
|
||||
syn::WherePredicate::Type(syn::PredicateType {
|
||||
lifetimes: None,
|
||||
// the type parameter that is being bounded e.g. T
|
||||
bounded_ty: syn::Type::Path(bounded_ty),
|
||||
colon_token: <Token![:]>::default(),
|
||||
// the bound e.g. Serialize
|
||||
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: bound.clone(),
|
||||
})]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
})
|
||||
});
|
||||
|
||||
let mut generics = generics.clone();
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.extend(new_predicates);
|
||||
generics
|
||||
}
|
||||
|
||||
pub fn with_self_bound(
|
||||
cont: &Container,
|
||||
generics: &syn::Generics,
|
||||
bound: &syn::Path,
|
||||
) -> syn::Generics {
|
||||
let mut generics = generics.clone();
|
||||
generics
|
||||
.make_where_clause()
|
||||
.predicates
|
||||
.push(syn::WherePredicate::Type(syn::PredicateType {
|
||||
lifetimes: None,
|
||||
// the type that is being bounded e.g. MyStruct<'a, T>
|
||||
bounded_ty: type_of_item(cont),
|
||||
colon_token: <Token![:]>::default(),
|
||||
// the bound e.g. Default
|
||||
bounds: vec![syn::TypeParamBound::Trait(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: bound.clone(),
|
||||
})]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
}));
|
||||
generics
|
||||
}
|
||||
|
||||
pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Generics {
|
||||
let bound = syn::Lifetime::new(lifetime, Span::call_site());
|
||||
let def = syn::LifetimeParam {
|
||||
attrs: Vec::new(),
|
||||
lifetime: bound.clone(),
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
};
|
||||
|
||||
let params = Some(syn::GenericParam::Lifetime(def))
|
||||
.into_iter()
|
||||
.chain(generics.params.iter().cloned().map(|mut param| {
|
||||
match &mut param {
|
||||
syn::GenericParam::Lifetime(param) => {
|
||||
param.bounds.push(bound.clone());
|
||||
}
|
||||
syn::GenericParam::Type(param) => {
|
||||
param
|
||||
.bounds
|
||||
.push(syn::TypeParamBound::Lifetime(bound.clone()));
|
||||
}
|
||||
syn::GenericParam::Const(_) => {}
|
||||
}
|
||||
param
|
||||
}))
|
||||
.collect();
|
||||
|
||||
syn::Generics {
|
||||
params,
|
||||
..generics.clone()
|
||||
}
|
||||
}
|
||||
|
||||
fn type_of_item(cont: &Container) -> syn::Type {
|
||||
syn::Type::Path(syn::TypePath {
|
||||
qself: None,
|
||||
path: syn::Path {
|
||||
leading_colon: None,
|
||||
segments: vec![syn::PathSegment {
|
||||
ident: cont.ident.clone(),
|
||||
arguments: syn::PathArguments::AngleBracketed(
|
||||
syn::AngleBracketedGenericArguments {
|
||||
colon2_token: None,
|
||||
lt_token: <Token![<]>::default(),
|
||||
args: cont
|
||||
.generics
|
||||
.params
|
||||
.iter()
|
||||
.map(|param| match param {
|
||||
syn::GenericParam::Type(param) => {
|
||||
syn::GenericArgument::Type(syn::Type::Path(syn::TypePath {
|
||||
qself: None,
|
||||
path: param.ident.clone().into(),
|
||||
}))
|
||||
}
|
||||
syn::GenericParam::Lifetime(param) => {
|
||||
syn::GenericArgument::Lifetime(param.lifetime.clone())
|
||||
}
|
||||
syn::GenericParam::Const(_) => {
|
||||
panic!("Serde does not support const generics yet");
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
gt_token: <Token![>]>::default(),
|
||||
},
|
||||
),
|
||||
}]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
},
|
||||
})
|
||||
}
|
3148
vendor/serde_derive/src/de.rs
vendored
Normal file
3148
vendor/serde_derive/src/de.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
23
vendor/serde_derive/src/dummy.rs
vendored
Normal file
23
vendor/serde_derive/src/dummy.rs
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
|
||||
pub fn wrap_in_const(serde_path: Option<&syn::Path>, code: TokenStream) -> TokenStream {
|
||||
let use_serde = match serde_path {
|
||||
Some(path) => quote! {
|
||||
use #path as _serde;
|
||||
},
|
||||
None => quote! {
|
||||
#[allow(unused_extern_crates, clippy::useless_attribute)]
|
||||
extern crate serde as _serde;
|
||||
},
|
||||
};
|
||||
|
||||
quote! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
|
||||
const _: () = {
|
||||
#use_serde
|
||||
#code
|
||||
};
|
||||
}
|
||||
}
|
74
vendor/serde_derive/src/fragment.rs
vendored
Normal file
74
vendor/serde_derive/src/fragment.rs
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use syn::{token, Token};
|
||||
|
||||
pub enum Fragment {
|
||||
/// Tokens that can be used as an expression.
|
||||
Expr(TokenStream),
|
||||
/// Tokens that can be used inside a block. The surrounding curly braces are
|
||||
/// not part of these tokens.
|
||||
Block(TokenStream),
|
||||
}
|
||||
|
||||
macro_rules! quote_expr {
|
||||
($($tt:tt)*) => {
|
||||
$crate::fragment::Fragment::Expr(quote!($($tt)*))
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! quote_block {
|
||||
($($tt:tt)*) => {
|
||||
$crate::fragment::Fragment::Block(quote!($($tt)*))
|
||||
}
|
||||
}
|
||||
|
||||
/// Interpolate a fragment in place of an expression. This involves surrounding
|
||||
/// Block fragments in curly braces.
|
||||
pub struct Expr(pub Fragment);
|
||||
impl ToTokens for Expr {
|
||||
fn to_tokens(&self, out: &mut TokenStream) {
|
||||
match &self.0 {
|
||||
Fragment::Expr(expr) => expr.to_tokens(out),
|
||||
Fragment::Block(block) => {
|
||||
token::Brace::default().surround(out, |out| block.to_tokens(out));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Interpolate a fragment as the statements of a block.
|
||||
pub struct Stmts(pub Fragment);
|
||||
impl ToTokens for Stmts {
|
||||
fn to_tokens(&self, out: &mut TokenStream) {
|
||||
match &self.0 {
|
||||
Fragment::Expr(expr) => expr.to_tokens(out),
|
||||
Fragment::Block(block) => block.to_tokens(out),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Interpolate a fragment as the value part of a `match` expression. This
|
||||
/// involves putting a comma after expressions and curly braces around blocks.
|
||||
pub struct Match(pub Fragment);
|
||||
impl ToTokens for Match {
|
||||
fn to_tokens(&self, out: &mut TokenStream) {
|
||||
match &self.0 {
|
||||
Fragment::Expr(expr) => {
|
||||
expr.to_tokens(out);
|
||||
<Token![,]>::default().to_tokens(out);
|
||||
}
|
||||
Fragment::Block(block) => {
|
||||
token::Brace::default().surround(out, |out| block.to_tokens(out));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<TokenStream> for Fragment {
|
||||
fn as_ref(&self) -> &TokenStream {
|
||||
match self {
|
||||
Fragment::Expr(expr) => expr,
|
||||
Fragment::Block(block) => block,
|
||||
}
|
||||
}
|
||||
}
|
216
vendor/serde_derive/src/internals/ast.rs
vendored
Normal file
216
vendor/serde_derive/src/internals/ast.rs
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
//! A Serde ast, parsed from the Syn ast and ready to generate Rust code.
|
||||
|
||||
use crate::internals::{attr, check, Ctxt, Derive};
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::Token;
|
||||
|
||||
/// A source data structure annotated with `#[derive(Serialize)]` and/or `#[derive(Deserialize)]`,
|
||||
/// parsed into an internal representation.
|
||||
pub struct Container<'a> {
|
||||
/// The struct or enum name (without generics).
|
||||
pub ident: syn::Ident,
|
||||
/// Attributes on the structure, parsed for Serde.
|
||||
pub attrs: attr::Container,
|
||||
/// The contents of the struct or enum.
|
||||
pub data: Data<'a>,
|
||||
/// Any generics on the struct or enum.
|
||||
pub generics: &'a syn::Generics,
|
||||
/// Original input.
|
||||
pub original: &'a syn::DeriveInput,
|
||||
}
|
||||
|
||||
/// The fields of a struct or enum.
|
||||
///
|
||||
/// Analogous to `syn::Data`.
|
||||
pub enum Data<'a> {
|
||||
Enum(Vec<Variant<'a>>),
|
||||
Struct(Style, Vec<Field<'a>>),
|
||||
}
|
||||
|
||||
/// A variant of an enum.
|
||||
pub struct Variant<'a> {
|
||||
pub ident: syn::Ident,
|
||||
pub attrs: attr::Variant,
|
||||
pub style: Style,
|
||||
pub fields: Vec<Field<'a>>,
|
||||
pub original: &'a syn::Variant,
|
||||
}
|
||||
|
||||
/// A field of a struct.
|
||||
pub struct Field<'a> {
|
||||
pub member: syn::Member,
|
||||
pub attrs: attr::Field,
|
||||
pub ty: &'a syn::Type,
|
||||
pub original: &'a syn::Field,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Style {
|
||||
/// Named fields.
|
||||
Struct,
|
||||
/// Many unnamed fields.
|
||||
Tuple,
|
||||
/// One unnamed field.
|
||||
Newtype,
|
||||
/// No fields.
|
||||
Unit,
|
||||
}
|
||||
|
||||
impl<'a> Container<'a> {
|
||||
/// Convert the raw Syn ast into a parsed container object, collecting errors in `cx`.
|
||||
pub fn from_ast(
|
||||
cx: &Ctxt,
|
||||
item: &'a syn::DeriveInput,
|
||||
derive: Derive,
|
||||
) -> Option<Container<'a>> {
|
||||
let mut attrs = attr::Container::from_ast(cx, item);
|
||||
|
||||
let mut data = match &item.data {
|
||||
syn::Data::Enum(data) => Data::Enum(enum_from_ast(cx, &data.variants, attrs.default())),
|
||||
syn::Data::Struct(data) => {
|
||||
let (style, fields) = struct_from_ast(cx, &data.fields, None, attrs.default());
|
||||
Data::Struct(style, fields)
|
||||
}
|
||||
syn::Data::Union(_) => {
|
||||
cx.error_spanned_by(item, "Serde does not support derive for unions");
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let mut has_flatten = false;
|
||||
match &mut data {
|
||||
Data::Enum(variants) => {
|
||||
for variant in variants {
|
||||
variant.attrs.rename_by_rules(attrs.rename_all_rules());
|
||||
for field in &mut variant.fields {
|
||||
if field.attrs.flatten() {
|
||||
has_flatten = true;
|
||||
}
|
||||
field.attrs.rename_by_rules(
|
||||
variant
|
||||
.attrs
|
||||
.rename_all_rules()
|
||||
.or(attrs.rename_all_fields_rules()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Data::Struct(_, fields) => {
|
||||
for field in fields {
|
||||
if field.attrs.flatten() {
|
||||
has_flatten = true;
|
||||
}
|
||||
field.attrs.rename_by_rules(attrs.rename_all_rules());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if has_flatten {
|
||||
attrs.mark_has_flatten();
|
||||
}
|
||||
|
||||
let mut item = Container {
|
||||
ident: item.ident.clone(),
|
||||
attrs,
|
||||
data,
|
||||
generics: &item.generics,
|
||||
original: item,
|
||||
};
|
||||
check::check(cx, &mut item, derive);
|
||||
Some(item)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Data<'a> {
|
||||
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a Field<'a>> + 'a> {
|
||||
match self {
|
||||
Data::Enum(variants) => {
|
||||
Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
|
||||
}
|
||||
Data::Struct(_, fields) => Box::new(fields.iter()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_getter(&self) -> bool {
|
||||
self.all_fields().any(|f| f.attrs.getter().is_some())
|
||||
}
|
||||
}
|
||||
|
||||
fn enum_from_ast<'a>(
|
||||
cx: &Ctxt,
|
||||
variants: &'a Punctuated<syn::Variant, Token![,]>,
|
||||
container_default: &attr::Default,
|
||||
) -> Vec<Variant<'a>> {
|
||||
let variants: Vec<Variant> = variants
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
let attrs = attr::Variant::from_ast(cx, variant);
|
||||
let (style, fields) =
|
||||
struct_from_ast(cx, &variant.fields, Some(&attrs), container_default);
|
||||
Variant {
|
||||
ident: variant.ident.clone(),
|
||||
attrs,
|
||||
style,
|
||||
fields,
|
||||
original: variant,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let index_of_last_tagged_variant = variants
|
||||
.iter()
|
||||
.rposition(|variant| !variant.attrs.untagged());
|
||||
if let Some(index_of_last_tagged_variant) = index_of_last_tagged_variant {
|
||||
for variant in &variants[..index_of_last_tagged_variant] {
|
||||
if variant.attrs.untagged() {
|
||||
cx.error_spanned_by(&variant.ident, "all variants with the #[serde(untagged)] attribute must be placed at the end of the enum");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variants
|
||||
}
|
||||
|
||||
fn struct_from_ast<'a>(
|
||||
cx: &Ctxt,
|
||||
fields: &'a syn::Fields,
|
||||
attrs: Option<&attr::Variant>,
|
||||
container_default: &attr::Default,
|
||||
) -> (Style, Vec<Field<'a>>) {
|
||||
match fields {
|
||||
syn::Fields::Named(fields) => (
|
||||
Style::Struct,
|
||||
fields_from_ast(cx, &fields.named, attrs, container_default),
|
||||
),
|
||||
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => (
|
||||
Style::Newtype,
|
||||
fields_from_ast(cx, &fields.unnamed, attrs, container_default),
|
||||
),
|
||||
syn::Fields::Unnamed(fields) => (
|
||||
Style::Tuple,
|
||||
fields_from_ast(cx, &fields.unnamed, attrs, container_default),
|
||||
),
|
||||
syn::Fields::Unit => (Style::Unit, Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn fields_from_ast<'a>(
|
||||
cx: &Ctxt,
|
||||
fields: &'a Punctuated<syn::Field, Token![,]>,
|
||||
attrs: Option<&attr::Variant>,
|
||||
container_default: &attr::Default,
|
||||
) -> Vec<Field<'a>> {
|
||||
fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, field)| Field {
|
||||
member: match &field.ident {
|
||||
Some(ident) => syn::Member::Named(ident.clone()),
|
||||
None => syn::Member::Unnamed(i.into()),
|
||||
},
|
||||
attrs: attr::Field::from_ast(cx, i, field, attrs, container_default),
|
||||
ty: &field.ty,
|
||||
original: field,
|
||||
})
|
||||
.collect()
|
||||
}
|
1881
vendor/serde_derive/src/internals/attr.rs
vendored
Normal file
1881
vendor/serde_derive/src/internals/attr.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
200
vendor/serde_derive/src/internals/case.rs
vendored
Normal file
200
vendor/serde_derive/src/internals/case.rs
vendored
Normal file
@ -0,0 +1,200 @@
|
||||
//! Code to convert the Rust-styled field/variant (e.g. `my_field`, `MyType`) to the
|
||||
//! case of the source (e.g. `my-field`, `MY_FIELD`).
|
||||
|
||||
use self::RenameRule::*;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
|
||||
/// The different possible ways to change case of fields in a struct, or variants in an enum.
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub enum RenameRule {
|
||||
/// Don't apply a default rename rule.
|
||||
None,
|
||||
/// Rename direct children to "lowercase" style.
|
||||
LowerCase,
|
||||
/// Rename direct children to "UPPERCASE" style.
|
||||
UpperCase,
|
||||
/// Rename direct children to "PascalCase" style, as typically used for
|
||||
/// enum variants.
|
||||
PascalCase,
|
||||
/// Rename direct children to "camelCase" style.
|
||||
CamelCase,
|
||||
/// Rename direct children to "snake_case" style, as commonly used for
|
||||
/// fields.
|
||||
SnakeCase,
|
||||
/// Rename direct children to "SCREAMING_SNAKE_CASE" style, as commonly
|
||||
/// used for constants.
|
||||
ScreamingSnakeCase,
|
||||
/// Rename direct children to "kebab-case" style.
|
||||
KebabCase,
|
||||
/// Rename direct children to "SCREAMING-KEBAB-CASE" style.
|
||||
ScreamingKebabCase,
|
||||
}
|
||||
|
||||
static RENAME_RULES: &[(&str, RenameRule)] = &[
|
||||
("lowercase", LowerCase),
|
||||
("UPPERCASE", UpperCase),
|
||||
("PascalCase", PascalCase),
|
||||
("camelCase", CamelCase),
|
||||
("snake_case", SnakeCase),
|
||||
("SCREAMING_SNAKE_CASE", ScreamingSnakeCase),
|
||||
("kebab-case", KebabCase),
|
||||
("SCREAMING-KEBAB-CASE", ScreamingKebabCase),
|
||||
];
|
||||
|
||||
impl RenameRule {
|
||||
pub fn from_str(rename_all_str: &str) -> Result<Self, ParseError> {
|
||||
for (name, rule) in RENAME_RULES {
|
||||
if rename_all_str == *name {
|
||||
return Ok(*rule);
|
||||
}
|
||||
}
|
||||
Err(ParseError {
|
||||
unknown: rename_all_str,
|
||||
})
|
||||
}
|
||||
|
||||
/// Apply a renaming rule to an enum variant, returning the version expected in the source.
|
||||
pub fn apply_to_variant(self, variant: &str) -> String {
|
||||
match self {
|
||||
None | PascalCase => variant.to_owned(),
|
||||
LowerCase => variant.to_ascii_lowercase(),
|
||||
UpperCase => variant.to_ascii_uppercase(),
|
||||
CamelCase => variant[..1].to_ascii_lowercase() + &variant[1..],
|
||||
SnakeCase => {
|
||||
let mut snake = String::new();
|
||||
for (i, ch) in variant.char_indices() {
|
||||
if i > 0 && ch.is_uppercase() {
|
||||
snake.push('_');
|
||||
}
|
||||
snake.push(ch.to_ascii_lowercase());
|
||||
}
|
||||
snake
|
||||
}
|
||||
ScreamingSnakeCase => SnakeCase.apply_to_variant(variant).to_ascii_uppercase(),
|
||||
KebabCase => SnakeCase.apply_to_variant(variant).replace('_', "-"),
|
||||
ScreamingKebabCase => ScreamingSnakeCase
|
||||
.apply_to_variant(variant)
|
||||
.replace('_', "-"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply a renaming rule to a struct field, returning the version expected in the source.
|
||||
pub fn apply_to_field(self, field: &str) -> String {
|
||||
match self {
|
||||
None | LowerCase | SnakeCase => field.to_owned(),
|
||||
UpperCase => field.to_ascii_uppercase(),
|
||||
PascalCase => {
|
||||
let mut pascal = String::new();
|
||||
let mut capitalize = true;
|
||||
for ch in field.chars() {
|
||||
if ch == '_' {
|
||||
capitalize = true;
|
||||
} else if capitalize {
|
||||
pascal.push(ch.to_ascii_uppercase());
|
||||
capitalize = false;
|
||||
} else {
|
||||
pascal.push(ch);
|
||||
}
|
||||
}
|
||||
pascal
|
||||
}
|
||||
CamelCase => {
|
||||
let pascal = PascalCase.apply_to_field(field);
|
||||
pascal[..1].to_ascii_lowercase() + &pascal[1..]
|
||||
}
|
||||
ScreamingSnakeCase => field.to_ascii_uppercase(),
|
||||
KebabCase => field.replace('_', "-"),
|
||||
ScreamingKebabCase => ScreamingSnakeCase.apply_to_field(field).replace('_', "-"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `RenameRule` if it is not `None`, `rule_b` otherwise.
|
||||
pub fn or(self, rule_b: Self) -> Self {
|
||||
match self {
|
||||
None => rule_b,
|
||||
_ => self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ParseError<'a> {
|
||||
unknown: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Display for ParseError<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("unknown rename rule `rename_all = ")?;
|
||||
Debug::fmt(self.unknown, f)?;
|
||||
f.write_str("`, expected one of ")?;
|
||||
for (i, (name, _rule)) in RENAME_RULES.iter().enumerate() {
|
||||
if i > 0 {
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
Debug::fmt(name, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_variants() {
|
||||
for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[
|
||||
(
|
||||
"Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
|
||||
),
|
||||
(
|
||||
"VeryTasty",
|
||||
"verytasty",
|
||||
"VERYTASTY",
|
||||
"veryTasty",
|
||||
"very_tasty",
|
||||
"VERY_TASTY",
|
||||
"very-tasty",
|
||||
"VERY-TASTY",
|
||||
),
|
||||
("A", "a", "A", "a", "a", "A", "a", "A"),
|
||||
("Z42", "z42", "Z42", "z42", "z42", "Z42", "z42", "Z42"),
|
||||
] {
|
||||
assert_eq!(None.apply_to_variant(original), original);
|
||||
assert_eq!(LowerCase.apply_to_variant(original), lower);
|
||||
assert_eq!(UpperCase.apply_to_variant(original), upper);
|
||||
assert_eq!(PascalCase.apply_to_variant(original), original);
|
||||
assert_eq!(CamelCase.apply_to_variant(original), camel);
|
||||
assert_eq!(SnakeCase.apply_to_variant(original), snake);
|
||||
assert_eq!(ScreamingSnakeCase.apply_to_variant(original), screaming);
|
||||
assert_eq!(KebabCase.apply_to_variant(original), kebab);
|
||||
assert_eq!(
|
||||
ScreamingKebabCase.apply_to_variant(original),
|
||||
screaming_kebab
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_fields() {
|
||||
for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[
|
||||
(
|
||||
"outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
|
||||
),
|
||||
(
|
||||
"very_tasty",
|
||||
"VERY_TASTY",
|
||||
"VeryTasty",
|
||||
"veryTasty",
|
||||
"VERY_TASTY",
|
||||
"very-tasty",
|
||||
"VERY-TASTY",
|
||||
),
|
||||
("a", "A", "A", "a", "A", "a", "A"),
|
||||
("z42", "Z42", "Z42", "z42", "Z42", "z42", "Z42"),
|
||||
] {
|
||||
assert_eq!(None.apply_to_field(original), original);
|
||||
assert_eq!(UpperCase.apply_to_field(original), upper);
|
||||
assert_eq!(PascalCase.apply_to_field(original), pascal);
|
||||
assert_eq!(CamelCase.apply_to_field(original), camel);
|
||||
assert_eq!(SnakeCase.apply_to_field(original), original);
|
||||
assert_eq!(ScreamingSnakeCase.apply_to_field(original), screaming);
|
||||
assert_eq!(KebabCase.apply_to_field(original), kebab);
|
||||
assert_eq!(ScreamingKebabCase.apply_to_field(original), screaming_kebab);
|
||||
}
|
||||
}
|
477
vendor/serde_derive/src/internals/check.rs
vendored
Normal file
477
vendor/serde_derive/src/internals/check.rs
vendored
Normal file
@ -0,0 +1,477 @@
|
||||
use crate::internals::ast::{Container, Data, Field, Style};
|
||||
use crate::internals::attr::{Default, Identifier, TagType};
|
||||
use crate::internals::{ungroup, Ctxt, Derive};
|
||||
use syn::{Member, Type};
|
||||
|
||||
// Cross-cutting checks that require looking at more than a single attrs object.
|
||||
// Simpler checks should happen when parsing and building the attrs.
|
||||
pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
|
||||
check_default_on_tuple(cx, cont);
|
||||
check_remote_generic(cx, cont);
|
||||
check_getter(cx, cont);
|
||||
check_flatten(cx, cont);
|
||||
check_identifier(cx, cont);
|
||||
check_variant_skip_attrs(cx, cont);
|
||||
check_internal_tag_field_name_conflict(cx, cont);
|
||||
check_adjacent_tag_conflict(cx, cont);
|
||||
check_transparent(cx, cont, derive);
|
||||
check_from_and_try_from(cx, cont);
|
||||
}
|
||||
|
||||
// If some field of a tuple struct is marked #[serde(default)] then all fields
|
||||
// after it must also be marked with that attribute, or the struct must have a
|
||||
// container-level serde(default) attribute. A field's default value is only
|
||||
// used for tuple fields if the sequence is exhausted at that point; that means
|
||||
// all subsequent fields will fail to deserialize if they don't have their own
|
||||
// default.
|
||||
fn check_default_on_tuple(cx: &Ctxt, cont: &Container) {
|
||||
if let Default::None = cont.attrs.default() {
|
||||
if let Data::Struct(Style::Tuple, fields) = &cont.data {
|
||||
let mut first_default_index = None;
|
||||
for (i, field) in fields.iter().enumerate() {
|
||||
// Skipped fields automatically get the #[serde(default)]
|
||||
// attribute. We are interested only on non-skipped fields here.
|
||||
if field.attrs.skip_deserializing() {
|
||||
continue;
|
||||
}
|
||||
if let Default::None = field.attrs.default() {
|
||||
if let Some(first) = first_default_index {
|
||||
cx.error_spanned_by(
|
||||
field.ty,
|
||||
format!("field must have #[serde(default)] because previous field {} has #[serde(default)]", first),
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if first_default_index.is_none() {
|
||||
first_default_index = Some(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remote derive definition type must have either all of the generics of the
|
||||
// remote type:
|
||||
//
|
||||
// #[serde(remote = "Generic")]
|
||||
// struct Generic<T> {…}
|
||||
//
|
||||
// or none of them, i.e. defining impls for one concrete instantiation of the
|
||||
// remote type only:
|
||||
//
|
||||
// #[serde(remote = "Generic<T>")]
|
||||
// struct ConcreteDef {…}
|
||||
//
|
||||
fn check_remote_generic(cx: &Ctxt, cont: &Container) {
|
||||
if let Some(remote) = cont.attrs.remote() {
|
||||
let local_has_generic = !cont.generics.params.is_empty();
|
||||
let remote_has_generic = !remote.segments.last().unwrap().arguments.is_none();
|
||||
if local_has_generic && remote_has_generic {
|
||||
cx.error_spanned_by(remote, "remove generic parameters from this path");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Getters are only allowed inside structs (not enums) with the `remote`
|
||||
// attribute.
|
||||
fn check_getter(cx: &Ctxt, cont: &Container) {
|
||||
match cont.data {
|
||||
Data::Enum(_) => {
|
||||
if cont.data.has_getter() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(getter = \"...\")] is not allowed in an enum",
|
||||
);
|
||||
}
|
||||
}
|
||||
Data::Struct(_, _) => {
|
||||
if cont.data.has_getter() && cont.attrs.remote().is_none() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(getter = \"...\")] can only be used in structs that have #[serde(remote = \"...\")]",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flattening has some restrictions we can test.
|
||||
fn check_flatten(cx: &Ctxt, cont: &Container) {
|
||||
match &cont.data {
|
||||
Data::Enum(variants) => {
|
||||
for variant in variants {
|
||||
for field in &variant.fields {
|
||||
check_flatten_field(cx, variant.style, field);
|
||||
}
|
||||
}
|
||||
}
|
||||
Data::Struct(style, fields) => {
|
||||
for field in fields {
|
||||
check_flatten_field(cx, *style, field);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_flatten_field(cx: &Ctxt, style: Style, field: &Field) {
|
||||
if !field.attrs.flatten() {
|
||||
return;
|
||||
}
|
||||
match style {
|
||||
Style::Tuple => {
|
||||
cx.error_spanned_by(
|
||||
field.original,
|
||||
"#[serde(flatten)] cannot be used on tuple structs",
|
||||
);
|
||||
}
|
||||
Style::Newtype => {
|
||||
cx.error_spanned_by(
|
||||
field.original,
|
||||
"#[serde(flatten)] cannot be used on newtype structs",
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// The `other` attribute must be used at most once and it must be the last
|
||||
// variant of an enum.
|
||||
//
|
||||
// Inside a `variant_identifier` all variants must be unit variants. Inside a
|
||||
// `field_identifier` all but possibly one variant must be unit variants. The
|
||||
// last variant may be a newtype variant which is an implicit "other" case.
|
||||
fn check_identifier(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
Data::Struct(_, _) => return,
|
||||
};
|
||||
|
||||
for (i, variant) in variants.iter().enumerate() {
|
||||
match (
|
||||
variant.style,
|
||||
cont.attrs.identifier(),
|
||||
variant.attrs.other(),
|
||||
cont.attrs.tag(),
|
||||
) {
|
||||
// The `other` attribute may not be used in a variant_identifier.
|
||||
(_, Identifier::Variant, true, _) => {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
"#[serde(other)] may not be used on a variant identifier",
|
||||
);
|
||||
}
|
||||
|
||||
// Variant with `other` attribute cannot appear in untagged enum
|
||||
(_, Identifier::No, true, &TagType::None) => {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
"#[serde(other)] cannot appear on untagged enum",
|
||||
);
|
||||
}
|
||||
|
||||
// Variant with `other` attribute must be the last one.
|
||||
(Style::Unit, Identifier::Field, true, _) | (Style::Unit, Identifier::No, true, _) => {
|
||||
if i < variants.len() - 1 {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
"#[serde(other)] must be on the last variant",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Variant with `other` attribute must be a unit variant.
|
||||
(_, Identifier::Field, true, _) | (_, Identifier::No, true, _) => {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
"#[serde(other)] must be on a unit variant",
|
||||
);
|
||||
}
|
||||
|
||||
// Any sort of variant is allowed if this is not an identifier.
|
||||
(_, Identifier::No, false, _) => {}
|
||||
|
||||
// Unit variant without `other` attribute is always fine.
|
||||
(Style::Unit, _, false, _) => {}
|
||||
|
||||
// The last field is allowed to be a newtype catch-all.
|
||||
(Style::Newtype, Identifier::Field, false, _) => {
|
||||
if i < variants.len() - 1 {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
format!("`{}` must be the last variant", variant.ident),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
(_, Identifier::Field, false, _) => {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
"#[serde(field_identifier)] may only contain unit variants",
|
||||
);
|
||||
}
|
||||
|
||||
(_, Identifier::Variant, false, _) => {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
"#[serde(variant_identifier)] may only contain unit variants",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Skip-(de)serializing attributes are not allowed on variants marked
|
||||
// (de)serialize_with.
|
||||
fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
Data::Struct(_, _) => return,
|
||||
};
|
||||
|
||||
for variant in variants {
|
||||
if variant.attrs.serialize_with().is_some() {
|
||||
if variant.attrs.skip_serializing() {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
format!(
|
||||
"variant `{}` cannot have both #[serde(serialize_with)] and #[serde(skip_serializing)]",
|
||||
variant.ident
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
for field in &variant.fields {
|
||||
let member = member_message(&field.member);
|
||||
|
||||
if field.attrs.skip_serializing() {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
format!(
|
||||
"variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing)]",
|
||||
variant.ident, member
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if field.attrs.skip_serializing_if().is_some() {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
format!(
|
||||
"variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing_if)]",
|
||||
variant.ident, member
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if variant.attrs.deserialize_with().is_some() {
|
||||
if variant.attrs.skip_deserializing() {
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
format!(
|
||||
"variant `{}` cannot have both #[serde(deserialize_with)] and #[serde(skip_deserializing)]",
|
||||
variant.ident
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
for field in &variant.fields {
|
||||
if field.attrs.skip_deserializing() {
|
||||
let member = member_message(&field.member);
|
||||
|
||||
cx.error_spanned_by(
|
||||
variant.original,
|
||||
format!(
|
||||
"variant `{}` cannot have both #[serde(deserialize_with)] and a field {} marked with #[serde(skip_deserializing)]",
|
||||
variant.ident, member
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The tag of an internally-tagged struct variant must not be the same as either
|
||||
// one of its fields, as this would result in duplicate keys in the serialized
|
||||
// output and/or ambiguity in the to-be-deserialized input.
|
||||
fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
Data::Struct(_, _) => return,
|
||||
};
|
||||
|
||||
let tag = match cont.attrs.tag() {
|
||||
TagType::Internal { tag } => tag.as_str(),
|
||||
TagType::External | TagType::Adjacent { .. } | TagType::None => return,
|
||||
};
|
||||
|
||||
let diagnose_conflict = || {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
format!("variant field name `{}` conflicts with internal tag", tag),
|
||||
);
|
||||
};
|
||||
|
||||
for variant in variants {
|
||||
match variant.style {
|
||||
Style::Struct => {
|
||||
if variant.attrs.untagged() {
|
||||
continue;
|
||||
}
|
||||
for field in &variant.fields {
|
||||
let check_ser =
|
||||
!(field.attrs.skip_serializing() || variant.attrs.skip_serializing());
|
||||
let check_de =
|
||||
!(field.attrs.skip_deserializing() || variant.attrs.skip_deserializing());
|
||||
let name = field.attrs.name();
|
||||
let ser_name = name.serialize_name();
|
||||
|
||||
if check_ser && ser_name == tag {
|
||||
diagnose_conflict();
|
||||
return;
|
||||
}
|
||||
|
||||
for de_name in field.attrs.aliases() {
|
||||
if check_de && de_name == tag {
|
||||
diagnose_conflict();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Style::Unit | Style::Newtype | Style::Tuple => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In the case of adjacently-tagged enums, the type and the contents tag must
|
||||
// differ, for the same reason.
|
||||
fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
|
||||
let (type_tag, content_tag) = match cont.attrs.tag() {
|
||||
TagType::Adjacent { tag, content } => (tag, content),
|
||||
TagType::Internal { .. } | TagType::External | TagType::None => return,
|
||||
};
|
||||
|
||||
if type_tag == content_tag {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
format!(
|
||||
"enum tags `{}` for type and content conflict with each other",
|
||||
type_tag
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Enums and unit structs cannot be transparent.
|
||||
fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
|
||||
if !cont.attrs.transparent() {
|
||||
return;
|
||||
}
|
||||
|
||||
if cont.attrs.type_from().is_some() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] is not allowed with #[serde(from = \"...\")]",
|
||||
);
|
||||
}
|
||||
|
||||
if cont.attrs.type_try_from().is_some() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] is not allowed with #[serde(try_from = \"...\")]",
|
||||
);
|
||||
}
|
||||
|
||||
if cont.attrs.type_into().is_some() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] is not allowed with #[serde(into = \"...\")]",
|
||||
);
|
||||
}
|
||||
|
||||
let fields = match &mut cont.data {
|
||||
Data::Enum(_) => {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] is not allowed on an enum",
|
||||
);
|
||||
return;
|
||||
}
|
||||
Data::Struct(Style::Unit, _) => {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] is not allowed on a unit struct",
|
||||
);
|
||||
return;
|
||||
}
|
||||
Data::Struct(_, fields) => fields,
|
||||
};
|
||||
|
||||
let mut transparent_field = None;
|
||||
|
||||
for field in fields {
|
||||
if allow_transparent(field, derive) {
|
||||
if transparent_field.is_some() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] requires struct to have at most one transparent field",
|
||||
);
|
||||
return;
|
||||
}
|
||||
transparent_field = Some(field);
|
||||
}
|
||||
}
|
||||
|
||||
match transparent_field {
|
||||
Some(transparent_field) => transparent_field.attrs.mark_transparent(),
|
||||
None => match derive {
|
||||
Derive::Serialize => {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] requires at least one field that is not skipped",
|
||||
);
|
||||
}
|
||||
Derive::Deserialize => {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(transparent)] requires at least one field that is neither skipped nor has a default",
|
||||
);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn member_message(member: &Member) -> String {
|
||||
match member {
|
||||
Member::Named(ident) => format!("`{}`", ident),
|
||||
Member::Unnamed(i) => format!("#{}", i.index),
|
||||
}
|
||||
}
|
||||
|
||||
fn allow_transparent(field: &Field, derive: Derive) -> bool {
|
||||
if let Type::Path(ty) = ungroup(field.ty) {
|
||||
if let Some(seg) = ty.path.segments.last() {
|
||||
if seg.ident == "PhantomData" {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match derive {
|
||||
Derive::Serialize => !field.attrs.skip_serializing(),
|
||||
Derive::Deserialize => !field.attrs.skip_deserializing() && field.attrs.default().is_none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn check_from_and_try_from(cx: &Ctxt, cont: &mut Container) {
|
||||
if cont.attrs.type_from().is_some() && cont.attrs.type_try_from().is_some() {
|
||||
cx.error_spanned_by(
|
||||
cont.original,
|
||||
"#[serde(from = \"...\")] and #[serde(try_from = \"...\")] conflict with each other",
|
||||
);
|
||||
}
|
||||
}
|
68
vendor/serde_derive/src/internals/ctxt.rs
vendored
Normal file
68
vendor/serde_derive/src/internals/ctxt.rs
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
use quote::ToTokens;
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Display;
|
||||
use std::thread;
|
||||
|
||||
/// A type to collect errors together and format them.
|
||||
///
|
||||
/// Dropping this object will cause a panic. It must be consumed using `check`.
|
||||
///
|
||||
/// References can be shared since this type uses run-time exclusive mut checking.
|
||||
#[derive(Default)]
|
||||
pub struct Ctxt {
|
||||
// The contents will be set to `None` during checking. This is so that checking can be
|
||||
// enforced.
|
||||
errors: RefCell<Option<Vec<syn::Error>>>,
|
||||
}
|
||||
|
||||
impl Ctxt {
|
||||
/// Create a new context object.
|
||||
///
|
||||
/// This object contains no errors, but will still trigger a panic if it is not `check`ed.
|
||||
pub fn new() -> Self {
|
||||
Ctxt {
|
||||
errors: RefCell::new(Some(Vec::new())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add an error to the context object with a tokenenizable object.
|
||||
///
|
||||
/// The object is used for spanning in error messages.
|
||||
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
|
||||
self.errors
|
||||
.borrow_mut()
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
// Curb monomorphization from generating too many identical methods.
|
||||
.push(syn::Error::new_spanned(obj.into_token_stream(), msg));
|
||||
}
|
||||
|
||||
/// Add one of Syn's parse errors.
|
||||
pub fn syn_error(&self, err: syn::Error) {
|
||||
self.errors.borrow_mut().as_mut().unwrap().push(err);
|
||||
}
|
||||
|
||||
/// Consume this object, producing a formatted error string if there are errors.
|
||||
pub fn check(self) -> syn::Result<()> {
|
||||
let mut errors = self.errors.borrow_mut().take().unwrap().into_iter();
|
||||
|
||||
let mut combined = match errors.next() {
|
||||
Some(first) => first,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
for rest in errors {
|
||||
combined.combine(rest);
|
||||
}
|
||||
|
||||
Err(combined)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Ctxt {
|
||||
fn drop(&mut self) {
|
||||
if !thread::panicking() && self.errors.borrow().is_some() {
|
||||
panic!("forgot to check for errors");
|
||||
}
|
||||
}
|
||||
}
|
27
vendor/serde_derive/src/internals/mod.rs
vendored
Normal file
27
vendor/serde_derive/src/internals/mod.rs
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
pub mod ast;
|
||||
pub mod attr;
|
||||
|
||||
mod case;
|
||||
mod check;
|
||||
mod ctxt;
|
||||
mod receiver;
|
||||
mod respan;
|
||||
mod symbol;
|
||||
|
||||
use syn::Type;
|
||||
|
||||
pub use self::ctxt::Ctxt;
|
||||
pub use self::receiver::replace_receiver;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Derive {
|
||||
Serialize,
|
||||
Deserialize,
|
||||
}
|
||||
|
||||
pub fn ungroup(mut ty: &Type) -> &Type {
|
||||
while let Type::Group(group) = ty {
|
||||
ty = &group.elem;
|
||||
}
|
||||
ty
|
||||
}
|
292
vendor/serde_derive/src/internals/receiver.rs
vendored
Normal file
292
vendor/serde_derive/src/internals/receiver.rs
vendored
Normal file
@ -0,0 +1,292 @@
|
||||
use crate::internals::respan::respan;
|
||||
use proc_macro2::Span;
|
||||
use quote::ToTokens;
|
||||
use std::mem;
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{
|
||||
parse_quote, Data, DeriveInput, Expr, ExprPath, GenericArgument, GenericParam, Generics, Macro,
|
||||
Path, PathArguments, QSelf, ReturnType, Token, Type, TypeParamBound, TypePath, WherePredicate,
|
||||
};
|
||||
|
||||
pub fn replace_receiver(input: &mut DeriveInput) {
|
||||
let self_ty = {
|
||||
let ident = &input.ident;
|
||||
let ty_generics = input.generics.split_for_impl().1;
|
||||
parse_quote!(#ident #ty_generics)
|
||||
};
|
||||
let mut visitor = ReplaceReceiver(&self_ty);
|
||||
visitor.visit_generics_mut(&mut input.generics);
|
||||
visitor.visit_data_mut(&mut input.data);
|
||||
}
|
||||
|
||||
struct ReplaceReceiver<'a>(&'a TypePath);
|
||||
|
||||
impl ReplaceReceiver<'_> {
|
||||
fn self_ty(&self, span: Span) -> TypePath {
|
||||
let tokens = self.0.to_token_stream();
|
||||
let respanned = respan(tokens, span);
|
||||
syn::parse2(respanned).unwrap()
|
||||
}
|
||||
|
||||
fn self_to_qself(&self, qself: &mut Option<QSelf>, path: &mut Path) {
|
||||
if path.leading_colon.is_some() || path.segments[0].ident != "Self" {
|
||||
return;
|
||||
}
|
||||
|
||||
if path.segments.len() == 1 {
|
||||
self.self_to_expr_path(path);
|
||||
return;
|
||||
}
|
||||
|
||||
let span = path.segments[0].ident.span();
|
||||
*qself = Some(QSelf {
|
||||
lt_token: Token,
|
||||
ty: Box::new(Type::Path(self.self_ty(span))),
|
||||
position: 0,
|
||||
as_token: None,
|
||||
gt_token: Token,
|
||||
});
|
||||
|
||||
path.leading_colon = Some(**path.segments.pairs().next().unwrap().punct().unwrap());
|
||||
|
||||
let segments = mem::replace(&mut path.segments, Punctuated::new());
|
||||
path.segments = segments.into_pairs().skip(1).collect();
|
||||
}
|
||||
|
||||
fn self_to_expr_path(&self, path: &mut Path) {
|
||||
let self_ty = self.self_ty(path.segments[0].ident.span());
|
||||
let variant = mem::replace(path, self_ty.path);
|
||||
for segment in &mut path.segments {
|
||||
if let PathArguments::AngleBracketed(bracketed) = &mut segment.arguments {
|
||||
if bracketed.colon2_token.is_none() && !bracketed.args.is_empty() {
|
||||
bracketed.colon2_token = Some(<Token![::]>::default());
|
||||
}
|
||||
}
|
||||
}
|
||||
if variant.segments.len() > 1 {
|
||||
path.segments.push_punct(<Token![::]>::default());
|
||||
path.segments.extend(variant.segments.into_pairs().skip(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ReplaceReceiver<'_> {
|
||||
// `Self` -> `Receiver`
|
||||
fn visit_type_mut(&mut self, ty: &mut Type) {
|
||||
let span = if let Type::Path(node) = ty {
|
||||
if node.qself.is_none() && node.path.is_ident("Self") {
|
||||
node.path.segments[0].ident.span()
|
||||
} else {
|
||||
self.visit_type_path_mut(node);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
self.visit_type_mut_impl(ty);
|
||||
return;
|
||||
};
|
||||
*ty = self.self_ty(span).into();
|
||||
}
|
||||
|
||||
// `Self::Assoc` -> `<Receiver>::Assoc`
|
||||
fn visit_type_path_mut(&mut self, ty: &mut TypePath) {
|
||||
if ty.qself.is_none() {
|
||||
self.self_to_qself(&mut ty.qself, &mut ty.path);
|
||||
}
|
||||
self.visit_type_path_mut_impl(ty);
|
||||
}
|
||||
|
||||
// `Self::method` -> `<Receiver>::method`
|
||||
fn visit_expr_path_mut(&mut self, expr: &mut ExprPath) {
|
||||
if expr.qself.is_none() {
|
||||
self.self_to_qself(&mut expr.qself, &mut expr.path);
|
||||
}
|
||||
self.visit_expr_path_mut_impl(expr);
|
||||
}
|
||||
|
||||
// Everything below is simply traversing the syntax tree.
|
||||
|
||||
fn visit_type_mut_impl(&mut self, ty: &mut Type) {
|
||||
match ty {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
Type::Array(ty) => {
|
||||
self.visit_type_mut(&mut ty.elem);
|
||||
self.visit_expr_mut(&mut ty.len);
|
||||
}
|
||||
Type::BareFn(ty) => {
|
||||
for arg in &mut ty.inputs {
|
||||
self.visit_type_mut(&mut arg.ty);
|
||||
}
|
||||
self.visit_return_type_mut(&mut ty.output);
|
||||
}
|
||||
Type::Group(ty) => self.visit_type_mut(&mut ty.elem),
|
||||
Type::ImplTrait(ty) => {
|
||||
for bound in &mut ty.bounds {
|
||||
self.visit_type_param_bound_mut(bound);
|
||||
}
|
||||
}
|
||||
Type::Macro(ty) => self.visit_macro_mut(&mut ty.mac),
|
||||
Type::Paren(ty) => self.visit_type_mut(&mut ty.elem),
|
||||
Type::Path(ty) => {
|
||||
if let Some(qself) = &mut ty.qself {
|
||||
self.visit_type_mut(&mut qself.ty);
|
||||
}
|
||||
self.visit_path_mut(&mut ty.path);
|
||||
}
|
||||
Type::Ptr(ty) => self.visit_type_mut(&mut ty.elem),
|
||||
Type::Reference(ty) => self.visit_type_mut(&mut ty.elem),
|
||||
Type::Slice(ty) => self.visit_type_mut(&mut ty.elem),
|
||||
Type::TraitObject(ty) => {
|
||||
for bound in &mut ty.bounds {
|
||||
self.visit_type_param_bound_mut(bound);
|
||||
}
|
||||
}
|
||||
Type::Tuple(ty) => {
|
||||
for elem in &mut ty.elems {
|
||||
self.visit_type_mut(elem);
|
||||
}
|
||||
}
|
||||
|
||||
Type::Infer(_) | Type::Never(_) | Type::Verbatim(_) => {}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_type_path_mut_impl(&mut self, ty: &mut TypePath) {
|
||||
if let Some(qself) = &mut ty.qself {
|
||||
self.visit_type_mut(&mut qself.ty);
|
||||
}
|
||||
self.visit_path_mut(&mut ty.path);
|
||||
}
|
||||
|
||||
fn visit_expr_path_mut_impl(&mut self, expr: &mut ExprPath) {
|
||||
if let Some(qself) = &mut expr.qself {
|
||||
self.visit_type_mut(&mut qself.ty);
|
||||
}
|
||||
self.visit_path_mut(&mut expr.path);
|
||||
}
|
||||
|
||||
fn visit_path_mut(&mut self, path: &mut Path) {
|
||||
for segment in &mut path.segments {
|
||||
self.visit_path_arguments_mut(&mut segment.arguments);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_path_arguments_mut(&mut self, arguments: &mut PathArguments) {
|
||||
match arguments {
|
||||
PathArguments::None => {}
|
||||
PathArguments::AngleBracketed(arguments) => {
|
||||
for arg in &mut arguments.args {
|
||||
match arg {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
GenericArgument::Type(arg) => self.visit_type_mut(arg),
|
||||
GenericArgument::AssocType(arg) => self.visit_type_mut(&mut arg.ty),
|
||||
GenericArgument::Lifetime(_)
|
||||
| GenericArgument::Const(_)
|
||||
| GenericArgument::AssocConst(_)
|
||||
| GenericArgument::Constraint(_) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
PathArguments::Parenthesized(arguments) => {
|
||||
for argument in &mut arguments.inputs {
|
||||
self.visit_type_mut(argument);
|
||||
}
|
||||
self.visit_return_type_mut(&mut arguments.output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_return_type_mut(&mut self, return_type: &mut ReturnType) {
|
||||
match return_type {
|
||||
ReturnType::Default => {}
|
||||
ReturnType::Type(_, output) => self.visit_type_mut(output),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) {
|
||||
match bound {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
TypeParamBound::Trait(bound) => self.visit_path_mut(&mut bound.path),
|
||||
TypeParamBound::Lifetime(_) | TypeParamBound::Verbatim(_) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_generics_mut(&mut self, generics: &mut Generics) {
|
||||
for param in &mut generics.params {
|
||||
match param {
|
||||
GenericParam::Type(param) => {
|
||||
for bound in &mut param.bounds {
|
||||
self.visit_type_param_bound_mut(bound);
|
||||
}
|
||||
}
|
||||
GenericParam::Lifetime(_) | GenericParam::Const(_) => {}
|
||||
}
|
||||
}
|
||||
if let Some(where_clause) = &mut generics.where_clause {
|
||||
for predicate in &mut where_clause.predicates {
|
||||
match predicate {
|
||||
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
WherePredicate::Type(predicate) => {
|
||||
self.visit_type_mut(&mut predicate.bounded_ty);
|
||||
for bound in &mut predicate.bounds {
|
||||
self.visit_type_param_bound_mut(bound);
|
||||
}
|
||||
}
|
||||
WherePredicate::Lifetime(_) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_data_mut(&mut self, data: &mut Data) {
|
||||
match data {
|
||||
Data::Struct(data) => {
|
||||
for field in &mut data.fields {
|
||||
self.visit_type_mut(&mut field.ty);
|
||||
}
|
||||
}
|
||||
Data::Enum(data) => {
|
||||
for variant in &mut data.variants {
|
||||
for field in &mut variant.fields {
|
||||
self.visit_type_mut(&mut field.ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
Data::Union(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr_mut(&mut self, expr: &mut Expr) {
|
||||
match expr {
|
||||
Expr::Binary(expr) => {
|
||||
self.visit_expr_mut(&mut expr.left);
|
||||
self.visit_expr_mut(&mut expr.right);
|
||||
}
|
||||
Expr::Call(expr) => {
|
||||
self.visit_expr_mut(&mut expr.func);
|
||||
for arg in &mut expr.args {
|
||||
self.visit_expr_mut(arg);
|
||||
}
|
||||
}
|
||||
Expr::Cast(expr) => {
|
||||
self.visit_expr_mut(&mut expr.expr);
|
||||
self.visit_type_mut(&mut expr.ty);
|
||||
}
|
||||
Expr::Field(expr) => self.visit_expr_mut(&mut expr.base),
|
||||
Expr::Index(expr) => {
|
||||
self.visit_expr_mut(&mut expr.expr);
|
||||
self.visit_expr_mut(&mut expr.index);
|
||||
}
|
||||
Expr::Paren(expr) => self.visit_expr_mut(&mut expr.expr),
|
||||
Expr::Path(expr) => self.visit_expr_path_mut(expr),
|
||||
Expr::Unary(expr) => self.visit_expr_mut(&mut expr.expr),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_macro_mut(&mut self, _mac: &mut Macro) {}
|
||||
}
|
16
vendor/serde_derive/src/internals/respan.rs
vendored
Normal file
16
vendor/serde_derive/src/internals/respan.rs
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
use proc_macro2::{Group, Span, TokenStream, TokenTree};
|
||||
|
||||
pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream {
|
||||
stream
|
||||
.into_iter()
|
||||
.map(|token| respan_token(token, span))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn respan_token(mut token: TokenTree, span: Span) -> TokenTree {
|
||||
if let TokenTree::Group(g) = &mut token {
|
||||
*g = Group::new(g.delimiter(), respan(g.stream(), span));
|
||||
}
|
||||
token.set_span(span);
|
||||
token
|
||||
}
|
71
vendor/serde_derive/src/internals/symbol.rs
vendored
Normal file
71
vendor/serde_derive/src/internals/symbol.rs
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
use std::fmt::{self, Display};
|
||||
use syn::{Ident, Path};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct Symbol(&'static str);
|
||||
|
||||
pub const ALIAS: Symbol = Symbol("alias");
|
||||
pub const BORROW: Symbol = Symbol("borrow");
|
||||
pub const BOUND: Symbol = Symbol("bound");
|
||||
pub const CONTENT: Symbol = Symbol("content");
|
||||
pub const CRATE: Symbol = Symbol("crate");
|
||||
pub const DEFAULT: Symbol = Symbol("default");
|
||||
pub const DENY_UNKNOWN_FIELDS: Symbol = Symbol("deny_unknown_fields");
|
||||
pub const DESERIALIZE: Symbol = Symbol("deserialize");
|
||||
pub const DESERIALIZE_WITH: Symbol = Symbol("deserialize_with");
|
||||
pub const EXPECTING: Symbol = Symbol("expecting");
|
||||
pub const FIELD_IDENTIFIER: Symbol = Symbol("field_identifier");
|
||||
pub const FLATTEN: Symbol = Symbol("flatten");
|
||||
pub const FROM: Symbol = Symbol("from");
|
||||
pub const GETTER: Symbol = Symbol("getter");
|
||||
pub const INTO: Symbol = Symbol("into");
|
||||
pub const NON_EXHAUSTIVE: Symbol = Symbol("non_exhaustive");
|
||||
pub const OTHER: Symbol = Symbol("other");
|
||||
pub const REMOTE: Symbol = Symbol("remote");
|
||||
pub const RENAME: Symbol = Symbol("rename");
|
||||
pub const RENAME_ALL: Symbol = Symbol("rename_all");
|
||||
pub const RENAME_ALL_FIELDS: Symbol = Symbol("rename_all_fields");
|
||||
pub const REPR: Symbol = Symbol("repr");
|
||||
pub const SERDE: Symbol = Symbol("serde");
|
||||
pub const SERIALIZE: Symbol = Symbol("serialize");
|
||||
pub const SERIALIZE_WITH: Symbol = Symbol("serialize_with");
|
||||
pub const SKIP: Symbol = Symbol("skip");
|
||||
pub const SKIP_DESERIALIZING: Symbol = Symbol("skip_deserializing");
|
||||
pub const SKIP_SERIALIZING: Symbol = Symbol("skip_serializing");
|
||||
pub const SKIP_SERIALIZING_IF: Symbol = Symbol("skip_serializing_if");
|
||||
pub const TAG: Symbol = Symbol("tag");
|
||||
pub const TRANSPARENT: Symbol = Symbol("transparent");
|
||||
pub const TRY_FROM: Symbol = Symbol("try_from");
|
||||
pub const UNTAGGED: Symbol = Symbol("untagged");
|
||||
pub const VARIANT_IDENTIFIER: Symbol = Symbol("variant_identifier");
|
||||
pub const WITH: Symbol = Symbol("with");
|
||||
|
||||
impl PartialEq<Symbol> for Ident {
|
||||
fn eq(&self, word: &Symbol) -> bool {
|
||||
self == word.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<Symbol> for &'a Ident {
|
||||
fn eq(&self, word: &Symbol) -> bool {
|
||||
*self == word.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Symbol> for Path {
|
||||
fn eq(&self, word: &Symbol) -> bool {
|
||||
self.is_ident(word.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<Symbol> for &'a Path {
|
||||
fn eq(&self, word: &Symbol) -> bool {
|
||||
self.is_ident(word.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Symbol {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(self.0)
|
||||
}
|
||||
}
|
101
vendor/serde_derive/src/lib.rs
vendored
Normal file
101
vendor/serde_derive/src/lib.rs
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
//! This crate provides Serde's two derive macros.
|
||||
//!
|
||||
//! ```edition2021
|
||||
//! # use serde_derive::{Deserialize, Serialize};
|
||||
//! #
|
||||
//! #[derive(Serialize, Deserialize)]
|
||||
//! # struct S;
|
||||
//! #
|
||||
//! # fn main() {}
|
||||
//! ```
|
||||
//!
|
||||
//! Please refer to [https://serde.rs/derive.html] for how to set this up.
|
||||
//!
|
||||
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
|
||||
|
||||
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.195")]
|
||||
// Ignored clippy lints
|
||||
#![allow(
|
||||
// clippy false positive: https://github.com/rust-lang/rust-clippy/issues/7054
|
||||
clippy::branches_sharing_code,
|
||||
clippy::cognitive_complexity,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/7575
|
||||
clippy::collapsible_match,
|
||||
clippy::derive_partial_eq_without_eq,
|
||||
clippy::enum_variant_names,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6797
|
||||
clippy::manual_map,
|
||||
clippy::match_like_matches_macro,
|
||||
clippy::needless_pass_by_value,
|
||||
clippy::too_many_arguments,
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
clippy::used_underscore_binding,
|
||||
clippy::wildcard_in_or_patterns,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/5704
|
||||
clippy::unnested_or_patterns,
|
||||
)]
|
||||
// Ignored clippy_pedantic lints
|
||||
#![allow(
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::checked_conversions,
|
||||
clippy::doc_markdown,
|
||||
clippy::enum_glob_use,
|
||||
clippy::indexing_slicing,
|
||||
clippy::items_after_statements,
|
||||
clippy::let_underscore_untyped,
|
||||
clippy::manual_assert,
|
||||
clippy::map_err_ignore,
|
||||
clippy::match_same_arms,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
|
||||
clippy::match_wildcard_for_single_variants,
|
||||
clippy::module_name_repetitions,
|
||||
clippy::must_use_candidate,
|
||||
clippy::similar_names,
|
||||
clippy::single_match_else,
|
||||
clippy::struct_excessive_bools,
|
||||
clippy::too_many_lines,
|
||||
clippy::unseparated_literal_suffix,
|
||||
clippy::unused_self,
|
||||
clippy::use_self,
|
||||
clippy::wildcard_imports
|
||||
)]
|
||||
#![cfg_attr(all(test, exhaustive), feature(non_exhaustive_omitted_patterns_lint))]
|
||||
|
||||
extern crate proc_macro2;
|
||||
extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
mod internals;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use syn::parse_macro_input;
|
||||
use syn::DeriveInput;
|
||||
|
||||
#[macro_use]
|
||||
mod bound;
|
||||
#[macro_use]
|
||||
mod fragment;
|
||||
|
||||
mod de;
|
||||
mod dummy;
|
||||
mod pretend;
|
||||
mod ser;
|
||||
mod this;
|
||||
|
||||
#[proc_macro_derive(Serialize, attributes(serde))]
|
||||
pub fn derive_serialize(input: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(input as DeriveInput);
|
||||
ser::expand_derive_serialize(&mut input)
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
||||
|
||||
#[proc_macro_derive(Deserialize, attributes(serde))]
|
||||
pub fn derive_deserialize(input: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(input as DeriveInput);
|
||||
de::expand_derive_deserialize(&mut input)
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
185
vendor/serde_derive/src/pretend.rs
vendored
Normal file
185
vendor/serde_derive/src/pretend.rs
vendored
Normal file
@ -0,0 +1,185 @@
|
||||
use crate::internals::ast::{Container, Data, Field, Style, Variant};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
|
||||
// Suppress dead_code warnings that would otherwise appear when using a remote
|
||||
// derive. Other than this pretend code, a struct annotated with remote derive
|
||||
// never has its fields referenced and an enum annotated with remote derive
|
||||
// never has its variants constructed.
|
||||
//
|
||||
// warning: field is never used: `i`
|
||||
// --> src/main.rs:4:20
|
||||
// |
|
||||
// 4 | struct StructDef { i: i32 }
|
||||
// | ^^^^^^
|
||||
//
|
||||
// warning: variant is never constructed: `V`
|
||||
// --> src/main.rs:8:16
|
||||
// |
|
||||
// 8 | enum EnumDef { V }
|
||||
// | ^
|
||||
//
|
||||
pub fn pretend_used(cont: &Container, is_packed: bool) -> TokenStream {
|
||||
let pretend_fields = pretend_fields_used(cont, is_packed);
|
||||
let pretend_variants = pretend_variants_used(cont);
|
||||
|
||||
quote! {
|
||||
#pretend_fields
|
||||
#pretend_variants
|
||||
}
|
||||
}
|
||||
|
||||
// For structs with named fields, expands to:
|
||||
//
|
||||
// match None::<&T> {
|
||||
// Some(T { a: __v0, b: __v1 }) => {}
|
||||
// _ => {}
|
||||
// }
|
||||
//
|
||||
// For packed structs on sufficiently new rustc, expands to:
|
||||
//
|
||||
// match None::<&T> {
|
||||
// Some(__v @ T { a: _, b: _ }) => {
|
||||
// let _ = addr_of!(__v.a);
|
||||
// let _ = addr_of!(__v.b);
|
||||
// }
|
||||
// _ => {}
|
||||
// }
|
||||
//
|
||||
// For packed structs on older rustc, we assume Sized and !Drop, and expand to:
|
||||
//
|
||||
// match None::<T> {
|
||||
// Some(T { a: __v0, b: __v1 }) => {}
|
||||
// _ => {}
|
||||
// }
|
||||
//
|
||||
// For enums, expands to the following but only including struct variants:
|
||||
//
|
||||
// match None::<&T> {
|
||||
// Some(T::A { a: __v0 }) => {}
|
||||
// Some(T::B { b: __v0 }) => {}
|
||||
// _ => {}
|
||||
// }
|
||||
//
|
||||
fn pretend_fields_used(cont: &Container, is_packed: bool) -> TokenStream {
|
||||
match &cont.data {
|
||||
Data::Enum(variants) => pretend_fields_used_enum(cont, variants),
|
||||
Data::Struct(Style::Struct | Style::Tuple | Style::Newtype, fields) => {
|
||||
if is_packed {
|
||||
pretend_fields_used_struct_packed(cont, fields)
|
||||
} else {
|
||||
pretend_fields_used_struct(cont, fields)
|
||||
}
|
||||
}
|
||||
Data::Struct(Style::Unit, _) => quote!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn pretend_fields_used_struct(cont: &Container, fields: &[Field]) -> TokenStream {
|
||||
let type_ident = &cont.ident;
|
||||
let (_, ty_generics, _) = cont.generics.split_for_impl();
|
||||
|
||||
let members = fields.iter().map(|field| &field.member);
|
||||
let placeholders = (0usize..).map(|i| format_ident!("__v{}", i));
|
||||
|
||||
quote! {
|
||||
match _serde::__private::None::<&#type_ident #ty_generics> {
|
||||
_serde::__private::Some(#type_ident { #(#members: #placeholders),* }) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pretend_fields_used_struct_packed(cont: &Container, fields: &[Field]) -> TokenStream {
|
||||
let type_ident = &cont.ident;
|
||||
let (_, ty_generics, _) = cont.generics.split_for_impl();
|
||||
|
||||
let members = fields.iter().map(|field| &field.member).collect::<Vec<_>>();
|
||||
|
||||
quote! {
|
||||
match _serde::__private::None::<&#type_ident #ty_generics> {
|
||||
_serde::__private::Some(__v @ #type_ident { #(#members: _),* }) => {
|
||||
#(
|
||||
let _ = _serde::__private::ptr::addr_of!(__v.#members);
|
||||
)*
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pretend_fields_used_enum(cont: &Container, variants: &[Variant]) -> TokenStream {
|
||||
let type_ident = &cont.ident;
|
||||
let (_, ty_generics, _) = cont.generics.split_for_impl();
|
||||
|
||||
let patterns = variants
|
||||
.iter()
|
||||
.filter_map(|variant| match variant.style {
|
||||
Style::Struct | Style::Tuple | Style::Newtype => {
|
||||
let variant_ident = &variant.ident;
|
||||
let members = variant.fields.iter().map(|field| &field.member);
|
||||
let placeholders = (0usize..).map(|i| format_ident!("__v{}", i));
|
||||
Some(quote!(#type_ident::#variant_ident { #(#members: #placeholders),* }))
|
||||
}
|
||||
Style::Unit => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
quote! {
|
||||
match _serde::__private::None::<&#type_ident #ty_generics> {
|
||||
#(
|
||||
_serde::__private::Some(#patterns) => {}
|
||||
)*
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Expands to one of these per enum variant:
|
||||
//
|
||||
// match None {
|
||||
// Some((__v0, __v1,)) => {
|
||||
// let _ = E::V { a: __v0, b: __v1 };
|
||||
// }
|
||||
// _ => {}
|
||||
// }
|
||||
//
|
||||
fn pretend_variants_used(cont: &Container) -> TokenStream {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
Data::Struct(_, _) => {
|
||||
return quote!();
|
||||
}
|
||||
};
|
||||
|
||||
let type_ident = &cont.ident;
|
||||
let (_, ty_generics, _) = cont.generics.split_for_impl();
|
||||
let turbofish = ty_generics.as_turbofish();
|
||||
|
||||
let cases = variants.iter().map(|variant| {
|
||||
let variant_ident = &variant.ident;
|
||||
let placeholders = &(0..variant.fields.len())
|
||||
.map(|i| format_ident!("__v{}", i))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let pat = match variant.style {
|
||||
Style::Struct => {
|
||||
let members = variant.fields.iter().map(|field| &field.member);
|
||||
quote!({ #(#members: #placeholders),* })
|
||||
}
|
||||
Style::Tuple | Style::Newtype => quote!(( #(#placeholders),* )),
|
||||
Style::Unit => quote!(),
|
||||
};
|
||||
|
||||
quote! {
|
||||
match _serde::__private::None {
|
||||
_serde::__private::Some((#(#placeholders,)*)) => {
|
||||
let _ = #type_ident::#variant_ident #turbofish #pat;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
quote!(#(#cases)*)
|
||||
}
|
1359
vendor/serde_derive/src/ser.rs
vendored
Normal file
1359
vendor/serde_derive/src/ser.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
32
vendor/serde_derive/src/this.rs
vendored
Normal file
32
vendor/serde_derive/src/this.rs
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
use crate::internals::ast::Container;
|
||||
use syn::{Path, PathArguments, Token};
|
||||
|
||||
pub fn this_type(cont: &Container) -> Path {
|
||||
if let Some(remote) = cont.attrs.remote() {
|
||||
let mut this = remote.clone();
|
||||
for segment in &mut this.segments {
|
||||
if let PathArguments::AngleBracketed(arguments) = &mut segment.arguments {
|
||||
arguments.colon2_token = None;
|
||||
}
|
||||
}
|
||||
this
|
||||
} else {
|
||||
Path::from(cont.ident.clone())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn this_value(cont: &Container) -> Path {
|
||||
if let Some(remote) = cont.attrs.remote() {
|
||||
let mut this = remote.clone();
|
||||
for segment in &mut this.segments {
|
||||
if let PathArguments::AngleBracketed(arguments) = &mut segment.arguments {
|
||||
if arguments.colon2_token.is_none() {
|
||||
arguments.colon2_token = Some(Token);
|
||||
}
|
||||
}
|
||||
}
|
||||
this
|
||||
} else {
|
||||
Path::from(cont.ident.clone())
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user