Initial vendor packages
Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
1
vendor/syn/.cargo-checksum.json
vendored
Normal file
1
vendor/syn/.cargo-checksum.json
vendored
Normal file
File diff suppressed because one or more lines are too long
147
vendor/syn/Cargo.toml
vendored
Normal file
147
vendor/syn/Cargo.toml
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2021"
|
||||
rust-version = "1.56"
|
||||
name = "syn"
|
||||
version = "2.0.48"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = [
|
||||
"/benches/**",
|
||||
"/Cargo.toml",
|
||||
"/LICENSE-APACHE",
|
||||
"/LICENSE-MIT",
|
||||
"/README.md",
|
||||
"/src/**",
|
||||
"/tests/**",
|
||||
]
|
||||
description = "Parser for Rust source code"
|
||||
documentation = "https://docs.rs/syn"
|
||||
readme = "README.md"
|
||||
keywords = [
|
||||
"macros",
|
||||
"syn",
|
||||
]
|
||||
categories = [
|
||||
"development-tools::procedural-macro-helpers",
|
||||
"parser-implementations",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/syn"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = [
|
||||
"--cfg",
|
||||
"doc_cfg",
|
||||
"--generate-link-to-definition",
|
||||
]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[package.metadata.playground]
|
||||
features = [
|
||||
"full",
|
||||
"visit",
|
||||
"visit-mut",
|
||||
"fold",
|
||||
"extra-traits",
|
||||
]
|
||||
|
||||
[lib]
|
||||
doc-scrape-examples = false
|
||||
|
||||
[[bench]]
|
||||
name = "rust"
|
||||
harness = false
|
||||
required-features = [
|
||||
"full",
|
||||
"parsing",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
name = "file"
|
||||
required-features = [
|
||||
"full",
|
||||
"parsing",
|
||||
]
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0.75"
|
||||
default-features = false
|
||||
|
||||
[dependencies.quote]
|
||||
version = "1.0.35"
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.unicode-ident]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.anyhow]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.automod]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.flate2]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.insta]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.rayon]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.ref-cast]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.reqwest]
|
||||
version = "0.11"
|
||||
features = ["blocking"]
|
||||
|
||||
[dev-dependencies.rustversion]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.syn-test-suite]
|
||||
version = "0"
|
||||
|
||||
[dev-dependencies.tar]
|
||||
version = "0.4.16"
|
||||
|
||||
[dev-dependencies.termcolor]
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.walkdir]
|
||||
version = "2.3.2"
|
||||
|
||||
[features]
|
||||
clone-impls = []
|
||||
default = [
|
||||
"derive",
|
||||
"parsing",
|
||||
"printing",
|
||||
"clone-impls",
|
||||
"proc-macro",
|
||||
]
|
||||
derive = []
|
||||
extra-traits = []
|
||||
fold = []
|
||||
full = []
|
||||
parsing = []
|
||||
printing = ["quote"]
|
||||
proc-macro = [
|
||||
"proc-macro2/proc-macro",
|
||||
"quote/proc-macro",
|
||||
]
|
||||
test = ["syn-test-suite/all-features"]
|
||||
visit = []
|
||||
visit-mut = []
|
||||
176
vendor/syn/LICENSE-APACHE
vendored
Normal file
176
vendor/syn/LICENSE-APACHE
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
23
vendor/syn/LICENSE-MIT
vendored
Normal file
23
vendor/syn/LICENSE-MIT
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
284
vendor/syn/README.md
vendored
Normal file
284
vendor/syn/README.md
vendored
Normal file
@@ -0,0 +1,284 @@
|
||||
Parser for Rust source code
|
||||
===========================
|
||||
|
||||
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
|
||||
[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
|
||||
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/syn)
|
||||
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/syn/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
|
||||
|
||||
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
|
||||
of Rust source code.
|
||||
|
||||
Currently this library is geared toward use in Rust procedural macros, but
|
||||
contains some APIs that may be useful more generally.
|
||||
|
||||
- **Data structures** — Syn provides a complete syntax tree that can represent
|
||||
any valid Rust source code. The syntax tree is rooted at [`syn::File`] which
|
||||
represents a full source file, but there are other entry points that may be
|
||||
useful to procedural macros including [`syn::Item`], [`syn::Expr`] and
|
||||
[`syn::Type`].
|
||||
|
||||
- **Derives** — Of particular interest to derive macros is [`syn::DeriveInput`]
|
||||
which is any of the three legal input items to a derive macro. An example
|
||||
below shows using this type in a library that can derive implementations of a
|
||||
user-defined trait.
|
||||
|
||||
- **Parsing** — Parsing in Syn is built around [parser functions] with the
|
||||
signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined by
|
||||
Syn is individually parsable and may be used as a building block for custom
|
||||
syntaxes, or you may dream up your own brand new syntax without involving any
|
||||
of our syntax tree types.
|
||||
|
||||
- **Location information** — Every token parsed by Syn is associated with a
|
||||
`Span` that tracks line and column information back to the source of that
|
||||
token. These spans allow a procedural macro to display detailed error messages
|
||||
pointing to all the right places in the user's code. There is an example of
|
||||
this below.
|
||||
|
||||
- **Feature flags** — Functionality is aggressively feature gated so your
|
||||
procedural macros enable only what they need, and do not pay in compile time
|
||||
for all the rest.
|
||||
|
||||
[`syn::File`]: https://docs.rs/syn/2.0/syn/struct.File.html
|
||||
[`syn::Item`]: https://docs.rs/syn/2.0/syn/enum.Item.html
|
||||
[`syn::Expr`]: https://docs.rs/syn/2.0/syn/enum.Expr.html
|
||||
[`syn::Type`]: https://docs.rs/syn/2.0/syn/enum.Type.html
|
||||
[`syn::DeriveInput`]: https://docs.rs/syn/2.0/syn/struct.DeriveInput.html
|
||||
[parser functions]: https://docs.rs/syn/2.0/syn/parse/index.html
|
||||
|
||||
*Version requirement: Syn supports rustc 1.56 and up.*
|
||||
|
||||
[*Release notes*](https://github.com/dtolnay/syn/releases)
|
||||
|
||||
<br>
|
||||
|
||||
## Resources
|
||||
|
||||
The best way to learn about procedural macros is by writing some. Consider
|
||||
working through [this procedural macro workshop][workshop] to get familiar with
|
||||
the different types of procedural macros. The workshop contains relevant links
|
||||
into the Syn documentation as you work through each project.
|
||||
|
||||
[workshop]: https://github.com/dtolnay/proc-macro-workshop
|
||||
|
||||
<br>
|
||||
|
||||
## Example of a derive macro
|
||||
|
||||
The canonical derive macro using Syn looks like this. We write an ordinary Rust
|
||||
function tagged with a `proc_macro_derive` attribute and the name of the trait
|
||||
we are deriving. Any time that derive appears in the user's code, the Rust
|
||||
compiler passes their data structure as tokens into our macro. We get to execute
|
||||
arbitrary Rust code to figure out what to do with those tokens, then hand some
|
||||
tokens back to the compiler to compile into the user's crate.
|
||||
|
||||
[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
syn = "2.0"
|
||||
quote = "1.0"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
```
|
||||
|
||||
```rust
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
|
||||
#[proc_macro_derive(MyMacro)]
|
||||
pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
// Parse the input tokens into a syntax tree
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
// Build the output, possibly using quasi-quotation
|
||||
let expanded = quote! {
|
||||
// ...
|
||||
};
|
||||
|
||||
// Hand the output tokens back to the compiler
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
```
|
||||
|
||||
The [`heapsize`] example directory shows a complete working implementation of a
|
||||
derive macro. The example derives a `HeapSize` trait which computes an estimate
|
||||
of the amount of heap memory owned by a value.
|
||||
|
||||
[`heapsize`]: examples/heapsize
|
||||
|
||||
```rust
|
||||
pub trait HeapSize {
|
||||
/// Total number of bytes of heap memory owned by `self`.
|
||||
fn heap_size_of_children(&self) -> usize;
|
||||
}
|
||||
```
|
||||
|
||||
The derive macro allows users to write `#[derive(HeapSize)]` on data structures
|
||||
in their program.
|
||||
|
||||
```rust
|
||||
#[derive(HeapSize)]
|
||||
struct Demo<'a, T: ?Sized> {
|
||||
a: Box<T>,
|
||||
b: u8,
|
||||
c: &'a str,
|
||||
d: String,
|
||||
}
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Spans and error reporting
|
||||
|
||||
The token-based procedural macro API provides great control over where the
|
||||
compiler's error messages are displayed in user code. Consider the error the
|
||||
user sees if one of their field types does not implement `HeapSize`.
|
||||
|
||||
```rust
|
||||
#[derive(HeapSize)]
|
||||
struct Broken {
|
||||
ok: String,
|
||||
bad: std::thread::Thread,
|
||||
}
|
||||
```
|
||||
|
||||
By tracking span information all the way through the expansion of a procedural
|
||||
macro as shown in the `heapsize` example, token-based macros in Syn are able to
|
||||
trigger errors that directly pinpoint the source of the problem.
|
||||
|
||||
```console
|
||||
error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
|
||||
--> src/main.rs:7:5
|
||||
|
|
||||
7 | bad: std::thread::Thread,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread`
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Parsing a custom syntax
|
||||
|
||||
The [`lazy-static`] example directory shows the implementation of a
|
||||
`functionlike!(...)` procedural macro in which the input tokens are parsed using
|
||||
Syn's parsing API.
|
||||
|
||||
[`lazy-static`]: examples/lazy-static
|
||||
|
||||
The example reimplements the popular `lazy_static` crate from crates.io as a
|
||||
procedural macro.
|
||||
|
||||
```rust
|
||||
lazy_static! {
|
||||
static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
|
||||
}
|
||||
```
|
||||
|
||||
The implementation shows how to trigger custom warnings and error messages on
|
||||
the macro input.
|
||||
|
||||
```console
|
||||
warning: come on, pick a more creative name
|
||||
--> src/main.rs:10:16
|
||||
|
|
||||
10 | static ref FOO: String = "lazy_static".to_owned();
|
||||
| ^^^
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Testing
|
||||
|
||||
When testing macros, we often care not just that the macro can be used
|
||||
successfully but also that when the macro is provided with invalid input it
|
||||
produces maximally helpful error messages. Consider using the [`trybuild`] crate
|
||||
to write tests for errors that are emitted by your macro or errors detected by
|
||||
the Rust compiler in the expanded code following misuse of the macro. Such tests
|
||||
help avoid regressions from later refactors that mistakenly make an error no
|
||||
longer trigger or be less helpful than it used to be.
|
||||
|
||||
[`trybuild`]: https://github.com/dtolnay/trybuild
|
||||
|
||||
<br>
|
||||
|
||||
## Debugging
|
||||
|
||||
When developing a procedural macro it can be helpful to look at what the
|
||||
generated code looks like. Use `cargo rustc -- -Zunstable-options
|
||||
--pretty=expanded` or the [`cargo expand`] subcommand.
|
||||
|
||||
[`cargo expand`]: https://github.com/dtolnay/cargo-expand
|
||||
|
||||
To show the expanded code for some crate that uses your procedural macro, run
|
||||
`cargo expand` from that crate. To show the expanded code for one of your own
|
||||
test cases, run `cargo expand --test the_test_case` where the last argument is
|
||||
the name of the test file without the `.rs` extension.
|
||||
|
||||
This write-up by Brandon W Maister discusses debugging in more detail:
|
||||
[Debugging Rust's new Custom Derive system][debugging].
|
||||
|
||||
[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
|
||||
|
||||
<br>
|
||||
|
||||
## Optional features
|
||||
|
||||
Syn puts a lot of functionality behind optional features in order to optimize
|
||||
compile time for the most common use cases. The following features are
|
||||
available.
|
||||
|
||||
- **`derive`** *(enabled by default)* — Data structures for representing the
|
||||
possible input to a derive macro, including structs and enums and types.
|
||||
- **`full`** — Data structures for representing the syntax tree of all valid
|
||||
Rust source code, including items and expressions.
|
||||
- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a
|
||||
syntax tree node of a chosen type.
|
||||
- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as
|
||||
tokens of Rust source code.
|
||||
- **`visit`** — Trait for traversing a syntax tree.
|
||||
- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree.
|
||||
- **`fold`** — Trait for transforming an owned syntax tree.
|
||||
- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
|
||||
types.
|
||||
- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
|
||||
types.
|
||||
- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
|
||||
library libproc_macro from rustc toolchain.
|
||||
|
||||
<br>
|
||||
|
||||
## Proc macro shim
|
||||
|
||||
Syn operates on the token representation provided by the [proc-macro2] crate
|
||||
from crates.io rather than using the compiler's built in proc-macro crate
|
||||
directly. This enables code using Syn to execute outside of the context of a
|
||||
procedural macro, such as in unit tests or build.rs, and we avoid needing
|
||||
incompatible ecosystems for proc macros vs non-macro use cases.
|
||||
|
||||
In general all of your code should be written against proc-macro2 rather than
|
||||
proc-macro. The one exception is in the signatures of procedural macro entry
|
||||
points, which are required by the language to use `proc_macro::TokenStream`.
|
||||
|
||||
The proc-macro2 crate will automatically detect and use the compiler's data
|
||||
structures when a procedural macro is active.
|
||||
|
||||
[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
|
||||
|
||||
<br>
|
||||
|
||||
#### License
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
||||
57
vendor/syn/benches/file.rs
vendored
Normal file
57
vendor/syn/benches/file.rs
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
// $ cargo bench --features full,test --bench file
|
||||
|
||||
#![feature(rustc_private, test)]
|
||||
#![recursion_limit = "1024"]
|
||||
#![allow(
|
||||
clippy::items_after_statements,
|
||||
clippy::manual_let_else,
|
||||
clippy::match_like_matches_macro,
|
||||
clippy::missing_panics_doc,
|
||||
clippy::must_use_candidate,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
extern crate test;
|
||||
|
||||
#[macro_use]
|
||||
#[path = "../tests/macros/mod.rs"]
|
||||
mod macros;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[path = "../tests/repo/mod.rs"]
|
||||
mod repo;
|
||||
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use std::fs;
|
||||
use std::str::FromStr;
|
||||
use syn::parse::{ParseStream, Parser};
|
||||
use test::Bencher;
|
||||
|
||||
const FILE: &str = "tests/rust/library/core/src/str/mod.rs";
|
||||
|
||||
fn get_tokens() -> TokenStream {
|
||||
repo::clone_rust();
|
||||
let content = fs::read_to_string(FILE).unwrap();
|
||||
TokenStream::from_str(&content).unwrap()
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn baseline(b: &mut Bencher) {
|
||||
let tokens = get_tokens();
|
||||
b.iter(|| drop(tokens.clone()));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn create_token_buffer(b: &mut Bencher) {
|
||||
let tokens = get_tokens();
|
||||
fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
|
||||
Err(syn::Error::new(Span::call_site(), ""))
|
||||
}
|
||||
b.iter(|| immediate_fail.parse2(tokens.clone()));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn parse_file(b: &mut Bencher) {
|
||||
let tokens = get_tokens();
|
||||
b.iter(|| syn::parse2::<syn::File>(tokens.clone()));
|
||||
}
|
||||
182
vendor/syn/benches/rust.rs
vendored
Normal file
182
vendor/syn/benches/rust.rs
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
// $ cargo bench --features full,test --bench rust
|
||||
//
|
||||
// Syn only, useful for profiling:
|
||||
// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test --bench rust
|
||||
|
||||
#![cfg_attr(not(syn_only), feature(rustc_private))]
|
||||
#![recursion_limit = "1024"]
|
||||
#![allow(
|
||||
clippy::arc_with_non_send_sync,
|
||||
clippy::cast_lossless,
|
||||
clippy::let_underscore_untyped,
|
||||
clippy::manual_let_else,
|
||||
clippy::match_like_matches_macro,
|
||||
clippy::uninlined_format_args,
|
||||
clippy::unnecessary_wraps
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
#[path = "../tests/macros/mod.rs"]
|
||||
mod macros;
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[path = "../tests/repo/mod.rs"]
|
||||
mod repo;
|
||||
|
||||
use std::fs;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
mod tokenstream_parse {
|
||||
use proc_macro2::TokenStream;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
TokenStream::from_str(content).map(drop).map_err(drop)
|
||||
}
|
||||
}
|
||||
|
||||
mod syn_parse {
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
syn::parse_file(content).map(drop).map_err(drop)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
mod librustc_parse {
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_driver;
|
||||
extern crate rustc_error_messages;
|
||||
extern crate rustc_errors;
|
||||
extern crate rustc_parse;
|
||||
extern crate rustc_session;
|
||||
extern crate rustc_span;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_error_messages::FluentBundle;
|
||||
use rustc_errors::{emitter::Emitter, translation::Translate, DiagCtxt, Diagnostic};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::{FilePathMapping, SourceMap};
|
||||
use rustc_span::{edition::Edition, FileName};
|
||||
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
struct SilentEmitter;
|
||||
|
||||
impl Emitter for SilentEmitter {
|
||||
fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
|
||||
fn source_map(&self) -> Option<&Lrc<SourceMap>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Translate for SilentEmitter {
|
||||
fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> {
|
||||
None
|
||||
}
|
||||
fn fallback_fluent_bundle(&self) -> &FluentBundle {
|
||||
panic!("silent emitter attempted to translate a diagnostic");
|
||||
}
|
||||
}
|
||||
|
||||
rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| {
|
||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let emitter = Box::new(SilentEmitter);
|
||||
let handler = DiagCtxt::with_emitter(emitter);
|
||||
let sess = ParseSess::with_dcx(handler, source_map);
|
||||
if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str(
|
||||
FileName::Custom("bench".to_owned()),
|
||||
content.to_owned(),
|
||||
&sess,
|
||||
) {
|
||||
diagnostic.cancel();
|
||||
return Err(());
|
||||
};
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
mod read_from_disk {
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
let _ = content;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration {
|
||||
let begin = Instant::now();
|
||||
let mut success = 0;
|
||||
let mut total = 0;
|
||||
|
||||
["tests/rust/compiler", "tests/rust/library"]
|
||||
.iter()
|
||||
.flat_map(|dir| {
|
||||
walkdir::WalkDir::new(dir)
|
||||
.into_iter()
|
||||
.filter_entry(repo::base_dir_filter)
|
||||
})
|
||||
.for_each(|entry| {
|
||||
let entry = entry.unwrap();
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
return;
|
||||
}
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
let ok = codepath(&content).is_ok();
|
||||
success += ok as usize;
|
||||
total += 1;
|
||||
if !ok {
|
||||
eprintln!("FAIL {}", path.display());
|
||||
}
|
||||
});
|
||||
|
||||
assert_eq!(success, total);
|
||||
begin.elapsed()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
repo::clone_rust();
|
||||
|
||||
macro_rules! testcases {
|
||||
($($(#[$cfg:meta])* $name:ident,)*) => {
|
||||
[
|
||||
$(
|
||||
$(#[$cfg])*
|
||||
(stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
|
||||
)*
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
{
|
||||
let mut lines = 0;
|
||||
let mut files = 0;
|
||||
exec(|content| {
|
||||
lines += content.lines().count();
|
||||
files += 1;
|
||||
Ok(())
|
||||
});
|
||||
eprintln!("\n{} lines in {} files", lines, files);
|
||||
}
|
||||
|
||||
for (name, f) in testcases!(
|
||||
#[cfg(not(syn_only))]
|
||||
read_from_disk,
|
||||
#[cfg(not(syn_only))]
|
||||
tokenstream_parse,
|
||||
syn_parse,
|
||||
#[cfg(not(syn_only))]
|
||||
librustc_parse,
|
||||
) {
|
||||
eprint!("{:20}", format!("{}:", name));
|
||||
let elapsed = exec(f);
|
||||
eprintln!(
|
||||
"elapsed={}.{:03}s",
|
||||
elapsed.as_secs(),
|
||||
elapsed.subsec_millis(),
|
||||
);
|
||||
}
|
||||
eprintln!();
|
||||
}
|
||||
776
vendor/syn/src/attr.rs
vendored
Normal file
776
vendor/syn/src/attr.rs
vendored
Normal file
@@ -0,0 +1,776 @@
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use std::iter;
|
||||
use std::slice;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::meta::{self, ParseNestedMeta};
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream, Parser, Result};
|
||||
|
||||
ast_struct! {
|
||||
/// An attribute, like `#[repr(transparent)]`.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Rust has six types of attributes.
|
||||
///
|
||||
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
|
||||
/// in front of the item they describe.
|
||||
///
|
||||
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
|
||||
/// of the item they describe, usually a module.
|
||||
///
|
||||
/// - Outer one-line doc comments like `/// Example`.
|
||||
///
|
||||
/// - Inner one-line doc comments like `//! Please file an issue`.
|
||||
///
|
||||
/// - Outer documentation blocks `/** Example */`.
|
||||
///
|
||||
/// - Inner documentation blocks `/*! Please file an issue */`.
|
||||
///
|
||||
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
|
||||
/// is outer or inner.
|
||||
///
|
||||
/// Every attribute has a `path` that indicates the intended interpretation
|
||||
/// of the rest of the attribute's contents. The path and the optional
|
||||
/// additional contents are represented together in the `meta` field of the
|
||||
/// attribute in three possible varieties:
|
||||
///
|
||||
/// - Meta::Path — attributes whose information content conveys just a
|
||||
/// path, for example the `#[test]` attribute.
|
||||
///
|
||||
/// - Meta::List — attributes that carry arbitrary tokens after the
|
||||
/// path, surrounded by a delimiter (parenthesis, bracket, or brace). For
|
||||
/// example `#[derive(Copy)]` or `#[precondition(x < 5)]`.
|
||||
///
|
||||
/// - Meta::NameValue — attributes with an `=` sign after the path,
|
||||
/// followed by a Rust expression. For example `#[path =
|
||||
/// "sys/windows.rs"]`.
|
||||
///
|
||||
/// All doc comments are represented in the NameValue style with a path of
|
||||
/// "doc", as this is how they are processed by the compiler and by
|
||||
/// `macro_rules!` macros.
|
||||
///
|
||||
/// ```text
|
||||
/// #[derive(Copy, Clone)]
|
||||
/// ~~~~~~Path
|
||||
/// ^^^^^^^^^^^^^^^^^^^Meta::List
|
||||
///
|
||||
/// #[path = "sys/windows.rs"]
|
||||
/// ~~~~Path
|
||||
/// ^^^^^^^^^^^^^^^^^^^^^^^Meta::NameValue
|
||||
///
|
||||
/// #[test]
|
||||
/// ^^^^Meta::Path
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Parsing from tokens to Attribute
|
||||
///
|
||||
/// This type does not implement the [`Parse`] trait and thus cannot be
|
||||
/// parsed directly by [`ParseStream::parse`]. Instead use
|
||||
/// [`ParseStream::call`] with one of the two parser functions
|
||||
/// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on
|
||||
/// which you intend to parse.
|
||||
///
|
||||
/// [`Parse`]: parse::Parse
|
||||
/// [`ParseStream::parse`]: parse::ParseBuffer::parse
|
||||
/// [`ParseStream::call`]: parse::ParseBuffer::call
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Attribute, Ident, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parses a unit struct with attributes.
|
||||
/// //
|
||||
/// // #[path = "s.tmpl"]
|
||||
/// // struct S;
|
||||
/// struct UnitStruct {
|
||||
/// attrs: Vec<Attribute>,
|
||||
/// struct_token: Token![struct],
|
||||
/// name: Ident,
|
||||
/// semi_token: Token![;],
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for UnitStruct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(UnitStruct {
|
||||
/// attrs: input.call(Attribute::parse_outer)?,
|
||||
/// struct_token: input.parse()?,
|
||||
/// name: input.parse()?,
|
||||
/// semi_token: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Parsing from Attribute to structured arguments
|
||||
///
|
||||
/// The grammar of attributes in Rust is very flexible, which makes the
|
||||
/// syntax tree not that useful on its own. In particular, arguments of the
|
||||
/// `Meta::List` variety of attribute are held in an arbitrary `tokens:
|
||||
/// TokenStream`. Macros are expected to check the `path` of the attribute,
|
||||
/// decide whether they recognize it, and then parse the remaining tokens
|
||||
/// according to whatever grammar they wish to require for that kind of
|
||||
/// attribute. Use [`parse_args()`] to parse those tokens into the expected
|
||||
/// data structure.
|
||||
///
|
||||
/// [`parse_args()`]: Attribute::parse_args
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Doc comments
|
||||
///
|
||||
/// The compiler transforms doc comments, such as `/// comment` and `/*!
|
||||
/// comment */`, into attributes before macros are expanded. Each comment is
|
||||
/// expanded into an attribute of the form `#[doc = r"comment"]`.
|
||||
///
|
||||
/// As an example, the following `mod` items are expanded identically:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{ItemMod, parse_quote};
|
||||
/// let doc: ItemMod = parse_quote! {
|
||||
/// /// Single line doc comments
|
||||
/// /// We write so many!
|
||||
/// /**
|
||||
/// * Multi-line comments...
|
||||
/// * May span many lines
|
||||
/// */
|
||||
/// mod example {
|
||||
/// //! Of course, they can be inner too
|
||||
/// /*! And fit in a single line */
|
||||
/// }
|
||||
/// };
|
||||
/// let attr: ItemMod = parse_quote! {
|
||||
/// #[doc = r" Single line doc comments"]
|
||||
/// #[doc = r" We write so many!"]
|
||||
/// #[doc = r"
|
||||
/// * Multi-line comments...
|
||||
/// * May span many lines
|
||||
/// "]
|
||||
/// mod example {
|
||||
/// #![doc = r" Of course, they can be inner too"]
|
||||
/// #![doc = r" And fit in a single line "]
|
||||
/// }
|
||||
/// };
|
||||
/// assert_eq!(doc, attr);
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Attribute {
|
||||
pub pound_token: Token![#],
|
||||
pub style: AttrStyle,
|
||||
pub bracket_token: token::Bracket,
|
||||
pub meta: Meta,
|
||||
}
|
||||
}
|
||||
|
||||
impl Attribute {
|
||||
/// Returns the path that identifies the interpretation of this attribute.
|
||||
///
|
||||
/// For example this would return the `test` in `#[test]`, the `derive` in
|
||||
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
|
||||
pub fn path(&self) -> &Path {
|
||||
self.meta.path()
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute as a syntax tree.
|
||||
///
|
||||
/// This is similar to pulling out the `TokenStream` from `Meta::List` and
|
||||
/// doing `syn::parse2::<T>(meta_list.tokens)`, except that using
|
||||
/// `parse_args` the error message has a more useful span when `tokens` is
|
||||
/// empty.
|
||||
///
|
||||
/// The surrounding delimiters are *not* included in the input to the
|
||||
/// parser.
|
||||
///
|
||||
/// ```text
|
||||
/// #[my_attr(value < 5)]
|
||||
/// ^^^^^^^^^ what gets parsed
|
||||
/// ```
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute, Expr};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[precondition(value < 5)]
|
||||
/// };
|
||||
///
|
||||
/// if attr.path().is_ident("precondition") {
|
||||
/// let precondition: Expr = attr.parse_args()?;
|
||||
/// // ...
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_args_with(T::parse)
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute using the given parser.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[inception { #[brrrrrrraaaaawwwwrwrrrmrmrmmrmrmmmmm] }]
|
||||
/// };
|
||||
///
|
||||
/// let bwom = attr.parse_args_with(Attribute::parse_outer)?;
|
||||
///
|
||||
/// // Attribute does not have a Parse impl, so we couldn't directly do:
|
||||
/// // let bwom: Attribute = attr.parse_args()?;
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
match &self.meta {
|
||||
Meta::Path(path) => Err(crate::error::new2(
|
||||
path.segments.first().unwrap().ident.span(),
|
||||
path.segments.last().unwrap().ident.span(),
|
||||
format!(
|
||||
"expected attribute arguments in parentheses: {}[{}(...)]",
|
||||
parsing::DisplayAttrStyle(&self.style),
|
||||
parsing::DisplayPath(path),
|
||||
),
|
||||
)),
|
||||
Meta::NameValue(meta) => Err(Error::new(
|
||||
meta.eq_token.span,
|
||||
format_args!(
|
||||
"expected parentheses: {}[{}(...)]",
|
||||
parsing::DisplayAttrStyle(&self.style),
|
||||
parsing::DisplayPath(&meta.path),
|
||||
),
|
||||
)),
|
||||
Meta::List(meta) => meta.parse_args_with(parser),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute, expecting it to follow the
|
||||
/// conventional structure used by most of Rust's built-in attributes.
|
||||
///
|
||||
/// The [*Meta Item Attribute Syntax*][syntax] section in the Rust reference
|
||||
/// explains the convention in more detail. Not all attributes follow this
|
||||
/// convention, so [`parse_args()`][Self::parse_args] is available if you
|
||||
/// need to parse arbitrarily goofy attribute syntax.
|
||||
///
|
||||
/// [syntax]: https://doc.rust-lang.org/reference/attributes.html#meta-item-attribute-syntax
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// We'll parse a struct, and then parse some of Rust's `#[repr]` attribute
|
||||
/// syntax.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parenthesized, parse_quote, token, ItemStruct, LitInt};
|
||||
///
|
||||
/// let input: ItemStruct = parse_quote! {
|
||||
/// #[repr(C, align(4))]
|
||||
/// pub struct MyStruct(u16, u32);
|
||||
/// };
|
||||
///
|
||||
/// let mut repr_c = false;
|
||||
/// let mut repr_transparent = false;
|
||||
/// let mut repr_align = None::<usize>;
|
||||
/// let mut repr_packed = None::<usize>;
|
||||
/// for attr in &input.attrs {
|
||||
/// if attr.path().is_ident("repr") {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// // #[repr(C)]
|
||||
/// if meta.path.is_ident("C") {
|
||||
/// repr_c = true;
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(transparent)]
|
||||
/// if meta.path.is_ident("transparent") {
|
||||
/// repr_transparent = true;
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(align(N))]
|
||||
/// if meta.path.is_ident("align") {
|
||||
/// let content;
|
||||
/// parenthesized!(content in meta.input);
|
||||
/// let lit: LitInt = content.parse()?;
|
||||
/// let n: usize = lit.base10_parse()?;
|
||||
/// repr_align = Some(n);
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(packed)] or #[repr(packed(N))], omitted N means 1
|
||||
/// if meta.path.is_ident("packed") {
|
||||
/// if meta.input.peek(token::Paren) {
|
||||
/// let content;
|
||||
/// parenthesized!(content in meta.input);
|
||||
/// let lit: LitInt = content.parse()?;
|
||||
/// let n: usize = lit.base10_parse()?;
|
||||
/// repr_packed = Some(n);
|
||||
/// } else {
|
||||
/// repr_packed = Some(1);
|
||||
/// }
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// Err(meta.error("unrecognized repr"))
|
||||
/// })?;
|
||||
/// }
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
///
|
||||
/// # Alternatives
|
||||
///
|
||||
/// In some cases, for attributes which have nested layers of structured
|
||||
/// content, the following less flexible approach might be more convenient:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{parse_quote, ItemStruct};
|
||||
/// #
|
||||
/// # let input: ItemStruct = parse_quote! {
|
||||
/// # #[repr(C, align(4))]
|
||||
/// # pub struct MyStruct(u16, u32);
|
||||
/// # };
|
||||
/// #
|
||||
/// use syn::punctuated::Punctuated;
|
||||
/// use syn::{parenthesized, token, Error, LitInt, Meta, Token};
|
||||
///
|
||||
/// let mut repr_c = false;
|
||||
/// let mut repr_transparent = false;
|
||||
/// let mut repr_align = None::<usize>;
|
||||
/// let mut repr_packed = None::<usize>;
|
||||
/// for attr in &input.attrs {
|
||||
/// if attr.path().is_ident("repr") {
|
||||
/// let nested = attr.parse_args_with(Punctuated::<Meta, Token![,]>::parse_terminated)?;
|
||||
/// for meta in nested {
|
||||
/// match meta {
|
||||
/// // #[repr(C)]
|
||||
/// Meta::Path(path) if path.is_ident("C") => {
|
||||
/// repr_c = true;
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(align(N))]
|
||||
/// Meta::List(meta) if meta.path.is_ident("align") => {
|
||||
/// let lit: LitInt = meta.parse_args()?;
|
||||
/// let n: usize = lit.base10_parse()?;
|
||||
/// repr_align = Some(n);
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
///
|
||||
/// _ => {
|
||||
/// return Err(Error::new_spanned(meta, "unrecognized repr"));
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// # Ok(())
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_nested_meta(
|
||||
&self,
|
||||
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
self.parse_args_with(meta::parser(logic))
|
||||
}
|
||||
|
||||
/// Parses zero or more outer attributes from the stream.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// See
|
||||
/// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute).
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
while input.peek(Token![#]) {
|
||||
attrs.push(input.call(parsing::single_parse_outer)?);
|
||||
}
|
||||
Ok(attrs)
|
||||
}
|
||||
|
||||
/// Parses zero or more inner attributes from the stream.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// See
|
||||
/// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute).
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
parsing::parse_inner(input, &mut attrs)?;
|
||||
Ok(attrs)
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Distinguishes between attributes that decorate an item and attributes
|
||||
/// that are contained within an item.
|
||||
///
|
||||
/// # Outer attributes
|
||||
///
|
||||
/// - `#[repr(transparent)]`
|
||||
/// - `/// # Example`
|
||||
/// - `/** Please file an issue */`
|
||||
///
|
||||
/// # Inner attributes
|
||||
///
|
||||
/// - `#![feature(proc_macro)]`
|
||||
/// - `//! # Example`
|
||||
/// - `/*! Please file an issue */`
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum AttrStyle {
|
||||
Outer,
|
||||
Inner(Token![!]),
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// Content of a compile-time structured attribute.
|
||||
///
|
||||
/// ## Path
|
||||
///
|
||||
/// A meta path is like the `test` in `#[test]`.
|
||||
///
|
||||
/// ## List
|
||||
///
|
||||
/// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
|
||||
///
|
||||
/// ## NameValue
|
||||
///
|
||||
/// A name-value meta is like the `path = "..."` in `#[path =
|
||||
/// "sys/windows.rs"]`.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Meta {
|
||||
Path(Path),
|
||||
|
||||
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
||||
List(MetaList),
|
||||
|
||||
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
||||
NameValue(MetaNameValue),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaList {
|
||||
pub path: Path,
|
||||
pub delimiter: MacroDelimiter,
|
||||
pub tokens: TokenStream,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaNameValue {
|
||||
pub path: Path,
|
||||
pub eq_token: Token![=],
|
||||
pub value: Expr,
|
||||
}
|
||||
}
|
||||
|
||||
impl Meta {
|
||||
/// Returns the path that begins this structured meta item.
|
||||
///
|
||||
/// For example this would return the `test` in `#[test]`, the `derive` in
|
||||
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
|
||||
pub fn path(&self) -> &Path {
|
||||
match self {
|
||||
Meta::Path(path) => path,
|
||||
Meta::List(meta) => &meta.path,
|
||||
Meta::NameValue(meta) => &meta.path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Error if this is a `Meta::List` or `Meta::NameValue`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_path_only(&self) -> Result<&Path> {
|
||||
let error_span = match self {
|
||||
Meta::Path(path) => return Ok(path),
|
||||
Meta::List(meta) => meta.delimiter.span().open(),
|
||||
Meta::NameValue(meta) => meta.eq_token.span,
|
||||
};
|
||||
Err(Error::new(error_span, "unexpected token in attribute"))
|
||||
}
|
||||
|
||||
/// Error if this is a `Meta::Path` or `Meta::NameValue`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_list(&self) -> Result<&MetaList> {
|
||||
match self {
|
||||
Meta::List(meta) => Ok(meta),
|
||||
Meta::Path(path) => Err(crate::error::new2(
|
||||
path.segments.first().unwrap().ident.span(),
|
||||
path.segments.last().unwrap().ident.span(),
|
||||
format!(
|
||||
"expected attribute arguments in parentheses: `{}(...)`",
|
||||
parsing::DisplayPath(path),
|
||||
),
|
||||
)),
|
||||
Meta::NameValue(meta) => Err(Error::new(meta.eq_token.span, "expected `(`")),
|
||||
}
|
||||
}
|
||||
|
||||
/// Error if this is a `Meta::Path` or `Meta::List`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_name_value(&self) -> Result<&MetaNameValue> {
|
||||
match self {
|
||||
Meta::NameValue(meta) => Ok(meta),
|
||||
Meta::Path(path) => Err(crate::error::new2(
|
||||
path.segments.first().unwrap().ident.span(),
|
||||
path.segments.last().unwrap().ident.span(),
|
||||
format!(
|
||||
"expected a value for this attribute: `{} = ...`",
|
||||
parsing::DisplayPath(path),
|
||||
),
|
||||
)),
|
||||
Meta::List(meta) => Err(Error::new(meta.delimiter.span().open(), "expected `=`")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaList {
|
||||
/// See [`Attribute::parse_args`].
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_args_with(T::parse)
|
||||
}
|
||||
|
||||
/// See [`Attribute::parse_args_with`].
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let scope = self.delimiter.span().close();
|
||||
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
||||
}
|
||||
|
||||
/// See [`Attribute::parse_nested_meta`].
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_nested_meta(
|
||||
&self,
|
||||
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
self.parse_args_with(meta::parser(logic))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait FilterAttrs<'a> {
|
||||
type Ret: Iterator<Item = &'a Attribute>;
|
||||
|
||||
fn outer(self) -> Self::Ret;
|
||||
fn inner(self) -> Self::Ret;
|
||||
}
|
||||
|
||||
impl<'a> FilterAttrs<'a> for &'a [Attribute] {
|
||||
type Ret = iter::Filter<slice::Iter<'a, Attribute>, fn(&&Attribute) -> bool>;
|
||||
|
||||
fn outer(self) -> Self::Ret {
|
||||
fn is_outer(attr: &&Attribute) -> bool {
|
||||
match attr.style {
|
||||
AttrStyle::Outer => true,
|
||||
AttrStyle::Inner(_) => false,
|
||||
}
|
||||
}
|
||||
self.iter().filter(is_outer)
|
||||
}
|
||||
|
||||
fn inner(self) -> Self::Ret {
|
||||
fn is_inner(attr: &&Attribute) -> bool {
|
||||
match attr.style {
|
||||
AttrStyle::Inner(_) => true,
|
||||
AttrStyle::Outer => false,
|
||||
}
|
||||
}
|
||||
self.iter().filter(is_inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> {
|
||||
while input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attrs.push(input.call(parsing::single_parse_inner)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
Ok(Attribute {
|
||||
pound_token: input.parse()?,
|
||||
style: AttrStyle::Inner(input.parse()?),
|
||||
bracket_token: bracketed!(content in input),
|
||||
meta: content.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
Ok(Attribute {
|
||||
pound_token: input.parse()?,
|
||||
style: AttrStyle::Outer,
|
||||
bracket_token: bracketed!(content in input),
|
||||
meta: content.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Meta {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(Path::parse_mod_style)?;
|
||||
parse_meta_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaList {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(Path::parse_mod_style)?;
|
||||
parse_meta_list_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaNameValue {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(Path::parse_mod_style)?;
|
||||
parse_meta_name_value_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> {
|
||||
if input.peek(token::Paren) || input.peek(token::Bracket) || input.peek(token::Brace) {
|
||||
parse_meta_list_after_path(path, input).map(Meta::List)
|
||||
} else if input.peek(Token![=]) {
|
||||
parse_meta_name_value_after_path(path, input).map(Meta::NameValue)
|
||||
} else {
|
||||
Ok(Meta::Path(path))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result<MetaList> {
|
||||
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
||||
Ok(MetaList {
|
||||
path,
|
||||
delimiter,
|
||||
tokens,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result<MetaNameValue> {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
let ahead = input.fork();
|
||||
let lit: Option<Lit> = ahead.parse()?;
|
||||
let value = if let (Some(lit), true) = (lit, ahead.is_empty()) {
|
||||
input.advance_to(&ahead);
|
||||
Expr::Lit(ExprLit {
|
||||
attrs: Vec::new(),
|
||||
lit,
|
||||
})
|
||||
} else if input.peek(Token![#]) && input.peek2(token::Bracket) {
|
||||
return Err(input.error("unexpected attribute inside of attribute"));
|
||||
} else {
|
||||
input.parse()?
|
||||
};
|
||||
Ok(MetaNameValue {
|
||||
path,
|
||||
eq_token,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) struct DisplayAttrStyle<'a>(pub &'a AttrStyle);
|
||||
|
||||
impl<'a> Display for DisplayAttrStyle<'a> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(match self.0 {
|
||||
AttrStyle::Outer => "#",
|
||||
AttrStyle::Inner(_) => "#!",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct DisplayPath<'a>(pub &'a Path);
|
||||
|
||||
impl<'a> Display for DisplayPath<'a> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
for (i, segment) in self.0.segments.iter().enumerate() {
|
||||
if i > 0 || self.0.leading_colon.is_some() {
|
||||
formatter.write_str("::")?;
|
||||
}
|
||||
write!(formatter, "{}", segment.ident)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Attribute {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
if let AttrStyle::Inner(b) = &self.style {
|
||||
b.to_tokens(tokens);
|
||||
}
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.meta.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaList {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.delimiter.surround(tokens, self.tokens.clone());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaNameValue {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
66
vendor/syn/src/bigint.rs
vendored
Normal file
66
vendor/syn/src/bigint.rs
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
use std::ops::{AddAssign, MulAssign};
|
||||
|
||||
// For implementing base10_digits() accessor on LitInt.
|
||||
pub(crate) struct BigInt {
|
||||
digits: Vec<u8>,
|
||||
}
|
||||
|
||||
impl BigInt {
|
||||
pub(crate) fn new() -> Self {
|
||||
BigInt { digits: Vec::new() }
|
||||
}
|
||||
|
||||
pub(crate) fn to_string(&self) -> String {
|
||||
let mut repr = String::with_capacity(self.digits.len());
|
||||
|
||||
let mut has_nonzero = false;
|
||||
for digit in self.digits.iter().rev() {
|
||||
has_nonzero |= *digit != 0;
|
||||
if has_nonzero {
|
||||
repr.push((*digit + b'0') as char);
|
||||
}
|
||||
}
|
||||
|
||||
if repr.is_empty() {
|
||||
repr.push('0');
|
||||
}
|
||||
|
||||
repr
|
||||
}
|
||||
|
||||
fn reserve_two_digits(&mut self) {
|
||||
let len = self.digits.len();
|
||||
let desired =
|
||||
len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize;
|
||||
self.digits.resize(desired, 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<u8> for BigInt {
|
||||
// Assumes increment <16.
|
||||
fn add_assign(&mut self, mut increment: u8) {
|
||||
self.reserve_two_digits();
|
||||
|
||||
let mut i = 0;
|
||||
while increment > 0 {
|
||||
let sum = self.digits[i] + increment;
|
||||
self.digits[i] = sum % 10;
|
||||
increment = sum / 10;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MulAssign<u8> for BigInt {
|
||||
// Assumes base <=16.
|
||||
fn mul_assign(&mut self, base: u8) {
|
||||
self.reserve_two_digits();
|
||||
|
||||
let mut carry = 0;
|
||||
for digit in &mut self.digits {
|
||||
let prod = *digit * base + carry;
|
||||
*digit = prod % 10;
|
||||
carry = prod / 10;
|
||||
}
|
||||
}
|
||||
}
|
||||
432
vendor/syn/src/buffer.rs
vendored
Normal file
432
vendor/syn/src/buffer.rs
vendored
Normal file
@@ -0,0 +1,432 @@
|
||||
//! A stably addressed token buffer supporting efficient traversal based on a
|
||||
//! cheaply copyable cursor.
|
||||
|
||||
// This module is heavily commented as it contains most of the unsafe code in
|
||||
// Syn, and caution should be used when editing it. The public-facing interface
|
||||
// is 100% safe but the implementation is fragile internally.
|
||||
|
||||
use crate::Lifetime;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::cmp::Ordering;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// Internal type which is used instead of `TokenTree` to represent a token tree
|
||||
/// within a `TokenBuffer`.
|
||||
enum Entry {
|
||||
// Mimicking types from proc-macro.
|
||||
// Group entries contain the offset to the matching End entry.
|
||||
Group(Group, usize),
|
||||
Ident(Ident),
|
||||
Punct(Punct),
|
||||
Literal(Literal),
|
||||
// End entries contain the offset (negative) to the start of the buffer.
|
||||
End(isize),
|
||||
}
|
||||
|
||||
/// A buffer that can be efficiently traversed multiple times, unlike
|
||||
/// `TokenStream` which requires a deep copy in order to traverse more than
|
||||
/// once.
|
||||
pub struct TokenBuffer {
|
||||
// NOTE: Do not implement clone on this - while the current design could be
|
||||
// cloned, other designs which could be desirable may not be cloneable.
|
||||
entries: Box<[Entry]>,
|
||||
}
|
||||
|
||||
impl TokenBuffer {
|
||||
fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) {
|
||||
for tt in stream {
|
||||
match tt {
|
||||
TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)),
|
||||
TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)),
|
||||
TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
|
||||
TokenTree::Group(group) => {
|
||||
let group_start_index = entries.len();
|
||||
entries.push(Entry::End(0)); // we replace this below
|
||||
Self::recursive_new(entries, group.stream());
|
||||
let group_end_index = entries.len();
|
||||
entries.push(Entry::End(-(group_end_index as isize)));
|
||||
let group_end_offset = group_end_index - group_start_index;
|
||||
entries[group_start_index] = Entry::Group(group, group_end_offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `TokenBuffer` containing all the tokens from the input
|
||||
/// `proc_macro::TokenStream`.
|
||||
#[cfg(feature = "proc-macro")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
|
||||
pub fn new(stream: proc_macro::TokenStream) -> Self {
|
||||
Self::new2(stream.into())
|
||||
}
|
||||
|
||||
/// Creates a `TokenBuffer` containing all the tokens from the input
|
||||
/// `proc_macro2::TokenStream`.
|
||||
pub fn new2(stream: TokenStream) -> Self {
|
||||
let mut entries = Vec::new();
|
||||
Self::recursive_new(&mut entries, stream);
|
||||
entries.push(Entry::End(-(entries.len() as isize)));
|
||||
Self {
|
||||
entries: entries.into_boxed_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a cursor referencing the first token in the buffer and able to
|
||||
/// traverse until the end of the buffer.
|
||||
pub fn begin(&self) -> Cursor {
|
||||
let ptr = self.entries.as_ptr();
|
||||
unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) }
|
||||
}
|
||||
}
|
||||
|
||||
/// A cheaply copyable cursor into a `TokenBuffer`.
|
||||
///
|
||||
/// This cursor holds a shared reference into the immutable data which is used
|
||||
/// internally to represent a `TokenStream`, and can be efficiently manipulated
|
||||
/// and copied around.
|
||||
///
|
||||
/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
|
||||
/// object and get a cursor to its first token with `begin()`.
|
||||
pub struct Cursor<'a> {
|
||||
// The current entry which the `Cursor` is pointing at.
|
||||
ptr: *const Entry,
|
||||
// This is the only `Entry::End` object which this cursor is allowed to
|
||||
// point at. All other `End` objects are skipped over in `Cursor::create`.
|
||||
scope: *const Entry,
|
||||
// Cursor is covariant in 'a. This field ensures that our pointers are still
|
||||
// valid.
|
||||
marker: PhantomData<&'a Entry>,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
/// Creates a cursor referencing a static empty TokenStream.
|
||||
pub fn empty() -> Self {
|
||||
// It's safe in this situation for us to put an `Entry` object in global
|
||||
// storage, despite it not actually being safe to send across threads
|
||||
// (`Ident` is a reference into a thread-local table). This is because
|
||||
// this entry never includes a `Ident` object.
|
||||
//
|
||||
// This wrapper struct allows us to break the rules and put a `Sync`
|
||||
// object in global storage.
|
||||
struct UnsafeSyncEntry(Entry);
|
||||
unsafe impl Sync for UnsafeSyncEntry {}
|
||||
static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0));
|
||||
|
||||
Cursor {
|
||||
ptr: &EMPTY_ENTRY.0,
|
||||
scope: &EMPTY_ENTRY.0,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// This create method intelligently exits non-explicitly-entered
|
||||
/// `None`-delimited scopes when the cursor reaches the end of them,
|
||||
/// allowing for them to be treated transparently.
|
||||
unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
|
||||
// NOTE: If we're looking at a `End`, we want to advance the cursor
|
||||
// past it, unless `ptr == scope`, which means that we're at the edge of
|
||||
// our cursor's scope. We should only have `ptr != scope` at the exit
|
||||
// from None-delimited groups entered with `ignore_none`.
|
||||
while let Entry::End(_) = unsafe { &*ptr } {
|
||||
if ptr == scope {
|
||||
break;
|
||||
}
|
||||
ptr = unsafe { ptr.add(1) };
|
||||
}
|
||||
|
||||
Cursor {
|
||||
ptr,
|
||||
scope,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the current entry.
|
||||
fn entry(self) -> &'a Entry {
|
||||
unsafe { &*self.ptr }
|
||||
}
|
||||
|
||||
/// Bump the cursor to point at the next token after the current one. This
|
||||
/// is undefined behavior if the cursor is currently looking at an
|
||||
/// `Entry::End`.
|
||||
///
|
||||
/// If the cursor is looking at an `Entry::Group`, the bumped cursor will
|
||||
/// point at the first token in the group (with the same scope end).
|
||||
unsafe fn bump_ignore_group(self) -> Cursor<'a> {
|
||||
unsafe { Cursor::create(self.ptr.offset(1), self.scope) }
|
||||
}
|
||||
|
||||
/// While the cursor is looking at a `None`-delimited group, move it to look
|
||||
/// at the first token inside instead. If the group is empty, this will move
|
||||
/// the cursor past the `None`-delimited group.
|
||||
///
|
||||
/// WARNING: This mutates its argument.
|
||||
fn ignore_none(&mut self) {
|
||||
while let Entry::Group(group, _) = self.entry() {
|
||||
if group.delimiter() == Delimiter::None {
|
||||
unsafe { *self = self.bump_ignore_group() };
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks whether the cursor is currently pointing at the end of its valid
|
||||
/// scope.
|
||||
pub fn eof(self) -> bool {
|
||||
// We're at eof if we're at the end of our scope.
|
||||
self.ptr == self.scope
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Group` with the given delimiter, returns
|
||||
/// a cursor into that group and one pointing to the next `TokenTree`.
|
||||
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
|
||||
// If we're not trying to enter a none-delimited group, we want to
|
||||
// ignore them. We have to make sure to _not_ ignore them when we want
|
||||
// to enter them, of course. For obvious reasons.
|
||||
if delim != Delimiter::None {
|
||||
self.ignore_none();
|
||||
}
|
||||
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
if group.delimiter() == delim {
|
||||
let span = group.delim_span();
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((inside_of_group, span, after_group));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
let delimiter = group.delimiter();
|
||||
let span = group.delim_span();
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((inside_of_group, delimiter, span, after_group));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((group.clone(), after_group));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Punct`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Punct(punct) if punct.as_char() != '\'' => {
|
||||
Some((punct.clone(), unsafe { self.bump_ignore_group() }))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Literal`, return it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Lifetime`, returns it along with a
|
||||
/// cursor pointing at the next `TokenTree`.
|
||||
pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
|
||||
let next = unsafe { self.bump_ignore_group() };
|
||||
let (ident, rest) = next.ident()?;
|
||||
let lifetime = Lifetime {
|
||||
apostrophe: punct.span(),
|
||||
ident,
|
||||
};
|
||||
Some((lifetime, rest))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Copies all remaining tokens visible from this cursor into a
|
||||
/// `TokenStream`.
|
||||
pub fn token_stream(self) -> TokenStream {
|
||||
let mut tts = Vec::new();
|
||||
let mut cursor = self;
|
||||
while let Some((tt, rest)) = cursor.token_tree() {
|
||||
tts.push(tt);
|
||||
cursor = rest;
|
||||
}
|
||||
tts.into_iter().collect()
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `TokenTree`, returns it along with a
|
||||
/// cursor pointing at the next `TokenTree`.
|
||||
///
|
||||
/// Returns `None` if the cursor has reached the end of its stream.
|
||||
///
|
||||
/// This method does not treat `None`-delimited groups as transparent, and
|
||||
/// will return a `Group(None, ..)` if the cursor is looking at one.
|
||||
pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
|
||||
let (tree, len) = match self.entry() {
|
||||
Entry::Group(group, end_offset) => (group.clone().into(), *end_offset),
|
||||
Entry::Literal(literal) => (literal.clone().into(), 1),
|
||||
Entry::Ident(ident) => (ident.clone().into(), 1),
|
||||
Entry::Punct(punct) => (punct.clone().into(), 1),
|
||||
Entry::End(_) => return None,
|
||||
};
|
||||
|
||||
let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
|
||||
Some((tree, rest))
|
||||
}
|
||||
|
||||
/// Returns the `Span` of the current token, or `Span::call_site()` if this
|
||||
/// cursor points to eof.
|
||||
pub fn span(self) -> Span {
|
||||
match self.entry() {
|
||||
Entry::Group(group, _) => group.span(),
|
||||
Entry::Literal(literal) => literal.span(),
|
||||
Entry::Ident(ident) => ident.span(),
|
||||
Entry::Punct(punct) => punct.span(),
|
||||
Entry::End(_) => Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `Span` of the token immediately prior to the position of
|
||||
/// this cursor, or of the current token if there is no previous one.
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub(crate) fn prev_span(mut self) -> Span {
|
||||
if start_of_buffer(self) < self.ptr {
|
||||
self.ptr = unsafe { self.ptr.offset(-1) };
|
||||
if let Entry::End(_) = self.entry() {
|
||||
// Locate the matching Group begin token.
|
||||
let mut depth = 1;
|
||||
loop {
|
||||
self.ptr = unsafe { self.ptr.offset(-1) };
|
||||
match self.entry() {
|
||||
Entry::Group(group, _) => {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return group.span();
|
||||
}
|
||||
}
|
||||
Entry::End(_) => depth += 1,
|
||||
Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.span()
|
||||
}
|
||||
|
||||
/// Skip over the next token without cloning it. Returns `None` if this
|
||||
/// cursor points to eof.
|
||||
///
|
||||
/// This method treats `'lifetimes` as a single token.
|
||||
pub(crate) fn skip(self) -> Option<Cursor<'a>> {
|
||||
let len = match self.entry() {
|
||||
Entry::End(_) => return None,
|
||||
|
||||
// Treat lifetimes as a single tt for the purposes of 'skip'.
|
||||
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
|
||||
match unsafe { &*self.ptr.add(1) } {
|
||||
Entry::Ident(_) => 2,
|
||||
_ => 1,
|
||||
}
|
||||
}
|
||||
|
||||
Entry::Group(_, end_offset) => *end_offset,
|
||||
_ => 1,
|
||||
};
|
||||
|
||||
Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Copy for Cursor<'a> {}
|
||||
|
||||
impl<'a> Clone for Cursor<'a> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Eq for Cursor<'a> {}
|
||||
|
||||
impl<'a> PartialEq for Cursor<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ptr == other.ptr
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for Cursor<'a> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
if same_buffer(*self, *other) {
|
||||
Some(cmp_assuming_same_buffer(*self, *other))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
|
||||
a.scope == b.scope
|
||||
}
|
||||
|
||||
pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
|
||||
start_of_buffer(a) == start_of_buffer(b)
|
||||
}
|
||||
|
||||
fn start_of_buffer(cursor: Cursor) -> *const Entry {
|
||||
unsafe {
|
||||
match &*cursor.scope {
|
||||
Entry::End(offset) => cursor.scope.offset(*offset),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
|
||||
a.ptr.cmp(&b.ptr)
|
||||
}
|
||||
|
||||
pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
|
||||
match cursor.entry() {
|
||||
Entry::Group(group, _) => group.span_open(),
|
||||
_ => cursor.span(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn close_span_of_group(cursor: Cursor) -> Span {
|
||||
match cursor.entry() {
|
||||
Entry::Group(group, _) => group.span_close(),
|
||||
_ => cursor.span(),
|
||||
}
|
||||
}
|
||||
259
vendor/syn/src/custom_keyword.rs
vendored
Normal file
259
vendor/syn/src/custom_keyword.rs
vendored
Normal file
@@ -0,0 +1,259 @@
|
||||
/// Define a type that supports parsing and printing a given identifier as if it
|
||||
/// were a keyword.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// As a convention, it is recommended that this macro be invoked within a
|
||||
/// module called `kw` or `keyword` and that the resulting parser be invoked
|
||||
/// with a `kw::` or `keyword::` prefix.
|
||||
///
|
||||
/// ```
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(whatever);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The generated syntax tree node supports the following operations just like
|
||||
/// any built-in keyword token.
|
||||
///
|
||||
/// - [Peeking] — `input.peek(kw::whatever)`
|
||||
///
|
||||
/// - [Parsing] — `input.parse::<kw::whatever>()?`
|
||||
///
|
||||
/// - [Printing] — `quote!( ... #whatever_token ... )`
|
||||
///
|
||||
/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)`
|
||||
///
|
||||
/// - Field access to its span — `let sp = whatever_token.span`
|
||||
///
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This example parses input that looks like `bool = true` or `str = "value"`.
|
||||
/// The key must be either the identifier `bool` or the identifier `str`. If
|
||||
/// `bool`, the value may be either `true` or `false`. If `str`, the value may
|
||||
/// be any string literal.
|
||||
///
|
||||
/// The symbols `bool` and `str` are not reserved keywords in Rust so these are
|
||||
/// not considered keywords in the `syn::token` module. Like any other
|
||||
/// identifier that is not a keyword, these can be declared as custom keywords
|
||||
/// by crates that need to use them as such.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{LitBool, LitStr, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(bool);
|
||||
/// syn::custom_keyword!(str);
|
||||
/// }
|
||||
///
|
||||
/// enum Argument {
|
||||
/// Bool {
|
||||
/// bool_token: kw::bool,
|
||||
/// eq_token: Token![=],
|
||||
/// value: LitBool,
|
||||
/// },
|
||||
/// Str {
|
||||
/// str_token: kw::str,
|
||||
/// eq_token: Token![=],
|
||||
/// value: LitStr,
|
||||
/// },
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Argument {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let lookahead = input.lookahead1();
|
||||
/// if lookahead.peek(kw::bool) {
|
||||
/// Ok(Argument::Bool {
|
||||
/// bool_token: input.parse::<kw::bool>()?,
|
||||
/// eq_token: input.parse()?,
|
||||
/// value: input.parse()?,
|
||||
/// })
|
||||
/// } else if lookahead.peek(kw::str) {
|
||||
/// Ok(Argument::Str {
|
||||
/// str_token: input.parse::<kw::str>()?,
|
||||
/// eq_token: input.parse()?,
|
||||
/// value: input.parse()?,
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(lookahead.error())
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! custom_keyword {
|
||||
($ident:ident) => {
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct $ident {
|
||||
pub span: $crate::__private::Span,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::__private::Span>>(
|
||||
span: __S,
|
||||
) -> $ident {
|
||||
$ident {
|
||||
span: $crate::__private::IntoSpans::into_spans(span),
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident {
|
||||
span: $crate::__private::Span::call_site(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$crate::impl_parse_for_custom_keyword!($ident);
|
||||
$crate::impl_to_tokens_for_custom_keyword!($ident);
|
||||
$crate::impl_clone_for_custom_keyword!($ident);
|
||||
$crate::impl_extra_traits_for_custom_keyword!($ident);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
// For peek.
|
||||
impl $crate::__private::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
|
||||
if let $crate::__private::Some((ident, _rest)) = cursor.ident() {
|
||||
ident == $crate::__private::stringify!($ident)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
$crate::__private::concat!("`", $crate::__private::stringify!($ident), "`")
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
input.step(|cursor| {
|
||||
if let $crate::__private::Some((ident, rest)) = cursor.ident() {
|
||||
if ident == $crate::__private::stringify!($ident) {
|
||||
return $crate::__private::Ok(($ident { span: ident.span() }, rest));
|
||||
}
|
||||
}
|
||||
$crate::__private::Err(cursor.error($crate::__private::concat!(
|
||||
"expected `",
|
||||
$crate::__private::stringify!($ident),
|
||||
"`",
|
||||
)))
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "parsing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
let ident = $crate::Ident::new($crate::__private::stringify!($ident), self.span);
|
||||
$crate::__private::TokenStreamExt::append(tokens, ident);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "clone-impls"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
|
||||
$crate::__private::Formatter::write_str(
|
||||
f,
|
||||
$crate::__private::concat!(
|
||||
"Keyword [",
|
||||
$crate::__private::stringify!($ident),
|
||||
"]",
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "extra-traits"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
302
vendor/syn/src/custom_punctuation.rs
vendored
Normal file
302
vendor/syn/src/custom_punctuation.rs
vendored
Normal file
@@ -0,0 +1,302 @@
|
||||
/// Define a type that supports parsing and printing a multi-character symbol
|
||||
/// as if it were a punctuation token.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// ```
|
||||
/// syn::custom_punctuation!(LeftRightArrow, <=>);
|
||||
/// ```
|
||||
///
|
||||
/// The generated syntax tree node supports the following operations just like
|
||||
/// any built-in punctuation token.
|
||||
///
|
||||
/// - [Peeking] — `input.peek(LeftRightArrow)`
|
||||
///
|
||||
/// - [Parsing] — `input.parse::<LeftRightArrow>()?`
|
||||
///
|
||||
/// - [Printing] — `quote!( ... #lrarrow ... )`
|
||||
///
|
||||
/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)`
|
||||
///
|
||||
/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])`
|
||||
///
|
||||
/// - Field access to its spans — `let spans = lrarrow.spans`
|
||||
///
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::{TokenStream, TokenTree};
|
||||
/// use syn::parse::{Parse, ParseStream, Peek, Result};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
/// use syn::Expr;
|
||||
///
|
||||
/// syn::custom_punctuation!(PathSeparator, </>);
|
||||
///
|
||||
/// // expr </> expr </> expr ...
|
||||
/// struct PathSegments {
|
||||
/// segments: Punctuated<Expr, PathSeparator>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for PathSegments {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let mut segments = Punctuated::new();
|
||||
///
|
||||
/// let first = parse_until(input, PathSeparator)?;
|
||||
/// segments.push_value(syn::parse2(first)?);
|
||||
///
|
||||
/// while input.peek(PathSeparator) {
|
||||
/// segments.push_punct(input.parse()?);
|
||||
///
|
||||
/// let next = parse_until(input, PathSeparator)?;
|
||||
/// segments.push_value(syn::parse2(next)?);
|
||||
/// }
|
||||
///
|
||||
/// Ok(PathSegments { segments })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn parse_until<E: Peek>(input: ParseStream, end: E) -> Result<TokenStream> {
|
||||
/// let mut tokens = TokenStream::new();
|
||||
/// while !input.is_empty() && !input.peek(end) {
|
||||
/// let next: TokenTree = input.parse()?;
|
||||
/// tokens.extend(Some(next));
|
||||
/// }
|
||||
/// Ok(tokens)
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let input = r#" a::b </> c::d::e "#;
|
||||
/// let _: PathSegments = syn::parse_str(input).unwrap();
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
pub struct $ident {
|
||||
pub spans: $crate::custom_punctuation_repr!($($tt)+),
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
|
||||
spans: __S,
|
||||
) -> $ident {
|
||||
let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
|
||||
$ident {
|
||||
spans: $crate::__private::IntoSpans::into_spans(spans)
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident($crate::__private::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
$crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
|
||||
$crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
$crate::__private::concat!("`", $crate::stringify_punct!($($tt)+), "`")
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
let spans: $crate::custom_punctuation_repr!($($tt)+) =
|
||||
$crate::__private::parse_punct(input, $crate::stringify_punct!($($tt)+))?;
|
||||
Ok($ident(spans))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "parsing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
$crate::__private::print_punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "clone-impls"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
|
||||
$crate::__private::Formatter::write_str(f, $crate::__private::stringify!($ident))
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "extra-traits"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_repr {
|
||||
($($tt:tt)+) => {
|
||||
[$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
#[rustfmt::skip]
|
||||
macro_rules! custom_punctuation_len {
|
||||
($mode:ident, +) => { 1 };
|
||||
($mode:ident, +=) => { 2 };
|
||||
($mode:ident, &) => { 1 };
|
||||
($mode:ident, &&) => { 2 };
|
||||
($mode:ident, &=) => { 2 };
|
||||
($mode:ident, @) => { 1 };
|
||||
($mode:ident, !) => { 1 };
|
||||
($mode:ident, ^) => { 1 };
|
||||
($mode:ident, ^=) => { 2 };
|
||||
($mode:ident, :) => { 1 };
|
||||
($mode:ident, ::) => { 2 };
|
||||
($mode:ident, ,) => { 1 };
|
||||
($mode:ident, /) => { 1 };
|
||||
($mode:ident, /=) => { 2 };
|
||||
($mode:ident, .) => { 1 };
|
||||
($mode:ident, ..) => { 2 };
|
||||
($mode:ident, ...) => { 3 };
|
||||
($mode:ident, ..=) => { 3 };
|
||||
($mode:ident, =) => { 1 };
|
||||
($mode:ident, ==) => { 2 };
|
||||
($mode:ident, >=) => { 2 };
|
||||
($mode:ident, >) => { 1 };
|
||||
($mode:ident, <=) => { 2 };
|
||||
($mode:ident, <) => { 1 };
|
||||
($mode:ident, *=) => { 2 };
|
||||
($mode:ident, !=) => { 2 };
|
||||
($mode:ident, |) => { 1 };
|
||||
($mode:ident, |=) => { 2 };
|
||||
($mode:ident, ||) => { 2 };
|
||||
($mode:ident, #) => { 1 };
|
||||
($mode:ident, ?) => { 1 };
|
||||
($mode:ident, ->) => { 2 };
|
||||
($mode:ident, <-) => { 2 };
|
||||
($mode:ident, %) => { 1 };
|
||||
($mode:ident, %=) => { 2 };
|
||||
($mode:ident, =>) => { 2 };
|
||||
($mode:ident, ;) => { 1 };
|
||||
($mode:ident, <<) => { 2 };
|
||||
($mode:ident, <<=) => { 3 };
|
||||
($mode:ident, >>) => { 2 };
|
||||
($mode:ident, >>=) => { 3 };
|
||||
($mode:ident, *) => { 1 };
|
||||
($mode:ident, -) => { 1 };
|
||||
($mode:ident, -=) => { 2 };
|
||||
($mode:ident, ~) => { 1 };
|
||||
(lenient, $tt:tt) => { 0 };
|
||||
(strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_unexpected {
|
||||
() => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! stringify_punct {
|
||||
($($tt:tt)+) => {
|
||||
$crate::__private::concat!($($crate::__private::stringify!($tt)),+)
|
||||
};
|
||||
}
|
||||
404
vendor/syn/src/data.rs
vendored
Normal file
404
vendor/syn/src/data.rs
vendored
Normal file
@@ -0,0 +1,404 @@
|
||||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
/// An enum variant.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Variant {
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
/// Name of the variant.
|
||||
pub ident: Ident,
|
||||
|
||||
/// Content stored in the variant.
|
||||
pub fields: Fields,
|
||||
|
||||
/// Explicit discriminant: `Variant = 1`
|
||||
pub discriminant: Option<(Token![=], Expr)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// Data stored within an enum variant or struct.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Fields {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
Named(FieldsNamed),
|
||||
|
||||
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
||||
Unnamed(FieldsUnnamed),
|
||||
|
||||
/// Unit struct or unit variant such as `None`.
|
||||
Unit,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsNamed {
|
||||
pub brace_token: token::Brace,
|
||||
pub named: Punctuated<Field, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsUnnamed {
|
||||
pub paren_token: token::Paren,
|
||||
pub unnamed: Punctuated<Field, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl Fields {
|
||||
/// Get an iterator over the borrowed [`Field`] items in this object. This
|
||||
/// iterator can be used to iterate over a named or unnamed struct or
|
||||
/// variant's fields uniformly.
|
||||
pub fn iter(&self) -> punctuated::Iter<Field> {
|
||||
match self {
|
||||
Fields::Unit => crate::punctuated::empty_punctuated_iter(),
|
||||
Fields::Named(f) => f.named.iter(),
|
||||
Fields::Unnamed(f) => f.unnamed.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get an iterator over the mutably borrowed [`Field`] items in this
|
||||
/// object. This iterator can be used to iterate over a named or unnamed
|
||||
/// struct or variant's fields uniformly.
|
||||
pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
|
||||
match self {
|
||||
Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
|
||||
Fields::Named(f) => f.named.iter_mut(),
|
||||
Fields::Unnamed(f) => f.unnamed.iter_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of fields.
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
Fields::Unit => 0,
|
||||
Fields::Named(f) => f.named.len(),
|
||||
Fields::Unnamed(f) => f.unnamed.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if there are zero fields.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Fields::Unit => true,
|
||||
Fields::Named(f) => f.named.is_empty(),
|
||||
Fields::Unnamed(f) => f.unnamed.is_empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Fields {
|
||||
type Item = Field;
|
||||
type IntoIter = punctuated::IntoIter<Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
Fields::Unit => Punctuated::<Field, ()>::new().into_iter(),
|
||||
Fields::Named(f) => f.named.into_iter(),
|
||||
Fields::Unnamed(f) => f.unnamed.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Fields {
|
||||
type Item = &'a Field;
|
||||
type IntoIter = punctuated::Iter<'a, Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut Fields {
|
||||
type Item = &'a mut Field;
|
||||
type IntoIter = punctuated::IterMut<'a, Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter_mut()
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A field of a struct or enum variant.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Field {
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
pub vis: Visibility,
|
||||
|
||||
pub mutability: FieldMutability,
|
||||
|
||||
/// Name of the field, if any.
|
||||
///
|
||||
/// Fields of tuple structs have no names.
|
||||
pub ident: Option<Ident>,
|
||||
|
||||
pub colon_token: Option<Token![:]>,
|
||||
|
||||
pub ty: Type,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::ext::IdentExt as _;
|
||||
#[cfg(not(feature = "full"))]
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Variant {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let _visibility: Visibility = input.parse()?;
|
||||
let ident: Ident = input.parse()?;
|
||||
let fields = if input.peek(token::Brace) {
|
||||
Fields::Named(input.parse()?)
|
||||
} else if input.peek(token::Paren) {
|
||||
Fields::Unnamed(input.parse()?)
|
||||
} else {
|
||||
Fields::Unit
|
||||
};
|
||||
let discriminant = if input.peek(Token![=]) {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
#[cfg(feature = "full")]
|
||||
let discriminant: Expr = input.parse()?;
|
||||
#[cfg(not(feature = "full"))]
|
||||
let discriminant = {
|
||||
let begin = input.fork();
|
||||
let ahead = input.fork();
|
||||
let mut discriminant: Result<Expr> = ahead.parse();
|
||||
if discriminant.is_ok() {
|
||||
input.advance_to(&ahead);
|
||||
} else if scan_lenient_discriminant(input).is_ok() {
|
||||
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
|
||||
}
|
||||
discriminant?
|
||||
};
|
||||
Some((eq_token, discriminant))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Variant {
|
||||
attrs,
|
||||
ident,
|
||||
fields,
|
||||
discriminant,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> {
|
||||
use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis};
|
||||
|
||||
let consume = |delimiter: Delimiter| {
|
||||
Result::unwrap(input.step(|cursor| match cursor.group(delimiter) {
|
||||
Some((_inside, _span, rest)) => Ok((true, rest)),
|
||||
None => Ok((false, *cursor)),
|
||||
}))
|
||||
};
|
||||
|
||||
macro_rules! consume {
|
||||
[$token:tt] => {
|
||||
input.parse::<Option<Token![$token]>>().unwrap().is_some()
|
||||
};
|
||||
}
|
||||
|
||||
let mut initial = true;
|
||||
let mut depth = 0usize;
|
||||
loop {
|
||||
if initial {
|
||||
if consume![&] {
|
||||
input.parse::<Option<Token![mut]>>()?;
|
||||
} else if consume![if] || consume![match] || consume![while] {
|
||||
depth += 1;
|
||||
} else if input.parse::<Option<Lit>>()?.is_some()
|
||||
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis))
|
||||
|| (consume![async] || consume![const] || consume![loop] || consume![unsafe])
|
||||
&& (consume(Brace) || break)
|
||||
{
|
||||
initial = false;
|
||||
} else if consume![let] {
|
||||
while !consume![=] {
|
||||
if !((consume![|] || consume![ref] || consume![mut] || consume![@])
|
||||
|| (consume![!] || input.parse::<Option<Lit>>()?.is_some())
|
||||
|| (consume![..=] || consume![..] || consume![&] || consume![_])
|
||||
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis)))
|
||||
{
|
||||
path::parsing::qpath(input, true)?;
|
||||
}
|
||||
}
|
||||
} else if input.parse::<Option<Lifetime>>()?.is_some() && !consume![:] {
|
||||
break;
|
||||
} else if input.parse::<UnOp>().is_err() {
|
||||
path::parsing::qpath(input, true)?;
|
||||
initial = consume![!] || depth == 0 && input.peek(token::Brace);
|
||||
}
|
||||
} else if input.is_empty() || input.peek(Token![,]) {
|
||||
return Ok(());
|
||||
} else if depth > 0 && consume(Brace) {
|
||||
if consume![else] && !consume(Brace) {
|
||||
initial = consume![if] || break;
|
||||
} else {
|
||||
depth -= 1;
|
||||
}
|
||||
} else if input.parse::<BinOp>().is_ok() || (consume![..] | consume![=]) {
|
||||
initial = true;
|
||||
} else if consume![.] {
|
||||
if input.parse::<Option<LitFloat>>()?.is_none()
|
||||
&& (input.parse::<Member>()?.is_named() && consume![::])
|
||||
{
|
||||
AngleBracketedGenericArguments::do_parse(None, input)?;
|
||||
}
|
||||
} else if consume![as] {
|
||||
input.parse::<Type>()?;
|
||||
} else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Err(input.error("unsupported expression"))
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsNamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(FieldsNamed {
|
||||
brace_token: braced!(content in input),
|
||||
named: content.parse_terminated(Field::parse_named, Token![,])?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsUnnamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(FieldsUnnamed {
|
||||
paren_token: parenthesized!(content in input),
|
||||
unnamed: content.parse_terminated(Field::parse_unnamed, Token![,])?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Field {
|
||||
/// Parses a named (braced struct) field.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_named(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis: Visibility = input.parse()?;
|
||||
|
||||
let unnamed_field = cfg!(feature = "full") && input.peek(Token![_]);
|
||||
let ident = if unnamed_field {
|
||||
input.call(Ident::parse_any)
|
||||
} else {
|
||||
input.parse()
|
||||
}?;
|
||||
|
||||
let colon_token: Token![:] = input.parse()?;
|
||||
|
||||
let ty: Type = if unnamed_field
|
||||
&& (input.peek(Token![struct])
|
||||
|| input.peek(Token![union]) && input.peek2(token::Brace))
|
||||
{
|
||||
let begin = input.fork();
|
||||
input.call(Ident::parse_any)?;
|
||||
input.parse::<FieldsNamed>()?;
|
||||
Type::Verbatim(verbatim::between(&begin, input))
|
||||
} else {
|
||||
input.parse()?
|
||||
};
|
||||
|
||||
Ok(Field {
|
||||
attrs,
|
||||
vis,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(ident),
|
||||
colon_token: Some(colon_token),
|
||||
ty,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses an unnamed (tuple struct) field.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_unnamed(input: ParseStream) -> Result<Self> {
|
||||
Ok(Field {
|
||||
attrs: input.call(Attribute::parse_outer)?,
|
||||
vis: input.parse()?,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Variant {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.ident.to_tokens(tokens);
|
||||
self.fields.to_tokens(tokens);
|
||||
if let Some((eq_token, disc)) = &self.discriminant {
|
||||
eq_token.to_tokens(tokens);
|
||||
disc.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsNamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.named.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsUnnamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.unnamed.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Field {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.vis.to_tokens(tokens);
|
||||
if let Some(ident) = &self.ident {
|
||||
ident.to_tokens(tokens);
|
||||
TokensOrDefault(&self.colon_token).to_tokens(tokens);
|
||||
}
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
245
vendor/syn/src/derive.rs
vendored
Normal file
245
vendor/syn/src/derive.rs
vendored
Normal file
@@ -0,0 +1,245 @@
|
||||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
/// Data structure sent to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DeriveInput {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub vis: Visibility,
|
||||
pub ident: Ident,
|
||||
pub generics: Generics,
|
||||
pub data: Data,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// The storage of a struct, enum or union data structure.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub enum Data {
|
||||
Struct(DataStruct),
|
||||
Enum(DataEnum),
|
||||
Union(DataUnion),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A struct input to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataStruct {
|
||||
pub struct_token: Token![struct],
|
||||
pub fields: Fields,
|
||||
pub semi_token: Option<Token![;]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An enum input to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataEnum {
|
||||
pub enum_token: Token![enum],
|
||||
pub brace_token: token::Brace,
|
||||
pub variants: Punctuated<Variant, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An untagged union input to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataUnion {
|
||||
pub union_token: Token![union],
|
||||
pub fields: FieldsNamed,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for DeriveInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis = input.parse::<Visibility>()?;
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![struct]) {
|
||||
let struct_token = input.parse::<Token![struct]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields, semi) = data_struct(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Struct(DataStruct {
|
||||
struct_token,
|
||||
fields,
|
||||
semi_token: semi,
|
||||
}),
|
||||
})
|
||||
} else if lookahead.peek(Token![enum]) {
|
||||
let enum_token = input.parse::<Token![enum]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, brace, variants) = data_enum(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Enum(DataEnum {
|
||||
enum_token,
|
||||
brace_token: brace,
|
||||
variants,
|
||||
}),
|
||||
})
|
||||
} else if lookahead.peek(Token![union]) {
|
||||
let union_token = input.parse::<Token![union]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields) = data_union(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Union(DataUnion {
|
||||
union_token,
|
||||
fields,
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn data_struct(
|
||||
input: ParseStream,
|
||||
) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
|
||||
let mut lookahead = input.lookahead1();
|
||||
let mut where_clause = None;
|
||||
if lookahead.peek(Token![where]) {
|
||||
where_clause = Some(input.parse()?);
|
||||
lookahead = input.lookahead1();
|
||||
}
|
||||
|
||||
if where_clause.is_none() && lookahead.peek(token::Paren) {
|
||||
let fields = input.parse()?;
|
||||
|
||||
lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![where]) {
|
||||
where_clause = Some(input.parse()?);
|
||||
lookahead = input.lookahead1();
|
||||
}
|
||||
|
||||
if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
Ok((where_clause, Fields::Unnamed(fields), Some(semi)))
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
} else if lookahead.peek(token::Brace) {
|
||||
let fields = input.parse()?;
|
||||
Ok((where_clause, Fields::Named(fields), None))
|
||||
} else if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
Ok((where_clause, Fields::Unit, Some(semi)))
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn data_enum(
|
||||
input: ParseStream,
|
||||
) -> Result<(
|
||||
Option<WhereClause>,
|
||||
token::Brace,
|
||||
Punctuated<Variant, Token![,]>,
|
||||
)> {
|
||||
let where_clause = input.parse()?;
|
||||
|
||||
let content;
|
||||
let brace = braced!(content in input);
|
||||
let variants = content.parse_terminated(Variant::parse, Token![,])?;
|
||||
|
||||
Ok((where_clause, brace, variants))
|
||||
}
|
||||
|
||||
pub(crate) fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
|
||||
let where_clause = input.parse()?;
|
||||
let fields = input.parse()?;
|
||||
Ok((where_clause, fields))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for DeriveInput {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
for attr in self.attrs.outer() {
|
||||
attr.to_tokens(tokens);
|
||||
}
|
||||
self.vis.to_tokens(tokens);
|
||||
match &self.data {
|
||||
Data::Struct(d) => d.struct_token.to_tokens(tokens),
|
||||
Data::Enum(d) => d.enum_token.to_tokens(tokens),
|
||||
Data::Union(d) => d.union_token.to_tokens(tokens),
|
||||
}
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
match &self.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
fields.to_tokens(tokens);
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
fields.to_tokens(tokens);
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
TokensOrDefault(&data.semi_token).to_tokens(tokens);
|
||||
}
|
||||
Fields::Unit => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
TokensOrDefault(&data.semi_token).to_tokens(tokens);
|
||||
}
|
||||
},
|
||||
Data::Enum(data) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
data.brace_token.surround(tokens, |tokens| {
|
||||
data.variants.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
Data::Union(data) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
data.fields.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
219
vendor/syn/src/discouraged.rs
vendored
Normal file
219
vendor/syn/src/discouraged.rs
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
//! Extensions to the parsing API with niche applicability.
|
||||
|
||||
use super::*;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
|
||||
/// Extensions to the `ParseStream` API to support speculative parsing.
|
||||
pub trait Speculative {
|
||||
/// Advance this parse stream to the position of a forked parse stream.
|
||||
///
|
||||
/// This is the opposite operation to [`ParseStream::fork`]. You can fork a
|
||||
/// parse stream, perform some speculative parsing, then join the original
|
||||
/// stream to the fork to "commit" the parsing from the fork to the main
|
||||
/// stream.
|
||||
///
|
||||
/// If you can avoid doing this, you should, as it limits the ability to
|
||||
/// generate useful errors. That said, it is often the only way to parse
|
||||
/// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
|
||||
/// is that when the fork fails to parse an `A`, it's impossible to tell
|
||||
/// whether that was because of a syntax error and the user meant to provide
|
||||
/// an `A`, or that the `A`s are finished and it's time to start parsing
|
||||
/// `B`s. Use with care.
|
||||
///
|
||||
/// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
|
||||
/// parsing `B*` and removing the leading members of `A` from the
|
||||
/// repetition, bypassing the need to involve the downsides associated with
|
||||
/// speculative parsing.
|
||||
///
|
||||
/// [`ParseStream::fork`]: ParseBuffer::fork
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// There has been chatter about the possibility of making the colons in the
|
||||
/// turbofish syntax like `path::to::<T>` no longer required by accepting
|
||||
/// `path::to<T>` in expression position. Specifically, according to [RFC
|
||||
/// 2544], [`PathSegment`] parsing should always try to consume a following
|
||||
/// `<` token as the start of generic arguments, and reset to the `<` if
|
||||
/// that fails (e.g. the token is acting as a less-than operator).
|
||||
///
|
||||
/// This is the exact kind of parsing behavior which requires the "fork,
|
||||
/// try, commit" behavior that [`ParseStream::fork`] discourages. With
|
||||
/// `advance_to`, we can avoid having to parse the speculatively parsed
|
||||
/// content a second time.
|
||||
///
|
||||
/// This change in behavior can be implemented in syn by replacing just the
|
||||
/// `Parse` implementation for `PathSegment`:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::ext::IdentExt;
|
||||
/// use syn::parse::discouraged::Speculative;
|
||||
/// # use syn::parse::{Parse, ParseStream};
|
||||
/// # use syn::{Ident, PathArguments, Result, Token};
|
||||
///
|
||||
/// pub struct PathSegment {
|
||||
/// pub ident: Ident,
|
||||
/// pub arguments: PathArguments,
|
||||
/// }
|
||||
/// #
|
||||
/// # impl<T> From<T> for PathSegment
|
||||
/// # where
|
||||
/// # T: Into<Ident>,
|
||||
/// # {
|
||||
/// # fn from(ident: T) -> Self {
|
||||
/// # PathSegment {
|
||||
/// # ident: ident.into(),
|
||||
/// # arguments: PathArguments::None,
|
||||
/// # }
|
||||
/// # }
|
||||
/// # }
|
||||
///
|
||||
/// impl Parse for PathSegment {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// if input.peek(Token![super])
|
||||
/// || input.peek(Token![self])
|
||||
/// || input.peek(Token![Self])
|
||||
/// || input.peek(Token![crate])
|
||||
/// {
|
||||
/// let ident = input.call(Ident::parse_any)?;
|
||||
/// return Ok(PathSegment::from(ident));
|
||||
/// }
|
||||
///
|
||||
/// let ident = input.parse()?;
|
||||
/// if input.peek(Token![::]) && input.peek3(Token![<]) {
|
||||
/// return Ok(PathSegment {
|
||||
/// ident,
|
||||
/// arguments: PathArguments::AngleBracketed(input.parse()?),
|
||||
/// });
|
||||
/// }
|
||||
/// if input.peek(Token![<]) && !input.peek(Token![<=]) {
|
||||
/// let fork = input.fork();
|
||||
/// if let Ok(arguments) = fork.parse() {
|
||||
/// input.advance_to(&fork);
|
||||
/// return Ok(PathSegment {
|
||||
/// ident,
|
||||
/// arguments: PathArguments::AngleBracketed(arguments),
|
||||
/// });
|
||||
/// }
|
||||
/// }
|
||||
/// Ok(PathSegment::from(ident))
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # syn::parse_str::<PathSegment>("a<b,c>").unwrap();
|
||||
/// ```
|
||||
///
|
||||
/// # Drawbacks
|
||||
///
|
||||
/// The main drawback of this style of speculative parsing is in error
|
||||
/// presentation. Even if the lookahead is the "correct" parse, the error
|
||||
/// that is shown is that of the "fallback" parse. To use the same example
|
||||
/// as the turbofish above, take the following unfinished "turbofish":
|
||||
///
|
||||
/// ```text
|
||||
/// let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// ```
|
||||
///
|
||||
/// If this is parsed as generic arguments, we can provide the error message
|
||||
///
|
||||
/// ```text
|
||||
/// error: expected identifier
|
||||
/// --> src.rs:L:C
|
||||
/// |
|
||||
/// L | let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// | ^
|
||||
/// ```
|
||||
///
|
||||
/// but if parsed using the above speculative parsing, it falls back to
|
||||
/// assuming that the `<` is a less-than when it fails to parse the generic
|
||||
/// arguments, and tries to interpret the `&'a` as the start of a labelled
|
||||
/// loop, resulting in the much less helpful error
|
||||
///
|
||||
/// ```text
|
||||
/// error: expected `:`
|
||||
/// --> src.rs:L:C
|
||||
/// |
|
||||
/// L | let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// | ^^
|
||||
/// ```
|
||||
///
|
||||
/// This can be mitigated with various heuristics (two examples: show both
|
||||
/// forks' parse errors, or show the one that consumed more tokens), but
|
||||
/// when you can control the grammar, sticking to something that can be
|
||||
/// parsed LL(3) and without the LL(*) speculative parsing this makes
|
||||
/// possible, displaying reasonable errors becomes much more simple.
|
||||
///
|
||||
/// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544
|
||||
/// [`PathSegment`]: crate::PathSegment
|
||||
///
|
||||
/// # Performance
|
||||
///
|
||||
/// This method performs a cheap fixed amount of work that does not depend
|
||||
/// on how far apart the two streams are positioned.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// The forked stream in the argument of `advance_to` must have been
|
||||
/// obtained by forking `self`. Attempting to advance to any other stream
|
||||
/// will cause a panic.
|
||||
fn advance_to(&self, fork: &Self);
|
||||
}
|
||||
|
||||
impl<'a> Speculative for ParseBuffer<'a> {
|
||||
fn advance_to(&self, fork: &Self) {
|
||||
if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
|
||||
panic!("Fork was not derived from the advancing parse stream");
|
||||
}
|
||||
|
||||
let (self_unexp, self_sp) = inner_unexpected(self);
|
||||
let (fork_unexp, fork_sp) = inner_unexpected(fork);
|
||||
if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
|
||||
match (fork_sp, self_sp) {
|
||||
// Unexpected set on the fork, but not on `self`, copy it over.
|
||||
(Some(span), None) => {
|
||||
self_unexp.set(Unexpected::Some(span));
|
||||
}
|
||||
// Unexpected unset. Use chain to propagate errors from fork.
|
||||
(None, None) => {
|
||||
fork_unexp.set(Unexpected::Chain(self_unexp));
|
||||
|
||||
// Ensure toplevel 'unexpected' tokens from the fork don't
|
||||
// bubble up the chain by replacing the root `unexpected`
|
||||
// pointer, only 'unexpected' tokens from existing group
|
||||
// parsers should bubble.
|
||||
fork.unexpected
|
||||
.set(Some(Rc::new(Cell::new(Unexpected::None))));
|
||||
}
|
||||
// Unexpected has been set on `self`. No changes needed.
|
||||
(_, Some(_)) => {}
|
||||
}
|
||||
}
|
||||
|
||||
// See comment on `cell` in the struct definition.
|
||||
self.cell
|
||||
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) });
|
||||
}
|
||||
}
|
||||
|
||||
/// Extensions to the `ParseStream` API to support manipulating invisible
|
||||
/// delimiters the same as if they were visible.
|
||||
pub trait AnyDelimiter {
|
||||
/// Returns the delimiter, the span of the delimiter token, and the nested
|
||||
/// contents for further parsing.
|
||||
fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)>;
|
||||
}
|
||||
|
||||
impl<'a> AnyDelimiter for ParseBuffer<'a> {
|
||||
fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> {
|
||||
self.step(|cursor| {
|
||||
if let Some((content, delimiter, span, rest)) = cursor.any_group() {
|
||||
let scope = crate::buffer::close_span_of_group(*cursor);
|
||||
let nested = crate::parse::advance_step_cursor(cursor, content);
|
||||
let unexpected = crate::parse::get_unexpected(self);
|
||||
let content = crate::parse::new_parse_buffer(scope, nested, unexpected);
|
||||
Ok(((delimiter, span, content), rest))
|
||||
} else {
|
||||
Err(cursor.error("expected any delimiter"))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
58
vendor/syn/src/drops.rs
vendored
Normal file
58
vendor/syn/src/drops.rs
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::iter;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::option;
|
||||
use std::slice;
|
||||
|
||||
#[repr(transparent)]
|
||||
pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>);
|
||||
|
||||
impl<T> NoDrop<T> {
|
||||
pub(crate) fn new(value: T) -> Self
|
||||
where
|
||||
T: TrivialDrop,
|
||||
{
|
||||
NoDrop(ManuallyDrop::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Deref for NoDrop<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> DerefMut for NoDrop<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TrivialDrop {}
|
||||
|
||||
impl<T> TrivialDrop for iter::Empty<T> {}
|
||||
impl<'a, T> TrivialDrop for slice::Iter<'a, T> {}
|
||||
impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {}
|
||||
impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {}
|
||||
impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {}
|
||||
|
||||
#[test]
|
||||
fn test_needs_drop() {
|
||||
use std::mem::needs_drop;
|
||||
|
||||
struct NeedsDrop;
|
||||
|
||||
impl Drop for NeedsDrop {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
|
||||
assert!(needs_drop::<NeedsDrop>());
|
||||
|
||||
// Test each of the types with a handwritten TrivialDrop impl above.
|
||||
assert!(!needs_drop::<iter::Empty<NeedsDrop>>());
|
||||
assert!(!needs_drop::<slice::Iter<NeedsDrop>>());
|
||||
assert!(!needs_drop::<slice::IterMut<NeedsDrop>>());
|
||||
assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>());
|
||||
assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>());
|
||||
}
|
||||
467
vendor/syn/src/error.rs
vendored
Normal file
467
vendor/syn/src/error.rs
vendored
Normal file
@@ -0,0 +1,467 @@
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
use crate::thread::ThreadBound;
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
#[cfg(feature = "printing")]
|
||||
use quote::ToTokens;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::slice;
|
||||
use std::vec;
|
||||
|
||||
/// The result of a Syn parser.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
/// Error returned when a Syn parser cannot parse the input tokens.
|
||||
///
|
||||
/// # Error reporting in proc macros
|
||||
///
|
||||
/// The correct way to report errors back to the compiler from a procedural
|
||||
/// macro is by emitting an appropriately spanned invocation of
|
||||
/// [`compile_error!`] in the generated code. This produces a better diagnostic
|
||||
/// message than simply panicking the macro.
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// When parsing macro input, the [`parse_macro_input!`] macro handles the
|
||||
/// conversion to `compile_error!` automatically.
|
||||
///
|
||||
/// [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::parse::{Parse, ParseStream, Result};
|
||||
/// use syn::{parse_macro_input, ItemFn};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let args = parse_macro_input!(args as MyAttrArgs);
|
||||
/// let input = parse_macro_input!(input as ItemFn);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
///
|
||||
/// struct MyAttrArgs {
|
||||
/// # _k: [(); { stringify! {
|
||||
/// ...
|
||||
/// # }; 0 }]
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for MyAttrArgs {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// # stringify! {
|
||||
/// ...
|
||||
/// # };
|
||||
/// # unimplemented!()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// For errors that arise later than the initial parsing stage, the
|
||||
/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to
|
||||
/// perform an explicit conversion to `compile_error!`.
|
||||
///
|
||||
/// [`.to_compile_error()`]: Error::to_compile_error
|
||||
/// [`.into_compile_error()`]: Error::into_compile_error
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # use proc_macro::TokenStream;
|
||||
/// # use syn::{parse_macro_input, DeriveInput};
|
||||
/// #
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyDerive)]
|
||||
/// # };
|
||||
/// pub fn my_derive(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
///
|
||||
/// // fn(DeriveInput) -> syn::Result<proc_macro2::TokenStream>
|
||||
/// expand::my_derive(input)
|
||||
/// .unwrap_or_else(syn::Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
/// #
|
||||
/// # mod expand {
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// # use syn::{DeriveInput, Result};
|
||||
/// #
|
||||
/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// # unimplemented!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct Error {
|
||||
messages: Vec<ErrorMessage>,
|
||||
}
|
||||
|
||||
struct ErrorMessage {
|
||||
// Span is implemented as an index into a thread-local interner to keep the
|
||||
// size small. It is not safe to access from a different thread. We want
|
||||
// errors to be Send and Sync to play nicely with ecosystem crates for error
|
||||
// handling, so pin the span we're given to its original thread and assume
|
||||
// it is Span::call_site if accessed from any other thread.
|
||||
span: ThreadBound<SpanRange>,
|
||||
message: String,
|
||||
}
|
||||
|
||||
// Cannot use std::ops::Range<Span> because that does not implement Copy,
|
||||
// whereas ThreadBound<T> requires a Copy impl as a way to ensure no Drop impls
|
||||
// are involved.
|
||||
struct SpanRange {
|
||||
start: Span,
|
||||
end: Span,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
struct _Test
|
||||
where
|
||||
Error: Send + Sync;
|
||||
|
||||
impl Error {
|
||||
/// Usually the [`ParseStream::error`] method will be used instead, which
|
||||
/// automatically uses the correct span from the current position of the
|
||||
/// parse stream.
|
||||
///
|
||||
/// Use `Error::new` when the error needs to be triggered on some span other
|
||||
/// than where the parse stream is currently positioned.
|
||||
///
|
||||
/// [`ParseStream::error`]: crate::parse::ParseBuffer::error
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Error, Ident, LitStr, Result, Token};
|
||||
/// use syn::parse::ParseStream;
|
||||
///
|
||||
/// // Parses input that looks like `name = "string"` where the key must be
|
||||
/// // the identifier `name` and the value may be any string literal.
|
||||
/// // Returns the string literal.
|
||||
/// fn parse_name(input: ParseStream) -> Result<LitStr> {
|
||||
/// let name_token: Ident = input.parse()?;
|
||||
/// if name_token != "name" {
|
||||
/// // Trigger an error not on the current position of the stream,
|
||||
/// // but on the position of the unexpected identifier.
|
||||
/// return Err(Error::new(name_token.span(), "expected `name`"));
|
||||
/// }
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let s: LitStr = input.parse()?;
|
||||
/// Ok(s)
|
||||
/// }
|
||||
/// ```
|
||||
pub fn new<T: Display>(span: Span, message: T) -> Self {
|
||||
return new(span, message.to_string());
|
||||
|
||||
fn new(span: Span, message: String) -> Error {
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
span: ThreadBound::new(SpanRange {
|
||||
start: span,
|
||||
end: span,
|
||||
}),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an error with the specified message spanning the given syntax
|
||||
/// tree node.
|
||||
///
|
||||
/// Unlike the `Error::new` constructor, this constructor takes an argument
|
||||
/// `tokens` which is a syntax tree node. This allows the resulting `Error`
|
||||
/// to attempt to span all tokens inside of `tokens`. While you would
|
||||
/// typically be able to use the `Spanned` trait with the above `Error::new`
|
||||
/// constructor, implementation limitations today mean that
|
||||
/// `Error::new_spanned` may provide a higher-quality error message on
|
||||
/// stable Rust.
|
||||
///
|
||||
/// When in doubt it's recommended to stick to `Error::new` (or
|
||||
/// `ParseStream::error`)!
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
|
||||
return new_spanned(tokens.into_token_stream(), message.to_string());
|
||||
|
||||
fn new_spanned(tokens: TokenStream, message: String) -> Error {
|
||||
let mut iter = tokens.into_iter();
|
||||
let start = iter.next().map_or_else(Span::call_site, |t| t.span());
|
||||
let end = iter.last().map_or(start, |t| t.span());
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
span: ThreadBound::new(SpanRange { start, end }),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The source location of the error.
|
||||
///
|
||||
/// Spans are not thread-safe so this function returns `Span::call_site()`
|
||||
/// if called from a different thread than the one on which the `Error` was
|
||||
/// originally created.
|
||||
pub fn span(&self) -> Span {
|
||||
let SpanRange { start, end } = match self.messages[0].span.get() {
|
||||
Some(span) => *span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
start.join(end).unwrap_or(start)
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// The [`parse_macro_input!`] macro provides a convenient way to invoke
|
||||
/// this method correctly in a procedural macro.
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
/// [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
pub fn to_compile_error(&self) -> TokenStream {
|
||||
self.messages
|
||||
.iter()
|
||||
.map(ErrorMessage::to_compile_error)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, DeriveInput, Error};
|
||||
///
|
||||
/// # const _: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyTrait)]
|
||||
/// # };
|
||||
/// pub fn derive_my_trait(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
/// my_trait::expand(input)
|
||||
/// .unwrap_or_else(Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
///
|
||||
/// mod my_trait {
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{DeriveInput, Result};
|
||||
///
|
||||
/// pub(crate) fn expand(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// /* ... */
|
||||
/// # unimplemented!()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn into_compile_error(self) -> TokenStream {
|
||||
self.to_compile_error()
|
||||
}
|
||||
|
||||
/// Add another error message to self such that when `to_compile_error()` is
|
||||
/// called, both errors will be emitted together.
|
||||
pub fn combine(&mut self, another: Error) {
|
||||
self.messages.extend(another.messages);
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorMessage {
|
||||
fn to_compile_error(&self) -> TokenStream {
|
||||
let (start, end) = match self.span.get() {
|
||||
Some(range) => (range.start, range.end),
|
||||
None => (Span::call_site(), Span::call_site()),
|
||||
};
|
||||
|
||||
// ::core::compile_error!($message)
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Joint);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Ident(Ident::new("core", start)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Joint);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Ident(Ident::new("compile_error", start)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new('!', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Group({
|
||||
let mut group = Group::new(Delimiter::Brace, {
|
||||
TokenStream::from_iter(vec![TokenTree::Literal({
|
||||
let mut string = Literal::string(&self.message);
|
||||
string.set_span(end);
|
||||
string
|
||||
})])
|
||||
});
|
||||
group.set_span(end);
|
||||
group
|
||||
}),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
|
||||
if cursor.eof() {
|
||||
Error::new(scope, format!("unexpected end of input, {}", message))
|
||||
} else {
|
||||
let span = crate::buffer::open_span_of_group(cursor);
|
||||
Error::new(span, message)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
|
||||
pub(crate) fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
|
||||
return new2(start, end, message.to_string());
|
||||
|
||||
fn new2(start: Span, end: Span, message: String) -> Error {
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
span: ThreadBound::new(SpanRange { start, end }),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.messages.len() == 1 {
|
||||
formatter
|
||||
.debug_tuple("Error")
|
||||
.field(&self.messages[0])
|
||||
.finish()
|
||||
} else {
|
||||
formatter
|
||||
.debug_tuple("Error")
|
||||
.field(&self.messages)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for ErrorMessage {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.message, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(&self.messages[0].message)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Error {
|
||||
fn clone(&self) -> Self {
|
||||
Error {
|
||||
messages: self.messages.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for ErrorMessage {
|
||||
fn clone(&self) -> Self {
|
||||
ErrorMessage {
|
||||
span: self.span,
|
||||
message: self.message.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for SpanRange {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy for SpanRange {}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<LexError> for Error {
|
||||
fn from(err: LexError) -> Self {
|
||||
Error::new(err.span(), err)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Error {
|
||||
type Item = Error;
|
||||
type IntoIter = IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
IntoIter {
|
||||
messages: self.messages.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IntoIter {
|
||||
messages: vec::IntoIter<ErrorMessage>,
|
||||
}
|
||||
|
||||
impl Iterator for IntoIter {
|
||||
type Item = Error;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
Some(Error {
|
||||
messages: vec![self.messages.next()?],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Error {
|
||||
type Item = Error;
|
||||
type IntoIter = Iter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Iter {
|
||||
messages: self.messages.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'a> {
|
||||
messages: slice::Iter<'a, ErrorMessage>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Iter<'a> {
|
||||
type Item = Error;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
Some(Error {
|
||||
messages: vec![self.messages.next()?.clone()],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<Error> for Error {
|
||||
fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
|
||||
for err in iter {
|
||||
self.combine(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
73
vendor/syn/src/export.rs
vendored
Normal file
73
vendor/syn/src/export.rs
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
#[doc(hidden)]
|
||||
pub use std::clone::Clone;
|
||||
#[doc(hidden)]
|
||||
pub use std::cmp::{Eq, PartialEq};
|
||||
#[doc(hidden)]
|
||||
pub use std::concat;
|
||||
#[doc(hidden)]
|
||||
pub use std::default::Default;
|
||||
#[doc(hidden)]
|
||||
pub use std::fmt::Debug;
|
||||
#[doc(hidden)]
|
||||
pub use std::hash::{Hash, Hasher};
|
||||
#[doc(hidden)]
|
||||
pub use std::marker::Copy;
|
||||
#[doc(hidden)]
|
||||
pub use std::option::Option::{None, Some};
|
||||
#[doc(hidden)]
|
||||
pub use std::result::Result::{Err, Ok};
|
||||
#[doc(hidden)]
|
||||
pub use std::stringify;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type Formatter<'a> = std::fmt::Formatter<'a>;
|
||||
#[doc(hidden)]
|
||||
pub type FmtResult = std::fmt::Result;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type bool = std::primitive::bool;
|
||||
#[doc(hidden)]
|
||||
pub type str = std::primitive::str;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
pub use quote;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type Span = proc_macro2::Span;
|
||||
#[doc(hidden)]
|
||||
pub type TokenStream2 = proc_macro2::TokenStream;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::group::{parse_braces, parse_brackets, parse_parens};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use crate::span::IntoSpans;
|
||||
|
||||
#[cfg(all(feature = "parsing", feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
pub use crate::parse_quote::parse as parse_quote;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::token::parsing::{peek_punct, punct as parse_punct};
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::token::printing::punct as print_punct;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::token::private::CustomToken;
|
||||
|
||||
#[cfg(feature = "proc-macro")]
|
||||
#[doc(hidden)]
|
||||
pub type TokenStream = proc_macro::TokenStream;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
pub use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct private(pub(crate) ());
|
||||
3506
vendor/syn/src/expr.rs
vendored
Normal file
3506
vendor/syn/src/expr.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
135
vendor/syn/src/ext.rs
vendored
Normal file
135
vendor/syn/src/ext.rs
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
//! Extension traits to provide parsing methods on foreign types.
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::parse::Peek;
|
||||
use crate::parse::{ParseStream, Result};
|
||||
use crate::sealed::lookahead;
|
||||
use crate::token::CustomToken;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
/// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented for types outside of Syn. It
|
||||
/// is implemented only for `proc_macro2::Ident`.
|
||||
pub trait IdentExt: Sized + private::Sealed {
|
||||
/// Parses any identifier including keywords.
|
||||
///
|
||||
/// This is useful when parsing macro input which allows Rust keywords as
|
||||
/// identifiers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Error, Ident, Result, Token};
|
||||
/// use syn::ext::IdentExt;
|
||||
/// use syn::parse::ParseStream;
|
||||
///
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(name);
|
||||
/// }
|
||||
///
|
||||
/// // Parses input that looks like `name = NAME` where `NAME` can be
|
||||
/// // any identifier.
|
||||
/// //
|
||||
/// // Examples:
|
||||
/// //
|
||||
/// // name = anything
|
||||
/// // name = impl
|
||||
/// fn parse_dsl(input: ParseStream) -> Result<Ident> {
|
||||
/// input.parse::<kw::name>()?;
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let name = input.call(Ident::parse_any)?;
|
||||
/// Ok(name)
|
||||
/// }
|
||||
/// ```
|
||||
fn parse_any(input: ParseStream) -> Result<Self>;
|
||||
|
||||
/// Peeks any identifier including keywords. Usage:
|
||||
/// `input.peek(Ident::peek_any)`
|
||||
///
|
||||
/// This is different from `input.peek(Ident)` which only returns true in
|
||||
/// the case of an ident which is not a Rust keyword.
|
||||
#[allow(non_upper_case_globals)]
|
||||
const peek_any: private::PeekFn = private::PeekFn;
|
||||
|
||||
/// Strips the raw marker `r#`, if any, from the beginning of an ident.
|
||||
///
|
||||
/// - unraw(`x`) = `x`
|
||||
/// - unraw(`move`) = `move`
|
||||
/// - unraw(`r#move`) = `move`
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// In the case of interop with other languages like Python that have a
|
||||
/// different set of keywords than Rust, we might come across macro input
|
||||
/// that involves raw identifiers to refer to ordinary variables in the
|
||||
/// other language with a name that happens to be a Rust keyword.
|
||||
///
|
||||
/// The function below appends an identifier from the caller's input onto a
|
||||
/// fixed prefix. Without using `unraw()`, this would tend to produce
|
||||
/// invalid identifiers like `__pyo3_get_r#move`.
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::Span;
|
||||
/// use syn::Ident;
|
||||
/// use syn::ext::IdentExt;
|
||||
///
|
||||
/// fn ident_for_getter(variable: &Ident) -> Ident {
|
||||
/// let getter = format!("__pyo3_get_{}", variable.unraw());
|
||||
/// Ident::new(&getter, Span::call_site())
|
||||
/// }
|
||||
/// ```
|
||||
fn unraw(&self) -> Ident;
|
||||
}
|
||||
|
||||
impl IdentExt for Ident {
|
||||
fn parse_any(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| match cursor.ident() {
|
||||
Some((ident, rest)) => Ok((ident, rest)),
|
||||
None => Err(cursor.error("expected ident")),
|
||||
})
|
||||
}
|
||||
|
||||
fn unraw(&self) -> Ident {
|
||||
let string = self.to_string();
|
||||
if let Some(string) = string.strip_prefix("r#") {
|
||||
Ident::new(string, self.span())
|
||||
} else {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Peek for private::PeekFn {
|
||||
type Token = private::IdentAny;
|
||||
}
|
||||
|
||||
impl CustomToken for private::IdentAny {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
cursor.ident().is_some()
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
}
|
||||
|
||||
impl lookahead::Sealed for private::PeekFn {}
|
||||
|
||||
mod private {
|
||||
use proc_macro2::Ident;
|
||||
|
||||
pub trait Sealed {}
|
||||
|
||||
impl Sealed for Ident {}
|
||||
|
||||
pub struct PeekFn;
|
||||
pub struct IdentAny;
|
||||
|
||||
impl Copy for PeekFn {}
|
||||
impl Clone for PeekFn {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
}
|
||||
125
vendor/syn/src/file.rs
vendored
Normal file
125
vendor/syn/src/file.rs
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
use super::*;
|
||||
|
||||
ast_struct! {
|
||||
/// A complete file of Rust source code.
|
||||
///
|
||||
/// Typically `File` objects are created with [`parse_file`].
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Parse a Rust source file into a `syn::File` and print out a debug
|
||||
/// representation of the syntax tree.
|
||||
///
|
||||
/// ```
|
||||
/// use std::env;
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
/// use std::process;
|
||||
///
|
||||
/// fn main() {
|
||||
/// # }
|
||||
/// #
|
||||
/// # fn fake_main() {
|
||||
/// let mut args = env::args();
|
||||
/// let _ = args.next(); // executable name
|
||||
///
|
||||
/// let filename = match (args.next(), args.next()) {
|
||||
/// (Some(filename), None) => filename,
|
||||
/// _ => {
|
||||
/// eprintln!("Usage: dump-syntax path/to/filename.rs");
|
||||
/// process::exit(1);
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// let mut file = File::open(&filename).expect("Unable to open file");
|
||||
///
|
||||
/// let mut src = String::new();
|
||||
/// file.read_to_string(&mut src).expect("Unable to read file");
|
||||
///
|
||||
/// let syntax = syn::parse_file(&src).expect("Unable to parse file");
|
||||
///
|
||||
/// // Debug impl is available if Syn is built with "extra-traits" feature.
|
||||
/// println!("{:#?}", syntax);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Running with its own source code as input, this program prints output
|
||||
/// that begins with:
|
||||
///
|
||||
/// ```text
|
||||
/// File {
|
||||
/// shebang: None,
|
||||
/// attrs: [],
|
||||
/// items: [
|
||||
/// Use(
|
||||
/// ItemUse {
|
||||
/// attrs: [],
|
||||
/// vis: Inherited,
|
||||
/// use_token: Use,
|
||||
/// leading_colon: None,
|
||||
/// tree: Path(
|
||||
/// UsePath {
|
||||
/// ident: Ident(
|
||||
/// std,
|
||||
/// ),
|
||||
/// colon2_token: Colon2,
|
||||
/// tree: Name(
|
||||
/// UseName {
|
||||
/// ident: Ident(
|
||||
/// env,
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// semi_token: Semi,
|
||||
/// },
|
||||
/// ),
|
||||
/// ...
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct File {
|
||||
pub shebang: Option<String>,
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for File {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(File {
|
||||
shebang: None,
|
||||
attrs: input.call(Attribute::parse_inner)?,
|
||||
items: {
|
||||
let mut items = Vec::new();
|
||||
while !input.is_empty() {
|
||||
items.push(input.parse()?);
|
||||
}
|
||||
items
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::attr::FilterAttrs;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for File {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.inner());
|
||||
tokens.append_all(&self.items);
|
||||
}
|
||||
}
|
||||
}
|
||||
2181
vendor/syn/src/gen/clone.rs
vendored
Normal file
2181
vendor/syn/src/gen/clone.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3052
vendor/syn/src/gen/debug.rs
vendored
Normal file
3052
vendor/syn/src/gen/debug.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2148
vendor/syn/src/gen/eq.rs
vendored
Normal file
2148
vendor/syn/src/gen/eq.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3459
vendor/syn/src/gen/fold.rs
vendored
Normal file
3459
vendor/syn/src/gen/fold.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2804
vendor/syn/src/gen/hash.rs
vendored
Normal file
2804
vendor/syn/src/gen/hash.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3844
vendor/syn/src/gen/visit.rs
vendored
Normal file
3844
vendor/syn/src/gen/visit.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3847
vendor/syn/src/gen/visit_mut.rs
vendored
Normal file
3847
vendor/syn/src/gen/visit_mut.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
34
vendor/syn/src/gen_helper.rs
vendored
Normal file
34
vendor/syn/src/gen_helper.rs
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
#[cfg(feature = "fold")]
|
||||
pub(crate) mod fold {
|
||||
use crate::punctuated::{Pair, Punctuated};
|
||||
|
||||
pub(crate) trait FoldHelper {
|
||||
type Item;
|
||||
fn lift<F>(self, f: F) -> Self
|
||||
where
|
||||
F: FnMut(Self::Item) -> Self::Item;
|
||||
}
|
||||
|
||||
impl<T> FoldHelper for Vec<T> {
|
||||
type Item = T;
|
||||
fn lift<F>(self, f: F) -> Self
|
||||
where
|
||||
F: FnMut(Self::Item) -> Self::Item,
|
||||
{
|
||||
self.into_iter().map(f).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, U> FoldHelper for Punctuated<T, U> {
|
||||
type Item = T;
|
||||
fn lift<F>(self, mut f: F) -> Self
|
||||
where
|
||||
F: FnMut(Self::Item) -> Self::Item,
|
||||
{
|
||||
self.into_pairs()
|
||||
.map(Pair::into_tuple)
|
||||
.map(|(t, u)| Pair::new(f(t), u))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
1227
vendor/syn/src/generics.rs
vendored
Normal file
1227
vendor/syn/src/generics.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
291
vendor/syn/src/group.rs
vendored
Normal file
291
vendor/syn/src/group.rs
vendored
Normal file
@@ -0,0 +1,291 @@
|
||||
use crate::error::Result;
|
||||
use crate::parse::ParseBuffer;
|
||||
use crate::token;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Delimiter;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Parens<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Paren,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Braces<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Brace,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Brackets<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Bracket,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
#[doc(hidden)]
|
||||
pub struct Group<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Group,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_parens<'a>(input: &ParseBuffer<'a>) -> Result<Parens<'a>> {
|
||||
parse_delimited(input, Delimiter::Parenthesis).map(|(span, content)| Parens {
|
||||
token: token::Paren(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_braces<'a>(input: &ParseBuffer<'a>) -> Result<Braces<'a>> {
|
||||
parse_delimited(input, Delimiter::Brace).map(|(span, content)| Braces {
|
||||
token: token::Brace(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_brackets<'a>(input: &ParseBuffer<'a>) -> Result<Brackets<'a>> {
|
||||
parse_delimited(input, Delimiter::Bracket).map(|(span, content)| Brackets {
|
||||
token: token::Bracket(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> {
|
||||
parse_delimited(input, Delimiter::None).map(|(span, content)| Group {
|
||||
token: token::Group(span.join()),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_delimited<'a>(
|
||||
input: &ParseBuffer<'a>,
|
||||
delimiter: Delimiter,
|
||||
) -> Result<(DelimSpan, ParseBuffer<'a>)> {
|
||||
input.step(|cursor| {
|
||||
if let Some((content, span, rest)) = cursor.group(delimiter) {
|
||||
let scope = crate::buffer::close_span_of_group(*cursor);
|
||||
let nested = crate::parse::advance_step_cursor(cursor, content);
|
||||
let unexpected = crate::parse::get_unexpected(input);
|
||||
let content = crate::parse::new_parse_buffer(scope, nested, unexpected);
|
||||
Ok(((span, content), rest))
|
||||
} else {
|
||||
let message = match delimiter {
|
||||
Delimiter::Parenthesis => "expected parentheses",
|
||||
Delimiter::Brace => "expected curly braces",
|
||||
Delimiter::Bracket => "expected square brackets",
|
||||
Delimiter::None => "expected invisible group",
|
||||
};
|
||||
Err(cursor.error(message))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse a set of parentheses and expose their content to subsequent parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use syn::{parenthesized, token, Ident, Result, Token, Type};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // Parse a simplified tuple struct syntax like:
|
||||
/// //
|
||||
/// // struct S(A, B);
|
||||
/// struct TupleStruct {
|
||||
/// struct_token: Token![struct],
|
||||
/// ident: Ident,
|
||||
/// paren_token: token::Paren,
|
||||
/// fields: Punctuated<Type, Token![,]>,
|
||||
/// semi_token: Token![;],
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for TupleStruct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(TupleStruct {
|
||||
/// struct_token: input.parse()?,
|
||||
/// ident: input.parse()?,
|
||||
/// paren_token: parenthesized!(content in input),
|
||||
/// fields: content.parse_terminated(Type::parse, Token![,])?,
|
||||
/// semi_token: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # struct S(A, B);
|
||||
/// # };
|
||||
/// # syn::parse2::<TupleStruct>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! parenthesized {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_parens(&$cursor) {
|
||||
$crate::__private::Ok(parens) => {
|
||||
$content = parens.content;
|
||||
parens.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Parse a set of curly braces and expose their content to subsequent parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use syn::{braced, token, Ident, Result, Token, Type};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // Parse a simplified struct syntax like:
|
||||
/// //
|
||||
/// // struct S {
|
||||
/// // a: A,
|
||||
/// // b: B,
|
||||
/// // }
|
||||
/// struct Struct {
|
||||
/// struct_token: Token![struct],
|
||||
/// ident: Ident,
|
||||
/// brace_token: token::Brace,
|
||||
/// fields: Punctuated<Field, Token![,]>,
|
||||
/// }
|
||||
///
|
||||
/// struct Field {
|
||||
/// name: Ident,
|
||||
/// colon_token: Token![:],
|
||||
/// ty: Type,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Struct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(Struct {
|
||||
/// struct_token: input.parse()?,
|
||||
/// ident: input.parse()?,
|
||||
/// brace_token: braced!(content in input),
|
||||
/// fields: content.parse_terminated(Field::parse, Token![,])?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Field {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(Field {
|
||||
/// name: input.parse()?,
|
||||
/// colon_token: input.parse()?,
|
||||
/// ty: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # struct S {
|
||||
/// # a: A,
|
||||
/// # b: B,
|
||||
/// # }
|
||||
/// # };
|
||||
/// # syn::parse2::<Struct>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! braced {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_braces(&$cursor) {
|
||||
$crate::__private::Ok(braces) => {
|
||||
$content = braces.content;
|
||||
braces.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Parse a set of square brackets and expose their content to subsequent
|
||||
/// parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{bracketed, token, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parse an outer attribute like:
|
||||
/// //
|
||||
/// // #[repr(C, packed)]
|
||||
/// struct OuterAttribute {
|
||||
/// pound_token: Token![#],
|
||||
/// bracket_token: token::Bracket,
|
||||
/// content: TokenStream,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for OuterAttribute {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(OuterAttribute {
|
||||
/// pound_token: input.parse()?,
|
||||
/// bracket_token: bracketed!(content in input),
|
||||
/// content: content.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # #[repr(C, packed)]
|
||||
/// # };
|
||||
/// # syn::parse2::<OuterAttribute>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! bracketed {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_brackets(&$cursor) {
|
||||
$crate::__private::Ok(brackets) => {
|
||||
$content = brackets.content;
|
||||
brackets.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
107
vendor/syn/src/ident.rs
vendored
Normal file
107
vendor/syn/src/ident.rs
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
|
||||
pub use proc_macro2::Ident;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub_if_not_doc! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
|
||||
match marker {}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! ident_from_token {
|
||||
($token:ident) => {
|
||||
impl From<Token![$token]> for Ident {
|
||||
fn from(token: Token![$token]) -> Ident {
|
||||
Ident::new(stringify!($token), token.span)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ident_from_token!(self);
|
||||
ident_from_token!(Self);
|
||||
ident_from_token!(super);
|
||||
ident_from_token!(crate);
|
||||
ident_from_token!(extern);
|
||||
|
||||
impl From<Token![_]> for Ident {
|
||||
fn from(token: Token![_]) -> Ident {
|
||||
Ident::new("_", token.span)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn xid_ok(symbol: &str) -> bool {
|
||||
let mut chars = symbol.chars();
|
||||
let first = chars.next().unwrap();
|
||||
if !(first == '_' || unicode_ident::is_xid_start(first)) {
|
||||
return false;
|
||||
}
|
||||
for ch in chars {
|
||||
if !unicode_ident::is_xid_continue(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
mod parsing {
|
||||
use crate::buffer::Cursor;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
use crate::token::Token;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
fn accept_as_ident(ident: &Ident) -> bool {
|
||||
match ident.to_string().as_str() {
|
||||
"_" |
|
||||
// Based on https://doc.rust-lang.org/1.65.0/reference/keywords.html
|
||||
"abstract" | "as" | "async" | "await" | "become" | "box" | "break" |
|
||||
"const" | "continue" | "crate" | "do" | "dyn" | "else" | "enum" |
|
||||
"extern" | "false" | "final" | "fn" | "for" | "if" | "impl" | "in" |
|
||||
"let" | "loop" | "macro" | "match" | "mod" | "move" | "mut" |
|
||||
"override" | "priv" | "pub" | "ref" | "return" | "Self" | "self" |
|
||||
"static" | "struct" | "super" | "trait" | "true" | "try" | "type" |
|
||||
"typeof" | "unsafe" | "unsized" | "use" | "virtual" | "where" |
|
||||
"while" | "yield" => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Ident {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
if let Some((ident, rest)) = cursor.ident() {
|
||||
if accept_as_ident(&ident) {
|
||||
Ok((ident, rest))
|
||||
} else {
|
||||
Err(cursor.error(format_args!(
|
||||
"expected identifier, found keyword `{}`",
|
||||
ident,
|
||||
)))
|
||||
}
|
||||
} else {
|
||||
Err(cursor.error("expected identifier"))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Token for Ident {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
if let Some((ident, _rest)) = cursor.ident() {
|
||||
accept_as_ident(&ident)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
}
|
||||
}
|
||||
3404
vendor/syn/src/item.rs
vendored
Normal file
3404
vendor/syn/src/item.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1010
vendor/syn/src/lib.rs
vendored
Normal file
1010
vendor/syn/src/lib.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
156
vendor/syn/src/lifetime.rs
vendored
Normal file
156
vendor/syn/src/lifetime.rs
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
use proc_macro2::{Ident, Span};
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
|
||||
/// A Rust lifetime: `'a`.
|
||||
///
|
||||
/// Lifetime names must conform to the following rules:
|
||||
///
|
||||
/// - Must start with an apostrophe.
|
||||
/// - Must not consist of just an apostrophe: `'`.
|
||||
/// - Character after the apostrophe must be `_` or a Unicode code point with
|
||||
/// the XID_Start property.
|
||||
/// - All following characters must be Unicode code points with the XID_Continue
|
||||
/// property.
|
||||
pub struct Lifetime {
|
||||
pub apostrophe: Span,
|
||||
pub ident: Ident,
|
||||
}
|
||||
|
||||
impl Lifetime {
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the lifetime does not conform to the bulleted rules above.
|
||||
///
|
||||
/// # Invocation
|
||||
///
|
||||
/// ```
|
||||
/// # use proc_macro2::Span;
|
||||
/// # use syn::Lifetime;
|
||||
/// #
|
||||
/// # fn f() -> Lifetime {
|
||||
/// Lifetime::new("'a", Span::call_site())
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn new(symbol: &str, span: Span) -> Self {
|
||||
if !symbol.starts_with('\'') {
|
||||
panic!(
|
||||
"lifetime name must start with apostrophe as in \"'a\", got {:?}",
|
||||
symbol
|
||||
);
|
||||
}
|
||||
|
||||
if symbol == "'" {
|
||||
panic!("lifetime name must not be empty");
|
||||
}
|
||||
|
||||
if !crate::ident::xid_ok(&symbol[1..]) {
|
||||
panic!("{:?} is not a valid lifetime name", symbol);
|
||||
}
|
||||
|
||||
Lifetime {
|
||||
apostrophe: span,
|
||||
ident: Ident::new(&symbol[1..], span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.apostrophe
|
||||
.join(self.ident.span())
|
||||
.unwrap_or(self.apostrophe)
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.apostrophe = span;
|
||||
self.ident.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Lifetime {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
"'".fmt(formatter)?;
|
||||
self.ident.fmt(formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Lifetime {
|
||||
fn clone(&self) -> Self {
|
||||
Lifetime {
|
||||
apostrophe: self.apostrophe,
|
||||
ident: self.ident.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Lifetime {
|
||||
fn eq(&self, other: &Lifetime) -> bool {
|
||||
self.ident.eq(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Lifetime {}
|
||||
|
||||
impl PartialOrd for Lifetime {
|
||||
fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Lifetime {
|
||||
fn cmp(&self, other: &Lifetime) -> Ordering {
|
||||
self.ident.cmp(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Lifetime {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
self.ident.hash(h);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub_if_not_doc! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
|
||||
match marker {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Lifetime {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
cursor
|
||||
.lifetime()
|
||||
.ok_or_else(|| cursor.error("expected lifetime"))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::{Punct, Spacing, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Lifetime {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut apostrophe = Punct::new('\'', Spacing::Joint);
|
||||
apostrophe.set_span(self.apostrophe);
|
||||
tokens.append(apostrophe);
|
||||
self.ident.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
1651
vendor/syn/src/lit.rs
vendored
Normal file
1651
vendor/syn/src/lit.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
169
vendor/syn/src/lookahead.rs
vendored
Normal file
169
vendor/syn/src/lookahead.rs
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::{self, Error};
|
||||
use crate::sealed::lookahead::Sealed;
|
||||
use crate::span::IntoSpans;
|
||||
use crate::token::Token;
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
use std::cell::RefCell;
|
||||
|
||||
/// Support for checking the next token in a stream to decide how to parse.
|
||||
///
|
||||
/// An important advantage over [`ParseStream::peek`] is that here we
|
||||
/// automatically construct an appropriate error message based on the token
|
||||
/// alternatives that get peeked. If you are producing your own error message,
|
||||
/// go ahead and use `ParseStream::peek` instead.
|
||||
///
|
||||
/// Use [`ParseStream::lookahead1`] to construct this object.
|
||||
///
|
||||
/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
|
||||
/// [`ParseStream::lookahead1`]: crate::parse::ParseBuffer::lookahead1
|
||||
///
|
||||
/// Consuming tokens from the source stream after constructing a lookahead
|
||||
/// object does not also advance the lookahead object.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{ConstParam, Ident, Lifetime, LifetimeParam, Result, Token, TypeParam};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // A generic parameter, a single one of the comma-separated elements inside
|
||||
/// // angle brackets in:
|
||||
/// //
|
||||
/// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
|
||||
/// //
|
||||
/// // On invalid input, lookahead gives us a reasonable error message.
|
||||
/// //
|
||||
/// // error: expected one of: identifier, lifetime, `const`
|
||||
/// // |
|
||||
/// // 5 | fn f<!Sized>() {}
|
||||
/// // | ^
|
||||
/// enum GenericParam {
|
||||
/// Type(TypeParam),
|
||||
/// Lifetime(LifetimeParam),
|
||||
/// Const(ConstParam),
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for GenericParam {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let lookahead = input.lookahead1();
|
||||
/// if lookahead.peek(Ident) {
|
||||
/// input.parse().map(GenericParam::Type)
|
||||
/// } else if lookahead.peek(Lifetime) {
|
||||
/// input.parse().map(GenericParam::Lifetime)
|
||||
/// } else if lookahead.peek(Token![const]) {
|
||||
/// input.parse().map(GenericParam::Const)
|
||||
/// } else {
|
||||
/// Err(lookahead.error())
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub struct Lookahead1<'a> {
|
||||
scope: Span,
|
||||
cursor: Cursor<'a>,
|
||||
comparisons: RefCell<Vec<&'static str>>,
|
||||
}
|
||||
|
||||
pub(crate) fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
|
||||
Lookahead1 {
|
||||
scope,
|
||||
cursor,
|
||||
comparisons: RefCell::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_impl(
|
||||
lookahead: &Lookahead1,
|
||||
peek: fn(Cursor) -> bool,
|
||||
display: fn() -> &'static str,
|
||||
) -> bool {
|
||||
if peek(lookahead.cursor) {
|
||||
return true;
|
||||
}
|
||||
lookahead.comparisons.borrow_mut().push(display());
|
||||
false
|
||||
}
|
||||
|
||||
impl<'a> Lookahead1<'a> {
|
||||
/// Looks at the next token in the parse stream to determine whether it
|
||||
/// matches the requested type of token.
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Note that this method does not use turbofish syntax. Pass the peek type
|
||||
/// inside of parentheses.
|
||||
///
|
||||
/// - `input.peek(Token![struct])`
|
||||
/// - `input.peek(Token![==])`
|
||||
/// - `input.peek(Ident)` *(does not accept keywords)*
|
||||
/// - `input.peek(Ident::peek_any)`
|
||||
/// - `input.peek(Lifetime)`
|
||||
/// - `input.peek(token::Brace)`
|
||||
pub fn peek<T: Peek>(&self, token: T) -> bool {
|
||||
let _ = token;
|
||||
peek_impl(self, T::Token::peek, T::Token::display)
|
||||
}
|
||||
|
||||
/// Triggers an error at the current position of the parse stream.
|
||||
///
|
||||
/// The error message will identify all of the expected token types that
|
||||
/// have been peeked against this lookahead instance.
|
||||
pub fn error(self) -> Error {
|
||||
let comparisons = self.comparisons.borrow();
|
||||
match comparisons.len() {
|
||||
0 => {
|
||||
if self.cursor.eof() {
|
||||
Error::new(self.scope, "unexpected end of input")
|
||||
} else {
|
||||
Error::new(self.cursor.span(), "unexpected token")
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
let message = format!("expected {}", comparisons[0]);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
2 => {
|
||||
let message = format!("expected {} or {}", comparisons[0], comparisons[1]);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
_ => {
|
||||
let join = comparisons.join(", ");
|
||||
let message = format!("expected one of: {}", join);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Types that can be parsed by looking at just one token.
|
||||
///
|
||||
/// Use [`ParseStream::peek`] to peek one of these types in a parse stream
|
||||
/// without consuming it from the stream.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented for types outside of Syn.
|
||||
///
|
||||
/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
|
||||
pub trait Peek: Sealed {
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
type Token: Token;
|
||||
}
|
||||
|
||||
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F {
|
||||
type Token = T;
|
||||
}
|
||||
|
||||
pub enum TokenMarker {}
|
||||
|
||||
impl<S> IntoSpans<S> for TokenMarker {
|
||||
fn into_spans(self) -> S {
|
||||
match self {}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool {
|
||||
cursor.group(delimiter).is_some()
|
||||
}
|
||||
|
||||
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {}
|
||||
211
vendor/syn/src/mac.rs
vendored
Normal file
211
vendor/syn/src/mac.rs
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
use super::*;
|
||||
use crate::token::{Brace, Bracket, Paren};
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
#[cfg(any(feature = "parsing", feature = "printing"))]
|
||||
use proc_macro2::Delimiter;
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::TokenTree;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream, Parser, Result};
|
||||
|
||||
ast_struct! {
|
||||
/// A macro invocation: `println!("{}", mac)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Macro {
|
||||
pub path: Path,
|
||||
pub bang_token: Token![!],
|
||||
pub delimiter: MacroDelimiter,
|
||||
pub tokens: TokenStream,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum MacroDelimiter {
|
||||
Paren(Paren),
|
||||
Brace(Brace),
|
||||
Bracket(Bracket),
|
||||
}
|
||||
}
|
||||
|
||||
impl MacroDelimiter {
|
||||
pub fn span(&self) -> &DelimSpan {
|
||||
match self {
|
||||
MacroDelimiter::Paren(token) => &token.span,
|
||||
MacroDelimiter::Brace(token) => &token.span,
|
||||
MacroDelimiter::Bracket(token) => &token.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Macro {
|
||||
/// Parse the tokens within the macro invocation's delimiters into a syntax
|
||||
/// tree.
|
||||
///
|
||||
/// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it
|
||||
/// produces a more useful span when `tokens` is empty.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Expr, ExprLit, Ident, Lit, LitStr, Macro, Token};
|
||||
/// use syn::ext::IdentExt;
|
||||
/// use syn::parse::{Error, Parse, ParseStream, Result};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // The arguments expected by libcore's format_args macro, and as a
|
||||
/// // result most other formatting and printing macros like println.
|
||||
/// //
|
||||
/// // println!("{} is {number:.prec$}", "x", prec=5, number=0.01)
|
||||
/// struct FormatArgs {
|
||||
/// format_string: Expr,
|
||||
/// positional_args: Vec<Expr>,
|
||||
/// named_args: Vec<(Ident, Expr)>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for FormatArgs {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let format_string: Expr;
|
||||
/// let mut positional_args = Vec::new();
|
||||
/// let mut named_args = Vec::new();
|
||||
///
|
||||
/// format_string = input.parse()?;
|
||||
/// while !input.is_empty() {
|
||||
/// input.parse::<Token![,]>()?;
|
||||
/// if input.is_empty() {
|
||||
/// break;
|
||||
/// }
|
||||
/// if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
/// while !input.is_empty() {
|
||||
/// let name: Ident = input.call(Ident::parse_any)?;
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let value: Expr = input.parse()?;
|
||||
/// named_args.push((name, value));
|
||||
/// if input.is_empty() {
|
||||
/// break;
|
||||
/// }
|
||||
/// input.parse::<Token![,]>()?;
|
||||
/// }
|
||||
/// break;
|
||||
/// }
|
||||
/// positional_args.push(input.parse()?);
|
||||
/// }
|
||||
///
|
||||
/// Ok(FormatArgs {
|
||||
/// format_string,
|
||||
/// positional_args,
|
||||
/// named_args,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // Extract the first argument, the format string literal, from an
|
||||
/// // invocation of a formatting or printing macro.
|
||||
/// fn get_format_string(m: &Macro) -> Result<LitStr> {
|
||||
/// let args: FormatArgs = m.parse_body()?;
|
||||
/// match args.format_string {
|
||||
/// Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) => Ok(lit),
|
||||
/// other => {
|
||||
/// // First argument was not a string literal expression.
|
||||
/// // Maybe something like: println!(concat!(...), ...)
|
||||
/// Err(Error::new_spanned(other, "format string must be a string literal"))
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let invocation = parse_quote! {
|
||||
/// println!("{:?}", Instant::now())
|
||||
/// };
|
||||
/// let lit = get_format_string(&invocation).unwrap();
|
||||
/// assert_eq!(lit.value(), "{:?}");
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_body_with(T::parse)
|
||||
}
|
||||
|
||||
/// Parse the tokens within the macro invocation's delimiters using the
|
||||
/// given parser.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let scope = self.delimiter.span().close();
|
||||
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
|
||||
input.step(|cursor| {
|
||||
if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
|
||||
let span = g.delim_span();
|
||||
let delimiter = match g.delimiter() {
|
||||
Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
|
||||
Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
|
||||
Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
|
||||
Delimiter::None => {
|
||||
return Err(cursor.error("expected delimiter"));
|
||||
}
|
||||
};
|
||||
Ok(((delimiter, g.stream()), rest))
|
||||
} else {
|
||||
Err(cursor.error("expected delimiter"))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Macro {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let tokens;
|
||||
Ok(Macro {
|
||||
path: input.call(Path::parse_mod_style)?,
|
||||
bang_token: input.parse()?,
|
||||
delimiter: {
|
||||
let (delimiter, content) = parse_delimiter(input)?;
|
||||
tokens = content;
|
||||
delimiter
|
||||
},
|
||||
tokens,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
impl MacroDelimiter {
|
||||
pub(crate) fn surround(&self, tokens: &mut TokenStream, inner: TokenStream) {
|
||||
let (delim, span) = match self {
|
||||
MacroDelimiter::Paren(paren) => (Delimiter::Parenthesis, paren.span),
|
||||
MacroDelimiter::Brace(brace) => (Delimiter::Brace, brace.span),
|
||||
MacroDelimiter::Bracket(bracket) => (Delimiter::Bracket, bracket.span),
|
||||
};
|
||||
token::printing::delim(delim, span.join(), tokens, inner);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Macro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.bang_token.to_tokens(tokens);
|
||||
self.delimiter.surround(tokens, self.tokens.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
176
vendor/syn/src/macros.rs
vendored
Normal file
176
vendor/syn/src/macros.rs
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
#[cfg_attr(
|
||||
not(any(feature = "full", feature = "derive")),
|
||||
allow(unknown_lints, unused_macro_rules)
|
||||
)]
|
||||
macro_rules! ast_struct {
|
||||
(
|
||||
$(#[$attr:meta])*
|
||||
$pub:ident $struct:ident $name:ident #full $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(struct $struct);
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
$(#[$attr])* $pub $struct $name $body
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
$(#[$attr])* $pub $struct $name {
|
||||
_noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
|
||||
}
|
||||
|
||||
#[cfg(all(not(feature = "full"), feature = "printing"))]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, _: &mut ::proc_macro2::TokenStream) {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(
|
||||
$(#[$attr:meta])*
|
||||
$pub:ident $struct:ident $name:ident $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(struct $struct);
|
||||
|
||||
$(#[$attr])* $pub $struct $name $body
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
macro_rules! ast_enum {
|
||||
(
|
||||
$(#[$enum_attr:meta])*
|
||||
$pub:ident $enum:ident $name:ident $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(enum $enum);
|
||||
|
||||
$(#[$enum_attr])* $pub $enum $name $body
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ast_enum_of_structs {
|
||||
(
|
||||
$(#[$enum_attr:meta])*
|
||||
$pub:ident $enum:ident $name:ident $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(enum $enum);
|
||||
|
||||
$(#[$enum_attr])* $pub $enum $name $body
|
||||
|
||||
ast_enum_of_structs_impl!($name $body);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ast_enum_of_structs_impl {
|
||||
(
|
||||
$name:ident {
|
||||
$(
|
||||
$(#[cfg $cfg_attr:tt])*
|
||||
$(#[doc $($doc_attr:tt)*])*
|
||||
$variant:ident $( ($($member:ident)::+) )*,
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
$($(
|
||||
ast_enum_from_struct!($name::$variant, $($member)::+);
|
||||
)*)*
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
generate_to_tokens! {
|
||||
()
|
||||
tokens
|
||||
$name {
|
||||
$(
|
||||
$(#[cfg $cfg_attr])*
|
||||
$(#[doc $($doc_attr)*])*
|
||||
$variant $($($member)::+)*,
|
||||
)*
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ast_enum_from_struct {
|
||||
// No From<TokenStream> for verbatim variants.
|
||||
($name:ident::Verbatim, $member:ident) => {};
|
||||
|
||||
($name:ident::$variant:ident, $member:ident) => {
|
||||
impl From<$member> for $name {
|
||||
fn from(e: $member) -> $name {
|
||||
$name::$variant(e)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
macro_rules! generate_to_tokens {
|
||||
(
|
||||
($($arms:tt)*) $tokens:ident $name:ident {
|
||||
$(#[cfg $cfg_attr:tt])*
|
||||
$(#[doc $($doc_attr:tt)*])*
|
||||
$variant:ident,
|
||||
$($next:tt)*
|
||||
}
|
||||
) => {
|
||||
generate_to_tokens!(
|
||||
($($arms)* $(#[cfg $cfg_attr])* $name::$variant => {})
|
||||
$tokens $name { $($next)* }
|
||||
);
|
||||
};
|
||||
|
||||
(
|
||||
($($arms:tt)*) $tokens:ident $name:ident {
|
||||
$(#[cfg $cfg_attr:tt])*
|
||||
$(#[doc $($doc_attr:tt)*])*
|
||||
$variant:ident $member:ident,
|
||||
$($next:tt)*
|
||||
}
|
||||
) => {
|
||||
generate_to_tokens!(
|
||||
($($arms)* $(#[cfg $cfg_attr])* $name::$variant(_e) => _e.to_tokens($tokens),)
|
||||
$tokens $name { $($next)* }
|
||||
);
|
||||
};
|
||||
|
||||
(($($arms:tt)*) $tokens:ident $name:ident {}) => {
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) {
|
||||
match self {
|
||||
$($arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Rustdoc bug: does not respect the doc(hidden) on some items.
|
||||
#[cfg(all(doc, feature = "parsing"))]
|
||||
macro_rules! pub_if_not_doc {
|
||||
($(#[$m:meta])* $pub:ident $($item:tt)*) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
|
||||
$(#[$m])*
|
||||
$pub(crate) $($item)*
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(all(not(doc), feature = "parsing"))]
|
||||
macro_rules! pub_if_not_doc {
|
||||
($(#[$m:meta])* $pub:ident $($item:tt)*) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
|
||||
$(#[$m])*
|
||||
$pub $($item)*
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! check_keyword_matches {
|
||||
(enum enum) => {};
|
||||
(pub pub) => {};
|
||||
(struct struct) => {};
|
||||
}
|
||||
426
vendor/syn/src/meta.rs
vendored
Normal file
426
vendor/syn/src/meta.rs
vendored
Normal file
@@ -0,0 +1,426 @@
|
||||
//! Facility for interpreting structured content inside of an `Attribute`.
|
||||
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::lit::Lit;
|
||||
use crate::parse::{Error, ParseStream, Parser, Result};
|
||||
use crate::path::{Path, PathSegment};
|
||||
use crate::punctuated::Punctuated;
|
||||
use proc_macro2::Ident;
|
||||
use std::fmt::Display;
|
||||
|
||||
/// Make a parser that is usable with `parse_macro_input!` in a
|
||||
/// `#[proc_macro_attribute]` macro.
|
||||
///
|
||||
/// *Warning:* When parsing attribute args **other than** the
|
||||
/// `proc_macro::TokenStream` input of a `proc_macro_attribute`, you do **not**
|
||||
/// need this function. In several cases your callers will get worse error
|
||||
/// messages if you use this function, because the surrounding delimiter's span
|
||||
/// is concealed from attribute macros by rustc. Use
|
||||
/// [`Attribute::parse_nested_meta`] instead.
|
||||
///
|
||||
/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This example implements an attribute macro whose invocations look like this:
|
||||
///
|
||||
/// ```
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[tea(kind = "EarlGrey", hot)]
|
||||
/// struct Picard {...}
|
||||
/// # };
|
||||
/// ```
|
||||
///
|
||||
/// The "parameters" supported by the attribute are:
|
||||
///
|
||||
/// - `kind = "..."`
|
||||
/// - `hot`
|
||||
/// - `with(sugar, milk, ...)`, a comma-separated list of ingredients
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, LitStr, Path};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let mut kind: Option<LitStr> = None;
|
||||
/// let mut hot: bool = false;
|
||||
/// let mut with: Vec<Path> = Vec::new();
|
||||
/// let tea_parser = syn::meta::parser(|meta| {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// kind = Some(meta.value()?.parse()?);
|
||||
/// Ok(())
|
||||
/// } else if meta.path.is_ident("hot") {
|
||||
/// hot = true;
|
||||
/// Ok(())
|
||||
/// } else if meta.path.is_ident("with") {
|
||||
/// meta.parse_nested_meta(|meta| {
|
||||
/// with.push(meta.path);
|
||||
/// Ok(())
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported tea property"))
|
||||
/// }
|
||||
/// });
|
||||
///
|
||||
/// parse_macro_input!(args with tea_parser);
|
||||
/// eprintln!("kind={kind:?} hot={hot} with={with:?}");
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The `syn::meta` library will take care of dealing with the commas including
|
||||
/// trailing commas, and producing sensible error messages on unexpected input.
|
||||
///
|
||||
/// ```console
|
||||
/// error: expected `,`
|
||||
/// --> src/main.rs:3:37
|
||||
/// |
|
||||
/// 3 | #[tea(kind = "EarlGrey", with(sugar = "lol", milk))]
|
||||
/// | ^
|
||||
/// ```
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Same as above but we factor out most of the logic into a separate function.
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::meta::ParseNestedMeta;
|
||||
/// use syn::parse::{Parser, Result};
|
||||
/// use syn::{parse_macro_input, LitStr, Path};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let mut attrs = TeaAttributes::default();
|
||||
/// let tea_parser = syn::meta::parser(|meta| attrs.parse(meta));
|
||||
/// parse_macro_input!(args with tea_parser);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Default)]
|
||||
/// struct TeaAttributes {
|
||||
/// kind: Option<LitStr>,
|
||||
/// hot: bool,
|
||||
/// with: Vec<Path>,
|
||||
/// }
|
||||
///
|
||||
/// impl TeaAttributes {
|
||||
/// fn parse(&mut self, meta: ParseNestedMeta) -> Result<()> {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// self.kind = Some(meta.value()?.parse()?);
|
||||
/// Ok(())
|
||||
/// } else /* just like in last example */
|
||||
/// # { unimplemented!() }
|
||||
///
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn parser(logic: impl FnMut(ParseNestedMeta) -> Result<()>) -> impl Parser<Output = ()> {
|
||||
|input: ParseStream| {
|
||||
if input.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
parse_nested_meta(input, logic)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context for parsing a single property in the conventional syntax for
|
||||
/// structured attributes.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Refer to usage examples on the following two entry-points:
|
||||
///
|
||||
/// - [`Attribute::parse_nested_meta`] if you have an entire `Attribute` to
|
||||
/// parse. Always use this if possible. Generally this is able to produce
|
||||
/// better error messages because `Attribute` holds span information for all
|
||||
/// of the delimiters therein.
|
||||
///
|
||||
/// - [`syn::meta::parser`] if you are implementing a `proc_macro_attribute`
|
||||
/// macro and parsing the arguments to the attribute macro, i.e. the ones
|
||||
/// written in the same attribute that dispatched the macro invocation. Rustc
|
||||
/// does not pass span information for the surrounding delimiters into the
|
||||
/// attribute macro invocation in this situation, so error messages might be
|
||||
/// less precise.
|
||||
///
|
||||
/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
|
||||
/// [`syn::meta::parser`]: crate::meta::parser
|
||||
#[non_exhaustive]
|
||||
pub struct ParseNestedMeta<'a> {
|
||||
pub path: Path,
|
||||
pub input: ParseStream<'a>,
|
||||
}
|
||||
|
||||
impl<'a> ParseNestedMeta<'a> {
|
||||
/// Used when parsing `key = "value"` syntax.
|
||||
///
|
||||
/// All it does is advance `meta.input` past the `=` sign in the input. You
|
||||
/// could accomplish the same effect by writing
|
||||
/// `meta.parse::<Token![=]>()?`, so at most it is a minor convenience to
|
||||
/// use `meta.value()?`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute, LitStr};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[tea(kind = "EarlGrey")]
|
||||
/// };
|
||||
/// // conceptually:
|
||||
/// if attr.path().is_ident("tea") { // this parses the `tea`
|
||||
/// attr.parse_nested_meta(|meta| { // this parses the `(`
|
||||
/// if meta.path.is_ident("kind") { // this parses the `kind`
|
||||
/// let value = meta.value()?; // this parses the `=`
|
||||
/// let s: LitStr = value.parse()?; // this parses `"EarlGrey"`
|
||||
/// if s.value() == "EarlGrey" {
|
||||
/// // ...
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported attribute"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
pub fn value(&self) -> Result<ParseStream<'a>> {
|
||||
self.input.parse::<Token![=]>()?;
|
||||
Ok(self.input)
|
||||
}
|
||||
|
||||
/// Used when parsing `list(...)` syntax **if** the content inside the
|
||||
/// nested parentheses is also expected to conform to Rust's structured
|
||||
/// attribute convention.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[tea(with(sugar, milk))]
|
||||
/// };
|
||||
///
|
||||
/// if attr.path().is_ident("tea") {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("with") {
|
||||
/// meta.parse_nested_meta(|meta| { // <---
|
||||
/// if meta.path.is_ident("sugar") {
|
||||
/// // Here we can go even deeper if needed.
|
||||
/// Ok(())
|
||||
/// } else if meta.path.is_ident("milk") {
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported ingredient"))
|
||||
/// }
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported tea property"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
///
|
||||
/// # Counterexample
|
||||
///
|
||||
/// If you don't need `parse_nested_meta`'s help in parsing the content
|
||||
/// written within the nested parentheses, keep in mind that you can always
|
||||
/// just parse it yourself from the exposed ParseStream. Rust syntax permits
|
||||
/// arbitrary tokens within those parentheses so for the crazier stuff,
|
||||
/// `parse_nested_meta` is not what you want.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parenthesized, parse_quote, Attribute, LitInt};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[repr(align(32))]
|
||||
/// };
|
||||
///
|
||||
/// let mut align: Option<LitInt> = None;
|
||||
/// if attr.path().is_ident("repr") {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("align") {
|
||||
/// let content;
|
||||
/// parenthesized!(content in meta.input);
|
||||
/// align = Some(content.parse()?);
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported repr"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
pub fn parse_nested_meta(
|
||||
&self,
|
||||
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
let content;
|
||||
parenthesized!(content in self.input);
|
||||
parse_nested_meta(&content, logic)
|
||||
}
|
||||
|
||||
/// Report that the attribute's content did not conform to expectations.
|
||||
///
|
||||
/// The span of the resulting error will cover `meta.path` *and* everything
|
||||
/// that has been parsed so far since it.
|
||||
///
|
||||
/// There are 2 ways you might call this. First, if `meta.path` is not
|
||||
/// something you recognize:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::Attribute;
|
||||
/// #
|
||||
/// # fn example(attr: &Attribute) -> syn::Result<()> {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// // ...
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported tea property"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// In this case, it behaves exactly like
|
||||
/// `syn::Error::new_spanned(&meta.path, "message...")`.
|
||||
///
|
||||
/// ```console
|
||||
/// error: unsupported tea property
|
||||
/// --> src/main.rs:3:26
|
||||
/// |
|
||||
/// 3 | #[tea(kind = "EarlGrey", wat = "foo")]
|
||||
/// | ^^^
|
||||
/// ```
|
||||
///
|
||||
/// More usefully, the second place is if you've already parsed a value but
|
||||
/// have decided not to accept the value:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::Attribute;
|
||||
/// #
|
||||
/// # fn example(attr: &Attribute) -> syn::Result<()> {
|
||||
/// use syn::Expr;
|
||||
///
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// let expr: Expr = meta.value()?.parse()?;
|
||||
/// match expr {
|
||||
/// Expr::Lit(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Path(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Macro(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// _ => Err(meta.error("tea kind must be a string literal, path, or macro")),
|
||||
/// }
|
||||
/// } else /* as above */
|
||||
/// # { unimplemented!() }
|
||||
///
|
||||
/// })?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// ```console
|
||||
/// error: tea kind must be a string literal, path, or macro
|
||||
/// --> src/main.rs:3:7
|
||||
/// |
|
||||
/// 3 | #[tea(kind = async { replicator.await })]
|
||||
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
/// ```
|
||||
///
|
||||
/// Often you may want to use `syn::Error::new_spanned` even in this
|
||||
/// situation. In the above code, that would be:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{Error, Expr};
|
||||
/// #
|
||||
/// # fn example(expr: Expr) -> syn::Result<()> {
|
||||
/// match expr {
|
||||
/// Expr::Lit(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Path(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Macro(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// _ => Err(Error::new_spanned(expr, "unsupported expression type for `kind`")),
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// ```console
|
||||
/// error: unsupported expression type for `kind`
|
||||
/// --> src/main.rs:3:14
|
||||
/// |
|
||||
/// 3 | #[tea(kind = async { replicator.await })]
|
||||
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
/// ```
|
||||
pub fn error(&self, msg: impl Display) -> Error {
|
||||
let start_span = self.path.segments[0].ident.span();
|
||||
let end_span = self.input.cursor().prev_span();
|
||||
crate::error::new2(start_span, end_span, msg)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_nested_meta(
|
||||
input: ParseStream,
|
||||
mut logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
loop {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
logic(ParseNestedMeta { path, input })?;
|
||||
if input.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
input.parse::<Token![,]>()?;
|
||||
if input.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Like Path::parse_mod_style, but accepts keywords in the path.
|
||||
fn parse_meta_path(input: ParseStream) -> Result<Path> {
|
||||
Ok(Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
if input.peek(Ident::peek_any) {
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
} else if input.is_empty() {
|
||||
return Err(input.error("expected nested attribute"));
|
||||
} else if input.peek(Lit) {
|
||||
return Err(input.error("unexpected literal in nested attribute, expected ident"));
|
||||
} else {
|
||||
return Err(input.error("unexpected token in nested attribute, expected ident"));
|
||||
}
|
||||
while input.peek(Token![::]) {
|
||||
let punct = input.parse()?;
|
||||
segments.push_punct(punct);
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
}
|
||||
segments
|
||||
},
|
||||
})
|
||||
}
|
||||
218
vendor/syn/src/op.rs
vendored
Normal file
218
vendor/syn/src/op.rs
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
ast_enum! {
|
||||
/// A binary operator: `+`, `+=`, `&`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum BinOp {
|
||||
/// The `+` operator (addition)
|
||||
Add(Token![+]),
|
||||
/// The `-` operator (subtraction)
|
||||
Sub(Token![-]),
|
||||
/// The `*` operator (multiplication)
|
||||
Mul(Token![*]),
|
||||
/// The `/` operator (division)
|
||||
Div(Token![/]),
|
||||
/// The `%` operator (modulus)
|
||||
Rem(Token![%]),
|
||||
/// The `&&` operator (logical and)
|
||||
And(Token![&&]),
|
||||
/// The `||` operator (logical or)
|
||||
Or(Token![||]),
|
||||
/// The `^` operator (bitwise xor)
|
||||
BitXor(Token![^]),
|
||||
/// The `&` operator (bitwise and)
|
||||
BitAnd(Token![&]),
|
||||
/// The `|` operator (bitwise or)
|
||||
BitOr(Token![|]),
|
||||
/// The `<<` operator (shift left)
|
||||
Shl(Token![<<]),
|
||||
/// The `>>` operator (shift right)
|
||||
Shr(Token![>>]),
|
||||
/// The `==` operator (equality)
|
||||
Eq(Token![==]),
|
||||
/// The `<` operator (less than)
|
||||
Lt(Token![<]),
|
||||
/// The `<=` operator (less than or equal to)
|
||||
Le(Token![<=]),
|
||||
/// The `!=` operator (not equal to)
|
||||
Ne(Token![!=]),
|
||||
/// The `>=` operator (greater than or equal to)
|
||||
Ge(Token![>=]),
|
||||
/// The `>` operator (greater than)
|
||||
Gt(Token![>]),
|
||||
/// The `+=` operator
|
||||
AddAssign(Token![+=]),
|
||||
/// The `-=` operator
|
||||
SubAssign(Token![-=]),
|
||||
/// The `*=` operator
|
||||
MulAssign(Token![*=]),
|
||||
/// The `/=` operator
|
||||
DivAssign(Token![/=]),
|
||||
/// The `%=` operator
|
||||
RemAssign(Token![%=]),
|
||||
/// The `^=` operator
|
||||
BitXorAssign(Token![^=]),
|
||||
/// The `&=` operator
|
||||
BitAndAssign(Token![&=]),
|
||||
/// The `|=` operator
|
||||
BitOrAssign(Token![|=]),
|
||||
/// The `<<=` operator
|
||||
ShlAssign(Token![<<=]),
|
||||
/// The `>>=` operator
|
||||
ShrAssign(Token![>>=]),
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A unary operator: `*`, `!`, `-`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator for dereferencing
|
||||
Deref(Token![*]),
|
||||
/// The `!` operator for logical inversion
|
||||
Not(Token![!]),
|
||||
/// The `-` operator for negation
|
||||
Neg(Token![-]),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for BinOp {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![+=]) {
|
||||
input.parse().map(BinOp::AddAssign)
|
||||
} else if input.peek(Token![-=]) {
|
||||
input.parse().map(BinOp::SubAssign)
|
||||
} else if input.peek(Token![*=]) {
|
||||
input.parse().map(BinOp::MulAssign)
|
||||
} else if input.peek(Token![/=]) {
|
||||
input.parse().map(BinOp::DivAssign)
|
||||
} else if input.peek(Token![%=]) {
|
||||
input.parse().map(BinOp::RemAssign)
|
||||
} else if input.peek(Token![^=]) {
|
||||
input.parse().map(BinOp::BitXorAssign)
|
||||
} else if input.peek(Token![&=]) {
|
||||
input.parse().map(BinOp::BitAndAssign)
|
||||
} else if input.peek(Token![|=]) {
|
||||
input.parse().map(BinOp::BitOrAssign)
|
||||
} else if input.peek(Token![<<=]) {
|
||||
input.parse().map(BinOp::ShlAssign)
|
||||
} else if input.peek(Token![>>=]) {
|
||||
input.parse().map(BinOp::ShrAssign)
|
||||
} else if input.peek(Token![&&]) {
|
||||
input.parse().map(BinOp::And)
|
||||
} else if input.peek(Token![||]) {
|
||||
input.parse().map(BinOp::Or)
|
||||
} else if input.peek(Token![<<]) {
|
||||
input.parse().map(BinOp::Shl)
|
||||
} else if input.peek(Token![>>]) {
|
||||
input.parse().map(BinOp::Shr)
|
||||
} else if input.peek(Token![==]) {
|
||||
input.parse().map(BinOp::Eq)
|
||||
} else if input.peek(Token![<=]) {
|
||||
input.parse().map(BinOp::Le)
|
||||
} else if input.peek(Token![!=]) {
|
||||
input.parse().map(BinOp::Ne)
|
||||
} else if input.peek(Token![>=]) {
|
||||
input.parse().map(BinOp::Ge)
|
||||
} else if input.peek(Token![+]) {
|
||||
input.parse().map(BinOp::Add)
|
||||
} else if input.peek(Token![-]) {
|
||||
input.parse().map(BinOp::Sub)
|
||||
} else if input.peek(Token![*]) {
|
||||
input.parse().map(BinOp::Mul)
|
||||
} else if input.peek(Token![/]) {
|
||||
input.parse().map(BinOp::Div)
|
||||
} else if input.peek(Token![%]) {
|
||||
input.parse().map(BinOp::Rem)
|
||||
} else if input.peek(Token![^]) {
|
||||
input.parse().map(BinOp::BitXor)
|
||||
} else if input.peek(Token![&]) {
|
||||
input.parse().map(BinOp::BitAnd)
|
||||
} else if input.peek(Token![|]) {
|
||||
input.parse().map(BinOp::BitOr)
|
||||
} else if input.peek(Token![<]) {
|
||||
input.parse().map(BinOp::Lt)
|
||||
} else if input.peek(Token![>]) {
|
||||
input.parse().map(BinOp::Gt)
|
||||
} else {
|
||||
Err(input.error("expected binary operator"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for UnOp {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![*]) {
|
||||
input.parse().map(UnOp::Deref)
|
||||
} else if lookahead.peek(Token![!]) {
|
||||
input.parse().map(UnOp::Not)
|
||||
} else if lookahead.peek(Token![-]) {
|
||||
input.parse().map(UnOp::Neg)
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for BinOp {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
BinOp::Add(t) => t.to_tokens(tokens),
|
||||
BinOp::Sub(t) => t.to_tokens(tokens),
|
||||
BinOp::Mul(t) => t.to_tokens(tokens),
|
||||
BinOp::Div(t) => t.to_tokens(tokens),
|
||||
BinOp::Rem(t) => t.to_tokens(tokens),
|
||||
BinOp::And(t) => t.to_tokens(tokens),
|
||||
BinOp::Or(t) => t.to_tokens(tokens),
|
||||
BinOp::BitXor(t) => t.to_tokens(tokens),
|
||||
BinOp::BitAnd(t) => t.to_tokens(tokens),
|
||||
BinOp::BitOr(t) => t.to_tokens(tokens),
|
||||
BinOp::Shl(t) => t.to_tokens(tokens),
|
||||
BinOp::Shr(t) => t.to_tokens(tokens),
|
||||
BinOp::Eq(t) => t.to_tokens(tokens),
|
||||
BinOp::Lt(t) => t.to_tokens(tokens),
|
||||
BinOp::Le(t) => t.to_tokens(tokens),
|
||||
BinOp::Ne(t) => t.to_tokens(tokens),
|
||||
BinOp::Ge(t) => t.to_tokens(tokens),
|
||||
BinOp::Gt(t) => t.to_tokens(tokens),
|
||||
BinOp::AddAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::SubAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::MulAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::DivAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::RemAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::BitXorAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::BitAndAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::BitOrAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::ShlAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::ShrAssign(t) => t.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for UnOp {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
UnOp::Deref(t) => t.to_tokens(tokens),
|
||||
UnOp::Not(t) => t.to_tokens(tokens),
|
||||
UnOp::Neg(t) => t.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1386
vendor/syn/src/parse.rs
vendored
Normal file
1386
vendor/syn/src/parse.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
128
vendor/syn/src/parse_macro_input.rs
vendored
Normal file
128
vendor/syn/src/parse_macro_input.rs
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
/// Parse the input TokenStream of a macro, triggering a compile error if the
|
||||
/// tokens fail to parse.
|
||||
///
|
||||
/// Refer to the [`parse` module] documentation for more details about parsing
|
||||
/// in Syn.
|
||||
///
|
||||
/// [`parse` module]: mod@crate::parse
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Intended usage
|
||||
///
|
||||
/// This macro must be called from a function that returns
|
||||
/// `proc_macro::TokenStream`. Usually this will be your proc macro entry point,
|
||||
/// the function that has the #\[proc_macro\] / #\[proc_macro_derive\] /
|
||||
/// #\[proc_macro_attribute\] attribute.
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, Result};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// struct MyMacroInput {
|
||||
/// /* ... */
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for MyMacroInput {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// /* ... */
|
||||
/// # Ok(MyMacroInput {})
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro]
|
||||
/// # };
|
||||
/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(tokens as MyMacroInput);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Usage with Parser
|
||||
///
|
||||
/// This macro can also be used with the [`Parser` trait] for types that have
|
||||
/// multiple ways that they can be parsed.
|
||||
///
|
||||
/// [`Parser` trait]: crate::parse::Parser
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # use proc_macro::TokenStream;
|
||||
/// # use syn::{parse_macro_input, Result};
|
||||
/// # use syn::parse::ParseStream;
|
||||
/// #
|
||||
/// # struct MyMacroInput {}
|
||||
/// #
|
||||
/// impl MyMacroInput {
|
||||
/// fn parse_alternate(input: ParseStream) -> Result<Self> {
|
||||
/// /* ... */
|
||||
/// # Ok(MyMacroInput {})
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro]
|
||||
/// # };
|
||||
/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(tokens with MyMacroInput::parse_alternate);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Expansion
|
||||
///
|
||||
/// `parse_macro_input!($variable as $Type)` expands to something like:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # macro_rules! doc_test {
|
||||
/// # ($variable:ident as $Type:ty) => {
|
||||
/// match syn::parse::<$Type>($variable) {
|
||||
/// Ok(syntax_tree) => syntax_tree,
|
||||
/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
|
||||
/// }
|
||||
/// # };
|
||||
/// # }
|
||||
/// #
|
||||
/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
/// # let _ = doc_test!(input as syn::Ident);
|
||||
/// # proc_macro::TokenStream::new()
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
|
||||
macro_rules! parse_macro_input {
|
||||
($tokenstream:ident as $ty:ty) => {
|
||||
match $crate::parse::<$ty>($tokenstream) {
|
||||
$crate::__private::Ok(data) => data,
|
||||
$crate::__private::Err(err) => {
|
||||
return $crate::__private::TokenStream::from(err.to_compile_error());
|
||||
}
|
||||
}
|
||||
};
|
||||
($tokenstream:ident with $parser:path) => {
|
||||
match $crate::parse::Parser::parse($parser, $tokenstream) {
|
||||
$crate::__private::Ok(data) => data,
|
||||
$crate::__private::Err(err) => {
|
||||
return $crate::__private::TokenStream::from(err.to_compile_error());
|
||||
}
|
||||
}
|
||||
};
|
||||
($tokenstream:ident) => {
|
||||
$crate::parse_macro_input!($tokenstream as _)
|
||||
};
|
||||
}
|
||||
209
vendor/syn/src/parse_quote.rs
vendored
Normal file
209
vendor/syn/src/parse_quote.rs
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
/// Quasi-quotation macro that accepts input like the [`quote!`] macro but uses
|
||||
/// type inference to figure out a return type for those tokens.
|
||||
///
|
||||
/// [`quote!`]: https://docs.rs/quote/1.0/quote/index.html
|
||||
///
|
||||
/// The return type can be any syntax tree node that implements the [`Parse`]
|
||||
/// trait.
|
||||
///
|
||||
/// [`Parse`]: crate::parse::Parse
|
||||
///
|
||||
/// ```
|
||||
/// use quote::quote;
|
||||
/// use syn::{parse_quote, Stmt};
|
||||
///
|
||||
/// fn main() {
|
||||
/// let name = quote!(v);
|
||||
/// let ty = quote!(u8);
|
||||
///
|
||||
/// let stmt: Stmt = parse_quote! {
|
||||
/// let #name: #ty = Default::default();
|
||||
/// };
|
||||
///
|
||||
/// println!("{:#?}", stmt);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// *This macro is available only if Syn is built with both the `"parsing"` and
|
||||
/// `"printing"` features.*
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// The following helper function adds a bound `T: HeapSize` to every type
|
||||
/// parameter `T` in the input generics.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Generics, GenericParam};
|
||||
///
|
||||
/// // Add a bound `T: HeapSize` to every type parameter T.
|
||||
/// fn add_trait_bounds(mut generics: Generics) -> Generics {
|
||||
/// for param in &mut generics.params {
|
||||
/// if let GenericParam::Type(type_param) = param {
|
||||
/// type_param.bounds.push(parse_quote!(HeapSize));
|
||||
/// }
|
||||
/// }
|
||||
/// generics
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Special cases
|
||||
///
|
||||
/// This macro can parse the following additional types as a special case even
|
||||
/// though they do not implement the `Parse` trait.
|
||||
///
|
||||
/// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
|
||||
/// or inner like `#![...]`
|
||||
/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
|
||||
/// `P` with optional trailing punctuation
|
||||
/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
|
||||
///
|
||||
/// [`Vec<Stmt>`]: Block::parse_within
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the tokens fail to parse as the expected syntax tree type. The
|
||||
/// caller is responsible for ensuring that the input tokens are syntactically
|
||||
/// valid.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
#[macro_export]
|
||||
macro_rules! parse_quote {
|
||||
($($tt:tt)*) => {
|
||||
$crate::__private::parse_quote($crate::__private::quote::quote!($($tt)*))
|
||||
};
|
||||
}
|
||||
|
||||
/// This macro is [`parse_quote!`] + [`quote_spanned!`][quote::quote_spanned].
|
||||
///
|
||||
/// Please refer to each of their documentation.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use quote::{quote, quote_spanned};
|
||||
/// use syn::spanned::Spanned;
|
||||
/// use syn::{parse_quote_spanned, ReturnType, Signature};
|
||||
///
|
||||
/// // Changes `fn()` to `fn() -> Pin<Box<dyn Future<Output = ()>>>`,
|
||||
/// // and `fn() -> T` to `fn() -> Pin<Box<dyn Future<Output = T>>>`,
|
||||
/// // without introducing any call_site() spans.
|
||||
/// fn make_ret_pinned_future(sig: &mut Signature) {
|
||||
/// let ret = match &sig.output {
|
||||
/// ReturnType::Default => quote_spanned!(sig.paren_token.span=> ()),
|
||||
/// ReturnType::Type(_, ret) => quote!(#ret),
|
||||
/// };
|
||||
/// sig.output = parse_quote_spanned! {ret.span()=>
|
||||
/// -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = #ret>>>
|
||||
/// };
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
#[macro_export]
|
||||
macro_rules! parse_quote_spanned {
|
||||
($span:expr=> $($tt:tt)*) => {
|
||||
$crate::__private::parse_quote($crate::__private::quote::quote_spanned!($span=> $($tt)*))
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Can parse any type that implements Parse.
|
||||
|
||||
use crate::parse::{Parse, ParseStream, Parser, Result};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
|
||||
let parser = T::parse;
|
||||
match parser.parse2(token_stream) {
|
||||
Ok(t) => t,
|
||||
Err(err) => panic!("{}", err),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait ParseQuote: Sized {
|
||||
fn parse(input: ParseStream) -> Result<Self>;
|
||||
}
|
||||
|
||||
impl<T: Parse> ParseQuote for T {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
<T as Parse>::parse(input)
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Any other types that we want `parse_quote!` to be able to parse.
|
||||
|
||||
use crate::punctuated::Punctuated;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
use crate::{attr, Attribute, Field, FieldMutability, Ident, Type, Visibility};
|
||||
#[cfg(feature = "full")]
|
||||
use crate::{Block, Pat, Stmt};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl ParseQuote for Attribute {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attr::parsing::single_parse_inner(input)
|
||||
} else {
|
||||
attr::parsing::single_parse_outer(input)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl ParseQuote for Field {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis: Visibility = input.parse()?;
|
||||
|
||||
let ident: Option<Ident>;
|
||||
let colon_token: Option<Token![:]>;
|
||||
let is_named = input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]);
|
||||
if is_named {
|
||||
ident = Some(input.parse()?);
|
||||
colon_token = Some(input.parse()?);
|
||||
} else {
|
||||
ident = None;
|
||||
colon_token = None;
|
||||
}
|
||||
|
||||
let ty: Type = input.parse()?;
|
||||
|
||||
Ok(Field {
|
||||
attrs,
|
||||
vis,
|
||||
mutability: FieldMutability::None,
|
||||
ident,
|
||||
colon_token,
|
||||
ty,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Pat {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Pat::parse_multi_with_leading_vert(input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Box<Pat> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
<Pat as ParseQuote>::parse(input).map(Box::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_terminated(input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Vec<Stmt> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Block::parse_within(input)
|
||||
}
|
||||
}
|
||||
917
vendor/syn/src/pat.rs
vendored
Normal file
917
vendor/syn/src/pat.rs
vendored
Normal file
@@ -0,0 +1,917 @@
|
||||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// A pattern in a local binding, function signature, match expression, or
|
||||
/// various other places.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
#[non_exhaustive]
|
||||
pub enum Pat {
|
||||
/// A const block: `const { ... }`.
|
||||
Const(PatConst),
|
||||
|
||||
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
|
||||
Ident(PatIdent),
|
||||
|
||||
/// A literal pattern: `0`.
|
||||
Lit(PatLit),
|
||||
|
||||
/// A macro in pattern position.
|
||||
Macro(PatMacro),
|
||||
|
||||
/// A pattern that matches any one of a set of cases.
|
||||
Or(PatOr),
|
||||
|
||||
/// A parenthesized pattern: `(A | B)`.
|
||||
Paren(PatParen),
|
||||
|
||||
/// A path pattern like `Color::Red`, optionally qualified with a
|
||||
/// self-type.
|
||||
///
|
||||
/// Unqualified path patterns can legally refer to variants, structs,
|
||||
/// constants or associated constants. Qualified path patterns like
|
||||
/// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
|
||||
/// associated constants.
|
||||
Path(PatPath),
|
||||
|
||||
/// A range pattern: `1..=2`.
|
||||
Range(PatRange),
|
||||
|
||||
/// A reference pattern: `&mut var`.
|
||||
Reference(PatReference),
|
||||
|
||||
/// The dots in a tuple or slice pattern: `[0, 1, ..]`.
|
||||
Rest(PatRest),
|
||||
|
||||
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
|
||||
Slice(PatSlice),
|
||||
|
||||
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
|
||||
Struct(PatStruct),
|
||||
|
||||
/// A tuple pattern: `(a, b)`.
|
||||
Tuple(PatTuple),
|
||||
|
||||
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
|
||||
TupleStruct(PatTupleStruct),
|
||||
|
||||
/// A type ascription pattern: `foo: f64`.
|
||||
Type(PatType),
|
||||
|
||||
/// Tokens in pattern position not interpreted by Syn.
|
||||
Verbatim(TokenStream),
|
||||
|
||||
/// A pattern that matches any value: `_`.
|
||||
Wild(PatWild),
|
||||
|
||||
// For testing exhaustiveness in downstream code, use the following idiom:
|
||||
//
|
||||
// match pat {
|
||||
// #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
|
||||
//
|
||||
// Pat::Box(pat) => {...}
|
||||
// Pat::Ident(pat) => {...}
|
||||
// ...
|
||||
// Pat::Wild(pat) => {...}
|
||||
//
|
||||
// _ => { /* some sane fallback */ }
|
||||
// }
|
||||
//
|
||||
// This way we fail your tests but don't break your library when adding
|
||||
// a variant. You will be notified by a test failure when a variant is
|
||||
// added, so that you can add code to handle it, but your library will
|
||||
// continue to compile and work for downstream users in the interim.
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
|
||||
///
|
||||
/// It may also be a unit struct or struct variant (e.g. `None`), or a
|
||||
/// constant; these cannot be distinguished syntactically.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatIdent {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub by_ref: Option<Token![ref]>,
|
||||
pub mutability: Option<Token![mut]>,
|
||||
pub ident: Ident,
|
||||
pub subpat: Option<(Token![@], Box<Pat>)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A pattern that matches any one of a set of cases.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatOr {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub leading_vert: Option<Token![|]>,
|
||||
pub cases: Punctuated<Pat, Token![|]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A parenthesized pattern: `(A | B)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatParen {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub paren_token: token::Paren,
|
||||
pub pat: Box<Pat>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A reference pattern: `&mut var`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatReference {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub and_token: Token![&],
|
||||
pub mutability: Option<Token![mut]>,
|
||||
pub pat: Box<Pat>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// The dots in a tuple or slice pattern: `[0, 1, ..]`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatRest {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub dot2_token: Token![..],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatSlice {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub bracket_token: token::Bracket,
|
||||
pub elems: Punctuated<Pat, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatStruct {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub qself: Option<QSelf>,
|
||||
pub path: Path,
|
||||
pub brace_token: token::Brace,
|
||||
pub fields: Punctuated<FieldPat, Token![,]>,
|
||||
pub rest: Option<PatRest>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A tuple pattern: `(a, b)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatTuple {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub paren_token: token::Paren,
|
||||
pub elems: Punctuated<Pat, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatTupleStruct {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub qself: Option<QSelf>,
|
||||
pub path: Path,
|
||||
pub paren_token: token::Paren,
|
||||
pub elems: Punctuated<Pat, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A type ascription pattern: `foo: f64`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatType {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub pat: Box<Pat>,
|
||||
pub colon_token: Token![:],
|
||||
pub ty: Box<Type>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A pattern that matches any value: `_`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatWild {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub underscore_token: Token![_],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A single field in a struct pattern.
|
||||
///
|
||||
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
|
||||
/// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct FieldPat {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub member: Member,
|
||||
pub colon_token: Option<Token![:]>,
|
||||
pub pat: Box<Pat>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
|
||||
use crate::path;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Pat {
|
||||
/// Parse a pattern that does _not_ involve `|` at the top level.
|
||||
///
|
||||
/// This parser matches the behavior of the `$:pat_param` macro_rules
|
||||
/// matcher, and on editions prior to Rust 2021, the behavior of
|
||||
/// `$:pat`.
|
||||
///
|
||||
/// In Rust syntax, some examples of where this syntax would occur are
|
||||
/// in the argument pattern of functions and closures. Patterns using
|
||||
/// `|` are not allowed to occur in these positions.
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// fn f(Some(_) | None: Option<T>) {
|
||||
/// let _ = |Some(_) | None: Option<T>| {};
|
||||
/// // ^^^^^^^^^^^^^^^^^^^^^^^^^??? :(
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ```console
|
||||
/// error: top-level or-patterns are not allowed in function parameters
|
||||
/// --> src/main.rs:1:6
|
||||
/// |
|
||||
/// 1 | fn f(Some(_) | None: Option<T>) {
|
||||
/// | ^^^^^^^^^^^^^^ help: wrap the pattern in parentheses: `(Some(_) | None)`
|
||||
/// ```
|
||||
pub fn parse_single(input: ParseStream) -> Result<Self> {
|
||||
let begin = input.fork();
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Ident)
|
||||
&& (input.peek2(Token![::])
|
||||
|| input.peek2(Token![!])
|
||||
|| input.peek2(token::Brace)
|
||||
|| input.peek2(token::Paren)
|
||||
|| input.peek2(Token![..]))
|
||||
|| input.peek(Token![self]) && input.peek2(Token![::])
|
||||
|| lookahead.peek(Token![::])
|
||||
|| lookahead.peek(Token![<])
|
||||
|| input.peek(Token![Self])
|
||||
|| input.peek(Token![super])
|
||||
|| input.peek(Token![crate])
|
||||
{
|
||||
pat_path_or_macro_or_struct_or_range(input)
|
||||
} else if lookahead.peek(Token![_]) {
|
||||
input.call(pat_wild).map(Pat::Wild)
|
||||
} else if input.peek(Token![box]) {
|
||||
pat_box(begin, input)
|
||||
} else if input.peek(Token![-]) || lookahead.peek(Lit) || lookahead.peek(Token![const])
|
||||
{
|
||||
pat_lit_or_range(input)
|
||||
} else if lookahead.peek(Token![ref])
|
||||
|| lookahead.peek(Token![mut])
|
||||
|| input.peek(Token![self])
|
||||
|| input.peek(Ident)
|
||||
{
|
||||
input.call(pat_ident).map(Pat::Ident)
|
||||
} else if lookahead.peek(Token![&]) {
|
||||
input.call(pat_reference).map(Pat::Reference)
|
||||
} else if lookahead.peek(token::Paren) {
|
||||
input.call(pat_paren_or_tuple)
|
||||
} else if lookahead.peek(token::Bracket) {
|
||||
input.call(pat_slice).map(Pat::Slice)
|
||||
} else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
|
||||
pat_range_half_open(input)
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
input.call(pat_const).map(Pat::Verbatim)
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a pattern, possibly involving `|`, but not a leading `|`.
|
||||
pub fn parse_multi(input: ParseStream) -> Result<Self> {
|
||||
multi_pat_impl(input, None)
|
||||
}
|
||||
|
||||
/// Parse a pattern, possibly involving `|`, possibly including a
|
||||
/// leading `|`.
|
||||
///
|
||||
/// This parser matches the behavior of the Rust 2021 edition's `$:pat`
|
||||
/// macro_rules matcher.
|
||||
///
|
||||
/// In Rust syntax, an example of where this syntax would occur is in
|
||||
/// the pattern of a `match` arm, where the language permits an optional
|
||||
/// leading `|`, although it is not idiomatic to write one there in
|
||||
/// handwritten code.
|
||||
///
|
||||
/// ```
|
||||
/// # let wat = None;
|
||||
/// match wat {
|
||||
/// | None | Some(false) => {}
|
||||
/// | Some(true) => {}
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The compiler accepts it only to facilitate some situations in
|
||||
/// macro-generated code where a macro author might need to write:
|
||||
///
|
||||
/// ```
|
||||
/// # macro_rules! doc {
|
||||
/// # ($value:expr, ($($conditions1:pat),*), ($($conditions2:pat),*), $then:expr) => {
|
||||
/// match $value {
|
||||
/// $(| $conditions1)* $(| $conditions2)* => $then
|
||||
/// }
|
||||
/// # };
|
||||
/// # }
|
||||
/// #
|
||||
/// # doc!(true, (true), (false), {});
|
||||
/// # doc!(true, (), (true, false), {});
|
||||
/// # doc!(true, (true, false), (), {});
|
||||
/// ```
|
||||
///
|
||||
/// Expressing the same thing correctly in the case that either one (but
|
||||
/// not both) of `$conditions1` and `$conditions2` might be empty,
|
||||
/// without leading `|`, is complex.
|
||||
///
|
||||
/// Use [`Pat::parse_multi`] instead if you are not intending to support
|
||||
/// macro-generated macro input.
|
||||
pub fn parse_multi_with_leading_vert(input: ParseStream) -> Result<Self> {
|
||||
let leading_vert: Option<Token![|]> = input.parse()?;
|
||||
multi_pat_impl(input, leading_vert)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for PatType {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(PatType {
|
||||
attrs: Vec::new(),
|
||||
pat: Box::new(Pat::parse_single(input)?),
|
||||
colon_token: input.parse()?,
|
||||
ty: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
|
||||
let mut pat = Pat::parse_single(input)?;
|
||||
if leading_vert.is_some()
|
||||
|| input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
|
||||
{
|
||||
let mut cases = Punctuated::new();
|
||||
cases.push_value(pat);
|
||||
while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
|
||||
let punct = input.parse()?;
|
||||
cases.push_punct(punct);
|
||||
let pat = Pat::parse_single(input)?;
|
||||
cases.push_value(pat);
|
||||
}
|
||||
pat = Pat::Or(PatOr {
|
||||
attrs: Vec::new(),
|
||||
leading_vert,
|
||||
cases,
|
||||
});
|
||||
}
|
||||
Ok(pat)
|
||||
}
|
||||
|
||||
fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
|
||||
let (qself, path) = path::parsing::qpath(input, true)?;
|
||||
|
||||
if qself.is_none()
|
||||
&& input.peek(Token![!])
|
||||
&& !input.peek(Token![!=])
|
||||
&& path.is_mod_style()
|
||||
{
|
||||
let bang_token: Token![!] = input.parse()?;
|
||||
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
||||
return Ok(Pat::Macro(ExprMacro {
|
||||
attrs: Vec::new(),
|
||||
mac: Macro {
|
||||
path,
|
||||
bang_token,
|
||||
delimiter,
|
||||
tokens,
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
if input.peek(token::Brace) {
|
||||
pat_struct(input, qself, path).map(Pat::Struct)
|
||||
} else if input.peek(token::Paren) {
|
||||
pat_tuple_struct(input, qself, path).map(Pat::TupleStruct)
|
||||
} else if input.peek(Token![..]) {
|
||||
pat_range(input, qself, path)
|
||||
} else {
|
||||
Ok(Pat::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn pat_wild(input: ParseStream) -> Result<PatWild> {
|
||||
Ok(PatWild {
|
||||
attrs: Vec::new(),
|
||||
underscore_token: input.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_box(begin: ParseBuffer, input: ParseStream) -> Result<Pat> {
|
||||
input.parse::<Token![box]>()?;
|
||||
Pat::parse_single(input)?;
|
||||
Ok(Pat::Verbatim(verbatim::between(&begin, input)))
|
||||
}
|
||||
|
||||
fn pat_ident(input: ParseStream) -> Result<PatIdent> {
|
||||
Ok(PatIdent {
|
||||
attrs: Vec::new(),
|
||||
by_ref: input.parse()?,
|
||||
mutability: input.parse()?,
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
subpat: {
|
||||
if input.peek(Token![@]) {
|
||||
let at_token: Token![@] = input.parse()?;
|
||||
let subpat = Pat::parse_single(input)?;
|
||||
Some((at_token, Box::new(subpat)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_tuple_struct(
|
||||
input: ParseStream,
|
||||
qself: Option<QSelf>,
|
||||
path: Path,
|
||||
) -> Result<PatTupleStruct> {
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in input);
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = Pat::parse_multi_with_leading_vert(&content)?;
|
||||
elems.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
let punct = content.parse()?;
|
||||
elems.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(PatTupleStruct {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
paren_token,
|
||||
elems,
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_struct(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatStruct> {
|
||||
let content;
|
||||
let brace_token = braced!(content in input);
|
||||
|
||||
let mut fields = Punctuated::new();
|
||||
let mut rest = None;
|
||||
while !content.is_empty() {
|
||||
let attrs = content.call(Attribute::parse_outer)?;
|
||||
if content.peek(Token![..]) {
|
||||
rest = Some(PatRest {
|
||||
attrs,
|
||||
dot2_token: content.parse()?,
|
||||
});
|
||||
break;
|
||||
}
|
||||
let mut value = content.call(field_pat)?;
|
||||
value.attrs = attrs;
|
||||
fields.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
let punct: Token![,] = content.parse()?;
|
||||
fields.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(PatStruct {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
brace_token,
|
||||
fields,
|
||||
rest,
|
||||
})
|
||||
}
|
||||
|
||||
fn field_pat(input: ParseStream) -> Result<FieldPat> {
|
||||
let begin = input.fork();
|
||||
let boxed: Option<Token![box]> = input.parse()?;
|
||||
let by_ref: Option<Token![ref]> = input.parse()?;
|
||||
let mutability: Option<Token![mut]> = input.parse()?;
|
||||
|
||||
let member = if boxed.is_some() || by_ref.is_some() || mutability.is_some() {
|
||||
input.parse().map(Member::Named)
|
||||
} else {
|
||||
input.parse()
|
||||
}?;
|
||||
|
||||
if boxed.is_none() && by_ref.is_none() && mutability.is_none() && input.peek(Token![:])
|
||||
|| !member.is_named()
|
||||
{
|
||||
return Ok(FieldPat {
|
||||
attrs: Vec::new(),
|
||||
member,
|
||||
colon_token: Some(input.parse()?),
|
||||
pat: Box::new(Pat::parse_multi_with_leading_vert(input)?),
|
||||
});
|
||||
}
|
||||
|
||||
let ident = match member {
|
||||
Member::Named(ident) => ident,
|
||||
Member::Unnamed(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let pat = if boxed.is_some() {
|
||||
Pat::Verbatim(verbatim::between(&begin, input))
|
||||
} else {
|
||||
Pat::Ident(PatIdent {
|
||||
attrs: Vec::new(),
|
||||
by_ref,
|
||||
mutability,
|
||||
ident: ident.clone(),
|
||||
subpat: None,
|
||||
})
|
||||
};
|
||||
|
||||
Ok(FieldPat {
|
||||
attrs: Vec::new(),
|
||||
member: Member::Named(ident),
|
||||
colon_token: None,
|
||||
pat: Box::new(pat),
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<Pat> {
|
||||
let limits = RangeLimits::parse_obsolete(input)?;
|
||||
let end = input.call(pat_range_bound)?;
|
||||
if let (RangeLimits::Closed(_), None) = (&limits, &end) {
|
||||
return Err(input.error("expected range upper bound"));
|
||||
}
|
||||
Ok(Pat::Range(ExprRange {
|
||||
attrs: Vec::new(),
|
||||
start: Some(Box::new(Expr::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
}))),
|
||||
limits,
|
||||
end: end.map(PatRangeBound::into_expr),
|
||||
}))
|
||||
}
|
||||
|
||||
fn pat_range_half_open(input: ParseStream) -> Result<Pat> {
|
||||
let limits: RangeLimits = input.parse()?;
|
||||
let end = input.call(pat_range_bound)?;
|
||||
if end.is_some() {
|
||||
Ok(Pat::Range(ExprRange {
|
||||
attrs: Vec::new(),
|
||||
start: None,
|
||||
limits,
|
||||
end: end.map(PatRangeBound::into_expr),
|
||||
}))
|
||||
} else {
|
||||
match limits {
|
||||
RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
|
||||
attrs: Vec::new(),
|
||||
dot2_token,
|
||||
})),
|
||||
RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pat_paren_or_tuple(input: ParseStream) -> Result<Pat> {
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in input);
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = Pat::parse_multi_with_leading_vert(&content)?;
|
||||
if content.is_empty() {
|
||||
if elems.is_empty() && !matches!(value, Pat::Rest(_)) {
|
||||
return Ok(Pat::Paren(PatParen {
|
||||
attrs: Vec::new(),
|
||||
paren_token,
|
||||
pat: Box::new(value),
|
||||
}));
|
||||
}
|
||||
elems.push_value(value);
|
||||
break;
|
||||
}
|
||||
elems.push_value(value);
|
||||
let punct = content.parse()?;
|
||||
elems.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(Pat::Tuple(PatTuple {
|
||||
attrs: Vec::new(),
|
||||
paren_token,
|
||||
elems,
|
||||
}))
|
||||
}
|
||||
|
||||
fn pat_reference(input: ParseStream) -> Result<PatReference> {
|
||||
Ok(PatReference {
|
||||
attrs: Vec::new(),
|
||||
and_token: input.parse()?,
|
||||
mutability: input.parse()?,
|
||||
pat: Box::new(Pat::parse_single(input)?),
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
|
||||
let start = input.call(pat_range_bound)?.unwrap();
|
||||
if input.peek(Token![..]) {
|
||||
let limits = RangeLimits::parse_obsolete(input)?;
|
||||
let end = input.call(pat_range_bound)?;
|
||||
if let (RangeLimits::Closed(_), None) = (&limits, &end) {
|
||||
return Err(input.error("expected range upper bound"));
|
||||
}
|
||||
Ok(Pat::Range(ExprRange {
|
||||
attrs: Vec::new(),
|
||||
start: Some(start.into_expr()),
|
||||
limits,
|
||||
end: end.map(PatRangeBound::into_expr),
|
||||
}))
|
||||
} else {
|
||||
Ok(start.into_pat())
|
||||
}
|
||||
}
|
||||
|
||||
// Patterns that can appear on either side of a range pattern.
|
||||
enum PatRangeBound {
|
||||
Const(ExprConst),
|
||||
Lit(ExprLit),
|
||||
Path(ExprPath),
|
||||
}
|
||||
|
||||
impl PatRangeBound {
|
||||
fn into_expr(self) -> Box<Expr> {
|
||||
Box::new(match self {
|
||||
PatRangeBound::Const(pat) => Expr::Const(pat),
|
||||
PatRangeBound::Lit(pat) => Expr::Lit(pat),
|
||||
PatRangeBound::Path(pat) => Expr::Path(pat),
|
||||
})
|
||||
}
|
||||
|
||||
fn into_pat(self) -> Pat {
|
||||
match self {
|
||||
PatRangeBound::Const(pat) => Pat::Const(pat),
|
||||
PatRangeBound::Lit(pat) => Pat::Lit(pat),
|
||||
PatRangeBound::Path(pat) => Pat::Path(pat),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pat_range_bound(input: ParseStream) -> Result<Option<PatRangeBound>> {
|
||||
if input.is_empty()
|
||||
|| input.peek(Token![|])
|
||||
|| input.peek(Token![=])
|
||||
|| input.peek(Token![:]) && !input.peek(Token![::])
|
||||
|| input.peek(Token![,])
|
||||
|| input.peek(Token![;])
|
||||
|| input.peek(Token![if])
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
let expr = if lookahead.peek(Lit) {
|
||||
PatRangeBound::Lit(input.parse()?)
|
||||
} else if lookahead.peek(Ident)
|
||||
|| lookahead.peek(Token![::])
|
||||
|| lookahead.peek(Token![<])
|
||||
|| lookahead.peek(Token![self])
|
||||
|| lookahead.peek(Token![Self])
|
||||
|| lookahead.peek(Token![super])
|
||||
|| lookahead.peek(Token![crate])
|
||||
{
|
||||
PatRangeBound::Path(input.parse()?)
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
PatRangeBound::Const(input.parse()?)
|
||||
} else {
|
||||
return Err(lookahead.error());
|
||||
};
|
||||
|
||||
Ok(Some(expr))
|
||||
}
|
||||
|
||||
fn pat_slice(input: ParseStream) -> Result<PatSlice> {
|
||||
let content;
|
||||
let bracket_token = bracketed!(content in input);
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = Pat::parse_multi_with_leading_vert(&content)?;
|
||||
match value {
|
||||
Pat::Range(pat) if pat.start.is_none() || pat.end.is_none() => {
|
||||
let (start, end) = match pat.limits {
|
||||
RangeLimits::HalfOpen(dot_dot) => (dot_dot.spans[0], dot_dot.spans[1]),
|
||||
RangeLimits::Closed(dot_dot_eq) => {
|
||||
(dot_dot_eq.spans[0], dot_dot_eq.spans[2])
|
||||
}
|
||||
};
|
||||
let msg = "range pattern is not allowed unparenthesized inside slice pattern";
|
||||
return Err(error::new2(start, end, msg));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
elems.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
let punct = content.parse()?;
|
||||
elems.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(PatSlice {
|
||||
attrs: Vec::new(),
|
||||
bracket_token,
|
||||
elems,
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_const(input: ParseStream) -> Result<TokenStream> {
|
||||
let begin = input.fork();
|
||||
input.parse::<Token![const]>()?;
|
||||
|
||||
let content;
|
||||
braced!(content in input);
|
||||
content.call(Attribute::parse_inner)?;
|
||||
content.call(Block::parse_within)?;
|
||||
|
||||
Ok(verbatim::between(&begin, input))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::attr::FilterAttrs;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatIdent {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.by_ref.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.ident.to_tokens(tokens);
|
||||
if let Some((at_token, subpat)) = &self.subpat {
|
||||
at_token.to_tokens(tokens);
|
||||
subpat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatOr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.leading_vert.to_tokens(tokens);
|
||||
self.cases.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatParen {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.pat.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatReference {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.and_token.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.dot2_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatSlice {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
path::printing::print_path(tokens, &self.qself, &self.path);
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.fields.to_tokens(tokens);
|
||||
// NOTE: We need a comma before the dot2 token if it is present.
|
||||
if !self.fields.empty_or_trailing() && self.rest.is_some() {
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
self.rest.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
// If there is only one element, a trailing comma is needed to
|
||||
// distinguish PatTuple from PatParen, unless this is `(..)`
|
||||
// which is a tuple pattern even without comma.
|
||||
if self.elems.len() == 1
|
||||
&& !self.elems.trailing_punct()
|
||||
&& !matches!(self.elems[0], Pat::Rest { .. })
|
||||
{
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatTupleStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
path::printing::print_path(tokens, &self.qself, &self.path);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.pat.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatWild {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.underscore_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldPat {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
if let Some(colon_token) = &self.colon_token {
|
||||
self.member.to_tokens(tokens);
|
||||
colon_token.to_tokens(tokens);
|
||||
}
|
||||
self.pat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
877
vendor/syn/src/path.rs
vendored
Normal file
877
vendor/syn/src/path.rs
vendored
Normal file
@@ -0,0 +1,877 @@
|
||||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
/// A path at which a named item is exported (e.g. `std::collections::HashMap`).
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Path {
|
||||
pub leading_colon: Option<Token![::]>,
|
||||
pub segments: Punctuated<PathSegment, Token![::]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Path
|
||||
where
|
||||
T: Into<PathSegment>,
|
||||
{
|
||||
fn from(segment: T) -> Self {
|
||||
let mut path = Path {
|
||||
leading_colon: None,
|
||||
segments: Punctuated::new(),
|
||||
};
|
||||
path.segments.push_value(segment.into());
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
/// Determines whether this is a path of length 1 equal to the given
|
||||
/// ident.
|
||||
///
|
||||
/// For them to compare equal, it must be the case that:
|
||||
///
|
||||
/// - the path has no leading colon,
|
||||
/// - the number of path segments is 1,
|
||||
/// - the first path segment has no angle bracketed or parenthesized
|
||||
/// path arguments, and
|
||||
/// - the ident of the first path segment is equal to the given one.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{Attribute, Error, Meta, Result};
|
||||
///
|
||||
/// fn get_serde_meta_item(attr: &Attribute) -> Result<Option<&TokenStream>> {
|
||||
/// if attr.path().is_ident("serde") {
|
||||
/// match &attr.meta {
|
||||
/// Meta::List(meta) => Ok(Some(&meta.tokens)),
|
||||
/// bad => Err(Error::new_spanned(bad, "unrecognized attribute")),
|
||||
/// }
|
||||
/// } else {
|
||||
/// Ok(None)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn is_ident<I>(&self, ident: &I) -> bool
|
||||
where
|
||||
I: ?Sized,
|
||||
Ident: PartialEq<I>,
|
||||
{
|
||||
match self.get_ident() {
|
||||
Some(id) => id == ident,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// If this path consists of a single ident, returns the ident.
|
||||
///
|
||||
/// A path is considered an ident if:
|
||||
///
|
||||
/// - the path has no leading colon,
|
||||
/// - the number of path segments is 1, and
|
||||
/// - the first path segment has no angle bracketed or parenthesized
|
||||
/// path arguments.
|
||||
pub fn get_ident(&self) -> Option<&Ident> {
|
||||
if self.leading_colon.is_none()
|
||||
&& self.segments.len() == 1
|
||||
&& self.segments[0].arguments.is_none()
|
||||
{
|
||||
Some(&self.segments[0].ident)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// An error if this path is not a single ident, as defined in `get_ident`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_ident(&self) -> Result<&Ident> {
|
||||
self.get_ident().ok_or_else(|| {
|
||||
crate::error::new2(
|
||||
self.segments.first().unwrap().ident.span(),
|
||||
self.segments.last().unwrap().ident.span(),
|
||||
"expected this path to be an identifier",
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A segment of a path together with any path arguments on that segment.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct PathSegment {
|
||||
pub ident: Ident,
|
||||
pub arguments: PathArguments,
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for PathSegment
|
||||
where
|
||||
T: Into<Ident>,
|
||||
{
|
||||
fn from(ident: T) -> Self {
|
||||
PathSegment {
|
||||
ident: ident.into(),
|
||||
arguments: PathArguments::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Angle bracketed or parenthesized arguments of a path segment.
|
||||
///
|
||||
/// ## Angle bracketed
|
||||
///
|
||||
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
|
||||
///
|
||||
/// ## Parenthesized
|
||||
///
|
||||
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum PathArguments {
|
||||
None,
|
||||
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
|
||||
AngleBracketed(AngleBracketedGenericArguments),
|
||||
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
|
||||
Parenthesized(ParenthesizedGenericArguments),
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PathArguments {
|
||||
fn default() -> Self {
|
||||
PathArguments::None
|
||||
}
|
||||
}
|
||||
|
||||
impl PathArguments {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
PathArguments::None => true,
|
||||
PathArguments::AngleBracketed(bracketed) => bracketed.args.is_empty(),
|
||||
PathArguments::Parenthesized(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_none(&self) -> bool {
|
||||
match self {
|
||||
PathArguments::None => true,
|
||||
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum GenericArgument {
|
||||
/// A lifetime argument.
|
||||
Lifetime(Lifetime),
|
||||
/// A type argument.
|
||||
Type(Type),
|
||||
/// A const expression. Must be inside of a block.
|
||||
///
|
||||
/// NOTE: Identity expressions are represented as Type arguments, as
|
||||
/// they are indistinguishable syntactically.
|
||||
Const(Expr),
|
||||
/// A binding (equality constraint) on an associated type: the `Item =
|
||||
/// u8` in `Iterator<Item = u8>`.
|
||||
AssocType(AssocType),
|
||||
/// An equality constraint on an associated constant: the `PANIC =
|
||||
/// false` in `Trait<PANIC = false>`.
|
||||
AssocConst(AssocConst),
|
||||
/// An associated type bound: `Iterator<Item: Display>`.
|
||||
Constraint(Constraint),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
|
||||
/// V>`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AngleBracketedGenericArguments {
|
||||
pub colon2_token: Option<Token![::]>,
|
||||
pub lt_token: Token![<],
|
||||
pub args: Punctuated<GenericArgument, Token![,]>,
|
||||
pub gt_token: Token![>],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A binding (equality constraint) on an associated type: the `Item = u8`
|
||||
/// in `Iterator<Item = u8>`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AssocType {
|
||||
pub ident: Ident,
|
||||
pub generics: Option<AngleBracketedGenericArguments>,
|
||||
pub eq_token: Token![=],
|
||||
pub ty: Type,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An equality constraint on an associated constant: the `PANIC = false` in
|
||||
/// `Trait<PANIC = false>`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AssocConst {
|
||||
pub ident: Ident,
|
||||
pub generics: Option<AngleBracketedGenericArguments>,
|
||||
pub eq_token: Token![=],
|
||||
pub value: Expr,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An associated type bound: `Iterator<Item: Display>`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Constraint {
|
||||
pub ident: Ident,
|
||||
pub generics: Option<AngleBracketedGenericArguments>,
|
||||
pub colon_token: Token![:],
|
||||
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
|
||||
/// C`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct ParenthesizedGenericArguments {
|
||||
pub paren_token: token::Paren,
|
||||
/// `(A, B)`
|
||||
pub inputs: Punctuated<Type, Token![,]>,
|
||||
/// `C`
|
||||
pub output: ReturnType,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// The explicit Self type in a qualified path: the `T` in `<T as
|
||||
/// Display>::fmt`.
|
||||
///
|
||||
/// The actual path, including the trait and the associated item, is stored
|
||||
/// separately. The `position` field represents the index of the associated
|
||||
/// item qualified with this Self type.
|
||||
///
|
||||
/// ```text
|
||||
/// <Vec<T> as a::b::Trait>::AssociatedItem
|
||||
/// ^~~~~~ ~~~~~~~~~~~~~~^
|
||||
/// ty position = 3
|
||||
///
|
||||
/// <Vec<T>>::AssociatedItem
|
||||
/// ^~~~~~ ^
|
||||
/// ty position = 0
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct QSelf {
|
||||
pub lt_token: Token![<],
|
||||
pub ty: Box<Type>,
|
||||
pub position: usize,
|
||||
pub as_token: Option<Token![as]>,
|
||||
pub gt_token: Token![>],
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Path {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_helper(input, false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for GenericArgument {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lifetime) && !input.peek2(Token![+]) {
|
||||
return Ok(GenericArgument::Lifetime(input.parse()?));
|
||||
}
|
||||
|
||||
if input.peek(Lit) || input.peek(token::Brace) {
|
||||
return const_argument(input).map(GenericArgument::Const);
|
||||
}
|
||||
|
||||
let mut argument: Type = input.parse()?;
|
||||
|
||||
match argument {
|
||||
Type::Path(mut ty)
|
||||
if ty.qself.is_none()
|
||||
&& ty.path.leading_colon.is_none()
|
||||
&& ty.path.segments.len() == 1
|
||||
&& match &ty.path.segments[0].arguments {
|
||||
PathArguments::None | PathArguments::AngleBracketed(_) => true,
|
||||
PathArguments::Parenthesized(_) => false,
|
||||
} =>
|
||||
{
|
||||
if let Some(eq_token) = input.parse::<Option<Token![=]>>()? {
|
||||
let segment = ty.path.segments.pop().unwrap().into_value();
|
||||
let ident = segment.ident;
|
||||
let generics = match segment.arguments {
|
||||
PathArguments::None => None,
|
||||
PathArguments::AngleBracketed(arguments) => Some(arguments),
|
||||
PathArguments::Parenthesized(_) => unreachable!(),
|
||||
};
|
||||
return if input.peek(Lit) || input.peek(token::Brace) {
|
||||
Ok(GenericArgument::AssocConst(AssocConst {
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
value: const_argument(input)?,
|
||||
}))
|
||||
} else {
|
||||
Ok(GenericArgument::AssocType(AssocType {
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty: input.parse()?,
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
if let Some(colon_token) = input.parse::<Option<Token![:]>>()? {
|
||||
let segment = ty.path.segments.pop().unwrap().into_value();
|
||||
return Ok(GenericArgument::Constraint(Constraint {
|
||||
ident: segment.ident,
|
||||
generics: match segment.arguments {
|
||||
PathArguments::None => None,
|
||||
PathArguments::AngleBracketed(arguments) => Some(arguments),
|
||||
PathArguments::Parenthesized(_) => unreachable!(),
|
||||
},
|
||||
colon_token,
|
||||
bounds: {
|
||||
let mut bounds = Punctuated::new();
|
||||
loop {
|
||||
if input.peek(Token![,]) || input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let value: TypeParamBound = input.parse()?;
|
||||
bounds.push_value(value);
|
||||
if !input.peek(Token![+]) {
|
||||
break;
|
||||
}
|
||||
let punct: Token![+] = input.parse()?;
|
||||
bounds.push_punct(punct);
|
||||
}
|
||||
bounds
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
argument = Type::Path(ty);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(GenericArgument::Type(argument))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn const_argument(input: ParseStream) -> Result<Expr> {
|
||||
let lookahead = input.lookahead1();
|
||||
|
||||
if input.peek(Lit) {
|
||||
let lit = input.parse()?;
|
||||
return Ok(Expr::Lit(lit));
|
||||
}
|
||||
|
||||
if input.peek(Ident) {
|
||||
let ident: Ident = input.parse()?;
|
||||
return Ok(Expr::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself: None,
|
||||
path: Path::from(ident),
|
||||
}));
|
||||
}
|
||||
|
||||
if input.peek(token::Brace) {
|
||||
#[cfg(feature = "full")]
|
||||
{
|
||||
let block: ExprBlock = input.parse()?;
|
||||
return Ok(Expr::Block(block));
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
{
|
||||
let begin = input.fork();
|
||||
let content;
|
||||
braced!(content in input);
|
||||
content.parse::<Expr>()?;
|
||||
let verbatim = verbatim::between(&begin, input);
|
||||
return Ok(Expr::Verbatim(verbatim));
|
||||
}
|
||||
}
|
||||
|
||||
Err(lookahead.error())
|
||||
}
|
||||
|
||||
impl AngleBracketedGenericArguments {
|
||||
/// Parse `::<…>` with mandatory leading `::`.
|
||||
///
|
||||
/// The ordinary [`Parse`] impl for `AngleBracketedGenericArguments`
|
||||
/// parses optional leading `::`.
|
||||
#[cfg(feature = "full")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))]
|
||||
pub fn parse_turbofish(input: ParseStream) -> Result<Self> {
|
||||
let colon2_token: Token![::] = input.parse()?;
|
||||
Self::do_parse(Some(colon2_token), input)
|
||||
}
|
||||
|
||||
pub(crate) fn do_parse(
|
||||
colon2_token: Option<Token![::]>,
|
||||
input: ParseStream,
|
||||
) -> Result<Self> {
|
||||
Ok(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token: input.parse()?,
|
||||
args: {
|
||||
let mut args = Punctuated::new();
|
||||
loop {
|
||||
if input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let value: GenericArgument = input.parse()?;
|
||||
args.push_value(value);
|
||||
if input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let punct: Token![,] = input.parse()?;
|
||||
args.push_punct(punct);
|
||||
}
|
||||
args
|
||||
},
|
||||
gt_token: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for AngleBracketedGenericArguments {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let colon2_token: Option<Token![::]> = input.parse()?;
|
||||
Self::do_parse(colon2_token, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for ParenthesizedGenericArguments {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(ParenthesizedGenericArguments {
|
||||
paren_token: parenthesized!(content in input),
|
||||
inputs: content.parse_terminated(Type::parse, Token![,])?,
|
||||
output: input.call(ReturnType::without_plus)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for PathSegment {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_helper(input, false)
|
||||
}
|
||||
}
|
||||
|
||||
impl PathSegment {
|
||||
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
||||
if input.peek(Token![super])
|
||||
|| input.peek(Token![self])
|
||||
|| input.peek(Token![crate])
|
||||
|| cfg!(feature = "full") && input.peek(Token![try])
|
||||
{
|
||||
let ident = input.call(Ident::parse_any)?;
|
||||
return Ok(PathSegment::from(ident));
|
||||
}
|
||||
|
||||
let ident = if input.peek(Token![Self]) {
|
||||
input.call(Ident::parse_any)?
|
||||
} else {
|
||||
input.parse()?
|
||||
};
|
||||
|
||||
if !expr_style && input.peek(Token![<]) && !input.peek(Token![<=])
|
||||
|| input.peek(Token![::]) && input.peek3(Token![<])
|
||||
{
|
||||
Ok(PathSegment {
|
||||
ident,
|
||||
arguments: PathArguments::AngleBracketed(input.parse()?),
|
||||
})
|
||||
} else {
|
||||
Ok(PathSegment::from(ident))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
/// Parse a `Path` containing no path arguments on any of its segments.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Path, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // A simplified single `use` statement like:
|
||||
/// //
|
||||
/// // use std::collections::HashMap;
|
||||
/// //
|
||||
/// // Note that generic parameters are not allowed in a `use` statement
|
||||
/// // so the following must not be accepted.
|
||||
/// //
|
||||
/// // use a::<b>::c;
|
||||
/// struct SingleUse {
|
||||
/// use_token: Token![use],
|
||||
/// path: Path,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for SingleUse {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(SingleUse {
|
||||
/// use_token: input.parse()?,
|
||||
/// path: input.call(Path::parse_mod_style)?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_mod_style(input: ParseStream) -> Result<Self> {
|
||||
Ok(Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
loop {
|
||||
if !input.peek(Ident)
|
||||
&& !input.peek(Token![super])
|
||||
&& !input.peek(Token![self])
|
||||
&& !input.peek(Token![Self])
|
||||
&& !input.peek(Token![crate])
|
||||
{
|
||||
break;
|
||||
}
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
if !input.peek(Token![::]) {
|
||||
break;
|
||||
}
|
||||
let punct = input.parse()?;
|
||||
segments.push_punct(punct);
|
||||
}
|
||||
if segments.is_empty() {
|
||||
return Err(input.parse::<Ident>().unwrap_err());
|
||||
} else if segments.trailing_punct() {
|
||||
return Err(input.error("expected path segment after `::`"));
|
||||
}
|
||||
segments
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
||||
let mut path = Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
segments.push_value(value);
|
||||
segments
|
||||
},
|
||||
};
|
||||
Path::parse_rest(input, &mut path, expr_style)?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_rest(
|
||||
input: ParseStream,
|
||||
path: &mut Self,
|
||||
expr_style: bool,
|
||||
) -> Result<()> {
|
||||
while input.peek(Token![::]) && !input.peek3(token::Paren) {
|
||||
let punct: Token![::] = input.parse()?;
|
||||
path.segments.push_punct(punct);
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
path.segments.push_value(value);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn is_mod_style(&self) -> bool {
|
||||
self.segments
|
||||
.iter()
|
||||
.all(|segment| segment.arguments.is_none())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn qpath(input: ParseStream, expr_style: bool) -> Result<(Option<QSelf>, Path)> {
|
||||
if input.peek(Token![<]) {
|
||||
let lt_token: Token![<] = input.parse()?;
|
||||
let this: Type = input.parse()?;
|
||||
let path = if input.peek(Token![as]) {
|
||||
let as_token: Token![as] = input.parse()?;
|
||||
let path: Path = input.parse()?;
|
||||
Some((as_token, path))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let gt_token: Token![>] = input.parse()?;
|
||||
let colon2_token: Token![::] = input.parse()?;
|
||||
let mut rest = Punctuated::new();
|
||||
loop {
|
||||
let path = PathSegment::parse_helper(input, expr_style)?;
|
||||
rest.push_value(path);
|
||||
if !input.peek(Token![::]) {
|
||||
break;
|
||||
}
|
||||
let punct: Token![::] = input.parse()?;
|
||||
rest.push_punct(punct);
|
||||
}
|
||||
let (position, as_token, path) = match path {
|
||||
Some((as_token, mut path)) => {
|
||||
let pos = path.segments.len();
|
||||
path.segments.push_punct(colon2_token);
|
||||
path.segments.extend(rest.into_pairs());
|
||||
(pos, Some(as_token), path)
|
||||
}
|
||||
None => {
|
||||
let path = Path {
|
||||
leading_colon: Some(colon2_token),
|
||||
segments: rest,
|
||||
};
|
||||
(0, None, path)
|
||||
}
|
||||
};
|
||||
let qself = QSelf {
|
||||
lt_token,
|
||||
ty: Box::new(this),
|
||||
position,
|
||||
as_token,
|
||||
gt_token,
|
||||
};
|
||||
Ok((Some(qself), path))
|
||||
} else {
|
||||
let path = Path::parse_helper(input, expr_style)?;
|
||||
Ok((None, path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub(crate) mod printing {
|
||||
use super::*;
|
||||
use crate::print::TokensOrDefault;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::spanned::Spanned;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use std::cmp;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Path {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.leading_colon.to_tokens(tokens);
|
||||
self.segments.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PathSegment {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.arguments.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PathArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
PathArguments::None => {}
|
||||
PathArguments::AngleBracketed(arguments) => {
|
||||
arguments.to_tokens(tokens);
|
||||
}
|
||||
PathArguments::Parenthesized(arguments) => {
|
||||
arguments.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for GenericArgument {
|
||||
#[allow(clippy::match_same_arms)]
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
|
||||
GenericArgument::Type(ty) => ty.to_tokens(tokens),
|
||||
GenericArgument::Const(expr) => match expr {
|
||||
Expr::Lit(expr) => expr.to_tokens(tokens),
|
||||
|
||||
Expr::Path(expr)
|
||||
if expr.attrs.is_empty()
|
||||
&& expr.qself.is_none()
|
||||
&& expr.path.get_ident().is_some() =>
|
||||
{
|
||||
expr.to_tokens(tokens);
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
Expr::Block(expr) => expr.to_tokens(tokens),
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
Expr::Verbatim(expr) => expr.to_tokens(tokens),
|
||||
|
||||
// ERROR CORRECTION: Add braces to make sure that the
|
||||
// generated code is valid.
|
||||
_ => token::Brace::default().surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
}),
|
||||
},
|
||||
GenericArgument::AssocType(assoc) => assoc.to_tokens(tokens),
|
||||
GenericArgument::AssocConst(assoc) => assoc.to_tokens(tokens),
|
||||
GenericArgument::Constraint(constraint) => constraint.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AngleBracketedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.colon2_token.to_tokens(tokens);
|
||||
self.lt_token.to_tokens(tokens);
|
||||
|
||||
// Print lifetimes before types/consts/bindings, regardless of their
|
||||
// order in self.args.
|
||||
let mut trailing_or_empty = true;
|
||||
for param in self.args.pairs() {
|
||||
match param.value() {
|
||||
GenericArgument::Lifetime(_) => {
|
||||
param.to_tokens(tokens);
|
||||
trailing_or_empty = param.punct().is_some();
|
||||
}
|
||||
GenericArgument::Type(_)
|
||||
| GenericArgument::Const(_)
|
||||
| GenericArgument::AssocType(_)
|
||||
| GenericArgument::AssocConst(_)
|
||||
| GenericArgument::Constraint(_) => {}
|
||||
}
|
||||
}
|
||||
for param in self.args.pairs() {
|
||||
match param.value() {
|
||||
GenericArgument::Type(_)
|
||||
| GenericArgument::Const(_)
|
||||
| GenericArgument::AssocType(_)
|
||||
| GenericArgument::AssocConst(_)
|
||||
| GenericArgument::Constraint(_) => {
|
||||
if !trailing_or_empty {
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
param.to_tokens(tokens);
|
||||
trailing_or_empty = param.punct().is_some();
|
||||
}
|
||||
GenericArgument::Lifetime(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
self.gt_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AssocType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AssocConst {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Constraint {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
self.bounds.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for ParenthesizedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.inputs.to_tokens(tokens);
|
||||
});
|
||||
self.output.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) {
|
||||
let qself = match qself {
|
||||
Some(qself) => qself,
|
||||
None => {
|
||||
path.to_tokens(tokens);
|
||||
return;
|
||||
}
|
||||
};
|
||||
qself.lt_token.to_tokens(tokens);
|
||||
qself.ty.to_tokens(tokens);
|
||||
|
||||
let pos = cmp::min(qself.position, path.segments.len());
|
||||
let mut segments = path.segments.pairs();
|
||||
if pos > 0 {
|
||||
TokensOrDefault(&qself.as_token).to_tokens(tokens);
|
||||
path.leading_colon.to_tokens(tokens);
|
||||
for (i, segment) in segments.by_ref().take(pos).enumerate() {
|
||||
if i + 1 == pos {
|
||||
segment.value().to_tokens(tokens);
|
||||
qself.gt_token.to_tokens(tokens);
|
||||
segment.punct().to_tokens(tokens);
|
||||
} else {
|
||||
segment.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
qself.gt_token.to_tokens(tokens);
|
||||
path.leading_colon.to_tokens(tokens);
|
||||
}
|
||||
for segment in segments {
|
||||
segment.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
impl Spanned for QSelf {
|
||||
fn span(&self) -> Span {
|
||||
struct QSelfDelimiters<'a>(&'a QSelf);
|
||||
|
||||
impl<'a> ToTokens for QSelfDelimiters<'a> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.lt_token.to_tokens(tokens);
|
||||
self.0.gt_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
QSelfDelimiters(self).span()
|
||||
}
|
||||
}
|
||||
}
|
||||
16
vendor/syn/src/print.rs
vendored
Normal file
16
vendor/syn/src/print.rs
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
pub(crate) struct TokensOrDefault<'a, T: 'a>(pub &'a Option<T>);
|
||||
|
||||
impl<'a, T> ToTokens for TokensOrDefault<'a, T>
|
||||
where
|
||||
T: ToTokens + Default,
|
||||
{
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self.0 {
|
||||
Some(t) => t.to_tokens(tokens),
|
||||
None => T::default().to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
1108
vendor/syn/src/punctuated.rs
vendored
Normal file
1108
vendor/syn/src/punctuated.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
171
vendor/syn/src/restriction.rs
vendored
Normal file
171
vendor/syn/src/restriction.rs
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
use super::*;
|
||||
|
||||
ast_enum! {
|
||||
/// The visibility level of an item: inherited or `pub` or
|
||||
/// `pub(restricted)`.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Visibility {
|
||||
/// A public visibility level: `pub`.
|
||||
Public(Token![pub]),
|
||||
|
||||
/// A visibility level restricted to some path: `pub(self)` or
|
||||
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
||||
Restricted(VisRestricted),
|
||||
|
||||
/// An inherited visibility, which usually means private.
|
||||
Inherited,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A visibility level restricted to some path: `pub(self)` or
|
||||
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisRestricted {
|
||||
pub pub_token: Token![pub],
|
||||
pub paren_token: token::Paren,
|
||||
pub in_token: Option<Token![in]>,
|
||||
pub path: Box<Path>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Unused, but reserved for RFC 3323 restrictions.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum FieldMutability {
|
||||
None,
|
||||
|
||||
// TODO: https://rust-lang.github.io/rfcs/3323-restrictions.html
|
||||
//
|
||||
// FieldMutability::Restricted(MutRestricted)
|
||||
//
|
||||
// pub struct MutRestricted {
|
||||
// pub mut_token: Token![mut],
|
||||
// pub paren_token: token::Paren,
|
||||
// pub in_token: Option<Token![in]>,
|
||||
// pub path: Box<Path>,
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Visibility {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
// Recognize an empty None-delimited group, as produced by a $:vis
|
||||
// matcher that matched no tokens.
|
||||
if input.peek(token::Group) {
|
||||
let ahead = input.fork();
|
||||
let group = crate::group::parse_group(&ahead)?;
|
||||
if group.content.is_empty() {
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Inherited);
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(Token![pub]) {
|
||||
Self::parse_pub(input)
|
||||
} else {
|
||||
Ok(Visibility::Inherited)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visibility {
|
||||
fn parse_pub(input: ParseStream) -> Result<Self> {
|
||||
let pub_token = input.parse::<Token![pub]>()?;
|
||||
|
||||
if input.peek(token::Paren) {
|
||||
let ahead = input.fork();
|
||||
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in ahead);
|
||||
if content.peek(Token![crate])
|
||||
|| content.peek(Token![self])
|
||||
|| content.peek(Token![super])
|
||||
{
|
||||
let path = content.call(Ident::parse_any)?;
|
||||
|
||||
// Ensure there are no additional tokens within `content`.
|
||||
// Without explicitly checking, we may misinterpret a tuple
|
||||
// field as a restricted visibility, causing a parse error.
|
||||
// e.g. `pub (crate::A, crate::B)` (Issue #720).
|
||||
if content.is_empty() {
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Restricted(VisRestricted {
|
||||
pub_token,
|
||||
paren_token,
|
||||
in_token: None,
|
||||
path: Box::new(Path::from(path)),
|
||||
}));
|
||||
}
|
||||
} else if content.peek(Token![in]) {
|
||||
let in_token: Token![in] = content.parse()?;
|
||||
let path = content.call(Path::parse_mod_style)?;
|
||||
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Restricted(VisRestricted {
|
||||
pub_token,
|
||||
paren_token,
|
||||
in_token: Some(in_token),
|
||||
path: Box::new(path),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Visibility::Public(pub_token))
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
pub(crate) fn is_some(&self) -> bool {
|
||||
match self {
|
||||
Visibility::Inherited => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Visibility {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Visibility::Public(pub_token) => pub_token.to_tokens(tokens),
|
||||
Visibility::Restricted(vis_restricted) => vis_restricted.to_tokens(tokens),
|
||||
Visibility::Inherited => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisRestricted {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
// TODO: If we have a path which is not "self" or "super" or
|
||||
// "crate", automatically add the "in" token.
|
||||
self.in_token.to_tokens(tokens);
|
||||
self.path.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
4
vendor/syn/src/sealed.rs
vendored
Normal file
4
vendor/syn/src/sealed.rs
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod lookahead {
|
||||
pub trait Sealed: Copy {}
|
||||
}
|
||||
63
vendor/syn/src/span.rs
vendored
Normal file
63
vendor/syn/src/span.rs
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::{Delimiter, Group, Span, TokenStream};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait IntoSpans<S> {
|
||||
fn into_spans(self) -> S;
|
||||
}
|
||||
|
||||
impl IntoSpans<Span> for Span {
|
||||
fn into_spans(self) -> Span {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 1]> for Span {
|
||||
fn into_spans(self) -> [Span; 1] {
|
||||
[self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 2]> for Span {
|
||||
fn into_spans(self) -> [Span; 2] {
|
||||
[self, self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 3]> for Span {
|
||||
fn into_spans(self) -> [Span; 3] {
|
||||
[self, self, self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 1]> for [Span; 1] {
|
||||
fn into_spans(self) -> [Span; 1] {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 2]> for [Span; 2] {
|
||||
fn into_spans(self) -> [Span; 2] {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 3]> for [Span; 3] {
|
||||
fn into_spans(self) -> [Span; 3] {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<DelimSpan> for Span {
|
||||
fn into_spans(self) -> DelimSpan {
|
||||
let mut group = Group::new(Delimiter::None, TokenStream::new());
|
||||
group.set_span(self);
|
||||
group.delim_span()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<DelimSpan> for DelimSpan {
|
||||
fn into_spans(self) -> DelimSpan {
|
||||
self
|
||||
}
|
||||
}
|
||||
118
vendor/syn/src/spanned.rs
vendored
Normal file
118
vendor/syn/src/spanned.rs
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
//! A trait that can provide the `Span` of the complete contents of a syntax
|
||||
//! tree node.
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! Suppose in a procedural macro we have a [`Type`] that we want to assert
|
||||
//! implements the [`Sync`] trait. Maybe this is the type of one of the fields
|
||||
//! of a struct for which we are deriving a trait implementation, and we need to
|
||||
//! be able to pass a reference to one of those fields across threads.
|
||||
//!
|
||||
//! [`Type`]: crate::Type
|
||||
//! [`Sync`]: std::marker::Sync
|
||||
//!
|
||||
//! If the field type does *not* implement `Sync` as required, we want the
|
||||
//! compiler to report an error pointing out exactly which type it was.
|
||||
//!
|
||||
//! The following macro code takes a variable `ty` of type `Type` and produces a
|
||||
//! static assertion that `Sync` is implemented for that type.
|
||||
//!
|
||||
//! ```
|
||||
//! # extern crate proc_macro;
|
||||
//! #
|
||||
//! use proc_macro::TokenStream;
|
||||
//! use proc_macro2::Span;
|
||||
//! use quote::quote_spanned;
|
||||
//! use syn::Type;
|
||||
//! use syn::spanned::Spanned;
|
||||
//!
|
||||
//! # const IGNORE_TOKENS: &str = stringify! {
|
||||
//! #[proc_macro_derive(MyMacro)]
|
||||
//! # };
|
||||
//! pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
//! # let ty = get_a_type();
|
||||
//! /* ... */
|
||||
//!
|
||||
//! let assert_sync = quote_spanned! {ty.span()=>
|
||||
//! struct _AssertSync where #ty: Sync;
|
||||
//! };
|
||||
//!
|
||||
//! /* ... */
|
||||
//! # input
|
||||
//! }
|
||||
//! #
|
||||
//! # fn get_a_type() -> Type {
|
||||
//! # unimplemented!()
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! By inserting this `assert_sync` fragment into the output code generated by
|
||||
//! our macro, the user's code will fail to compile if `ty` does not implement
|
||||
//! `Sync`. The errors they would see look like the following.
|
||||
//!
|
||||
//! ```text
|
||||
//! error[E0277]: the trait bound `*const i32: std::marker::Sync` is not satisfied
|
||||
//! --> src/main.rs:10:21
|
||||
//! |
|
||||
//! 10 | bad_field: *const i32,
|
||||
//! | ^^^^^^^^^^ `*const i32` cannot be shared between threads safely
|
||||
//! ```
|
||||
//!
|
||||
//! In this technique, using the `Type`'s span for the error message makes the
|
||||
//! error appear in the correct place underlining the right type.
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Limitations
|
||||
//!
|
||||
//! The underlying [`proc_macro::Span::join`] method is nightly-only. When
|
||||
//! called from within a procedural macro in a nightly compiler, `Spanned` will
|
||||
//! use `join` to produce the intended span. When not using a nightly compiler,
|
||||
//! only the span of the *first token* of the syntax tree node is returned.
|
||||
//!
|
||||
//! In the common case of wanting to use the joined span as the span of a
|
||||
//! `syn::Error`, consider instead using [`syn::Error::new_spanned`] which is
|
||||
//! able to span the error correctly under the complete syntax tree node without
|
||||
//! needing the unstable `join`.
|
||||
//!
|
||||
//! [`syn::Error::new_spanned`]: crate::Error::new_spanned
|
||||
|
||||
use proc_macro2::Span;
|
||||
use quote::spanned::Spanned as ToTokens;
|
||||
|
||||
/// A trait that can provide the `Span` of the complete contents of a syntax
|
||||
/// tree node.
|
||||
///
|
||||
/// This trait is automatically implemented for all types that implement
|
||||
/// [`ToTokens`] from the `quote` crate, as well as for `Span` itself.
|
||||
///
|
||||
/// [`ToTokens`]: quote::ToTokens
|
||||
///
|
||||
/// See the [module documentation] for an example.
|
||||
///
|
||||
/// [module documentation]: self
|
||||
pub trait Spanned: private::Sealed {
|
||||
/// Returns a `Span` covering the complete contents of this syntax tree
|
||||
/// node, or [`Span::call_site()`] if this node is empty.
|
||||
///
|
||||
/// [`Span::call_site()`]: proc_macro2::Span::call_site
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> Spanned for T {
|
||||
fn span(&self) -> Span {
|
||||
self.__span()
|
||||
}
|
||||
}
|
||||
|
||||
mod private {
|
||||
use super::*;
|
||||
|
||||
pub trait Sealed {}
|
||||
impl<T: ?Sized + ToTokens> Sealed for T {}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl Sealed for crate::QSelf {}
|
||||
}
|
||||
452
vendor/syn/src/stmt.rs
vendored
Normal file
452
vendor/syn/src/stmt.rs
vendored
Normal file
@@ -0,0 +1,452 @@
|
||||
use super::*;
|
||||
|
||||
ast_struct! {
|
||||
/// A braced block containing Rust statements.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct Block {
|
||||
pub brace_token: token::Brace,
|
||||
/// Statements in a block
|
||||
pub stmts: Vec<Stmt>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A statement, usually ending in a semicolon.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub enum Stmt {
|
||||
/// A local (let) binding.
|
||||
Local(Local),
|
||||
|
||||
/// An item definition.
|
||||
Item(Item),
|
||||
|
||||
/// Expression, with or without trailing semicolon.
|
||||
Expr(Expr, Option<Token![;]>),
|
||||
|
||||
/// A macro invocation in statement position.
|
||||
///
|
||||
/// Syntactically it's ambiguous which other kind of statement this
|
||||
/// macro would expand to. It can be any of local variable (`let`),
|
||||
/// item, or expression.
|
||||
Macro(StmtMacro),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A local `let` binding: `let x: u64 = s.parse()?`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct Local {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub let_token: Token![let],
|
||||
pub pat: Pat,
|
||||
pub init: Option<LocalInit>,
|
||||
pub semi_token: Token![;],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// The expression assigned in a local `let` binding, including optional
|
||||
/// diverging `else` block.
|
||||
///
|
||||
/// `LocalInit` represents `= s.parse()?` in `let x: u64 = s.parse()?` and
|
||||
/// `= r else { return }` in `let Ok(x) = r else { return }`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct LocalInit {
|
||||
pub eq_token: Token![=],
|
||||
pub expr: Box<Expr>,
|
||||
pub diverge: Option<(Token![else], Box<Expr>)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A macro invocation in statement position.
|
||||
///
|
||||
/// Syntactically it's ambiguous which other kind of statement this macro
|
||||
/// would expand to. It can be any of local variable (`let`), item, or
|
||||
/// expression.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct StmtMacro {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub mac: Macro,
|
||||
pub semi_token: Option<Token![;]>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
struct AllowNoSemi(bool);
|
||||
|
||||
impl Block {
|
||||
/// Parse the body of a block as zero or more statements, possibly
|
||||
/// including one trailing expression.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parse a function with no generics or parameter list.
|
||||
/// //
|
||||
/// // fn playground {
|
||||
/// // let mut x = 1;
|
||||
/// // x += 1;
|
||||
/// // println!("{}", x);
|
||||
/// // }
|
||||
/// struct MiniFunction {
|
||||
/// attrs: Vec<Attribute>,
|
||||
/// fn_token: Token![fn],
|
||||
/// name: Ident,
|
||||
/// brace_token: token::Brace,
|
||||
/// stmts: Vec<Stmt>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for MiniFunction {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let outer_attrs = input.call(Attribute::parse_outer)?;
|
||||
/// let fn_token: Token![fn] = input.parse()?;
|
||||
/// let name: Ident = input.parse()?;
|
||||
///
|
||||
/// let content;
|
||||
/// let brace_token = braced!(content in input);
|
||||
/// let inner_attrs = content.call(Attribute::parse_inner)?;
|
||||
/// let stmts = content.call(Block::parse_within)?;
|
||||
///
|
||||
/// Ok(MiniFunction {
|
||||
/// attrs: {
|
||||
/// let mut attrs = outer_attrs;
|
||||
/// attrs.extend(inner_attrs);
|
||||
/// attrs
|
||||
/// },
|
||||
/// fn_token,
|
||||
/// name,
|
||||
/// brace_token,
|
||||
/// stmts,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
|
||||
let mut stmts = Vec::new();
|
||||
loop {
|
||||
while let semi @ Some(_) = input.parse()? {
|
||||
stmts.push(Stmt::Expr(Expr::Verbatim(TokenStream::new()), semi));
|
||||
}
|
||||
if input.is_empty() {
|
||||
break;
|
||||
}
|
||||
let stmt = parse_stmt(input, AllowNoSemi(true))?;
|
||||
let requires_semicolon = match &stmt {
|
||||
Stmt::Expr(stmt, None) => expr::requires_terminator(stmt),
|
||||
Stmt::Macro(stmt) => {
|
||||
stmt.semi_token.is_none() && !stmt.mac.delimiter.is_brace()
|
||||
}
|
||||
Stmt::Local(_) | Stmt::Item(_) | Stmt::Expr(_, Some(_)) => false,
|
||||
};
|
||||
stmts.push(stmt);
|
||||
if input.is_empty() {
|
||||
break;
|
||||
} else if requires_semicolon {
|
||||
return Err(input.error("unexpected token, expected `;`"));
|
||||
}
|
||||
}
|
||||
Ok(stmts)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Block {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(Block {
|
||||
brace_token: braced!(content in input),
|
||||
stmts: content.call(Block::parse_within)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Stmt {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let allow_nosemi = AllowNoSemi(false);
|
||||
parse_stmt(input, allow_nosemi)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_stmt(input: ParseStream, allow_nosemi: AllowNoSemi) -> Result<Stmt> {
|
||||
let begin = input.fork();
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
|
||||
// brace-style macros; paren and bracket macros get parsed as
|
||||
// expression statements.
|
||||
let ahead = input.fork();
|
||||
let mut is_item_macro = false;
|
||||
if let Ok(path) = ahead.call(Path::parse_mod_style) {
|
||||
if ahead.peek(Token![!]) {
|
||||
if ahead.peek2(Ident) || ahead.peek2(Token![try]) {
|
||||
is_item_macro = true;
|
||||
} else if ahead.peek2(token::Brace)
|
||||
&& !(ahead.peek3(Token![.]) || ahead.peek3(Token![?]))
|
||||
{
|
||||
input.advance_to(&ahead);
|
||||
return stmt_mac(input, attrs, path).map(Stmt::Macro);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(Token![let]) && !input.peek(token::Group) {
|
||||
stmt_local(input, attrs).map(Stmt::Local)
|
||||
} else if input.peek(Token![pub])
|
||||
|| input.peek(Token![crate]) && !input.peek2(Token![::])
|
||||
|| input.peek(Token![extern])
|
||||
|| input.peek(Token![use])
|
||||
|| input.peek(Token![static])
|
||||
&& (input.peek2(Token![mut])
|
||||
|| input.peek2(Ident)
|
||||
&& !(input.peek2(Token![async])
|
||||
&& (input.peek3(Token![move]) || input.peek3(Token![|]))))
|
||||
|| input.peek(Token![const])
|
||||
&& !(input.peek2(token::Brace)
|
||||
|| input.peek2(Token![static])
|
||||
|| input.peek2(Token![async])
|
||||
&& !(input.peek3(Token![unsafe])
|
||||
|| input.peek3(Token![extern])
|
||||
|| input.peek3(Token![fn]))
|
||||
|| input.peek2(Token![move])
|
||||
|| input.peek2(Token![|]))
|
||||
|| input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|
||||
|| input.peek(Token![async])
|
||||
&& (input.peek2(Token![unsafe])
|
||||
|| input.peek2(Token![extern])
|
||||
|| input.peek2(Token![fn]))
|
||||
|| input.peek(Token![fn])
|
||||
|| input.peek(Token![mod])
|
||||
|| input.peek(Token![type])
|
||||
|| input.peek(Token![struct])
|
||||
|| input.peek(Token![enum])
|
||||
|| input.peek(Token![union]) && input.peek2(Ident)
|
||||
|| input.peek(Token![auto]) && input.peek2(Token![trait])
|
||||
|| input.peek(Token![trait])
|
||||
|| input.peek(Token![default])
|
||||
&& (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
|
||||
|| input.peek(Token![impl])
|
||||
|| input.peek(Token![macro])
|
||||
|| is_item_macro
|
||||
{
|
||||
let item = item::parsing::parse_rest_of_item(begin, attrs, input)?;
|
||||
Ok(Stmt::Item(item))
|
||||
} else {
|
||||
stmt_expr(input, allow_nosemi, attrs)
|
||||
}
|
||||
}
|
||||
|
||||
fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<StmtMacro> {
|
||||
let bang_token: Token![!] = input.parse()?;
|
||||
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
||||
let semi_token: Option<Token![;]> = input.parse()?;
|
||||
|
||||
Ok(StmtMacro {
|
||||
attrs,
|
||||
mac: Macro {
|
||||
path,
|
||||
bang_token,
|
||||
delimiter,
|
||||
tokens,
|
||||
},
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
|
||||
fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
|
||||
let let_token: Token![let] = input.parse()?;
|
||||
|
||||
let mut pat = Pat::parse_single(input)?;
|
||||
if input.peek(Token![:]) {
|
||||
let colon_token: Token![:] = input.parse()?;
|
||||
let ty: Type = input.parse()?;
|
||||
pat = Pat::Type(PatType {
|
||||
attrs: Vec::new(),
|
||||
pat: Box::new(pat),
|
||||
colon_token,
|
||||
ty: Box::new(ty),
|
||||
});
|
||||
}
|
||||
|
||||
let init = if let Some(eq_token) = input.parse()? {
|
||||
let eq_token: Token![=] = eq_token;
|
||||
let expr: Expr = input.parse()?;
|
||||
|
||||
let diverge = if let Some(else_token) = input.parse()? {
|
||||
let else_token: Token![else] = else_token;
|
||||
let diverge = ExprBlock {
|
||||
attrs: Vec::new(),
|
||||
label: None,
|
||||
block: input.parse()?,
|
||||
};
|
||||
Some((else_token, Box::new(Expr::Block(diverge))))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Some(LocalInit {
|
||||
eq_token,
|
||||
expr: Box::new(expr),
|
||||
diverge,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let semi_token: Token![;] = input.parse()?;
|
||||
|
||||
Ok(Local {
|
||||
attrs,
|
||||
let_token,
|
||||
pat,
|
||||
init,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
|
||||
fn stmt_expr(
|
||||
input: ParseStream,
|
||||
allow_nosemi: AllowNoSemi,
|
||||
mut attrs: Vec<Attribute>,
|
||||
) -> Result<Stmt> {
|
||||
let mut e = expr::parsing::expr_early(input)?;
|
||||
|
||||
let mut attr_target = &mut e;
|
||||
loop {
|
||||
attr_target = match attr_target {
|
||||
Expr::Assign(e) => &mut e.left,
|
||||
Expr::Binary(e) => &mut e.left,
|
||||
Expr::Cast(e) => &mut e.expr,
|
||||
Expr::Array(_)
|
||||
| Expr::Async(_)
|
||||
| Expr::Await(_)
|
||||
| Expr::Block(_)
|
||||
| Expr::Break(_)
|
||||
| Expr::Call(_)
|
||||
| Expr::Closure(_)
|
||||
| Expr::Const(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Field(_)
|
||||
| Expr::ForLoop(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::If(_)
|
||||
| Expr::Index(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Let(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::Loop(_)
|
||||
| Expr::Macro(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::MethodCall(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::Range(_)
|
||||
| Expr::Reference(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Return(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::Try(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Tuple(_)
|
||||
| Expr::Unary(_)
|
||||
| Expr::Unsafe(_)
|
||||
| Expr::While(_)
|
||||
| Expr::Yield(_)
|
||||
| Expr::Verbatim(_) => break,
|
||||
};
|
||||
}
|
||||
attrs.extend(attr_target.replace_attrs(Vec::new()));
|
||||
attr_target.replace_attrs(attrs);
|
||||
|
||||
let semi_token: Option<Token![;]> = input.parse()?;
|
||||
|
||||
match e {
|
||||
Expr::Macro(ExprMacro { attrs, mac })
|
||||
if semi_token.is_some() || mac.delimiter.is_brace() =>
|
||||
{
|
||||
return Ok(Stmt::Macro(StmtMacro {
|
||||
attrs,
|
||||
mac,
|
||||
semi_token,
|
||||
}));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if semi_token.is_some() {
|
||||
Ok(Stmt::Expr(e, semi_token))
|
||||
} else if allow_nosemi.0 || !expr::requires_terminator(&e) {
|
||||
Ok(Stmt::Expr(e, None))
|
||||
} else {
|
||||
Err(input.error("expected semicolon"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Block {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
tokens.append_all(&self.stmts);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Stmt {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Stmt::Local(local) => local.to_tokens(tokens),
|
||||
Stmt::Item(item) => item.to_tokens(tokens),
|
||||
Stmt::Expr(expr, semi) => {
|
||||
expr.to_tokens(tokens);
|
||||
semi.to_tokens(tokens);
|
||||
}
|
||||
Stmt::Macro(mac) => mac.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Local {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
|
||||
self.let_token.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
if let Some(init) = &self.init {
|
||||
init.eq_token.to_tokens(tokens);
|
||||
init.expr.to_tokens(tokens);
|
||||
if let Some((else_token, diverge)) = &init.diverge {
|
||||
else_token.to_tokens(tokens);
|
||||
diverge.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
self.semi_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for StmtMacro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
|
||||
self.mac.to_tokens(tokens);
|
||||
self.semi_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
60
vendor/syn/src/thread.rs
vendored
Normal file
60
vendor/syn/src/thread.rs
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
use std::fmt::{self, Debug};
|
||||
use std::thread::{self, ThreadId};
|
||||
|
||||
/// ThreadBound is a Sync-maker and Send-maker that allows accessing a value
|
||||
/// of type T only from the original thread on which the ThreadBound was
|
||||
/// constructed.
|
||||
pub(crate) struct ThreadBound<T> {
|
||||
value: T,
|
||||
thread_id: ThreadId,
|
||||
}
|
||||
|
||||
unsafe impl<T> Sync for ThreadBound<T> {}
|
||||
|
||||
// Send bound requires Copy, as otherwise Drop could run in the wrong place.
|
||||
//
|
||||
// Today Copy and Drop are mutually exclusive so `T: Copy` implies `T: !Drop`.
|
||||
// This impl needs to be revisited if that restriction is relaxed in the future.
|
||||
unsafe impl<T: Copy> Send for ThreadBound<T> {}
|
||||
|
||||
impl<T> ThreadBound<T> {
|
||||
pub(crate) fn new(value: T) -> Self {
|
||||
ThreadBound {
|
||||
value,
|
||||
thread_id: thread::current().id(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get(&self) -> Option<&T> {
|
||||
if thread::current().id() == self.thread_id {
|
||||
Some(&self.value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Debug> Debug for ThreadBound<T> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.get() {
|
||||
Some(value) => Debug::fmt(value, formatter),
|
||||
None => formatter.write_str("unknown"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Copy the bytes of T, even if the currently running thread is the "wrong"
|
||||
// thread. This is fine as long as the original thread is not simultaneously
|
||||
// mutating this value via interior mutability, which would be a data race.
|
||||
//
|
||||
// Currently `T: Copy` is sufficient to guarantee that T contains no interior
|
||||
// mutability, because _all_ interior mutability in Rust is built on
|
||||
// std::cell::UnsafeCell, which has no Copy impl. This impl needs to be
|
||||
// revisited if that restriction is relaxed in the future.
|
||||
impl<T: Copy> Copy for ThreadBound<T> {}
|
||||
|
||||
impl<T: Copy> Clone for ThreadBound<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
1138
vendor/syn/src/token.rs
vendored
Normal file
1138
vendor/syn/src/token.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
107
vendor/syn/src/tt.rs
vendored
Normal file
107
vendor/syn/src/tt.rs
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
use proc_macro2::{Delimiter, TokenStream, TokenTree};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub(crate) struct TokenTreeHelper<'a>(pub &'a TokenTree);
|
||||
|
||||
impl<'a> PartialEq for TokenTreeHelper<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
use proc_macro2::Spacing;
|
||||
|
||||
match (self.0, other.0) {
|
||||
(TokenTree::Group(g1), TokenTree::Group(g2)) => {
|
||||
match (g1.delimiter(), g2.delimiter()) {
|
||||
(Delimiter::Parenthesis, Delimiter::Parenthesis)
|
||||
| (Delimiter::Brace, Delimiter::Brace)
|
||||
| (Delimiter::Bracket, Delimiter::Bracket)
|
||||
| (Delimiter::None, Delimiter::None) => {}
|
||||
_ => return false,
|
||||
}
|
||||
|
||||
let s1 = g1.stream().into_iter();
|
||||
let mut s2 = g2.stream().into_iter();
|
||||
|
||||
for item1 in s1 {
|
||||
let item2 = match s2.next() {
|
||||
Some(item) => item,
|
||||
None => return false,
|
||||
};
|
||||
if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
s2.next().is_none()
|
||||
}
|
||||
(TokenTree::Punct(o1), TokenTree::Punct(o2)) => {
|
||||
o1.as_char() == o2.as_char()
|
||||
&& match (o1.spacing(), o2.spacing()) {
|
||||
(Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
(TokenTree::Literal(l1), TokenTree::Literal(l2)) => l1.to_string() == l2.to_string(),
|
||||
(TokenTree::Ident(s1), TokenTree::Ident(s2)) => s1 == s2,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Hash for TokenTreeHelper<'a> {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
use proc_macro2::Spacing;
|
||||
|
||||
match self.0 {
|
||||
TokenTree::Group(g) => {
|
||||
0u8.hash(h);
|
||||
match g.delimiter() {
|
||||
Delimiter::Parenthesis => 0u8.hash(h),
|
||||
Delimiter::Brace => 1u8.hash(h),
|
||||
Delimiter::Bracket => 2u8.hash(h),
|
||||
Delimiter::None => 3u8.hash(h),
|
||||
}
|
||||
|
||||
for item in g.stream() {
|
||||
TokenTreeHelper(&item).hash(h);
|
||||
}
|
||||
0xffu8.hash(h); // terminator w/ a variant we don't normally hash
|
||||
}
|
||||
TokenTree::Punct(op) => {
|
||||
1u8.hash(h);
|
||||
op.as_char().hash(h);
|
||||
match op.spacing() {
|
||||
Spacing::Alone => 0u8.hash(h),
|
||||
Spacing::Joint => 1u8.hash(h),
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(lit) => (2u8, lit.to_string()).hash(h),
|
||||
TokenTree::Ident(word) => (3u8, word).hash(h),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TokenStreamHelper<'a>(pub &'a TokenStream);
|
||||
|
||||
impl<'a> PartialEq for TokenStreamHelper<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let left = self.0.clone().into_iter().collect::<Vec<_>>();
|
||||
let right = other.0.clone().into_iter().collect::<Vec<_>>();
|
||||
if left.len() != right.len() {
|
||||
return false;
|
||||
}
|
||||
for (a, b) in left.into_iter().zip(right) {
|
||||
if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Hash for TokenStreamHelper<'a> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let tts = self.0.clone().into_iter().collect::<Vec<_>>();
|
||||
tts.len().hash(state);
|
||||
for tt in tts {
|
||||
TokenTreeHelper(&tt).hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
1189
vendor/syn/src/ty.rs
vendored
Normal file
1189
vendor/syn/src/ty.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
33
vendor/syn/src/verbatim.rs
vendored
Normal file
33
vendor/syn/src/verbatim.rs
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
use crate::parse::ParseStream;
|
||||
use proc_macro2::{Delimiter, TokenStream};
|
||||
use std::cmp::Ordering;
|
||||
use std::iter;
|
||||
|
||||
pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) -> TokenStream {
|
||||
let end = end.cursor();
|
||||
let mut cursor = begin.cursor();
|
||||
assert!(crate::buffer::same_buffer(end, cursor));
|
||||
|
||||
let mut tokens = TokenStream::new();
|
||||
while cursor != end {
|
||||
let (tt, next) = cursor.token_tree().unwrap();
|
||||
|
||||
if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less {
|
||||
// A syntax node can cross the boundary of a None-delimited group
|
||||
// due to such groups being transparent to the parser in most cases.
|
||||
// Any time this occurs the group is known to be semantically
|
||||
// irrelevant. https://github.com/dtolnay/syn/issues/1235
|
||||
if let Some((inside, _span, after)) = cursor.group(Delimiter::None) {
|
||||
assert!(next == after);
|
||||
cursor = inside;
|
||||
continue;
|
||||
} else {
|
||||
panic!("verbatim end must not be inside a delimited group");
|
||||
}
|
||||
}
|
||||
|
||||
tokens.extend(iter::once(tt));
|
||||
cursor = next;
|
||||
}
|
||||
tokens
|
||||
}
|
||||
65
vendor/syn/src/whitespace.rs
vendored
Normal file
65
vendor/syn/src/whitespace.rs
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
pub(crate) fn skip(mut s: &str) -> &str {
|
||||
'skip: while !s.is_empty() {
|
||||
let byte = s.as_bytes()[0];
|
||||
if byte == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
if let Some(i) = s.find('\n') {
|
||||
s = &s[i + 1..];
|
||||
continue;
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
} else if s.starts_with("/**/") {
|
||||
s = &s[4..];
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
let mut depth = 0;
|
||||
let bytes = s.as_bytes();
|
||||
let mut i = 0;
|
||||
let upper = bytes.len() - 1;
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
s = &s[i + 2..];
|
||||
continue 'skip;
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
}
|
||||
match byte {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
s = &s[1..];
|
||||
continue;
|
||||
}
|
||||
b if b <= 0x7f => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
s = &s[ch.len_utf8()..];
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
879
vendor/syn/tests/common/eq.rs
vendored
Normal file
879
vendor/syn/tests/common/eq.rs
vendored
Normal file
@@ -0,0 +1,879 @@
|
||||
#![allow(unused_macro_rules)]
|
||||
|
||||
extern crate rustc_ast;
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_driver;
|
||||
extern crate rustc_span;
|
||||
extern crate thin_vec;
|
||||
|
||||
use rustc_ast::ast::AngleBracketedArg;
|
||||
use rustc_ast::ast::AngleBracketedArgs;
|
||||
use rustc_ast::ast::AnonConst;
|
||||
use rustc_ast::ast::Arm;
|
||||
use rustc_ast::ast::AssocConstraint;
|
||||
use rustc_ast::ast::AssocConstraintKind;
|
||||
use rustc_ast::ast::AssocItemKind;
|
||||
use rustc_ast::ast::AttrArgs;
|
||||
use rustc_ast::ast::AttrArgsEq;
|
||||
use rustc_ast::ast::AttrId;
|
||||
use rustc_ast::ast::AttrItem;
|
||||
use rustc_ast::ast::AttrKind;
|
||||
use rustc_ast::ast::AttrStyle;
|
||||
use rustc_ast::ast::Attribute;
|
||||
use rustc_ast::ast::BareFnTy;
|
||||
use rustc_ast::ast::BinOpKind;
|
||||
use rustc_ast::ast::BindingAnnotation;
|
||||
use rustc_ast::ast::Block;
|
||||
use rustc_ast::ast::BlockCheckMode;
|
||||
use rustc_ast::ast::BorrowKind;
|
||||
use rustc_ast::ast::BoundConstness;
|
||||
use rustc_ast::ast::BoundPolarity;
|
||||
use rustc_ast::ast::ByRef;
|
||||
use rustc_ast::ast::CaptureBy;
|
||||
use rustc_ast::ast::Closure;
|
||||
use rustc_ast::ast::ClosureBinder;
|
||||
use rustc_ast::ast::Const;
|
||||
use rustc_ast::ast::ConstItem;
|
||||
use rustc_ast::ast::CoroutineKind;
|
||||
use rustc_ast::ast::Crate;
|
||||
use rustc_ast::ast::Defaultness;
|
||||
use rustc_ast::ast::DelimArgs;
|
||||
use rustc_ast::ast::EnumDef;
|
||||
use rustc_ast::ast::Expr;
|
||||
use rustc_ast::ast::ExprField;
|
||||
use rustc_ast::ast::ExprKind;
|
||||
use rustc_ast::ast::Extern;
|
||||
use rustc_ast::ast::FieldDef;
|
||||
use rustc_ast::ast::FloatTy;
|
||||
use rustc_ast::ast::Fn;
|
||||
use rustc_ast::ast::FnDecl;
|
||||
use rustc_ast::ast::FnHeader;
|
||||
use rustc_ast::ast::FnRetTy;
|
||||
use rustc_ast::ast::FnSig;
|
||||
use rustc_ast::ast::ForLoopKind;
|
||||
use rustc_ast::ast::ForeignItemKind;
|
||||
use rustc_ast::ast::ForeignMod;
|
||||
use rustc_ast::ast::FormatAlignment;
|
||||
use rustc_ast::ast::FormatArgPosition;
|
||||
use rustc_ast::ast::FormatArgPositionKind;
|
||||
use rustc_ast::ast::FormatArgs;
|
||||
use rustc_ast::ast::FormatArgsPiece;
|
||||
use rustc_ast::ast::FormatArgument;
|
||||
use rustc_ast::ast::FormatArgumentKind;
|
||||
use rustc_ast::ast::FormatArguments;
|
||||
use rustc_ast::ast::FormatCount;
|
||||
use rustc_ast::ast::FormatDebugHex;
|
||||
use rustc_ast::ast::FormatOptions;
|
||||
use rustc_ast::ast::FormatPlaceholder;
|
||||
use rustc_ast::ast::FormatSign;
|
||||
use rustc_ast::ast::FormatTrait;
|
||||
use rustc_ast::ast::GenBlockKind;
|
||||
use rustc_ast::ast::GenericArg;
|
||||
use rustc_ast::ast::GenericArgs;
|
||||
use rustc_ast::ast::GenericBound;
|
||||
use rustc_ast::ast::GenericParam;
|
||||
use rustc_ast::ast::GenericParamKind;
|
||||
use rustc_ast::ast::Generics;
|
||||
use rustc_ast::ast::Impl;
|
||||
use rustc_ast::ast::ImplPolarity;
|
||||
use rustc_ast::ast::Inline;
|
||||
use rustc_ast::ast::InlineAsm;
|
||||
use rustc_ast::ast::InlineAsmOperand;
|
||||
use rustc_ast::ast::InlineAsmOptions;
|
||||
use rustc_ast::ast::InlineAsmRegOrRegClass;
|
||||
use rustc_ast::ast::InlineAsmSym;
|
||||
use rustc_ast::ast::InlineAsmTemplatePiece;
|
||||
use rustc_ast::ast::IntTy;
|
||||
use rustc_ast::ast::IsAuto;
|
||||
use rustc_ast::ast::Item;
|
||||
use rustc_ast::ast::ItemKind;
|
||||
use rustc_ast::ast::Label;
|
||||
use rustc_ast::ast::Lifetime;
|
||||
use rustc_ast::ast::LitFloatType;
|
||||
use rustc_ast::ast::LitIntType;
|
||||
use rustc_ast::ast::LitKind;
|
||||
use rustc_ast::ast::Local;
|
||||
use rustc_ast::ast::LocalKind;
|
||||
use rustc_ast::ast::MacCall;
|
||||
use rustc_ast::ast::MacCallStmt;
|
||||
use rustc_ast::ast::MacStmtStyle;
|
||||
use rustc_ast::ast::MacroDef;
|
||||
use rustc_ast::ast::MetaItemLit;
|
||||
use rustc_ast::ast::MethodCall;
|
||||
use rustc_ast::ast::ModKind;
|
||||
use rustc_ast::ast::ModSpans;
|
||||
use rustc_ast::ast::Movability;
|
||||
use rustc_ast::ast::MutTy;
|
||||
use rustc_ast::ast::Mutability;
|
||||
use rustc_ast::ast::NodeId;
|
||||
use rustc_ast::ast::NormalAttr;
|
||||
use rustc_ast::ast::Param;
|
||||
use rustc_ast::ast::ParenthesizedArgs;
|
||||
use rustc_ast::ast::Pat;
|
||||
use rustc_ast::ast::PatField;
|
||||
use rustc_ast::ast::PatFieldsRest;
|
||||
use rustc_ast::ast::PatKind;
|
||||
use rustc_ast::ast::Path;
|
||||
use rustc_ast::ast::PathSegment;
|
||||
use rustc_ast::ast::PolyTraitRef;
|
||||
use rustc_ast::ast::QSelf;
|
||||
use rustc_ast::ast::RangeEnd;
|
||||
use rustc_ast::ast::RangeLimits;
|
||||
use rustc_ast::ast::RangeSyntax;
|
||||
use rustc_ast::ast::StaticItem;
|
||||
use rustc_ast::ast::Stmt;
|
||||
use rustc_ast::ast::StmtKind;
|
||||
use rustc_ast::ast::StrLit;
|
||||
use rustc_ast::ast::StrStyle;
|
||||
use rustc_ast::ast::StructExpr;
|
||||
use rustc_ast::ast::StructRest;
|
||||
use rustc_ast::ast::Term;
|
||||
use rustc_ast::ast::Trait;
|
||||
use rustc_ast::ast::TraitBoundModifiers;
|
||||
use rustc_ast::ast::TraitObjectSyntax;
|
||||
use rustc_ast::ast::TraitRef;
|
||||
use rustc_ast::ast::Ty;
|
||||
use rustc_ast::ast::TyAlias;
|
||||
use rustc_ast::ast::TyAliasWhereClause;
|
||||
use rustc_ast::ast::TyKind;
|
||||
use rustc_ast::ast::UintTy;
|
||||
use rustc_ast::ast::UnOp;
|
||||
use rustc_ast::ast::Unsafe;
|
||||
use rustc_ast::ast::UnsafeSource;
|
||||
use rustc_ast::ast::UseTree;
|
||||
use rustc_ast::ast::UseTreeKind;
|
||||
use rustc_ast::ast::Variant;
|
||||
use rustc_ast::ast::VariantData;
|
||||
use rustc_ast::ast::Visibility;
|
||||
use rustc_ast::ast::VisibilityKind;
|
||||
use rustc_ast::ast::WhereBoundPredicate;
|
||||
use rustc_ast::ast::WhereClause;
|
||||
use rustc_ast::ast::WhereEqPredicate;
|
||||
use rustc_ast::ast::WherePredicate;
|
||||
use rustc_ast::ast::WhereRegionPredicate;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{
|
||||
AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing, DelimSpan, LazyAttrTokenStream,
|
||||
Spacing, TokenStream, TokenTree,
|
||||
};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::{ErrorGuaranteed, Span, Symbol, SyntaxContext, DUMMY_SP};
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{BuildHasher, Hash};
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
pub trait SpanlessEq {
|
||||
fn eq(&self, other: &Self) -> bool;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&**self, &**other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&**self, &**other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&**self, &**other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Option<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(None, None) => true,
|
||||
(Some(this), Some(other)) => SpanlessEq::eq(this, other),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq, E: SpanlessEq> SpanlessEq for Result<T, E> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Ok(this), Ok(other)) => SpanlessEq::eq(this, other),
|
||||
(Err(this), Err(other)) => SpanlessEq::eq(this, other),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for [T] {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Vec<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
<[T] as SpanlessEq>::eq(self, other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.len() == other.len()
|
||||
&& self
|
||||
.iter()
|
||||
.zip(other.iter())
|
||||
.all(|(a, b)| SpanlessEq::eq(a, b))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Eq + Hash, V: SpanlessEq, S: BuildHasher> SpanlessEq for HashMap<K, V, S> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.len() == other.len()
|
||||
&& self.iter().all(|(key, this_v)| {
|
||||
other
|
||||
.get(key)
|
||||
.map_or(false, |other_v| SpanlessEq::eq(this_v, other_v))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Spanned<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&self.node, &other.node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&self.0, &other.0)
|
||||
&& SpanlessEq::eq(&self.1, &other.1)
|
||||
&& SpanlessEq::eq(&self.2, &other.2)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! spanless_eq_true {
|
||||
($name:ty) => {
|
||||
impl SpanlessEq for $name {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
spanless_eq_true!(Span);
|
||||
spanless_eq_true!(DelimSpan);
|
||||
spanless_eq_true!(AttrId);
|
||||
spanless_eq_true!(NodeId);
|
||||
spanless_eq_true!(SyntaxContext);
|
||||
spanless_eq_true!(Spacing);
|
||||
|
||||
macro_rules! spanless_eq_partial_eq {
|
||||
($name:ty) => {
|
||||
impl SpanlessEq for $name {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
PartialEq::eq(self, other)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
spanless_eq_partial_eq!(bool);
|
||||
spanless_eq_partial_eq!(u8);
|
||||
spanless_eq_partial_eq!(u16);
|
||||
spanless_eq_partial_eq!(u32);
|
||||
spanless_eq_partial_eq!(u128);
|
||||
spanless_eq_partial_eq!(usize);
|
||||
spanless_eq_partial_eq!(char);
|
||||
spanless_eq_partial_eq!(String);
|
||||
spanless_eq_partial_eq!(Symbol);
|
||||
spanless_eq_partial_eq!(CommentKind);
|
||||
spanless_eq_partial_eq!(Delimiter);
|
||||
spanless_eq_partial_eq!(InlineAsmOptions);
|
||||
spanless_eq_partial_eq!(token::LitKind);
|
||||
spanless_eq_partial_eq!(ErrorGuaranteed);
|
||||
|
||||
macro_rules! spanless_eq_struct {
|
||||
{
|
||||
$($name:ident)::+ $(<$param:ident>)?
|
||||
$([$field:tt $this:ident $other:ident])*
|
||||
$(![$ignore:tt])*;
|
||||
} => {
|
||||
impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self;
|
||||
let $($name)::+ { $($field: $other,)* $($ignore: _,)* } = other;
|
||||
true $(&& SpanlessEq::eq($this, $other))*
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+ $(<$param:ident>)?
|
||||
$([$field:tt $this:ident $other:ident])*
|
||||
$(![$ignore:tt])*;
|
||||
!$next:tt
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_struct! {
|
||||
$($name)::+ $(<$param>)*
|
||||
$([$field $this $other])*
|
||||
$(![$ignore])*
|
||||
![$next];
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+ $(<$param:ident>)?
|
||||
$([$field:tt $this:ident $other:ident])*
|
||||
$(![$ignore:tt])*;
|
||||
$next:tt
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_struct! {
|
||||
$($name)::+ $(<$param>)*
|
||||
$([$field $this $other])*
|
||||
[$next this other]
|
||||
$(![$ignore])*;
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! spanless_eq_enum {
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])* $(![$ignore:tt])*])*
|
||||
} => {
|
||||
impl SpanlessEq for $($name)::+ {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match self {
|
||||
$(
|
||||
$($variant)::+ { .. } => {}
|
||||
)*
|
||||
}
|
||||
#[allow(unreachable_patterns)]
|
||||
match (self, other) {
|
||||
$(
|
||||
(
|
||||
$($variant)::+ { $($field: $this,)* $($ignore: _,)* },
|
||||
$($variant)::+ { $($field: $other,)* $($ignore: _,)* },
|
||||
) => {
|
||||
true $(&& SpanlessEq::eq($this, $other))*
|
||||
}
|
||||
)*
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt $($field:tt)*)
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
$next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt $($field:tt)*)
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
$next [$([$($named)*])* [$i this other] $(![$ignore])*] ($($field)*)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident [$($named:tt)*] ()
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
[$($name)::+::$next; $($named)*]
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident ($($field:tt)*)
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
$next [] ($($field)*)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
[$($name)::+::$next;]
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
spanless_eq_struct!(AngleBracketedArgs; span args);
|
||||
spanless_eq_struct!(AnonConst; id value);
|
||||
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
|
||||
spanless_eq_struct!(AssocConstraint; id ident gen_args kind span);
|
||||
spanless_eq_struct!(AttrItem; path args tokens);
|
||||
spanless_eq_struct!(AttrTokenStream; 0);
|
||||
spanless_eq_struct!(Attribute; kind id style span);
|
||||
spanless_eq_struct!(AttributesData; attrs tokens);
|
||||
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
|
||||
spanless_eq_struct!(BindingAnnotation; 0 1);
|
||||
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
|
||||
spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind movability fn_decl body !fn_decl_span !fn_arg_span);
|
||||
spanless_eq_struct!(ConstItem; defaultness generics ty expr);
|
||||
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
|
||||
spanless_eq_struct!(DelimArgs; dspan delim tokens);
|
||||
spanless_eq_struct!(DelimSpacing; open close);
|
||||
spanless_eq_struct!(EnumDef; variants);
|
||||
spanless_eq_struct!(Expr; id kind span attrs !tokens);
|
||||
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
|
||||
spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder);
|
||||
spanless_eq_struct!(Fn; defaultness generics sig body);
|
||||
spanless_eq_struct!(FnDecl; inputs output);
|
||||
spanless_eq_struct!(FnHeader; constness coroutine_kind unsafety ext);
|
||||
spanless_eq_struct!(FnSig; header decl span);
|
||||
spanless_eq_struct!(ForeignMod; unsafety abi items);
|
||||
spanless_eq_struct!(FormatArgPosition; index kind span);
|
||||
spanless_eq_struct!(FormatArgs; span template arguments);
|
||||
spanless_eq_struct!(FormatArgument; kind expr);
|
||||
spanless_eq_struct!(FormatOptions; width precision alignment fill sign alternate zero_pad debug_hex);
|
||||
spanless_eq_struct!(FormatPlaceholder; argument span format_trait format_options);
|
||||
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span);
|
||||
spanless_eq_struct!(Generics; params where_clause span);
|
||||
spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items);
|
||||
spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis options line_spans);
|
||||
spanless_eq_struct!(InlineAsmSym; id qself path);
|
||||
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
|
||||
spanless_eq_struct!(Label; ident);
|
||||
spanless_eq_struct!(Lifetime; id ident);
|
||||
spanless_eq_struct!(Lit; kind symbol suffix);
|
||||
spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
|
||||
spanless_eq_struct!(MacCall; path args);
|
||||
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
|
||||
spanless_eq_struct!(MacroDef; body macro_rules);
|
||||
spanless_eq_struct!(MetaItemLit; symbol suffix kind span);
|
||||
spanless_eq_struct!(MethodCall; seg receiver args !span);
|
||||
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
|
||||
spanless_eq_struct!(MutTy; ty mutbl);
|
||||
spanless_eq_struct!(NormalAttr; item tokens);
|
||||
spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
|
||||
spanless_eq_struct!(Pat; id kind span tokens);
|
||||
spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
|
||||
spanless_eq_struct!(Path; span segments tokens);
|
||||
spanless_eq_struct!(PathSegment; ident id args);
|
||||
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
|
||||
spanless_eq_struct!(QSelf; ty path_span position);
|
||||
spanless_eq_struct!(StaticItem; ty mutability expr);
|
||||
spanless_eq_struct!(Stmt; id kind span);
|
||||
spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span);
|
||||
spanless_eq_struct!(StructExpr; qself path fields rest);
|
||||
spanless_eq_struct!(Token; kind span);
|
||||
spanless_eq_struct!(Trait; unsafety is_auto generics bounds items);
|
||||
spanless_eq_struct!(TraitBoundModifiers; constness polarity);
|
||||
spanless_eq_struct!(TraitRef; path ref_id);
|
||||
spanless_eq_struct!(Ty; id kind span tokens);
|
||||
spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty);
|
||||
spanless_eq_struct!(TyAliasWhereClause; !0 1);
|
||||
spanless_eq_struct!(UseTree; prefix kind span);
|
||||
spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder);
|
||||
spanless_eq_struct!(Visibility; kind span tokens);
|
||||
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
|
||||
spanless_eq_struct!(WhereClause; has_where_token predicates span);
|
||||
spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
|
||||
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
|
||||
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
|
||||
spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
|
||||
spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0));
|
||||
spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1));
|
||||
spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0));
|
||||
spanless_eq_enum!(AttrStyle; Outer Inner);
|
||||
spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) Attributes(0));
|
||||
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
|
||||
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
|
||||
spanless_eq_enum!(BorrowKind; Ref Raw);
|
||||
spanless_eq_enum!(BoundConstness; Never Always(0) Maybe(0));
|
||||
spanless_eq_enum!(BoundPolarity; Positive Negative(0) Maybe(0));
|
||||
spanless_eq_enum!(ByRef; Yes No);
|
||||
spanless_eq_enum!(CaptureBy; Value(move_kw) Ref);
|
||||
spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
|
||||
spanless_eq_enum!(Const; Yes(0) No);
|
||||
spanless_eq_enum!(Defaultness; Default(0) Final);
|
||||
spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
|
||||
spanless_eq_enum!(FloatTy; F32 F64);
|
||||
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
|
||||
spanless_eq_enum!(ForLoopKind; For ForAwait);
|
||||
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
|
||||
spanless_eq_enum!(FormatAlignment; Left Right Center);
|
||||
spanless_eq_enum!(FormatArgPositionKind; Implicit Number Named);
|
||||
spanless_eq_enum!(FormatArgsPiece; Literal(0) Placeholder(0));
|
||||
spanless_eq_enum!(FormatArgumentKind; Normal Named(0) Captured(0));
|
||||
spanless_eq_enum!(FormatCount; Literal(0) Argument(0));
|
||||
spanless_eq_enum!(FormatDebugHex; Lower Upper);
|
||||
spanless_eq_enum!(FormatSign; Plus Minus);
|
||||
spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer Binary LowerHex UpperHex);
|
||||
spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen);
|
||||
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
|
||||
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
|
||||
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
|
||||
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default));
|
||||
spanless_eq_enum!(ImplPolarity; Positive Negative(0));
|
||||
spanless_eq_enum!(Inline; Yes No);
|
||||
spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
|
||||
spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
|
||||
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
|
||||
spanless_eq_enum!(IsAuto; Yes No);
|
||||
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
|
||||
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
|
||||
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
|
||||
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
|
||||
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
|
||||
spanless_eq_enum!(Movability; Static Movable);
|
||||
spanless_eq_enum!(Mutability; Mut Not);
|
||||
spanless_eq_enum!(PatFieldsRest; Rest None);
|
||||
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
|
||||
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
|
||||
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
|
||||
spanless_eq_enum!(StrStyle; Cooked Raw(0));
|
||||
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
|
||||
spanless_eq_enum!(Term; Ty(0) Const(0));
|
||||
spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3));
|
||||
spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None);
|
||||
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
|
||||
spanless_eq_enum!(UnOp; Deref Not Neg);
|
||||
spanless_eq_enum!(Unsafe; Yes(0) No);
|
||||
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
|
||||
spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob);
|
||||
spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0));
|
||||
spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
|
||||
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
|
||||
spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id)
|
||||
Gen(span closure_id return_impl_trait_id)
|
||||
AsyncGen(span closure_id return_impl_trait_id));
|
||||
spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0)
|
||||
Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3)
|
||||
If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2)
|
||||
Match(0 1) Closure(0) Block(0 1) Gen(0 1 2) Await(0 1) TryBlock(0)
|
||||
Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore
|
||||
Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
|
||||
InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0)
|
||||
Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) Err);
|
||||
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
|
||||
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
|
||||
Sym(sym));
|
||||
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0) Const(0) Fn(0)
|
||||
Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
|
||||
Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
|
||||
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0)
|
||||
Int(0 1) Float(0 1) Bool(0) Err);
|
||||
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
|
||||
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
|
||||
Never Paren(0) MacCall(0));
|
||||
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never
|
||||
Tup(0) AnonStruct(0) AnonUnion(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1)
|
||||
Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) Err CVarArgs);
|
||||
|
||||
impl SpanlessEq for Ident {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.as_str() == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for RangeSyntax {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
match self {
|
||||
RangeSyntax::DotDotDot | RangeSyntax::DotDotEq => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for Param {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let Param {
|
||||
attrs,
|
||||
ty,
|
||||
pat,
|
||||
id,
|
||||
span: _,
|
||||
is_placeholder,
|
||||
} = self;
|
||||
let Param {
|
||||
attrs: attrs2,
|
||||
ty: ty2,
|
||||
pat: pat2,
|
||||
id: id2,
|
||||
span: _,
|
||||
is_placeholder: is_placeholder2,
|
||||
} = other;
|
||||
SpanlessEq::eq(id, id2)
|
||||
&& SpanlessEq::eq(is_placeholder, is_placeholder2)
|
||||
&& (matches!(ty.kind, TyKind::Err)
|
||||
|| matches!(ty2.kind, TyKind::Err)
|
||||
|| SpanlessEq::eq(attrs, attrs2)
|
||||
&& SpanlessEq::eq(ty, ty2)
|
||||
&& SpanlessEq::eq(pat, pat2))
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for TokenKind {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(TokenKind::Literal(this), TokenKind::Literal(other)) => SpanlessEq::eq(this, other),
|
||||
(TokenKind::DotDotEq | TokenKind::DotDotDot, _) => match other {
|
||||
TokenKind::DotDotEq | TokenKind::DotDotDot => true,
|
||||
_ => false,
|
||||
},
|
||||
(TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => {
|
||||
let (this, this_span) = this.as_ref();
|
||||
let (other, other_span) = other.as_ref();
|
||||
SpanlessEq::eq(this_span, other_span)
|
||||
&& match (this, other) {
|
||||
(Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => {
|
||||
SpanlessEq::eq(this, other)
|
||||
}
|
||||
_ => this == other,
|
||||
}
|
||||
}
|
||||
_ => self == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for TokenStream {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let mut this_trees = self.trees();
|
||||
let mut other_trees = other.trees();
|
||||
loop {
|
||||
let this = match this_trees.next() {
|
||||
None => return other_trees.next().is_none(),
|
||||
Some(tree) => tree,
|
||||
};
|
||||
let other = match other_trees.next() {
|
||||
None => return false,
|
||||
Some(tree) => tree,
|
||||
};
|
||||
if SpanlessEq::eq(this, other) {
|
||||
continue;
|
||||
}
|
||||
if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) {
|
||||
if match (&this.kind, &other.kind) {
|
||||
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
|
||||
SpanlessEq::eq(this, other)
|
||||
}
|
||||
(TokenKind::DocComment(_kind, style, symbol), TokenKind::Pound) => {
|
||||
doc_comment(*style, *symbol, &mut other_trees)
|
||||
}
|
||||
(TokenKind::Pound, TokenKind::DocComment(_kind, style, symbol)) => {
|
||||
doc_comment(*style, *symbol, &mut this_trees)
|
||||
}
|
||||
_ => false,
|
||||
} {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_comment<'a>(
|
||||
style: AttrStyle,
|
||||
unescaped: Symbol,
|
||||
trees: &mut impl Iterator<Item = &'a TokenTree>,
|
||||
) -> bool {
|
||||
if match style {
|
||||
AttrStyle::Outer => false,
|
||||
AttrStyle::Inner => true,
|
||||
} {
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(
|
||||
Token {
|
||||
kind: TokenKind::Not,
|
||||
span: _,
|
||||
},
|
||||
_spacing,
|
||||
)) => {}
|
||||
_ => return false,
|
||||
}
|
||||
}
|
||||
let stream = match trees.next() {
|
||||
Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket, stream)) => stream,
|
||||
_ => return false,
|
||||
};
|
||||
let mut trees = stream.trees();
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(
|
||||
Token {
|
||||
kind: TokenKind::Ident(symbol, false),
|
||||
span: _,
|
||||
},
|
||||
_spacing,
|
||||
)) if *symbol == sym::doc => {}
|
||||
_ => return false,
|
||||
}
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(
|
||||
Token {
|
||||
kind: TokenKind::Eq,
|
||||
span: _,
|
||||
},
|
||||
_spacing,
|
||||
)) => {}
|
||||
_ => return false,
|
||||
}
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(token, _spacing)) => {
|
||||
is_escaped_literal_token(token, unescaped) && trees.next().is_none()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
|
||||
match token {
|
||||
Token {
|
||||
kind: TokenKind::Literal(lit),
|
||||
span: _,
|
||||
} => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) {
|
||||
Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped),
|
||||
Err(_) => false,
|
||||
},
|
||||
Token {
|
||||
kind: TokenKind::Interpolated(nonterminal),
|
||||
span: _,
|
||||
} => match &nonterminal.0 {
|
||||
Nonterminal::NtExpr(expr) => match &expr.kind {
|
||||
ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool {
|
||||
match value {
|
||||
AttrArgsEq::Ast(expr) => match &expr.kind {
|
||||
ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
|
||||
_ => false,
|
||||
},
|
||||
AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool {
|
||||
match lit {
|
||||
MetaItemLit {
|
||||
symbol: _,
|
||||
suffix: None,
|
||||
kind,
|
||||
span: _,
|
||||
} => is_escaped_lit_kind(kind, unescaped),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool {
|
||||
match lit {
|
||||
Lit {
|
||||
kind: token::LitKind::Str,
|
||||
symbol: _,
|
||||
suffix: None,
|
||||
} => match LitKind::from_token_lit(*lit) {
|
||||
Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped),
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool {
|
||||
match kind {
|
||||
LitKind::Str(symbol, StrStyle::Cooked) => {
|
||||
symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', "")
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for LazyAttrTokenStream {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let this = self.to_attr_token_stream();
|
||||
let other = other.to_attr_token_stream();
|
||||
SpanlessEq::eq(&this, &other)
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for AttrKind {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(AttrKind::Normal(normal), AttrKind::Normal(normal2)) => {
|
||||
SpanlessEq::eq(normal, normal2)
|
||||
}
|
||||
(AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
|
||||
SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
|
||||
}
|
||||
(AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2)) => {
|
||||
match kind {
|
||||
CommentKind::Line | CommentKind::Block => {}
|
||||
}
|
||||
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
|
||||
SpanlessEq::eq(&path, &normal2.item.path)
|
||||
&& match &normal2.item.args {
|
||||
AttrArgs::Empty | AttrArgs::Delimited(_) => false,
|
||||
AttrArgs::Eq(_span, value) => {
|
||||
is_escaped_literal_attr_args(value, *unescaped)
|
||||
}
|
||||
}
|
||||
}
|
||||
(AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for FormatArguments {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(self.all_args(), other.all_args())
|
||||
}
|
||||
}
|
||||
28
vendor/syn/tests/common/mod.rs
vendored
Normal file
28
vendor/syn/tests/common/mod.rs
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)]
|
||||
|
||||
use rayon::ThreadPoolBuilder;
|
||||
use std::env;
|
||||
|
||||
pub mod eq;
|
||||
pub mod parse;
|
||||
|
||||
/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it.
|
||||
pub fn abort_after() -> usize {
|
||||
match env::var("ABORT_AFTER_FAILURE") {
|
||||
Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
|
||||
Err(_) => usize::max_value(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Configure Rayon threadpool.
|
||||
pub fn rayon_init() {
|
||||
let stack_size = match env::var("RUST_MIN_STACK") {
|
||||
Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
|
||||
Err(_) => 20 * 1024 * 1024,
|
||||
};
|
||||
ThreadPoolBuilder::new()
|
||||
.stack_size(stack_size)
|
||||
.build_global()
|
||||
.unwrap();
|
||||
}
|
||||
51
vendor/syn/tests/common/parse.rs
vendored
Normal file
51
vendor/syn/tests/common/parse.rs
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
extern crate rustc_ast;
|
||||
extern crate rustc_driver;
|
||||
extern crate rustc_expand;
|
||||
extern crate rustc_parse as parse;
|
||||
extern crate rustc_session;
|
||||
extern crate rustc_span;
|
||||
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::FileName;
|
||||
use std::panic;
|
||||
|
||||
pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
|
||||
match panic::catch_unwind(|| {
|
||||
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
|
||||
let file_path_mapping = FilePathMapping::empty();
|
||||
let sess = ParseSess::new(locale_resources, file_path_mapping);
|
||||
let e = parse::new_parser_from_source_str(
|
||||
&sess,
|
||||
FileName::Custom("test_precedence".to_string()),
|
||||
input.to_string(),
|
||||
)
|
||||
.parse_expr();
|
||||
match e {
|
||||
Ok(expr) => Some(expr),
|
||||
Err(mut diagnostic) => {
|
||||
diagnostic.emit();
|
||||
None
|
||||
}
|
||||
}
|
||||
}) {
|
||||
Ok(Some(e)) => Some(e),
|
||||
Ok(None) => None,
|
||||
Err(_) => {
|
||||
errorf!("librustc panicked\n");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
|
||||
match syn::parse_str(input) {
|
||||
Ok(e) => Some(e),
|
||||
Err(msg) => {
|
||||
errorf!("syn failed to parse\n{:?}\n", msg);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
5157
vendor/syn/tests/debug/gen.rs
vendored
Normal file
5157
vendor/syn/tests/debug/gen.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
147
vendor/syn/tests/debug/mod.rs
vendored
Normal file
147
vendor/syn/tests/debug/mod.rs
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
#![allow(
|
||||
clippy::no_effect_underscore_binding,
|
||||
clippy::too_many_lines,
|
||||
clippy::used_underscore_binding
|
||||
)]
|
||||
|
||||
#[rustfmt::skip]
|
||||
mod gen;
|
||||
|
||||
use proc_macro2::{Ident, Literal, TokenStream};
|
||||
use ref_cast::RefCast;
|
||||
use std::fmt::{self, Debug};
|
||||
use std::ops::Deref;
|
||||
use syn::punctuated::Punctuated;
|
||||
|
||||
#[derive(RefCast)]
|
||||
#[repr(transparent)]
|
||||
pub struct Lite<T: ?Sized> {
|
||||
value: T,
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Lite<T: ?Sized>(value: &T) -> &Lite<T> {
|
||||
Lite::ref_cast(value)
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Deref for Lite<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<bool> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "{}", self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<u32> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "{}", self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<usize> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "{}", self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<String> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "{:?}", self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<Ident> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "{:?}", self.value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<Literal> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "{}", self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Lite<TokenStream> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
let string = self.value.to_string();
|
||||
if string.len() <= 80 {
|
||||
write!(formatter, "TokenStream(`{}`)", self.value)
|
||||
} else {
|
||||
formatter
|
||||
.debug_tuple("TokenStream")
|
||||
.field(&format_args!("`{}`", string))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Debug for Lite<&'a T>
|
||||
where
|
||||
Lite<T>: Debug,
|
||||
{
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(Lite(self.value), formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for Lite<Box<T>>
|
||||
where
|
||||
Lite<T>: Debug,
|
||||
{
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(Lite(&*self.value), formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for Lite<Vec<T>>
|
||||
where
|
||||
Lite<T>: Debug,
|
||||
{
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
.debug_list()
|
||||
.entries(self.value.iter().map(Lite))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, P> Debug for Lite<Punctuated<T, P>>
|
||||
where
|
||||
Lite<T>: Debug,
|
||||
Lite<P>: Debug,
|
||||
{
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut list = formatter.debug_list();
|
||||
for pair in self.pairs() {
|
||||
let (node, punct) = pair.into_tuple();
|
||||
list.entry(Lite(node));
|
||||
list.entries(punct.map(Lite));
|
||||
}
|
||||
list.finish()
|
||||
}
|
||||
}
|
||||
|
||||
struct Present;
|
||||
|
||||
impl Debug for Present {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("Some")
|
||||
}
|
||||
}
|
||||
|
||||
struct Option {
|
||||
present: bool,
|
||||
}
|
||||
|
||||
impl Debug for Option {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(if self.present { "Some" } else { "None" })
|
||||
}
|
||||
}
|
||||
92
vendor/syn/tests/macros/mod.rs
vendored
Normal file
92
vendor/syn/tests/macros/mod.rs
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
#![allow(unused_macros, unused_macro_rules)]
|
||||
|
||||
#[path = "../debug/mod.rs"]
|
||||
pub mod debug;
|
||||
|
||||
use std::str::FromStr;
|
||||
use syn::parse::Result;
|
||||
|
||||
macro_rules! errorf {
|
||||
($($tt:tt)*) => {{
|
||||
use ::std::io::Write;
|
||||
let stderr = ::std::io::stderr();
|
||||
write!(stderr.lock(), $($tt)*).unwrap();
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! punctuated {
|
||||
($($e:expr,)+) => {{
|
||||
let mut seq = ::syn::punctuated::Punctuated::new();
|
||||
$(
|
||||
seq.push($e);
|
||||
)+
|
||||
seq
|
||||
}};
|
||||
|
||||
($($e:expr),+) => {
|
||||
punctuated!($($e,)+)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! snapshot {
|
||||
($($args:tt)*) => {
|
||||
snapshot_impl!(() $($args)*)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! snapshot_impl {
|
||||
(($expr:ident) as $t:ty, @$snapshot:literal) => {
|
||||
let tokens = crate::macros::TryIntoTokens::try_into_tokens($expr).unwrap();
|
||||
let $expr: $t = syn::parse_quote!(#tokens);
|
||||
let debug = crate::macros::debug::Lite(&$expr);
|
||||
if !cfg!(miri) {
|
||||
#[allow(clippy::needless_raw_string_hashes)] // https://github.com/mitsuhiko/insta/issues/389
|
||||
{
|
||||
insta::assert_debug_snapshot!(debug, @$snapshot);
|
||||
}
|
||||
}
|
||||
};
|
||||
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
|
||||
let tokens = crate::macros::TryIntoTokens::try_into_tokens($($expr)*).unwrap();
|
||||
let syntax_tree: $t = syn::parse_quote!(#tokens);
|
||||
let debug = crate::macros::debug::Lite(&syntax_tree);
|
||||
if !cfg!(miri) {
|
||||
#[allow(clippy::needless_raw_string_hashes)]
|
||||
{
|
||||
insta::assert_debug_snapshot!(debug, @$snapshot);
|
||||
}
|
||||
}
|
||||
syntax_tree
|
||||
}};
|
||||
(($($expr:tt)*) , @$snapshot:literal) => {{
|
||||
let syntax_tree = $($expr)*;
|
||||
let debug = crate::macros::debug::Lite(&syntax_tree);
|
||||
if !cfg!(miri) {
|
||||
#[allow(clippy::needless_raw_string_hashes)]
|
||||
{
|
||||
insta::assert_debug_snapshot!(debug, @$snapshot);
|
||||
}
|
||||
}
|
||||
syntax_tree
|
||||
}};
|
||||
(($($expr:tt)*) $next:tt $($rest:tt)*) => {
|
||||
snapshot_impl!(($($expr)* $next) $($rest)*)
|
||||
};
|
||||
}
|
||||
|
||||
pub trait TryIntoTokens {
|
||||
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream>;
|
||||
}
|
||||
|
||||
impl<'a> TryIntoTokens for &'a str {
|
||||
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
|
||||
let tokens = proc_macro2::TokenStream::from_str(self)?;
|
||||
Ok(tokens)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryIntoTokens for proc_macro2::TokenStream {
|
||||
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
5
vendor/syn/tests/regression.rs
vendored
Normal file
5
vendor/syn/tests/regression.rs
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
#![allow(clippy::let_underscore_untyped, clippy::uninlined_format_args)]
|
||||
|
||||
mod regression {
|
||||
automod::dir!("tests/regression");
|
||||
}
|
||||
5
vendor/syn/tests/regression/issue1108.rs
vendored
Normal file
5
vendor/syn/tests/regression/issue1108.rs
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
#[test]
|
||||
fn issue1108() {
|
||||
let data = "impl<x<>>::x for";
|
||||
let _ = syn::parse_file(data);
|
||||
}
|
||||
32
vendor/syn/tests/regression/issue1235.rs
vendored
Normal file
32
vendor/syn/tests/regression/issue1235.rs
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
use proc_macro2::{Delimiter, Group};
|
||||
use quote::quote;
|
||||
|
||||
#[test]
|
||||
fn main() {
|
||||
// Okay. Rustc allows top-level `static` with no value syntactically, but
|
||||
// not semantically. Syn parses as Item::Verbatim.
|
||||
let tokens = quote! {
|
||||
pub static FOO: usize;
|
||||
pub static BAR: usize;
|
||||
};
|
||||
let file = syn::parse2::<syn::File>(tokens).unwrap();
|
||||
println!("{:#?}", file);
|
||||
|
||||
// Okay.
|
||||
let inner = Group::new(
|
||||
Delimiter::None,
|
||||
quote!(static FOO: usize = 0; pub static BAR: usize = 0),
|
||||
);
|
||||
let tokens = quote!(pub #inner;);
|
||||
let file = syn::parse2::<syn::File>(tokens).unwrap();
|
||||
println!("{:#?}", file);
|
||||
|
||||
// Formerly parser crash.
|
||||
let inner = Group::new(
|
||||
Delimiter::None,
|
||||
quote!(static FOO: usize; pub static BAR: usize),
|
||||
);
|
||||
let tokens = quote!(pub #inner;);
|
||||
let file = syn::parse2::<syn::File>(tokens).unwrap();
|
||||
println!("{:#?}", file);
|
||||
}
|
||||
375
vendor/syn/tests/repo/mod.rs
vendored
Normal file
375
vendor/syn/tests/repo/mod.rs
vendored
Normal file
@@ -0,0 +1,375 @@
|
||||
#![allow(clippy::manual_assert)]
|
||||
|
||||
mod progress;
|
||||
|
||||
use self::progress::Progress;
|
||||
use anyhow::Result;
|
||||
use flate2::read::GzDecoder;
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use std::collections::BTreeSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tar::Archive;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
const REVISION: &str = "b10cfcd65fd7f7b1ab9beb34798b2108de003452";
|
||||
|
||||
#[rustfmt::skip]
|
||||
static EXCLUDE_FILES: &[&str] = &[
|
||||
// TODO: CStr literals: c"…", cr"…"
|
||||
// https://github.com/dtolnay/syn/issues/1502
|
||||
"src/tools/clippy/tests/ui/needless_raw_string.rs",
|
||||
"src/tools/clippy/tests/ui/needless_raw_string_hashes.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs",
|
||||
|
||||
// TODO: explicit tail calls: `become _g()`
|
||||
// https://github.com/dtolnay/syn/issues/1501
|
||||
"tests/ui/explicit-tail-calls/return-lifetime-sub.rs",
|
||||
|
||||
// TODO: non-lifetime binders: `where for<'a, T> &'a Struct<T>: Trait`
|
||||
// https://github.com/dtolnay/syn/issues/1435
|
||||
"src/tools/rustfmt/tests/source/issue_5721.rs",
|
||||
"src/tools/rustfmt/tests/source/non-lifetime-binders.rs",
|
||||
"src/tools/rustfmt/tests/target/issue_5721.rs",
|
||||
"src/tools/rustfmt/tests/target/non-lifetime-binders.rs",
|
||||
"tests/rustdoc-json/non_lifetime_binders.rs",
|
||||
"tests/rustdoc/inline_cross/auxiliary/non_lifetime_binders.rs",
|
||||
"tests/rustdoc/non_lifetime_binders.rs",
|
||||
|
||||
// TODO: return type notation: `where T: Trait<method(): Send>`
|
||||
// https://github.com/dtolnay/syn/issues/1434
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs",
|
||||
"tests/ui/associated-type-bounds/return-type-notation/basic.rs",
|
||||
"tests/ui/feature-gates/feature-gate-return_type_notation.rs",
|
||||
|
||||
// TODO: lazy type alias syntax with where-clause in trailing position
|
||||
// https://github.com/dtolnay/syn/issues/1525
|
||||
"tests/rustdoc/typedef-inner-variants-lazy_type_alias.rs",
|
||||
|
||||
// TODO: gen blocks and functions
|
||||
// https://github.com/dtolnay/syn/issues/1526
|
||||
"compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs",
|
||||
"tests/ui/coroutine/gen_block_is_iter.rs",
|
||||
"tests/ui/coroutine/gen_block_iterate.rs",
|
||||
|
||||
// TODO: struct literal in match guard
|
||||
// https://github.com/dtolnay/syn/issues/1527
|
||||
"tests/ui/parser/struct-literal-in-match-guard.rs",
|
||||
|
||||
// Compile-fail expr parameter in const generic position: f::<1 + 2>()
|
||||
"tests/ui/const-generics/early/closing-args-token.rs",
|
||||
"tests/ui/const-generics/early/const-expression-parameter.rs",
|
||||
|
||||
// Compile-fail variadics in not the last position of a function parameter list
|
||||
"tests/ui/parser/variadic-ffi-syntactic-pass.rs",
|
||||
|
||||
// Need at least one trait in impl Trait, no such type as impl 'static
|
||||
"tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
|
||||
|
||||
// Negative polarity trait bound: `where T: !Copy`
|
||||
"src/tools/rustfmt/tests/target/negative-bounds.rs",
|
||||
|
||||
// Lifetime bound inside for<>: `T: ~const ?for<'a: 'b> Trait<'a>`
|
||||
"tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-syntax.rs",
|
||||
|
||||
// Const impl that is not a trait impl: `impl ~const T {}`
|
||||
"tests/ui/rfcs/rfc-2632-const-trait-impl/syntax.rs",
|
||||
|
||||
// Deprecated anonymous parameter syntax in traits
|
||||
"src/tools/rustfmt/tests/source/trait.rs",
|
||||
"src/tools/rustfmt/tests/target/trait.rs",
|
||||
"tests/ui/issues/issue-13105.rs",
|
||||
"tests/ui/issues/issue-13775.rs",
|
||||
"tests/ui/issues/issue-34074.rs",
|
||||
"tests/ui/proc-macro/trait-fn-args-2015.rs",
|
||||
|
||||
// Deprecated where-clause location
|
||||
"src/tools/rustfmt/tests/source/issue_4257.rs",
|
||||
"src/tools/rustfmt/tests/source/issue_4911.rs",
|
||||
"src/tools/rustfmt/tests/target/issue_4257.rs",
|
||||
"src/tools/rustfmt/tests/target/issue_4911.rs",
|
||||
"tests/pretty/gat-bounds.rs",
|
||||
"tests/rustdoc/generic-associated-types/gats.rs",
|
||||
|
||||
// Deprecated trait object syntax with parenthesized generic arguments and no dyn keyword
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs",
|
||||
"src/tools/rustfmt/tests/source/attrib.rs",
|
||||
"src/tools/rustfmt/tests/source/closure.rs",
|
||||
"src/tools/rustfmt/tests/source/existential_type.rs",
|
||||
"src/tools/rustfmt/tests/source/fn-simple.rs",
|
||||
"src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs",
|
||||
"src/tools/rustfmt/tests/source/issue-4689/one.rs",
|
||||
"src/tools/rustfmt/tests/source/issue-4689/two.rs",
|
||||
"src/tools/rustfmt/tests/source/paths.rs",
|
||||
"src/tools/rustfmt/tests/source/structs.rs",
|
||||
"src/tools/rustfmt/tests/target/attrib.rs",
|
||||
"src/tools/rustfmt/tests/target/closure.rs",
|
||||
"src/tools/rustfmt/tests/target/existential_type.rs",
|
||||
"src/tools/rustfmt/tests/target/fn-simple.rs",
|
||||
"src/tools/rustfmt/tests/target/fn.rs",
|
||||
"src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs",
|
||||
"src/tools/rustfmt/tests/target/issue-4689/one.rs",
|
||||
"src/tools/rustfmt/tests/target/issue-4689/two.rs",
|
||||
"src/tools/rustfmt/tests/target/paths.rs",
|
||||
"src/tools/rustfmt/tests/target/structs.rs",
|
||||
"tests/codegen-units/item-collection/non-generic-closures.rs",
|
||||
"tests/debuginfo/recursive-enum.rs",
|
||||
"tests/pretty/closure-reform-pretty.rs",
|
||||
"tests/run-make/reproducible-build-2/reproducible-build.rs",
|
||||
"tests/run-make/reproducible-build/reproducible-build.rs",
|
||||
"tests/ui/auxiliary/typeid-intrinsic-aux1.rs",
|
||||
"tests/ui/auxiliary/typeid-intrinsic-aux2.rs",
|
||||
"tests/ui/impl-trait/generic-with-implicit-hrtb-without-dyn.rs",
|
||||
"tests/ui/lifetimes/auxiliary/lifetime_bound_will_change_warning_lib.rs",
|
||||
"tests/ui/lifetimes/bare-trait-object-borrowck.rs",
|
||||
"tests/ui/lifetimes/bare-trait-object.rs",
|
||||
"tests/ui/parser/bounds-obj-parens.rs",
|
||||
|
||||
// Invalid unparenthesized range pattern inside slice pattern: `[1..]`
|
||||
"tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs",
|
||||
|
||||
// Various extensions to Rust syntax made up by rust-analyzer
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs",
|
||||
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs",
|
||||
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs",
|
||||
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs",
|
||||
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs",
|
||||
|
||||
// Placeholder syntax for "throw expressions"
|
||||
"compiler/rustc_errors/src/translation.rs",
|
||||
"src/tools/clippy/tests/ui/needless_return.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs",
|
||||
"tests/pretty/yeet-expr.rs",
|
||||
"tests/ui/try-trait/yeet-for-option.rs",
|
||||
"tests/ui/try-trait/yeet-for-result.rs",
|
||||
|
||||
// Edition 2015 code using identifiers that are now keywords
|
||||
// TODO: some of these we should probably parse
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs",
|
||||
"src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs",
|
||||
"src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs",
|
||||
"src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs",
|
||||
"src/tools/rustfmt/tests/source/issue_1306.rs",
|
||||
"src/tools/rustfmt/tests/source/try-conversion.rs",
|
||||
"src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs",
|
||||
"src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs",
|
||||
"src/tools/rustfmt/tests/target/issue-1681.rs",
|
||||
"src/tools/rustfmt/tests/target/issue_1306.rs",
|
||||
"tests/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs",
|
||||
"tests/ui/editions/edition-keywords-2015-2015.rs",
|
||||
"tests/ui/editions/edition-keywords-2015-2018.rs",
|
||||
"tests/ui/lint/lint_pre_expansion_extern_module_aux.rs",
|
||||
"tests/ui/macros/macro-comma-support-rpass.rs",
|
||||
"tests/ui/macros/try-macro.rs",
|
||||
"tests/ui/parser/extern-crate-async.rs",
|
||||
"tests/ui/try-block/try-is-identifier-edition2015.rs",
|
||||
|
||||
// Excessive nesting
|
||||
"tests/ui/issues/issue-74564-if-expr-stack-overflow.rs",
|
||||
|
||||
// Testing tools on invalid syntax
|
||||
"src/tools/rustfmt/tests/coverage/target/comments.rs",
|
||||
"src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
|
||||
"src/tools/rustfmt/tests/parser/issue_4418.rs",
|
||||
"src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs",
|
||||
"src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs",
|
||||
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs",
|
||||
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs",
|
||||
"src/tools/rustfmt/tests/source/type.rs",
|
||||
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs",
|
||||
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
|
||||
"src/tools/rustfmt/tests/target/type.rs",
|
||||
"tests/run-make/translation/test.rs",
|
||||
"tests/ui/generics/issue-94432-garbage-ice.rs",
|
||||
|
||||
// Generated file containing a top-level expression, used with `include!`
|
||||
"compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
|
||||
|
||||
// Clippy lint lists represented as expressions
|
||||
"src/tools/clippy/clippy_lints/src/lib.deprecated.rs",
|
||||
|
||||
// Not actually test cases
|
||||
"tests/ui/lint/expansion-time-include.rs",
|
||||
"tests/ui/macros/auxiliary/macro-comma-support.rs",
|
||||
"tests/ui/macros/auxiliary/macro-include-items-expr.rs",
|
||||
"tests/ui/macros/include-single-expr-helper.rs",
|
||||
"tests/ui/macros/include-single-expr-helper-1.rs",
|
||||
"tests/ui/parser/issues/auxiliary/issue-21146-inc.rs",
|
||||
];
|
||||
|
||||
#[rustfmt::skip]
|
||||
static EXCLUDE_DIRS: &[&str] = &[
|
||||
// Inputs that intentionally do not parse
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/err",
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err",
|
||||
|
||||
// Inputs that lex but do not necessarily parse
|
||||
"src/tools/rust-analyzer/crates/parser/test_data/lexer",
|
||||
|
||||
// Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse
|
||||
"src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures",
|
||||
"src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures",
|
||||
];
|
||||
|
||||
// Directories in which a .stderr implies the corresponding .rs is not expected
|
||||
// to work.
|
||||
static UI_TEST_DIRS: &[&str] = &["tests/ui", "tests/rustdoc-ui"];
|
||||
|
||||
pub fn for_each_rust_file(for_each: impl Fn(&Path) + Sync + Send) {
|
||||
let mut rs_files = BTreeSet::new();
|
||||
|
||||
let repo_dir = Path::new("tests/rust");
|
||||
for entry in WalkDir::new(repo_dir)
|
||||
.into_iter()
|
||||
.filter_entry(base_dir_filter)
|
||||
{
|
||||
let entry = entry.unwrap();
|
||||
if !entry.file_type().is_dir() {
|
||||
rs_files.insert(entry.into_path());
|
||||
}
|
||||
}
|
||||
|
||||
for ui_test_dir in UI_TEST_DIRS {
|
||||
for entry in WalkDir::new(repo_dir.join(ui_test_dir)) {
|
||||
let mut path = entry.unwrap().into_path();
|
||||
if path.extension() == Some(OsStr::new("stderr")) {
|
||||
loop {
|
||||
rs_files.remove(&path.with_extension("rs"));
|
||||
path = path.with_extension("");
|
||||
if path.extension().is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rs_files.par_iter().map(PathBuf::as_path).for_each(for_each);
|
||||
}
|
||||
|
||||
pub fn base_dir_filter(entry: &DirEntry) -> bool {
|
||||
let path = entry.path();
|
||||
|
||||
let mut path_string = path.to_string_lossy();
|
||||
if cfg!(windows) {
|
||||
path_string = path_string.replace('\\', "/").into();
|
||||
}
|
||||
let path_string = if path_string == "tests/rust" {
|
||||
return true;
|
||||
} else if let Some(path) = path_string.strip_prefix("tests/rust/") {
|
||||
path
|
||||
} else {
|
||||
panic!("unexpected path in Rust dist: {}", path_string);
|
||||
};
|
||||
|
||||
if path.is_dir() {
|
||||
return !EXCLUDE_DIRS.contains(&path_string);
|
||||
}
|
||||
|
||||
if path.extension() != Some(OsStr::new("rs")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
!EXCLUDE_FILES.contains(&path_string)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn edition(path: &Path) -> &'static str {
|
||||
if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
|
||||
"2015"
|
||||
} else {
|
||||
"2018"
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clone_rust() {
|
||||
let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
|
||||
Err(_) => true,
|
||||
Ok(contents) => contents.trim() != REVISION,
|
||||
};
|
||||
if needs_clone {
|
||||
download_and_unpack().unwrap();
|
||||
}
|
||||
|
||||
let mut missing = String::new();
|
||||
let test_src = Path::new("tests/rust");
|
||||
|
||||
let mut exclude_files_set = BTreeSet::new();
|
||||
for exclude in EXCLUDE_FILES {
|
||||
if !exclude_files_set.insert(exclude) {
|
||||
panic!("duplicate path in EXCLUDE_FILES: {}", exclude);
|
||||
}
|
||||
for dir in EXCLUDE_DIRS {
|
||||
if Path::new(exclude).starts_with(dir) {
|
||||
panic!("excluded file {} is inside an excluded dir", exclude);
|
||||
}
|
||||
}
|
||||
if !test_src.join(exclude).is_file() {
|
||||
missing += "\ntests/rust/";
|
||||
missing += exclude;
|
||||
}
|
||||
}
|
||||
|
||||
let mut exclude_dirs_set = BTreeSet::new();
|
||||
for exclude in EXCLUDE_DIRS {
|
||||
if !exclude_dirs_set.insert(exclude) {
|
||||
panic!("duplicate path in EXCLUDE_DIRS: {}", exclude);
|
||||
}
|
||||
if !test_src.join(exclude).is_dir() {
|
||||
missing += "\ntests/rust/";
|
||||
missing += exclude;
|
||||
missing += "/";
|
||||
}
|
||||
}
|
||||
|
||||
if !missing.is_empty() {
|
||||
panic!("excluded test file does not exist:{}\n", missing);
|
||||
}
|
||||
}
|
||||
|
||||
fn download_and_unpack() -> Result<()> {
|
||||
let url = format!(
|
||||
"https://github.com/rust-lang/rust/archive/{}.tar.gz",
|
||||
REVISION
|
||||
);
|
||||
let response = reqwest::blocking::get(url)?.error_for_status()?;
|
||||
let progress = Progress::new(response);
|
||||
let decoder = GzDecoder::new(progress);
|
||||
let mut archive = Archive::new(decoder);
|
||||
let prefix = format!("rust-{}", REVISION);
|
||||
|
||||
let tests_rust = Path::new("tests/rust");
|
||||
if tests_rust.exists() {
|
||||
fs::remove_dir_all(tests_rust)?;
|
||||
}
|
||||
|
||||
for entry in archive.entries()? {
|
||||
let mut entry = entry?;
|
||||
let path = entry.path()?;
|
||||
if path == Path::new("pax_global_header") {
|
||||
continue;
|
||||
}
|
||||
let relative = path.strip_prefix(&prefix)?;
|
||||
let out = tests_rust.join(relative);
|
||||
entry.unpack(&out)?;
|
||||
}
|
||||
|
||||
fs::write("tests/rust/COMMIT", REVISION)?;
|
||||
Ok(())
|
||||
}
|
||||
37
vendor/syn/tests/repo/progress.rs
vendored
Normal file
37
vendor/syn/tests/repo/progress.rs
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
use std::io::{Read, Result};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
pub struct Progress<R> {
|
||||
bytes: usize,
|
||||
tick: Instant,
|
||||
stream: R,
|
||||
}
|
||||
|
||||
impl<R> Progress<R> {
|
||||
pub fn new(stream: R) -> Self {
|
||||
Progress {
|
||||
bytes: 0,
|
||||
tick: Instant::now() + Duration::from_millis(2000),
|
||||
stream,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Read for Progress<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
|
||||
let num = self.stream.read(buf)?;
|
||||
self.bytes += num;
|
||||
let now = Instant::now();
|
||||
if now > self.tick {
|
||||
self.tick = now + Duration::from_millis(500);
|
||||
errorf!("downloading... {} bytes\n", self.bytes);
|
||||
}
|
||||
Ok(num)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Drop for Progress<R> {
|
||||
fn drop(&mut self) {
|
||||
errorf!("done ({} bytes)\n", self.bytes);
|
||||
}
|
||||
}
|
||||
43
vendor/syn/tests/test_asyncness.rs
vendored
Normal file
43
vendor/syn/tests/test_asyncness.rs
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use syn::{Expr, Item};
|
||||
|
||||
#[test]
|
||||
fn test_async_fn() {
|
||||
let input = "async fn process() {}";
|
||||
|
||||
snapshot!(input as Item, @r###"
|
||||
Item::Fn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
asyncness: Some,
|
||||
ident: "process",
|
||||
generics: Generics,
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_async_closure() {
|
||||
let input = "async || {}";
|
||||
|
||||
snapshot!(input as Expr, @r###"
|
||||
Expr::Closure {
|
||||
asyncness: Some,
|
||||
output: ReturnType::Default,
|
||||
body: Expr::Block {
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
225
vendor/syn/tests/test_attribute.rs
vendored
Normal file
225
vendor/syn/tests/test_attribute.rs
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use syn::parse::Parser;
|
||||
use syn::{Attribute, Meta};
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_word() {
|
||||
let meta = test("#[foo]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_name_value() {
|
||||
let meta = test("#[foo = 5]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::NameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: 5,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_bool_value() {
|
||||
let meta = test("#[foo = true]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::NameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let meta = test("#[foo = false]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::NameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: Lit::Bool {
|
||||
value: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_list_lit() {
|
||||
let meta = test("#[foo(5)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`5`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_list_word() {
|
||||
let meta = test("#[foo(bar)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`bar`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_list_name_value() {
|
||||
let meta = test("#[foo(bar = 5)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`bar = 5`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_list_bool_value() {
|
||||
let meta = test("#[foo(bar = true)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`bar = true`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_meta_item_multiple() {
|
||||
let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bool_lit() {
|
||||
let meta = test("#[foo(true)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`true`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_lit() {
|
||||
let meta = test("#[form(min = -1, max = 200)]");
|
||||
|
||||
snapshot!(meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "form",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`min = - 1 , max = 200`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
fn test(input: &str) -> Meta {
|
||||
let attrs = Attribute::parse_outer.parse_str(input).unwrap();
|
||||
|
||||
assert_eq!(attrs.len(), 1);
|
||||
let attr = attrs.into_iter().next().unwrap();
|
||||
|
||||
attr.meta
|
||||
}
|
||||
781
vendor/syn/tests/test_derive_input.rs
vendored
Normal file
781
vendor/syn/tests/test_derive_input.rs
vendored
Normal file
@@ -0,0 +1,781 @@
|
||||
#![allow(
|
||||
clippy::assertions_on_result_states,
|
||||
clippy::manual_let_else,
|
||||
clippy::too_many_lines,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use quote::quote;
|
||||
use syn::{Data, DeriveInput};
|
||||
|
||||
#[test]
|
||||
fn test_unit() {
|
||||
let input = quote! {
|
||||
struct Unit;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "Unit",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct() {
|
||||
let input = quote! {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Item {
|
||||
pub ident: Ident,
|
||||
pub attrs: Vec<Attribute>
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "derive",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`Debug , Clone`),
|
||||
},
|
||||
},
|
||||
],
|
||||
vis: Visibility::Public,
|
||||
ident: "Item",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Named {
|
||||
named: [
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ident: Some("ident"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Ident",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
Token![,],
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ident: Some("attrs"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Vec",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Attribute",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
snapshot!(&input.attrs[0].meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "derive",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`Debug , Clone`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_union() {
|
||||
let input = quote! {
|
||||
union MaybeUninit<T> {
|
||||
uninit: (),
|
||||
value: T
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "MaybeUninit",
|
||||
generics: Generics {
|
||||
lt_token: Some,
|
||||
params: [
|
||||
GenericParam::Type(TypeParam {
|
||||
ident: "T",
|
||||
}),
|
||||
],
|
||||
gt_token: Some,
|
||||
},
|
||||
data: Data::Union {
|
||||
fields: FieldsNamed {
|
||||
named: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ident: Some("uninit"),
|
||||
colon_token: Some,
|
||||
ty: Type::Tuple,
|
||||
},
|
||||
Token![,],
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ident: Some("value"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "full")]
|
||||
fn test_enum() {
|
||||
let input = quote! {
|
||||
/// See the std::result module documentation for details.
|
||||
#[must_use]
|
||||
pub enum Result<T, E> {
|
||||
Ok(T),
|
||||
Err(E),
|
||||
Surprise = 0isize,
|
||||
|
||||
// Smuggling data into a proc_macro_derive,
|
||||
// in the style of https://github.com/dtolnay/proc-macro-hack
|
||||
ProcMacroHack = (0, "data").0
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::NameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "doc",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: " See the std::result module documentation for details.",
|
||||
},
|
||||
},
|
||||
},
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "must_use",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
vis: Visibility::Public,
|
||||
ident: "Result",
|
||||
generics: Generics {
|
||||
lt_token: Some,
|
||||
params: [
|
||||
GenericParam::Type(TypeParam {
|
||||
ident: "T",
|
||||
}),
|
||||
Token![,],
|
||||
GenericParam::Type(TypeParam {
|
||||
ident: "E",
|
||||
}),
|
||||
],
|
||||
gt_token: Some,
|
||||
},
|
||||
data: Data::Enum {
|
||||
variants: [
|
||||
Variant {
|
||||
ident: "Ok",
|
||||
fields: Fields::Unnamed {
|
||||
unnamed: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
Token![,],
|
||||
Variant {
|
||||
ident: "Err",
|
||||
fields: Fields::Unnamed {
|
||||
unnamed: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "E",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
Token![,],
|
||||
Variant {
|
||||
ident: "Surprise",
|
||||
fields: Fields::Unit,
|
||||
discriminant: Some(Expr::Lit {
|
||||
lit: 0isize,
|
||||
}),
|
||||
},
|
||||
Token![,],
|
||||
Variant {
|
||||
ident: "ProcMacroHack",
|
||||
fields: Fields::Unit,
|
||||
discriminant: Some(Expr::Field {
|
||||
base: Expr::Tuple {
|
||||
elems: [
|
||||
Expr::Lit {
|
||||
lit: 0,
|
||||
},
|
||||
Token![,],
|
||||
Expr::Lit {
|
||||
lit: "data",
|
||||
},
|
||||
],
|
||||
},
|
||||
member: Member::Unnamed(Index {
|
||||
index: 0,
|
||||
}),
|
||||
}),
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let meta_items: Vec<_> = input.attrs.into_iter().map(|attr| attr.meta).collect();
|
||||
|
||||
snapshot!(meta_items, @r###"
|
||||
[
|
||||
Meta::NameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "doc",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: " See the std::result module documentation for details.",
|
||||
},
|
||||
},
|
||||
Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "must_use",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_attr_with_non_mod_style_path() {
|
||||
let input = quote! {
|
||||
#[inert <T>]
|
||||
struct S;
|
||||
};
|
||||
|
||||
syn::parse2::<DeriveInput>(input).unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_attr_with_mod_style_path_with_self() {
|
||||
let input = quote! {
|
||||
#[foo::self]
|
||||
struct S;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
snapshot!(&input.attrs[0].meta, @r###"
|
||||
Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "self",
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_restricted() {
|
||||
// Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
|
||||
let input = quote! {
|
||||
pub(in m) struct Z(pub(in m::n) u8);
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Restricted {
|
||||
in_token: Some,
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "m",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
ident: "Z",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unnamed {
|
||||
unnamed: [
|
||||
Field {
|
||||
vis: Visibility::Restricted {
|
||||
in_token: Some,
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "m",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "n",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "u8",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_restricted_crate() {
|
||||
let input = quote! {
|
||||
pub(crate) struct S;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Restricted {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "crate",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_restricted_super() {
|
||||
let input = quote! {
|
||||
pub(super) struct S;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Restricted {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "super",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_restricted_in_super() {
|
||||
let input = quote! {
|
||||
pub(in super) struct S;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Restricted {
|
||||
in_token: Some,
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "super",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fields_on_unit_struct() {
|
||||
let input = quote! {
|
||||
struct S;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let data = match input.data {
|
||||
Data::Struct(data) => data,
|
||||
_ => panic!("expected a struct"),
|
||||
};
|
||||
|
||||
assert_eq!(0, data.fields.iter().count());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fields_on_named_struct() {
|
||||
let input = quote! {
|
||||
struct S {
|
||||
foo: i32,
|
||||
pub bar: String,
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Named {
|
||||
named: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ident: Some("foo"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "i32",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
Token![,],
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ident: Some("bar"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "String",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
Token![,],
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let data = match input.data {
|
||||
Data::Struct(data) => data,
|
||||
_ => panic!("expected a struct"),
|
||||
};
|
||||
|
||||
snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
|
||||
[
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ident: Some("foo"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "i32",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ident: Some("bar"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "String",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fields_on_tuple_struct() {
|
||||
let input = quote! {
|
||||
struct S(i32, pub String);
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unnamed {
|
||||
unnamed: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "i32",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
Token![,],
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "String",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let data = match input.data {
|
||||
Data::Struct(data) => data,
|
||||
_ => panic!("expected a struct"),
|
||||
};
|
||||
|
||||
snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
|
||||
[
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "i32",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "String",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ambiguous_crate() {
|
||||
let input = quote! {
|
||||
// The field type is `(crate::X)` not `crate (::X)`.
|
||||
struct S(crate::X);
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unnamed {
|
||||
unnamed: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "crate",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "X",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
540
vendor/syn/tests/test_expr.rs
vendored
Normal file
540
vendor/syn/tests/test_expr.rs
vendored
Normal file
@@ -0,0 +1,540 @@
|
||||
#![allow(clippy::single_element_loop, clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group};
|
||||
use quote::{quote, ToTokens as _};
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{parse_quote, token, Expr, ExprRange, ExprTuple, Stmt, Token};
|
||||
|
||||
#[test]
|
||||
fn test_expr_parse() {
|
||||
let tokens = quote!(..100u32);
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Range {
|
||||
limits: RangeLimits::HalfOpen,
|
||||
end: Some(Expr::Lit {
|
||||
lit: 100u32,
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = quote!(..100u32);
|
||||
snapshot!(tokens as ExprRange, @r###"
|
||||
ExprRange {
|
||||
limits: RangeLimits::HalfOpen,
|
||||
end: Some(Expr::Lit {
|
||||
lit: 100u32,
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_await() {
|
||||
// Must not parse as Expr::Field.
|
||||
let tokens = quote!(fut.await);
|
||||
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Await {
|
||||
base: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "fut",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
#[test]
|
||||
fn test_tuple_multi_index() {
|
||||
let expected = snapshot!("tuple.0.0" as Expr, @r###"
|
||||
Expr::Field {
|
||||
base: Expr::Field {
|
||||
base: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "tuple",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
member: Member::Unnamed(Index {
|
||||
index: 0,
|
||||
}),
|
||||
},
|
||||
member: Member::Unnamed(Index {
|
||||
index: 0,
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
|
||||
for &input in &[
|
||||
"tuple .0.0",
|
||||
"tuple. 0.0",
|
||||
"tuple.0 .0",
|
||||
"tuple.0. 0",
|
||||
"tuple . 0 . 0",
|
||||
] {
|
||||
assert_eq!(expected, syn::parse_str(input).unwrap());
|
||||
}
|
||||
|
||||
for tokens in [
|
||||
quote!(tuple.0.0),
|
||||
quote!(tuple .0.0),
|
||||
quote!(tuple. 0.0),
|
||||
quote!(tuple.0 .0),
|
||||
quote!(tuple.0. 0),
|
||||
quote!(tuple . 0 . 0),
|
||||
] {
|
||||
assert_eq!(expected, syn::parse2(tokens).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_func() {
|
||||
// mimics the token stream corresponding to `$fn()`
|
||||
let path = Group::new(Delimiter::None, quote!(f));
|
||||
let tokens = quote!(#path());
|
||||
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Call {
|
||||
func: Expr::Group {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "f",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let path = Group::new(Delimiter::None, quote! { #[inside] f });
|
||||
let tokens = quote!(#[outside] #path());
|
||||
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Call {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "outside",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
func: Expr::Group {
|
||||
expr: Expr::Path {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "inside",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "f",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_macro() {
|
||||
// mimics the token stream corresponding to `$macro!()`
|
||||
let mac = Group::new(Delimiter::None, quote!(m));
|
||||
let tokens = quote!(#mac!());
|
||||
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Macro {
|
||||
mac: Macro {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "m",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(``),
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_struct() {
|
||||
// mimics the token stream corresponding to `$struct {}`
|
||||
let s = Group::new(Delimiter::None, quote! { S });
|
||||
let tokens = quote!(#s {});
|
||||
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Struct {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "S",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_unary() {
|
||||
// mimics the token stream corresponding to `$expr.method()` where expr is `&self`
|
||||
let inner = Group::new(Delimiter::None, quote!(&self));
|
||||
let tokens = quote!(#inner.method());
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::MethodCall {
|
||||
receiver: Expr::Group {
|
||||
expr: Expr::Reference {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
method: "method",
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_match_arm() {
|
||||
// mimics the token stream corresponding to `match v { _ => $expr }`
|
||||
let expr = Group::new(Delimiter::None, quote! { #[a] () });
|
||||
let tokens = quote!(match v { _ => #expr });
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Match {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "v",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
arms: [
|
||||
Arm {
|
||||
pat: Pat::Wild,
|
||||
body: Expr::Group {
|
||||
expr: Expr::Tuple {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "a",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
let expr = Group::new(Delimiter::None, quote!(loop {} + 1));
|
||||
let tokens = quote!(match v { _ => #expr });
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Match {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "v",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
arms: [
|
||||
Arm {
|
||||
pat: Pat::Wild,
|
||||
body: Expr::Group {
|
||||
expr: Expr::Binary {
|
||||
left: Expr::Loop {
|
||||
body: Block {
|
||||
stmts: [],
|
||||
},
|
||||
},
|
||||
op: BinOp::Add,
|
||||
right: Expr::Lit {
|
||||
lit: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/syn/issues/1019
|
||||
#[test]
|
||||
fn test_closure_vs_rangefull() {
|
||||
#[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/4808
|
||||
let tokens = quote!(|| .. .method());
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::MethodCall {
|
||||
receiver: Expr::Closure {
|
||||
output: ReturnType::Default,
|
||||
body: Expr::Range {
|
||||
limits: RangeLimits::HalfOpen,
|
||||
},
|
||||
},
|
||||
method: "method",
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_postfix_operator_after_cast() {
|
||||
syn::parse_str::<Expr>("|| &x as T[0]").unwrap_err();
|
||||
syn::parse_str::<Expr>("|| () as ()()").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ranges() {
|
||||
syn::parse_str::<Expr>("..").unwrap();
|
||||
syn::parse_str::<Expr>("..hi").unwrap();
|
||||
syn::parse_str::<Expr>("lo..").unwrap();
|
||||
syn::parse_str::<Expr>("lo..hi").unwrap();
|
||||
|
||||
syn::parse_str::<Expr>("..=").unwrap_err();
|
||||
syn::parse_str::<Expr>("..=hi").unwrap();
|
||||
syn::parse_str::<Expr>("lo..=").unwrap_err();
|
||||
syn::parse_str::<Expr>("lo..=hi").unwrap();
|
||||
|
||||
syn::parse_str::<Expr>("...").unwrap_err();
|
||||
syn::parse_str::<Expr>("...hi").unwrap_err();
|
||||
syn::parse_str::<Expr>("lo...").unwrap_err();
|
||||
syn::parse_str::<Expr>("lo...hi").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ambiguous_label() {
|
||||
for stmt in [
|
||||
quote! {
|
||||
return 'label: loop { break 'label 42; };
|
||||
},
|
||||
quote! {
|
||||
break ('label: loop { break 'label 42; });
|
||||
},
|
||||
quote! {
|
||||
break 1 + 'label: loop { break 'label 42; };
|
||||
},
|
||||
quote! {
|
||||
break 'outer 'inner: loop { break 'inner 42; };
|
||||
},
|
||||
] {
|
||||
syn::parse2::<Stmt>(stmt).unwrap();
|
||||
}
|
||||
|
||||
for stmt in [
|
||||
// Parentheses required. See https://github.com/rust-lang/rust/pull/87026.
|
||||
quote! {
|
||||
break 'label: loop { break 'label 42; };
|
||||
},
|
||||
] {
|
||||
syn::parse2::<Stmt>(stmt).unwrap_err();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extended_interpolated_path() {
|
||||
let path = Group::new(Delimiter::None, quote!(a::b));
|
||||
|
||||
let tokens = quote!(if #path {});
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::If {
|
||||
cond: Expr::Group {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "a",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "b",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
then_branch: Block {
|
||||
stmts: [],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = quote!(#path {});
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Struct {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "a",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "b",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = quote!(#path :: c);
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "a",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "b",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "c",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let nested = Group::new(Delimiter::None, quote!(a::b || true));
|
||||
let tokens = quote!(if #nested && false {});
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::If {
|
||||
cond: Expr::Binary {
|
||||
left: Expr::Group {
|
||||
expr: Expr::Binary {
|
||||
left: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "a",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "b",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
op: BinOp::Or,
|
||||
right: Expr::Lit {
|
||||
lit: Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
op: BinOp::And,
|
||||
right: Expr::Lit {
|
||||
lit: Lit::Bool {
|
||||
value: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
then_branch: Block {
|
||||
stmts: [],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tuple_comma() {
|
||||
let mut expr = ExprTuple {
|
||||
attrs: Vec::new(),
|
||||
paren_token: token::Paren::default(),
|
||||
elems: Punctuated::new(),
|
||||
};
|
||||
snapshot!(expr.to_token_stream() as Expr, @"Expr::Tuple");
|
||||
|
||||
expr.elems.push_value(parse_quote!(continue));
|
||||
// Must not parse to Expr::Paren
|
||||
snapshot!(expr.to_token_stream() as Expr, @r###"
|
||||
Expr::Tuple {
|
||||
elems: [
|
||||
Expr::Continue,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_punct(<Token![,]>::default());
|
||||
snapshot!(expr.to_token_stream() as Expr, @r###"
|
||||
Expr::Tuple {
|
||||
elems: [
|
||||
Expr::Continue,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_value(parse_quote!(continue));
|
||||
snapshot!(expr.to_token_stream() as Expr, @r###"
|
||||
Expr::Tuple {
|
||||
elems: [
|
||||
Expr::Continue,
|
||||
Token![,],
|
||||
Expr::Continue,
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_punct(<Token![,]>::default());
|
||||
snapshot!(expr.to_token_stream() as Expr, @r###"
|
||||
Expr::Tuple {
|
||||
elems: [
|
||||
Expr::Continue,
|
||||
Token![,],
|
||||
Expr::Continue,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
282
vendor/syn/tests/test_generics.rs
vendored
Normal file
282
vendor/syn/tests/test_generics.rs
vendored
Normal file
@@ -0,0 +1,282 @@
|
||||
#![allow(
|
||||
clippy::manual_let_else,
|
||||
clippy::too_many_lines,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use quote::quote;
|
||||
use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
|
||||
|
||||
#[test]
|
||||
fn test_split_for_impl() {
|
||||
let input = quote! {
|
||||
struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
|
||||
};
|
||||
|
||||
snapshot!(input as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics {
|
||||
lt_token: Some,
|
||||
params: [
|
||||
GenericParam::Lifetime(LifetimeParam {
|
||||
lifetime: Lifetime {
|
||||
ident: "a",
|
||||
},
|
||||
}),
|
||||
Token![,],
|
||||
GenericParam::Lifetime(LifetimeParam {
|
||||
lifetime: Lifetime {
|
||||
ident: "b",
|
||||
},
|
||||
colon_token: Some,
|
||||
bounds: [
|
||||
Lifetime {
|
||||
ident: "a",
|
||||
},
|
||||
],
|
||||
}),
|
||||
Token![,],
|
||||
GenericParam::Type(TypeParam {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "may_dangle",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
ident: "T",
|
||||
colon_token: Some,
|
||||
bounds: [
|
||||
TypeParamBound::Lifetime {
|
||||
ident: "a",
|
||||
},
|
||||
],
|
||||
eq_token: Some,
|
||||
default: Some(Type::Tuple),
|
||||
}),
|
||||
],
|
||||
gt_token: Some,
|
||||
where_clause: Some(WhereClause {
|
||||
predicates: [
|
||||
WherePredicate::Type(PredicateType {
|
||||
bounded_ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Debug",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
},
|
||||
data: Data::Struct {
|
||||
fields: Fields::Unit,
|
||||
semi_token: Some,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let generics = input.generics;
|
||||
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
|
||||
|
||||
let generated = quote! {
|
||||
impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
|
||||
};
|
||||
let expected = quote! {
|
||||
impl<'a, 'b: 'a, #[may_dangle] T: 'a> MyTrait
|
||||
for Test<'a, 'b, T>
|
||||
where
|
||||
T: Debug
|
||||
{}
|
||||
};
|
||||
assert_eq!(generated.to_string(), expected.to_string());
|
||||
|
||||
let turbofish = ty_generics.as_turbofish();
|
||||
let generated = quote! {
|
||||
Test #turbofish
|
||||
};
|
||||
let expected = quote! {
|
||||
Test::<'a, 'b, T>
|
||||
};
|
||||
assert_eq!(generated.to_string(), expected.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ty_param_bound() {
|
||||
let tokens = quote!('a);
|
||||
snapshot!(tokens as TypeParamBound, @r###"
|
||||
TypeParamBound::Lifetime {
|
||||
ident: "a",
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = quote!('_);
|
||||
snapshot!(tokens as TypeParamBound, @r###"
|
||||
TypeParamBound::Lifetime {
|
||||
ident: "_",
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = quote!(Debug);
|
||||
snapshot!(tokens as TypeParamBound, @r###"
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Debug",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
"###);
|
||||
|
||||
let tokens = quote!(?Sized);
|
||||
snapshot!(tokens as TypeParamBound, @r###"
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
modifier: TraitBoundModifier::Maybe,
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Sized",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fn_precedence_in_where_clause() {
|
||||
// This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not
|
||||
// `FnOnce() -> (i32 + Send)`.
|
||||
let input = quote! {
|
||||
fn f<G>()
|
||||
where
|
||||
G: FnOnce() -> i32 + Send,
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(input as ItemFn, @r###"
|
||||
ItemFn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
ident: "f",
|
||||
generics: Generics {
|
||||
lt_token: Some,
|
||||
params: [
|
||||
GenericParam::Type(TypeParam {
|
||||
ident: "G",
|
||||
}),
|
||||
],
|
||||
gt_token: Some,
|
||||
where_clause: Some(WhereClause {
|
||||
predicates: [
|
||||
WherePredicate::Type(PredicateType {
|
||||
bounded_ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "G",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "FnOnce",
|
||||
arguments: PathArguments::Parenthesized {
|
||||
output: ReturnType::Type(
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "i32",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Send",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}),
|
||||
Token![,],
|
||||
],
|
||||
}),
|
||||
},
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
|
||||
assert_eq!(where_clause.predicates.len(), 1);
|
||||
|
||||
let predicate = match &where_clause.predicates[0] {
|
||||
WherePredicate::Type(pred) => pred,
|
||||
_ => panic!("wrong predicate kind"),
|
||||
};
|
||||
|
||||
assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
|
||||
|
||||
let first_bound = &predicate.bounds[0];
|
||||
assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
|
||||
|
||||
let second_bound = &predicate.bounds[1];
|
||||
assert_eq!(quote!(#second_bound).to_string(), "Send");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_where_clause_at_end_of_input() {
|
||||
let input = quote! {
|
||||
where
|
||||
};
|
||||
|
||||
snapshot!(input as WhereClause, @"WhereClause");
|
||||
|
||||
assert_eq!(input.predicates.len(), 0);
|
||||
}
|
||||
53
vendor/syn/tests/test_grouping.rs
vendored
Normal file
53
vendor/syn/tests/test_grouping.rs
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
|
||||
use syn::Expr;
|
||||
|
||||
#[test]
|
||||
fn test_grouping() {
|
||||
let tokens: TokenStream = TokenStream::from_iter(vec![
|
||||
TokenTree::Literal(Literal::i32_suffixed(1)),
|
||||
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Literal(Literal::i32_suffixed(2)),
|
||||
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
||||
TokenTree::Literal(Literal::i32_suffixed(3)),
|
||||
]),
|
||||
)),
|
||||
TokenTree::Punct(Punct::new('*', Spacing::Alone)),
|
||||
TokenTree::Literal(Literal::i32_suffixed(4)),
|
||||
]);
|
||||
|
||||
assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
|
||||
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::Binary {
|
||||
left: Expr::Lit {
|
||||
lit: 1i32,
|
||||
},
|
||||
op: BinOp::Add,
|
||||
right: Expr::Binary {
|
||||
left: Expr::Group {
|
||||
expr: Expr::Binary {
|
||||
left: Expr::Lit {
|
||||
lit: 2i32,
|
||||
},
|
||||
op: BinOp::Add,
|
||||
right: Expr::Lit {
|
||||
lit: 3i32,
|
||||
},
|
||||
},
|
||||
},
|
||||
op: BinOp::Mul,
|
||||
right: Expr::Lit {
|
||||
lit: 4i32,
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
85
vendor/syn/tests/test_ident.rs
vendored
Normal file
85
vendor/syn/tests/test_ident.rs
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use std::str::FromStr;
|
||||
use syn::Result;
|
||||
|
||||
fn parse(s: &str) -> Result<Ident> {
|
||||
syn::parse2(TokenStream::from_str(s).unwrap())
|
||||
}
|
||||
|
||||
fn new(s: &str) -> Ident {
|
||||
Ident::new(s, Span::call_site())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse() {
|
||||
parse("String").unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse_keyword() {
|
||||
parse("abstract").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse_empty() {
|
||||
parse("").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse_lifetime() {
|
||||
parse("'static").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse_underscore() {
|
||||
parse("_").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse_number() {
|
||||
parse("255").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_parse_invalid() {
|
||||
parse("a#").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_new() {
|
||||
new("String");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_new_keyword() {
|
||||
new("abstract");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "use Option<Ident>")]
|
||||
fn ident_new_empty() {
|
||||
new("");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn ident_new_lifetime() {
|
||||
new("'static");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_new_underscore() {
|
||||
new("_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "use Literal instead")]
|
||||
fn ident_new_number() {
|
||||
new("255");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"a#\" is not a valid Ident")]
|
||||
fn ident_new_invalid() {
|
||||
new("a#");
|
||||
}
|
||||
332
vendor/syn/tests/test_item.rs
vendored
Normal file
332
vendor/syn/tests/test_item.rs
vendored
Normal file
@@ -0,0 +1,332 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
||||
use quote::quote;
|
||||
use syn::{Item, ItemTrait};
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_attr() {
|
||||
// mimics the token stream corresponding to `$attr fn f() {}`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
|
||||
TokenTree::Ident(Ident::new("fn", Span::call_site())),
|
||||
TokenTree::Ident(Ident::new("f", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Fn {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "test",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
ident: "f",
|
||||
generics: Generics,
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_impl() {
|
||||
// Rustc parses all of the following.
|
||||
|
||||
#[cfg(any())]
|
||||
impl ! {}
|
||||
let tokens = quote! {
|
||||
impl ! {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
self_ty: Type::Never,
|
||||
}
|
||||
"###);
|
||||
|
||||
#[cfg(any())]
|
||||
#[rustfmt::skip]
|
||||
impl !Trait {}
|
||||
let tokens = quote! {
|
||||
impl !Trait {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
self_ty: Type::Verbatim(`! Trait`),
|
||||
}
|
||||
"###);
|
||||
|
||||
#[cfg(any())]
|
||||
impl !Trait for T {}
|
||||
let tokens = quote! {
|
||||
impl !Trait for T {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
trait_: Some((
|
||||
Some,
|
||||
Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
},
|
||||
],
|
||||
},
|
||||
)),
|
||||
self_ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
#[cfg(any())]
|
||||
#[rustfmt::skip]
|
||||
impl !! {}
|
||||
let tokens = quote! {
|
||||
impl !! {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
self_ty: Type::Verbatim(`! !`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_impl() {
|
||||
// mimics the token stream corresponding to `impl $trait for $ty {}`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("impl", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))),
|
||||
TokenTree::Ident(Ident::new("for", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote!(Type))),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
trait_: Some((
|
||||
None,
|
||||
Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
},
|
||||
],
|
||||
},
|
||||
)),
|
||||
self_ty: Type::Group {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Type",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_supertraits() {
|
||||
// Rustc parses all of the following.
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait: where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
colon_token: Some,
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait: Sized where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
colon_token: Some,
|
||||
supertraits: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Sized",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait: Sized + where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
colon_token: Some,
|
||||
supertraits: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Sized",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_type_empty_bounds() {
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote! {
|
||||
trait Foo {
|
||||
type Bar: ;
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "Foo",
|
||||
generics: Generics,
|
||||
items: [
|
||||
TraitItem::Type {
|
||||
ident: "Bar",
|
||||
generics: Generics,
|
||||
colon_token: Some,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_impl_visibility() {
|
||||
let tokens = quote! {
|
||||
pub default unsafe impl union {}
|
||||
};
|
||||
|
||||
snapshot!(tokens as Item, @"Item::Verbatim(`pub default unsafe impl union { }`)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_impl_type_parameter_defaults() {
|
||||
#[cfg(any())]
|
||||
impl<T = ()> () {}
|
||||
let tokens = quote! {
|
||||
impl<T = ()> () {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics {
|
||||
lt_token: Some,
|
||||
params: [
|
||||
GenericParam::Type(TypeParam {
|
||||
ident: "T",
|
||||
eq_token: Some,
|
||||
default: Some(Type::Tuple),
|
||||
}),
|
||||
],
|
||||
gt_token: Some,
|
||||
},
|
||||
self_ty: Type::Tuple,
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_impl_trait_trailing_plus() {
|
||||
let tokens = quote! {
|
||||
fn f() -> impl Sized + {}
|
||||
};
|
||||
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Fn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
ident: "f",
|
||||
generics: Generics,
|
||||
output: ReturnType::Type(
|
||||
Type::ImplTrait {
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Sized",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
],
|
||||
},
|
||||
),
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
70
vendor/syn/tests/test_iterators.rs
vendored
Normal file
70
vendor/syn/tests/test_iterators.rs
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
use syn::punctuated::{Pair, Punctuated};
|
||||
use syn::Token;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
macro_rules! check_exact_size_iterator {
|
||||
($iter:expr) => {{
|
||||
let iter = $iter;
|
||||
let size_hint = iter.size_hint();
|
||||
let len = iter.len();
|
||||
let count = iter.count();
|
||||
assert_eq!(len, count);
|
||||
assert_eq!(size_hint, (count, Some(count)));
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pairs() {
|
||||
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
|
||||
|
||||
check_exact_size_iterator!(p.pairs());
|
||||
check_exact_size_iterator!(p.pairs_mut());
|
||||
check_exact_size_iterator!(p.into_pairs());
|
||||
|
||||
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
|
||||
|
||||
assert_eq!(p.pairs().next_back().map(Pair::into_value), Some(&4));
|
||||
assert_eq!(
|
||||
p.pairs_mut().next_back().map(Pair::into_value),
|
||||
Some(&mut 4)
|
||||
);
|
||||
assert_eq!(p.into_pairs().next_back().map(Pair::into_value), Some(4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter() {
|
||||
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
|
||||
|
||||
check_exact_size_iterator!(p.iter());
|
||||
check_exact_size_iterator!(p.iter_mut());
|
||||
check_exact_size_iterator!(p.into_iter());
|
||||
|
||||
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
|
||||
|
||||
assert_eq!(p.iter().next_back(), Some(&4));
|
||||
assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
|
||||
assert_eq!(p.into_iter().next_back(), Some(4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn may_dangle() {
|
||||
let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
|
||||
for element in &p {
|
||||
if *element == 2 {
|
||||
drop(p);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
|
||||
for element in &mut p {
|
||||
if *element == 2 {
|
||||
drop(p);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
273
vendor/syn/tests/test_lit.rs
vendored
Normal file
273
vendor/syn/tests/test_lit.rs
vendored
Normal file
@@ -0,0 +1,273 @@
|
||||
#![allow(
|
||||
clippy::float_cmp,
|
||||
clippy::non_ascii_literal,
|
||||
clippy::single_match_else,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
|
||||
use quote::ToTokens;
|
||||
use std::str::FromStr;
|
||||
use syn::{Lit, LitFloat, LitInt, LitStr};
|
||||
|
||||
fn lit(s: &str) -> Lit {
|
||||
let mut tokens = TokenStream::from_str(s).unwrap().into_iter();
|
||||
match tokens.next().unwrap() {
|
||||
TokenTree::Literal(lit) => {
|
||||
assert!(tokens.next().is_none());
|
||||
Lit::new(lit)
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strings() {
|
||||
fn test_string(s: &str, value: &str) {
|
||||
match lit(s) {
|
||||
Lit::Str(lit) => {
|
||||
assert_eq!(lit.value(), value);
|
||||
let again = lit.into_token_stream().to_string();
|
||||
if again != s {
|
||||
test_string(&again, value);
|
||||
}
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
test_string("\"a\"", "a");
|
||||
test_string("\"\\n\"", "\n");
|
||||
test_string("\"\\r\"", "\r");
|
||||
test_string("\"\\t\"", "\t");
|
||||
test_string("\"🐕\"", "🐕"); // NOTE: This is an emoji
|
||||
test_string("\"\\\"\"", "\"");
|
||||
test_string("\"'\"", "'");
|
||||
test_string("\"\"", "");
|
||||
test_string("\"\\u{1F415}\"", "\u{1F415}");
|
||||
test_string("\"\\u{1_2__3_}\"", "\u{123}");
|
||||
test_string(
|
||||
"\"contains\nnewlines\\\nescaped newlines\"",
|
||||
"contains\nnewlinesescaped newlines",
|
||||
);
|
||||
test_string(
|
||||
"\"escaped newline\\\n \x0C unsupported whitespace\"",
|
||||
"escaped newline\x0C unsupported whitespace",
|
||||
);
|
||||
test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
|
||||
test_string("\"...\"q", "...");
|
||||
test_string("r\"...\"q", "...");
|
||||
test_string("r##\"...\"##q", "...");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn byte_strings() {
|
||||
fn test_byte_string(s: &str, value: &[u8]) {
|
||||
match lit(s) {
|
||||
Lit::ByteStr(lit) => {
|
||||
assert_eq!(lit.value(), value);
|
||||
let again = lit.into_token_stream().to_string();
|
||||
if again != s {
|
||||
test_byte_string(&again, value);
|
||||
}
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
test_byte_string("b\"a\"", b"a");
|
||||
test_byte_string("b\"\\n\"", b"\n");
|
||||
test_byte_string("b\"\\r\"", b"\r");
|
||||
test_byte_string("b\"\\t\"", b"\t");
|
||||
test_byte_string("b\"\\\"\"", b"\"");
|
||||
test_byte_string("b\"'\"", b"'");
|
||||
test_byte_string("b\"\"", b"");
|
||||
test_byte_string(
|
||||
"b\"contains\nnewlines\\\nescaped newlines\"",
|
||||
b"contains\nnewlinesescaped newlines",
|
||||
);
|
||||
test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
|
||||
test_byte_string("b\"...\"q", b"...");
|
||||
test_byte_string("br\"...\"q", b"...");
|
||||
test_byte_string("br##\"...\"##q", b"...");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bytes() {
|
||||
fn test_byte(s: &str, value: u8) {
|
||||
match lit(s) {
|
||||
Lit::Byte(lit) => {
|
||||
assert_eq!(lit.value(), value);
|
||||
let again = lit.into_token_stream().to_string();
|
||||
assert_eq!(again, s);
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
test_byte("b'a'", b'a');
|
||||
test_byte("b'\\n'", b'\n');
|
||||
test_byte("b'\\r'", b'\r');
|
||||
test_byte("b'\\t'", b'\t');
|
||||
test_byte("b'\\''", b'\'');
|
||||
test_byte("b'\"'", b'"');
|
||||
test_byte("b'a'q", b'a');
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chars() {
|
||||
fn test_char(s: &str, value: char) {
|
||||
match lit(s) {
|
||||
Lit::Char(lit) => {
|
||||
assert_eq!(lit.value(), value);
|
||||
let again = lit.into_token_stream().to_string();
|
||||
if again != s {
|
||||
test_char(&again, value);
|
||||
}
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
test_char("'a'", 'a');
|
||||
test_char("'\\n'", '\n');
|
||||
test_char("'\\r'", '\r');
|
||||
test_char("'\\t'", '\t');
|
||||
test_char("'🐕'", '🐕'); // NOTE: This is an emoji
|
||||
test_char("'\\''", '\'');
|
||||
test_char("'\"'", '"');
|
||||
test_char("'\\u{1F415}'", '\u{1F415}');
|
||||
test_char("'a'q", 'a');
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ints() {
|
||||
fn test_int(s: &str, value: u64, suffix: &str) {
|
||||
match lit(s) {
|
||||
Lit::Int(lit) => {
|
||||
assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
|
||||
assert_eq!(lit.suffix(), suffix);
|
||||
let again = lit.into_token_stream().to_string();
|
||||
if again != s {
|
||||
test_int(&again, value, suffix);
|
||||
}
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
test_int("5", 5, "");
|
||||
test_int("5u32", 5, "u32");
|
||||
test_int("0E", 0, "E");
|
||||
test_int("0ECMA", 0, "ECMA");
|
||||
test_int("0o0A", 0, "A");
|
||||
test_int("5_0", 50, "");
|
||||
test_int("5_____0_____", 50, "");
|
||||
test_int("0x7f", 127, "");
|
||||
test_int("0x7F", 127, "");
|
||||
test_int("0b1001", 9, "");
|
||||
test_int("0o73", 59, "");
|
||||
test_int("0x7Fu8", 127, "u8");
|
||||
test_int("0b1001i8", 9, "i8");
|
||||
test_int("0o73u32", 59, "u32");
|
||||
test_int("0x__7___f_", 127, "");
|
||||
test_int("0x__7___F_", 127, "");
|
||||
test_int("0b_1_0__01", 9, "");
|
||||
test_int("0o_7__3", 59, "");
|
||||
test_int("0x_7F__u8", 127, "u8");
|
||||
test_int("0b__10__0_1i8", 9, "i8");
|
||||
test_int("0o__7__________________3u32", 59, "u32");
|
||||
test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn floats() {
|
||||
fn test_float(s: &str, value: f64, suffix: &str) {
|
||||
match lit(s) {
|
||||
Lit::Float(lit) => {
|
||||
assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
|
||||
assert_eq!(lit.suffix(), suffix);
|
||||
let again = lit.into_token_stream().to_string();
|
||||
if again != s {
|
||||
test_float(&again, value, suffix);
|
||||
}
|
||||
}
|
||||
wrong => panic!("{:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
test_float("5.5", 5.5, "");
|
||||
test_float("5.5E12", 5.5e12, "");
|
||||
test_float("5.5e12", 5.5e12, "");
|
||||
test_float("1.0__3e-12", 1.03e-12, "");
|
||||
test_float("1.03e+12", 1.03e12, "");
|
||||
test_float("9e99e99", 9e99, "e99");
|
||||
test_float("1e_0", 1.0, "");
|
||||
test_float("0.0ECMA", 0.0, "ECMA");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn negative() {
|
||||
let span = Span::call_site();
|
||||
assert_eq!("-1", LitInt::new("-1", span).to_string());
|
||||
assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
|
||||
assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
|
||||
assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
|
||||
assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
|
||||
assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
|
||||
assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
|
||||
assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suffix() {
|
||||
fn get_suffix(token: &str) -> String {
|
||||
let lit = syn::parse_str::<Lit>(token).unwrap();
|
||||
match lit {
|
||||
Lit::Str(lit) => lit.suffix().to_owned(),
|
||||
Lit::ByteStr(lit) => lit.suffix().to_owned(),
|
||||
Lit::Byte(lit) => lit.suffix().to_owned(),
|
||||
Lit::Char(lit) => lit.suffix().to_owned(),
|
||||
Lit::Int(lit) => lit.suffix().to_owned(),
|
||||
Lit::Float(lit) => lit.suffix().to_owned(),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(get_suffix("\"\"s"), "s");
|
||||
assert_eq!(get_suffix("r\"\"r"), "r");
|
||||
assert_eq!(get_suffix("b\"\"b"), "b");
|
||||
assert_eq!(get_suffix("br\"\"br"), "br");
|
||||
assert_eq!(get_suffix("r#\"\"#r"), "r");
|
||||
assert_eq!(get_suffix("'c'c"), "c");
|
||||
assert_eq!(get_suffix("b'b'b"), "b");
|
||||
assert_eq!(get_suffix("1i32"), "i32");
|
||||
assert_eq!(get_suffix("1_i32"), "i32");
|
||||
assert_eq!(get_suffix("1.0f32"), "f32");
|
||||
assert_eq!(get_suffix("1.0_f32"), "f32");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deep_group_empty() {
|
||||
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
|
||||
))]),
|
||||
))]);
|
||||
|
||||
snapshot!(tokens as Lit, @r#""hi""# );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error() {
|
||||
let err = syn::parse_str::<LitStr>("...").unwrap_err();
|
||||
assert_eq!("expected string literal", err.to_string());
|
||||
|
||||
let err = syn::parse_str::<LitStr>("5").unwrap_err();
|
||||
assert_eq!("expected string literal", err.to_string());
|
||||
}
|
||||
154
vendor/syn/tests/test_meta.rs
vendored
Normal file
154
vendor/syn/tests/test_meta.rs
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
#![allow(
|
||||
clippy::shadow_unrelated,
|
||||
clippy::too_many_lines,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use syn::{Meta, MetaList, MetaNameValue};
|
||||
|
||||
#[test]
|
||||
fn test_parse_meta_item_word() {
|
||||
let input = "hello";
|
||||
|
||||
snapshot!(input as Meta, @r###"
|
||||
Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "hello",
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_meta_name_value() {
|
||||
let input = "foo = 5";
|
||||
let (inner, meta) = (input, input);
|
||||
|
||||
snapshot!(inner as MetaNameValue, @r###"
|
||||
MetaNameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: 5,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
snapshot!(meta as Meta, @r###"
|
||||
Meta::NameValue {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
value: Expr::Lit {
|
||||
lit: 5,
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
assert_eq!(meta, inner.into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_meta_item_list_lit() {
|
||||
let input = "foo(5)";
|
||||
let (inner, meta) = (input, input);
|
||||
|
||||
snapshot!(inner as MetaList, @r###"
|
||||
MetaList {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`5`),
|
||||
}
|
||||
"###);
|
||||
|
||||
snapshot!(meta as Meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`5`),
|
||||
}
|
||||
"###);
|
||||
|
||||
assert_eq!(meta, inner.into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_meta_item_multiple() {
|
||||
let input = "foo(word, name = 5, list(name2 = 6), word2)";
|
||||
let (inner, meta) = (input, input);
|
||||
|
||||
snapshot!(inner as MetaList, @r###"
|
||||
MetaList {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
|
||||
}
|
||||
"###);
|
||||
|
||||
snapshot!(meta as Meta, @r###"
|
||||
Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "foo",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
|
||||
}
|
||||
"###);
|
||||
|
||||
assert_eq!(meta, inner.into());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
let input = "::serde::Serialize";
|
||||
snapshot!(input as Meta, @r###"
|
||||
Meta::Path {
|
||||
leading_colon: Some,
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "serde",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "Serialize",
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
92
vendor/syn/tests/test_parse_buffer.rs
vendored
Normal file
92
vendor/syn/tests/test_parse_buffer.rs
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
#![allow(clippy::non_ascii_literal)]
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
|
||||
use syn::parse::discouraged::Speculative as _;
|
||||
use syn::parse::{Parse, ParseStream, Parser, Result};
|
||||
use syn::{parenthesized, Token};
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
|
||||
fn smuggled_speculative_cursor_between_sources() {
|
||||
struct BreakRules;
|
||||
impl Parse for BreakRules {
|
||||
fn parse(input1: ParseStream) -> Result<Self> {
|
||||
let nested = |input2: ParseStream| {
|
||||
input1.advance_to(input2);
|
||||
Ok(Self)
|
||||
};
|
||||
nested.parse_str("")
|
||||
}
|
||||
}
|
||||
|
||||
syn::parse_str::<BreakRules>("").unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
|
||||
fn smuggled_speculative_cursor_between_brackets() {
|
||||
struct BreakRules;
|
||||
impl Parse for BreakRules {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let a;
|
||||
let b;
|
||||
parenthesized!(a in input);
|
||||
parenthesized!(b in input);
|
||||
a.advance_to(&b);
|
||||
Ok(Self)
|
||||
}
|
||||
}
|
||||
|
||||
syn::parse_str::<BreakRules>("()()").unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
|
||||
fn smuggled_speculative_cursor_into_brackets() {
|
||||
struct BreakRules;
|
||||
impl Parse for BreakRules {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let a;
|
||||
parenthesized!(a in input);
|
||||
input.advance_to(&a);
|
||||
Ok(Self)
|
||||
}
|
||||
}
|
||||
|
||||
syn::parse_str::<BreakRules>("()").unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trailing_empty_none_group() {
|
||||
fn parse(input: ParseStream) -> Result<()> {
|
||||
input.parse::<Token![+]>()?;
|
||||
|
||||
let content;
|
||||
parenthesized!(content in input);
|
||||
content.parse::<Token![+]>()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
|
||||
]),
|
||||
)),
|
||||
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::new(),
|
||||
))]),
|
||||
)),
|
||||
]);
|
||||
|
||||
parse.parse2(tokens).unwrap();
|
||||
}
|
||||
164
vendor/syn/tests/test_parse_quote.rs
vendored
Normal file
164
vendor/syn/tests/test_parse_quote.rs
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{parse_quote, Attribute, Field, Lit, Pat, Stmt, Token};
|
||||
|
||||
#[test]
|
||||
fn test_attribute() {
|
||||
let attr: Attribute = parse_quote!(#[test]);
|
||||
snapshot!(attr, @r###"
|
||||
Attribute {
|
||||
style: AttrStyle::Outer,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "test",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let attr: Attribute = parse_quote!(#![no_std]);
|
||||
snapshot!(attr, @r###"
|
||||
Attribute {
|
||||
style: AttrStyle::Inner,
|
||||
meta: Meta::Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "no_std",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_field() {
|
||||
let field: Field = parse_quote!(pub enabled: bool);
|
||||
snapshot!(field, @r###"
|
||||
Field {
|
||||
vis: Visibility::Public,
|
||||
ident: Some("enabled"),
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "bool",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let field: Field = parse_quote!(primitive::bool);
|
||||
snapshot!(field, @r###"
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "primitive",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "bool",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pat() {
|
||||
let pat: Pat = parse_quote!(Some(false) | None);
|
||||
snapshot!(&pat, @r###"
|
||||
Pat::Or {
|
||||
cases: [
|
||||
Pat::TupleStruct {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Some",
|
||||
},
|
||||
],
|
||||
},
|
||||
elems: [
|
||||
Pat::Lit(ExprLit {
|
||||
lit: Lit::Bool {
|
||||
value: false,
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
Token![|],
|
||||
Pat::Ident {
|
||||
ident: "None",
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
let boxed_pat: Box<Pat> = parse_quote!(Some(false) | None);
|
||||
assert_eq!(*boxed_pat, pat);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_punctuated() {
|
||||
let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true);
|
||||
snapshot!(punctuated, @r###"
|
||||
[
|
||||
Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
Token![|],
|
||||
Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
]
|
||||
"###);
|
||||
|
||||
let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true |);
|
||||
snapshot!(punctuated, @r###"
|
||||
[
|
||||
Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
Token![|],
|
||||
Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
Token![|],
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec_stmt() {
|
||||
let stmts: Vec<Stmt> = parse_quote! {
|
||||
let _;
|
||||
true
|
||||
};
|
||||
snapshot!(stmts, @r###"
|
||||
[
|
||||
Stmt::Local {
|
||||
pat: Pat::Wild,
|
||||
},
|
||||
Stmt::Expr(
|
||||
Expr::Lit {
|
||||
lit: Lit::Bool {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
]
|
||||
"###);
|
||||
}
|
||||
14
vendor/syn/tests/test_parse_stream.rs
vendored
Normal file
14
vendor/syn/tests/test_parse_stream.rs
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
#![allow(clippy::let_underscore_untyped)]
|
||||
|
||||
use syn::ext::IdentExt as _;
|
||||
use syn::parse::ParseStream;
|
||||
use syn::{Ident, Token};
|
||||
|
||||
#[test]
|
||||
fn test_peek() {
|
||||
let _ = |input: ParseStream| {
|
||||
let _ = input.peek(Ident);
|
||||
let _ = input.peek(Ident::peek_any);
|
||||
let _ = input.peek(Token![::]);
|
||||
};
|
||||
}
|
||||
152
vendor/syn/tests/test_pat.rs
vendored
Normal file
152
vendor/syn/tests/test_pat.rs
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
|
||||
use quote::{quote, ToTokens as _};
|
||||
use syn::parse::Parser;
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{parse_quote, token, Item, Pat, PatTuple, Stmt, Token};
|
||||
|
||||
#[test]
|
||||
fn test_pat_ident() {
|
||||
match Pat::parse_single.parse2(quote!(self)).unwrap() {
|
||||
Pat::Ident(_) => (),
|
||||
value => panic!("expected PatIdent, got {:?}", value),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pat_path() {
|
||||
match Pat::parse_single.parse2(quote!(self::CONST)).unwrap() {
|
||||
Pat::Path(_) => (),
|
||||
value => panic!("expected PatPath, got {:?}", value),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_leading_vert() {
|
||||
// https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
|
||||
|
||||
syn::parse_str::<Item>("fn f() {}").unwrap();
|
||||
syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
|
||||
syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
|
||||
|
||||
syn::parse_str::<Stmt>("let | () = ();").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let (| A): E;").unwrap();
|
||||
syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap();
|
||||
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap();
|
||||
syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap();
|
||||
syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap();
|
||||
syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group() {
|
||||
let group = Group::new(Delimiter::None, quote!(Some(_)));
|
||||
let tokens = TokenStream::from_iter(vec![TokenTree::Group(group)]);
|
||||
let pat = Pat::parse_single.parse2(tokens).unwrap();
|
||||
|
||||
snapshot!(pat, @r###"
|
||||
Pat::TupleStruct {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Some",
|
||||
},
|
||||
],
|
||||
},
|
||||
elems: [
|
||||
Pat::Wild,
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ranges() {
|
||||
Pat::parse_single.parse_str("..").unwrap();
|
||||
Pat::parse_single.parse_str("..hi").unwrap();
|
||||
Pat::parse_single.parse_str("lo..").unwrap();
|
||||
Pat::parse_single.parse_str("lo..hi").unwrap();
|
||||
|
||||
Pat::parse_single.parse_str("..=").unwrap_err();
|
||||
Pat::parse_single.parse_str("..=hi").unwrap();
|
||||
Pat::parse_single.parse_str("lo..=").unwrap_err();
|
||||
Pat::parse_single.parse_str("lo..=hi").unwrap();
|
||||
|
||||
Pat::parse_single.parse_str("...").unwrap_err();
|
||||
Pat::parse_single.parse_str("...hi").unwrap_err();
|
||||
Pat::parse_single.parse_str("lo...").unwrap_err();
|
||||
Pat::parse_single.parse_str("lo...hi").unwrap();
|
||||
|
||||
Pat::parse_single.parse_str("[lo..]").unwrap_err();
|
||||
Pat::parse_single.parse_str("[..=hi]").unwrap_err();
|
||||
Pat::parse_single.parse_str("[(lo..)]").unwrap();
|
||||
Pat::parse_single.parse_str("[(..=hi)]").unwrap();
|
||||
Pat::parse_single.parse_str("[lo..=hi]").unwrap();
|
||||
|
||||
Pat::parse_single.parse_str("[_, lo.., _]").unwrap_err();
|
||||
Pat::parse_single.parse_str("[_, ..=hi, _]").unwrap_err();
|
||||
Pat::parse_single.parse_str("[_, (lo..), _]").unwrap();
|
||||
Pat::parse_single.parse_str("[_, (..=hi), _]").unwrap();
|
||||
Pat::parse_single.parse_str("[_, lo..=hi, _]").unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tuple_comma() {
|
||||
let mut expr = PatTuple {
|
||||
attrs: Vec::new(),
|
||||
paren_token: token::Paren::default(),
|
||||
elems: Punctuated::new(),
|
||||
};
|
||||
snapshot!(expr.to_token_stream() as Pat, @"Pat::Tuple");
|
||||
|
||||
expr.elems.push_value(parse_quote!(_));
|
||||
// Must not parse to Pat::Paren
|
||||
snapshot!(expr.to_token_stream() as Pat, @r###"
|
||||
Pat::Tuple {
|
||||
elems: [
|
||||
Pat::Wild,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_punct(<Token![,]>::default());
|
||||
snapshot!(expr.to_token_stream() as Pat, @r###"
|
||||
Pat::Tuple {
|
||||
elems: [
|
||||
Pat::Wild,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_value(parse_quote!(_));
|
||||
snapshot!(expr.to_token_stream() as Pat, @r###"
|
||||
Pat::Tuple {
|
||||
elems: [
|
||||
Pat::Wild,
|
||||
Token![,],
|
||||
Pat::Wild,
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_punct(<Token![,]>::default());
|
||||
snapshot!(expr.to_token_stream() as Pat, @r###"
|
||||
Pat::Tuple {
|
||||
elems: [
|
||||
Pat::Wild,
|
||||
Token![,],
|
||||
Pat::Wild,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
130
vendor/syn/tests/test_path.rs
vendored
Normal file
130
vendor/syn/tests/test_path.rs
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{parse_quote, Expr, Type, TypePath};
|
||||
|
||||
#[test]
|
||||
fn parse_interpolated_leading_component() {
|
||||
// mimics the token stream corresponding to `$mod::rest`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("rest", Span::call_site())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens.clone() as Expr, @r###"
|
||||
Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "first",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "rest",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "first",
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "rest",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn print_incomplete_qpath() {
|
||||
// qpath with `as` token
|
||||
let mut ty: TypePath = parse_quote!(<Self as A>::Q);
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self as A > :: Q`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self as A > ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self >`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_none());
|
||||
|
||||
// qpath without `as` token
|
||||
let mut ty: TypePath = parse_quote!(<Self>::A::B);
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self > :: A :: B`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self > :: A ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self > ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_none());
|
||||
|
||||
// normal path
|
||||
let mut ty: TypePath = parse_quote!(Self::A::B);
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`Self :: A :: B`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`Self :: A ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`Self ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(``)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_parenthesized_path_arguments_with_disambiguator() {
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(dyn FnOnce::() -> !);
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::TraitObject {
|
||||
dyn_token: Some,
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "FnOnce",
|
||||
arguments: PathArguments::Parenthesized {
|
||||
output: ReturnType::Type(
|
||||
Type::Never,
|
||||
),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
548
vendor/syn/tests/test_precedence.rs
vendored
Normal file
548
vendor/syn/tests/test_precedence.rs
vendored
Normal file
@@ -0,0 +1,548 @@
|
||||
//! This test does the following for every file in the rust-lang/rust repo:
|
||||
//!
|
||||
//! 1. Parse the file using syn into a syn::File.
|
||||
//! 2. Extract every syn::Expr from the file.
|
||||
//! 3. Print each expr to a string of source code.
|
||||
//! 4. Parse the source code using librustc_parse into a rustc_ast::Expr.
|
||||
//! 5. For both the syn::Expr and rustc_ast::Expr, crawl the syntax tree to
|
||||
//! insert parentheses surrounding every subexpression.
|
||||
//! 6. Serialize the fully parenthesized syn::Expr to a string of source code.
|
||||
//! 7. Parse the fully parenthesized source code using librustc_parse.
|
||||
//! 8. Compare the rustc_ast::Expr resulting from parenthesizing using rustc
|
||||
//! data structures vs syn data structures, ignoring spans. If they agree,
|
||||
//! rustc's parser and syn's parser have identical handling of expression
|
||||
//! precedence.
|
||||
|
||||
#![cfg(not(syn_disable_nightly_tests))]
|
||||
#![cfg(not(miri))]
|
||||
#![recursion_limit = "1024"]
|
||||
#![feature(rustc_private)]
|
||||
#![allow(
|
||||
clippy::blocks_in_conditions,
|
||||
clippy::doc_markdown,
|
||||
clippy::explicit_deref_methods,
|
||||
clippy::let_underscore_untyped,
|
||||
clippy::manual_assert,
|
||||
clippy::manual_let_else,
|
||||
clippy::match_like_matches_macro,
|
||||
clippy::match_wildcard_for_single_variants,
|
||||
clippy::too_many_lines,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
extern crate rustc_ast;
|
||||
extern crate rustc_ast_pretty;
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_driver;
|
||||
extern crate rustc_span;
|
||||
extern crate smallvec;
|
||||
extern crate thin_vec;
|
||||
|
||||
use crate::common::eq::SpanlessEq;
|
||||
use crate::common::parse;
|
||||
use quote::ToTokens;
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_span::edition::Edition;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::process;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
#[allow(dead_code)]
|
||||
mod common;
|
||||
|
||||
mod repo;
|
||||
|
||||
#[test]
|
||||
fn test_rustc_precedence() {
|
||||
common::rayon_init();
|
||||
repo::clone_rust();
|
||||
let abort_after = common::abort_after();
|
||||
if abort_after == 0 {
|
||||
panic!("Skipping all precedence tests");
|
||||
}
|
||||
|
||||
let passed = AtomicUsize::new(0);
|
||||
let failed = AtomicUsize::new(0);
|
||||
|
||||
repo::for_each_rust_file(|path| {
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
|
||||
let (l_passed, l_failed) = match syn::parse_file(&content) {
|
||||
Ok(file) => {
|
||||
let edition = repo::edition(path).parse().unwrap();
|
||||
let exprs = collect_exprs(file);
|
||||
let (l_passed, l_failed) = test_expressions(path, edition, exprs);
|
||||
errorf!(
|
||||
"=== {}: {} passed | {} failed\n",
|
||||
path.display(),
|
||||
l_passed,
|
||||
l_failed,
|
||||
);
|
||||
(l_passed, l_failed)
|
||||
}
|
||||
Err(msg) => {
|
||||
errorf!("\nFAIL {} - syn failed to parse: {}\n", path.display(), msg);
|
||||
(0, 1)
|
||||
}
|
||||
};
|
||||
|
||||
passed.fetch_add(l_passed, Ordering::Relaxed);
|
||||
let prev_failed = failed.fetch_add(l_failed, Ordering::Relaxed);
|
||||
|
||||
if prev_failed + l_failed >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
let passed = passed.load(Ordering::Relaxed);
|
||||
let failed = failed.load(Ordering::Relaxed);
|
||||
|
||||
errorf!("\n===== Precedence Test Results =====\n");
|
||||
errorf!("{} passed | {} failed\n", passed, failed);
|
||||
|
||||
if failed > 0 {
|
||||
panic!("{} failures", failed);
|
||||
}
|
||||
}
|
||||
|
||||
fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
|
||||
let mut passed = 0;
|
||||
let mut failed = 0;
|
||||
|
||||
rustc_span::create_session_if_not_set_then(edition, |_| {
|
||||
for expr in exprs {
|
||||
let source_code = expr.to_token_stream().to_string();
|
||||
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
|
||||
e
|
||||
} else {
|
||||
failed += 1;
|
||||
errorf!(
|
||||
"\nFAIL {} - librustc failed to parse original\n",
|
||||
path.display(),
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
let syn_parenthesized_code =
|
||||
syn_parenthesize(expr.clone()).to_token_stream().to_string();
|
||||
let syn_ast = if let Some(e) = parse::librustc_expr(&syn_parenthesized_code) {
|
||||
e
|
||||
} else {
|
||||
failed += 1;
|
||||
errorf!(
|
||||
"\nFAIL {} - librustc failed to parse parenthesized\n",
|
||||
path.display(),
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
if !SpanlessEq::eq(&syn_ast, &librustc_ast) {
|
||||
failed += 1;
|
||||
let syn_pretty = pprust::expr_to_string(&syn_ast);
|
||||
let librustc_pretty = pprust::expr_to_string(&librustc_ast);
|
||||
errorf!(
|
||||
"\nFAIL {}\n{}\nsyn != rustc\n{}\n",
|
||||
path.display(),
|
||||
syn_pretty,
|
||||
librustc_pretty,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let expr_invisible = make_parens_invisible(expr);
|
||||
let Ok(reparsed_expr_invisible) = syn::parse2(expr_invisible.to_token_stream()) else {
|
||||
failed += 1;
|
||||
errorf!(
|
||||
"\nFAIL {} - syn failed to parse invisible delimiters\n{}\n",
|
||||
path.display(),
|
||||
source_code,
|
||||
);
|
||||
continue;
|
||||
};
|
||||
if expr_invisible != reparsed_expr_invisible {
|
||||
failed += 1;
|
||||
errorf!(
|
||||
"\nFAIL {} - mismatch after parsing invisible delimiters\n{}\n",
|
||||
path.display(),
|
||||
source_code,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
passed += 1;
|
||||
}
|
||||
});
|
||||
|
||||
(passed, failed)
|
||||
}
|
||||
|
||||
fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
|
||||
parse::librustc_expr(input).map(librustc_parenthesize)
|
||||
}
|
||||
|
||||
fn librustc_parenthesize(mut librustc_expr: P<ast::Expr>) -> P<ast::Expr> {
|
||||
use rustc_ast::ast::{
|
||||
AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BorrowKind, BoundConstness, Expr,
|
||||
ExprField, ExprKind, GenericArg, GenericBound, ItemKind, Local, LocalKind, Pat, Stmt,
|
||||
StmtKind, StructExpr, StructRest, TraitBoundModifiers, Ty,
|
||||
};
|
||||
use rustc_ast::mut_visit::{
|
||||
noop_flat_map_assoc_item, noop_visit_generic_arg, noop_visit_item_kind, noop_visit_local,
|
||||
noop_visit_param_bound, MutVisitor,
|
||||
};
|
||||
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
||||
use rustc_span::DUMMY_SP;
|
||||
use smallvec::SmallVec;
|
||||
use std::mem;
|
||||
use std::ops::DerefMut;
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
struct FullyParenthesize;
|
||||
|
||||
fn contains_let_chain(expr: &Expr) -> bool {
|
||||
match &expr.kind {
|
||||
ExprKind::Let(..) => true,
|
||||
ExprKind::Binary(binop, left, right) => {
|
||||
binop.node == BinOpKind::And
|
||||
&& (contains_let_chain(left) || contains_let_chain(right))
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> {
|
||||
if f.is_shorthand {
|
||||
noop_visit_expr(&mut f.expr, vis);
|
||||
} else {
|
||||
vis.visit_expr(&mut f.expr);
|
||||
}
|
||||
vec![f]
|
||||
}
|
||||
|
||||
fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
|
||||
let kind = match stmt.kind {
|
||||
// Don't wrap toplevel expressions in statements.
|
||||
StmtKind::Expr(mut e) => {
|
||||
noop_visit_expr(&mut e, vis);
|
||||
StmtKind::Expr(e)
|
||||
}
|
||||
StmtKind::Semi(mut e) => {
|
||||
noop_visit_expr(&mut e, vis);
|
||||
StmtKind::Semi(e)
|
||||
}
|
||||
s => s,
|
||||
};
|
||||
|
||||
vec![Stmt { kind, ..stmt }]
|
||||
}
|
||||
|
||||
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
|
||||
use rustc_ast::mut_visit::{noop_visit_expr, visit_attrs};
|
||||
match &mut e.kind {
|
||||
ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
|
||||
ExprKind::Struct(expr) => {
|
||||
let StructExpr {
|
||||
qself,
|
||||
path,
|
||||
fields,
|
||||
rest,
|
||||
} = expr.deref_mut();
|
||||
vis.visit_qself(qself);
|
||||
vis.visit_path(path);
|
||||
fields.flat_map_in_place(|field| flat_map_field(field, vis));
|
||||
if let StructRest::Base(rest) = rest {
|
||||
vis.visit_expr(rest);
|
||||
}
|
||||
vis.visit_id(&mut e.id);
|
||||
vis.visit_span(&mut e.span);
|
||||
visit_attrs(&mut e.attrs, vis);
|
||||
}
|
||||
_ => noop_visit_expr(e, vis),
|
||||
}
|
||||
}
|
||||
|
||||
impl MutVisitor for FullyParenthesize {
|
||||
fn visit_expr(&mut self, e: &mut P<Expr>) {
|
||||
noop_visit_expr(e, self);
|
||||
match e.kind {
|
||||
ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) => {}
|
||||
ExprKind::Binary(..) if contains_let_chain(e) => {}
|
||||
_ => {
|
||||
let inner = mem::replace(
|
||||
e,
|
||||
P(Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ExprKind::Err,
|
||||
span: DUMMY_SP,
|
||||
attrs: ThinVec::new(),
|
||||
tokens: None,
|
||||
}),
|
||||
);
|
||||
e.kind = ExprKind::Paren(inner);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
|
||||
match arg {
|
||||
// Don't wrap unbraced const generic arg as that's invalid syntax.
|
||||
GenericArg::Const(anon_const) => {
|
||||
if let ExprKind::Block(..) = &mut anon_const.value.kind {
|
||||
noop_visit_expr(&mut anon_const.value, self);
|
||||
}
|
||||
}
|
||||
_ => noop_visit_generic_arg(arg, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_param_bound(&mut self, bound: &mut GenericBound) {
|
||||
match bound {
|
||||
GenericBound::Trait(
|
||||
_,
|
||||
TraitBoundModifiers {
|
||||
constness: BoundConstness::Maybe(_),
|
||||
..
|
||||
},
|
||||
) => {}
|
||||
_ => noop_visit_param_bound(bound, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_block(&mut self, block: &mut P<Block>) {
|
||||
self.visit_id(&mut block.id);
|
||||
block
|
||||
.stmts
|
||||
.flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
|
||||
self.visit_span(&mut block.span);
|
||||
}
|
||||
|
||||
fn visit_local(&mut self, local: &mut P<Local>) {
|
||||
match local.kind {
|
||||
LocalKind::InitElse(..) => {}
|
||||
_ => noop_visit_local(local, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_item_kind(&mut self, item: &mut ItemKind) {
|
||||
match item {
|
||||
ItemKind::Const(const_item)
|
||||
if !const_item.generics.params.is_empty()
|
||||
|| !const_item.generics.where_clause.predicates.is_empty() => {}
|
||||
_ => noop_visit_item_kind(item, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_trait_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> {
|
||||
match &item.kind {
|
||||
AssocItemKind::Const(const_item)
|
||||
if !const_item.generics.params.is_empty()
|
||||
|| !const_item.generics.where_clause.predicates.is_empty() =>
|
||||
{
|
||||
SmallVec::from([item])
|
||||
}
|
||||
_ => noop_flat_map_assoc_item(item, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_impl_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> {
|
||||
match &item.kind {
|
||||
AssocItemKind::Const(const_item)
|
||||
if !const_item.generics.params.is_empty()
|
||||
|| !const_item.generics.where_clause.predicates.is_empty() =>
|
||||
{
|
||||
SmallVec::from([item])
|
||||
}
|
||||
_ => noop_flat_map_assoc_item(item, self),
|
||||
}
|
||||
}
|
||||
|
||||
// We don't want to look at expressions that might appear in patterns or
|
||||
// types yet. We'll look into comparing those in the future. For now
|
||||
// focus on expressions appearing in other places.
|
||||
fn visit_pat(&mut self, pat: &mut P<Pat>) {
|
||||
let _ = pat;
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, ty: &mut P<Ty>) {
|
||||
let _ = ty;
|
||||
}
|
||||
|
||||
fn visit_attribute(&mut self, attr: &mut Attribute) {
|
||||
let _ = attr;
|
||||
}
|
||||
}
|
||||
|
||||
let mut folder = FullyParenthesize;
|
||||
folder.visit_expr(&mut librustc_expr);
|
||||
librustc_expr
|
||||
}
|
||||
|
||||
fn syn_parenthesize(syn_expr: syn::Expr) -> syn::Expr {
|
||||
use syn::fold::{fold_expr, fold_generic_argument, Fold};
|
||||
use syn::{token, BinOp, Expr, ExprParen, GenericArgument, MetaNameValue, Pat, Stmt, Type};
|
||||
|
||||
struct FullyParenthesize;
|
||||
|
||||
fn parenthesize(expr: Expr) -> Expr {
|
||||
Expr::Paren(ExprParen {
|
||||
attrs: Vec::new(),
|
||||
expr: Box::new(expr),
|
||||
paren_token: token::Paren::default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn needs_paren(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
Expr::Group(_) => unreachable!(),
|
||||
Expr::If(_) | Expr::Unsafe(_) | Expr::Block(_) | Expr::Let(_) => false,
|
||||
Expr::Binary(_) => !contains_let_chain(expr),
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
fn contains_let_chain(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
Expr::Let(_) => true,
|
||||
Expr::Binary(expr) => {
|
||||
matches!(expr.op, BinOp::And(_))
|
||||
&& (contains_let_chain(&expr.left) || contains_let_chain(&expr.right))
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
impl Fold for FullyParenthesize {
|
||||
fn fold_expr(&mut self, expr: Expr) -> Expr {
|
||||
let needs_paren = needs_paren(&expr);
|
||||
let folded = fold_expr(self, expr);
|
||||
if needs_paren {
|
||||
parenthesize(folded)
|
||||
} else {
|
||||
folded
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
|
||||
match arg {
|
||||
GenericArgument::Const(arg) => GenericArgument::Const(match arg {
|
||||
Expr::Block(_) => fold_expr(self, arg),
|
||||
// Don't wrap unbraced const generic arg as that's invalid syntax.
|
||||
_ => arg,
|
||||
}),
|
||||
_ => fold_generic_argument(self, arg),
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
|
||||
match stmt {
|
||||
// Don't wrap toplevel expressions in statements.
|
||||
Stmt::Expr(Expr::Verbatim(_), Some(_)) => stmt,
|
||||
Stmt::Expr(e, semi) => Stmt::Expr(fold_expr(self, e), semi),
|
||||
s => s,
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_meta_name_value(&mut self, meta: MetaNameValue) -> MetaNameValue {
|
||||
// Don't turn #[p = "..."] into #[p = ("...")].
|
||||
meta
|
||||
}
|
||||
|
||||
// We don't want to look at expressions that might appear in patterns or
|
||||
// types yet. We'll look into comparing those in the future. For now
|
||||
// focus on expressions appearing in other places.
|
||||
fn fold_pat(&mut self, pat: Pat) -> Pat {
|
||||
pat
|
||||
}
|
||||
|
||||
fn fold_type(&mut self, ty: Type) -> Type {
|
||||
ty
|
||||
}
|
||||
}
|
||||
|
||||
let mut folder = FullyParenthesize;
|
||||
folder.fold_expr(syn_expr)
|
||||
}
|
||||
|
||||
fn make_parens_invisible(expr: syn::Expr) -> syn::Expr {
|
||||
use syn::fold::{fold_expr, fold_stmt, Fold};
|
||||
use syn::{token, Expr, ExprGroup, ExprParen, Stmt};
|
||||
|
||||
struct MakeParensInvisible;
|
||||
|
||||
impl Fold for MakeParensInvisible {
|
||||
fn fold_expr(&mut self, mut expr: Expr) -> Expr {
|
||||
if let Expr::Paren(paren) = expr {
|
||||
expr = Expr::Group(ExprGroup {
|
||||
attrs: paren.attrs,
|
||||
group_token: token::Group(paren.paren_token.span.join()),
|
||||
expr: paren.expr,
|
||||
});
|
||||
}
|
||||
fold_expr(self, expr)
|
||||
}
|
||||
|
||||
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
|
||||
if let Stmt::Expr(expr @ (Expr::Binary(_) | Expr::Cast(_)), None) = stmt {
|
||||
Stmt::Expr(
|
||||
Expr::Paren(ExprParen {
|
||||
attrs: Vec::new(),
|
||||
paren_token: token::Paren::default(),
|
||||
expr: Box::new(fold_expr(self, expr)),
|
||||
}),
|
||||
None,
|
||||
)
|
||||
} else {
|
||||
fold_stmt(self, stmt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut folder = MakeParensInvisible;
|
||||
folder.fold_expr(expr)
|
||||
}
|
||||
|
||||
/// Walk through a crate collecting all expressions we can find in it.
|
||||
fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
|
||||
use syn::fold::Fold;
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{token, ConstParam, Expr, ExprTuple, Pat, Path};
|
||||
|
||||
struct CollectExprs(Vec<Expr>);
|
||||
impl Fold for CollectExprs {
|
||||
fn fold_expr(&mut self, expr: Expr) -> Expr {
|
||||
match expr {
|
||||
Expr::Verbatim(_) => {}
|
||||
_ => self.0.push(expr),
|
||||
}
|
||||
|
||||
Expr::Tuple(ExprTuple {
|
||||
attrs: vec![],
|
||||
elems: Punctuated::new(),
|
||||
paren_token: token::Paren::default(),
|
||||
})
|
||||
}
|
||||
|
||||
fn fold_pat(&mut self, pat: Pat) -> Pat {
|
||||
pat
|
||||
}
|
||||
|
||||
fn fold_path(&mut self, path: Path) -> Path {
|
||||
// Skip traversing into const generic path arguments
|
||||
path
|
||||
}
|
||||
|
||||
fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam {
|
||||
const_param
|
||||
}
|
||||
}
|
||||
|
||||
let mut folder = CollectExprs(vec![]);
|
||||
folder.fold_file(file);
|
||||
folder.0
|
||||
}
|
||||
321
vendor/syn/tests/test_receiver.rs
vendored
Normal file
321
vendor/syn/tests/test_receiver.rs
vendored
Normal file
@@ -0,0 +1,321 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use syn::{parse_quote, TraitItemFn};
|
||||
|
||||
#[test]
|
||||
fn test_by_value() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn by_value(self: Self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_by_mut_value() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn by_mut(mut self: Self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
mutability: Some,
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_by_ref() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn by_ref(self: &Self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
colon_token: Some,
|
||||
ty: Type::Reference {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_by_box() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn by_box(self: Box<Self>);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Box",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_by_pin() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn by_pin(self: Pin<Self>);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Pin",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_explicit_type() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn explicit_type(self: Pin<MyType>);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
colon_token: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Pin",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "MyType",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_value_shorthand() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn value_shorthand(self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mut_value_shorthand() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn mut_value_shorthand(mut self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
mutability: Some,
|
||||
ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ref_shorthand() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn ref_shorthand(&self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
reference: Some(None),
|
||||
ty: Type::Reference {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ref_shorthand_with_lifetime() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn ref_shorthand(&'a self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
reference: Some(Some(Lifetime {
|
||||
ident: "a",
|
||||
})),
|
||||
ty: Type::Reference {
|
||||
lifetime: Some(Lifetime {
|
||||
ident: "a",
|
||||
}),
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ref_mut_shorthand() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn ref_mut_shorthand(&mut self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
reference: Some(None),
|
||||
mutability: Some,
|
||||
ty: Type::Reference {
|
||||
mutability: Some,
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ref_mut_shorthand_with_lifetime() {
|
||||
let TraitItemFn { sig, .. } = parse_quote! {
|
||||
fn ref_mut_shorthand(&'a mut self);
|
||||
};
|
||||
snapshot!(&sig.inputs[0], @r###"
|
||||
FnArg::Receiver(Receiver {
|
||||
reference: Some(Some(Lifetime {
|
||||
ident: "a",
|
||||
})),
|
||||
mutability: Some,
|
||||
ty: Type::Reference {
|
||||
lifetime: Some(Lifetime {
|
||||
ident: "a",
|
||||
}),
|
||||
mutability: Some,
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Self",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
239
vendor/syn/tests/test_round_trip.rs
vendored
Normal file
239
vendor/syn/tests/test_round_trip.rs
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
#![cfg(not(syn_disable_nightly_tests))]
|
||||
#![cfg(not(miri))]
|
||||
#![recursion_limit = "1024"]
|
||||
#![feature(rustc_private)]
|
||||
#![allow(
|
||||
clippy::blocks_in_conditions,
|
||||
clippy::manual_assert,
|
||||
clippy::manual_let_else,
|
||||
clippy::match_like_matches_macro,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
extern crate rustc_ast;
|
||||
extern crate rustc_ast_pretty;
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_driver;
|
||||
extern crate rustc_error_messages;
|
||||
extern crate rustc_errors;
|
||||
extern crate rustc_expand;
|
||||
extern crate rustc_parse as parse;
|
||||
extern crate rustc_session;
|
||||
extern crate rustc_span;
|
||||
|
||||
use crate::common::eq::SpanlessEq;
|
||||
use quote::quote;
|
||||
use rustc_ast::ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg, GenericParamKind, Generics,
|
||||
WhereClause,
|
||||
};
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_error_messages::{DiagnosticMessage, LazyFallbackBundle};
|
||||
use rustc_errors::{translation, Diagnostic, PResult};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::FileName;
|
||||
use std::borrow::Cow;
|
||||
use std::fs;
|
||||
use std::panic;
|
||||
use std::path::Path;
|
||||
use std::process;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::time::Instant;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
#[allow(dead_code)]
|
||||
mod common;
|
||||
|
||||
mod repo;
|
||||
|
||||
#[test]
|
||||
fn test_round_trip() {
|
||||
common::rayon_init();
|
||||
repo::clone_rust();
|
||||
let abort_after = common::abort_after();
|
||||
if abort_after == 0 {
|
||||
panic!("Skipping all round_trip tests");
|
||||
}
|
||||
|
||||
let failed = AtomicUsize::new(0);
|
||||
|
||||
repo::for_each_rust_file(|path| test(path, &failed, abort_after));
|
||||
|
||||
let failed = failed.load(Ordering::Relaxed);
|
||||
if failed > 0 {
|
||||
panic!("{} failures", failed);
|
||||
}
|
||||
}
|
||||
|
||||
fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) {
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
|
||||
let start = Instant::now();
|
||||
let (krate, elapsed) = match syn::parse_file(&content) {
|
||||
Ok(krate) => (krate, start.elapsed()),
|
||||
Err(msg) => {
|
||||
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
|
||||
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
|
||||
if prev_failed + 1 >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
let back = quote!(#krate).to_string();
|
||||
let edition = repo::edition(path).parse().unwrap();
|
||||
|
||||
rustc_span::create_session_if_not_set_then(edition, |_| {
|
||||
let equal = match panic::catch_unwind(|| {
|
||||
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
|
||||
let file_path_mapping = FilePathMapping::empty();
|
||||
let sess = ParseSess::new(locale_resources, file_path_mapping);
|
||||
let before = match librustc_parse(content, &sess) {
|
||||
Ok(before) => before,
|
||||
Err(diagnostic) => {
|
||||
errorf!(
|
||||
"=== {}: ignore - librustc failed to parse original content: {}\n",
|
||||
path.display(),
|
||||
translate_message(&diagnostic),
|
||||
);
|
||||
diagnostic.cancel();
|
||||
return Err(true);
|
||||
}
|
||||
};
|
||||
let after = match librustc_parse(back, &sess) {
|
||||
Ok(after) => after,
|
||||
Err(mut diagnostic) => {
|
||||
errorf!("=== {}: librustc failed to parse", path.display());
|
||||
diagnostic.emit();
|
||||
return Err(false);
|
||||
}
|
||||
};
|
||||
Ok((before, after))
|
||||
}) {
|
||||
Err(_) => {
|
||||
errorf!("=== {}: ignoring librustc panic\n", path.display());
|
||||
true
|
||||
}
|
||||
Ok(Err(equal)) => equal,
|
||||
Ok(Ok((mut before, mut after))) => {
|
||||
normalize(&mut before);
|
||||
normalize(&mut after);
|
||||
if SpanlessEq::eq(&before, &after) {
|
||||
errorf!(
|
||||
"=== {}: pass in {}ms\n",
|
||||
path.display(),
|
||||
elapsed.as_secs() * 1000 + u64::from(elapsed.subsec_nanos()) / 1_000_000
|
||||
);
|
||||
true
|
||||
} else {
|
||||
errorf!(
|
||||
"=== {}: FAIL\n{}\n!=\n{}\n",
|
||||
path.display(),
|
||||
pprust::crate_to_string_for_macros(&before),
|
||||
pprust::crate_to_string_for_macros(&after),
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
};
|
||||
if !equal {
|
||||
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
|
||||
if prev_failed + 1 >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
|
||||
static COUNTER: AtomicUsize = AtomicUsize::new(0);
|
||||
let counter = COUNTER.fetch_add(1, Ordering::Relaxed);
|
||||
let name = FileName::Custom(format!("test_round_trip{}", counter));
|
||||
parse::parse_crate_from_source_str(name, content, sess)
|
||||
}
|
||||
|
||||
fn translate_message(diagnostic: &Diagnostic) -> Cow<'static, str> {
|
||||
thread_local! {
|
||||
static FLUENT_BUNDLE: LazyFallbackBundle = {
|
||||
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
|
||||
let with_directionality_markers = false;
|
||||
rustc_error_messages::fallback_fluent_bundle(locale_resources, with_directionality_markers)
|
||||
};
|
||||
}
|
||||
|
||||
let message = &diagnostic.messages[0].0;
|
||||
let args = translation::to_fluent_args(diagnostic.args());
|
||||
|
||||
let (identifier, attr) = match message {
|
||||
DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => return msg.clone(),
|
||||
DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
|
||||
};
|
||||
|
||||
FLUENT_BUNDLE.with(|fluent_bundle| {
|
||||
let message = fluent_bundle
|
||||
.get_message(identifier)
|
||||
.expect("missing diagnostic in fluent bundle");
|
||||
let value = match attr {
|
||||
Some(attr) => message
|
||||
.get_attribute(attr)
|
||||
.expect("missing attribute in fluent message")
|
||||
.value(),
|
||||
None => message.value().expect("missing value in fluent message"),
|
||||
};
|
||||
|
||||
let mut err = Vec::new();
|
||||
let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err);
|
||||
assert!(err.is_empty());
|
||||
Cow::Owned(translated.into_owned())
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize(krate: &mut Crate) {
|
||||
struct NormalizeVisitor;
|
||||
|
||||
impl MutVisitor for NormalizeVisitor {
|
||||
fn visit_angle_bracketed_parameter_data(&mut self, e: &mut AngleBracketedArgs) {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq)]
|
||||
enum Group {
|
||||
Lifetimes,
|
||||
TypesAndConsts,
|
||||
Constraints,
|
||||
}
|
||||
e.args.sort_by_key(|arg| match arg {
|
||||
AngleBracketedArg::Arg(arg) => match arg {
|
||||
GenericArg::Lifetime(_) => Group::Lifetimes,
|
||||
GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts,
|
||||
},
|
||||
AngleBracketedArg::Constraint(_) => Group::Constraints,
|
||||
});
|
||||
mut_visit::noop_visit_angle_bracketed_parameter_data(e, self);
|
||||
}
|
||||
|
||||
fn visit_generics(&mut self, e: &mut Generics) {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq)]
|
||||
enum Group {
|
||||
Lifetimes,
|
||||
TypesAndConsts,
|
||||
}
|
||||
e.params.sort_by_key(|param| match param.kind {
|
||||
GenericParamKind::Lifetime => Group::Lifetimes,
|
||||
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
|
||||
Group::TypesAndConsts
|
||||
}
|
||||
});
|
||||
mut_visit::noop_visit_generics(e, self);
|
||||
}
|
||||
|
||||
fn visit_where_clause(&mut self, e: &mut WhereClause) {
|
||||
if e.predicates.is_empty() {
|
||||
e.has_where_token = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
NormalizeVisitor.visit_crate(krate);
|
||||
}
|
||||
67
vendor/syn/tests/test_shebang.rs
vendored
Normal file
67
vendor/syn/tests/test_shebang.rs
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
#[test]
|
||||
fn test_basic() {
|
||||
let content = "#!/usr/bin/env rustx\nfn main() {}";
|
||||
let file = syn::parse_file(content).unwrap();
|
||||
snapshot!(file, @r###"
|
||||
File {
|
||||
shebang: Some("#!/usr/bin/env rustx"),
|
||||
items: [
|
||||
Item::Fn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
ident: "main",
|
||||
generics: Generics,
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_comment() {
|
||||
let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
|
||||
let file = syn::parse_file(content).unwrap();
|
||||
snapshot!(file, @r###"
|
||||
File {
|
||||
attrs: [
|
||||
Attribute {
|
||||
style: AttrStyle::Inner,
|
||||
meta: Meta::List {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "allow",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`dead_code`),
|
||||
},
|
||||
},
|
||||
],
|
||||
items: [
|
||||
Item::Fn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
ident: "main",
|
||||
generics: Generics,
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
45
vendor/syn/tests/test_should_parse.rs
vendored
Normal file
45
vendor/syn/tests/test_should_parse.rs
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
macro_rules! should_parse {
|
||||
($name:ident, { $($in:tt)* }) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
// Make sure we can parse the file!
|
||||
syn::parse_file(stringify!($($in)*)).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
should_parse!(generic_associated_type, {
|
||||
impl Foo {
|
||||
type Item = &'a i32;
|
||||
fn foo<'a>(&'a self) -> Self::Item<'a> {}
|
||||
}
|
||||
});
|
||||
|
||||
#[rustfmt::skip]
|
||||
should_parse!(const_generics_use, {
|
||||
type X = Foo<5>;
|
||||
type Y = Foo<"foo">;
|
||||
type Z = Foo<X>;
|
||||
type W = Foo<{ X + 10 }>;
|
||||
});
|
||||
|
||||
should_parse!(trailing_plus_type, {
|
||||
type A = Box<Foo>;
|
||||
type A = Box<Foo + 'a>;
|
||||
type A = Box<'a + Foo>;
|
||||
});
|
||||
|
||||
should_parse!(generic_associated_type_where, {
|
||||
trait Foo {
|
||||
type Item;
|
||||
fn foo<T>(&self, t: T) -> Self::Item<T>;
|
||||
}
|
||||
});
|
||||
|
||||
should_parse!(match_with_block_expr, {
|
||||
fn main() {
|
||||
match false {
|
||||
_ => {}.a(),
|
||||
}
|
||||
}
|
||||
});
|
||||
36
vendor/syn/tests/test_size.rs
vendored
Normal file
36
vendor/syn/tests/test_size.rs
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
// Assumes proc-macro2's "span-locations" feature is off.
|
||||
|
||||
#![cfg(target_pointer_width = "64")]
|
||||
|
||||
use std::mem;
|
||||
use syn::{Expr, Item, Lit, Pat, Type};
|
||||
|
||||
#[rustversion::attr(before(2022-11-24), ignore)]
|
||||
#[test]
|
||||
fn test_expr_size() {
|
||||
assert_eq!(mem::size_of::<Expr>(), 176);
|
||||
}
|
||||
|
||||
#[rustversion::attr(before(2022-09-09), ignore)]
|
||||
#[test]
|
||||
fn test_item_size() {
|
||||
assert_eq!(mem::size_of::<Item>(), 360);
|
||||
}
|
||||
|
||||
#[rustversion::attr(before(2023-04-29), ignore)]
|
||||
#[test]
|
||||
fn test_type_size() {
|
||||
assert_eq!(mem::size_of::<Type>(), 232);
|
||||
}
|
||||
|
||||
#[rustversion::attr(before(2023-04-29), ignore)]
|
||||
#[test]
|
||||
fn test_pat_size() {
|
||||
assert_eq!(mem::size_of::<Pat>(), 184);
|
||||
}
|
||||
|
||||
#[rustversion::attr(before(2023-12-20), ignore)]
|
||||
#[test]
|
||||
fn test_lit_size() {
|
||||
assert_eq!(mem::size_of::<Lit>(), 24);
|
||||
}
|
||||
322
vendor/syn/tests/test_stmt.rs
vendored
Normal file
322
vendor/syn/tests/test_stmt.rs
vendored
Normal file
@@ -0,0 +1,322 @@
|
||||
#![allow(
|
||||
clippy::assertions_on_result_states,
|
||||
clippy::non_ascii_literal,
|
||||
clippy::uninlined_format_args
|
||||
)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
||||
use quote::{quote, ToTokens as _};
|
||||
use syn::parse::Parser as _;
|
||||
use syn::{Block, Stmt};
|
||||
|
||||
#[test]
|
||||
fn test_raw_operator() {
|
||||
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
|
||||
|
||||
snapshot!(stmt, @r###"
|
||||
Stmt::Local {
|
||||
pat: Pat::Wild,
|
||||
init: Some(LocalInit {
|
||||
expr: Expr::Verbatim(`& raw const x`),
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_raw_variable() {
|
||||
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
|
||||
|
||||
snapshot!(stmt, @r###"
|
||||
Stmt::Local {
|
||||
pat: Pat::Wild,
|
||||
init: Some(LocalInit {
|
||||
expr: Expr::Reference {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "raw",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_raw_invalid() {
|
||||
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_none_group() {
|
||||
// <Ø async fn f() {} Ø>
|
||||
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("async", Span::call_site())),
|
||||
TokenTree::Ident(Ident::new("fn", Span::call_site())),
|
||||
TokenTree::Ident(Ident::new("f", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
||||
]),
|
||||
))]);
|
||||
snapshot!(tokens as Stmt, @r###"
|
||||
Stmt::Item(Item::Fn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
asyncness: Some,
|
||||
ident: "f",
|
||||
generics: Generics,
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [],
|
||||
},
|
||||
})
|
||||
"###);
|
||||
|
||||
let tokens = Group::new(Delimiter::None, quote!(let None = None)).to_token_stream();
|
||||
let stmts = Block::parse_within.parse2(tokens).unwrap();
|
||||
snapshot!(stmts, @r###"
|
||||
[
|
||||
Stmt::Expr(
|
||||
Expr::Group {
|
||||
expr: Expr::Let {
|
||||
pat: Pat::Ident {
|
||||
ident: "None",
|
||||
},
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "None",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
]
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_let_dot_dot() {
|
||||
let tokens = quote! {
|
||||
let .. = 10;
|
||||
};
|
||||
|
||||
snapshot!(tokens as Stmt, @r###"
|
||||
Stmt::Local {
|
||||
pat: Pat::Rest,
|
||||
init: Some(LocalInit {
|
||||
expr: Expr::Lit {
|
||||
lit: 10,
|
||||
},
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_let_else() {
|
||||
let tokens = quote! {
|
||||
let Some(x) = None else { return 0; };
|
||||
};
|
||||
|
||||
snapshot!(tokens as Stmt, @r###"
|
||||
Stmt::Local {
|
||||
pat: Pat::TupleStruct {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Some",
|
||||
},
|
||||
],
|
||||
},
|
||||
elems: [
|
||||
Pat::Ident {
|
||||
ident: "x",
|
||||
},
|
||||
],
|
||||
},
|
||||
init: Some(LocalInit {
|
||||
expr: Expr::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "None",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
diverge: Some(Expr::Block {
|
||||
block: Block {
|
||||
stmts: [
|
||||
Stmt::Expr(
|
||||
Expr::Return {
|
||||
expr: Some(Expr::Lit {
|
||||
lit: 0,
|
||||
}),
|
||||
},
|
||||
Some,
|
||||
),
|
||||
],
|
||||
},
|
||||
}),
|
||||
}),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macros() {
|
||||
let tokens = quote! {
|
||||
fn main() {
|
||||
macro_rules! mac {}
|
||||
thread_local! { static FOO }
|
||||
println!("");
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(tokens as Stmt, @r###"
|
||||
Stmt::Item(Item::Fn {
|
||||
vis: Visibility::Inherited,
|
||||
sig: Signature {
|
||||
ident: "main",
|
||||
generics: Generics,
|
||||
output: ReturnType::Default,
|
||||
},
|
||||
block: Block {
|
||||
stmts: [
|
||||
Stmt::Item(Item::Macro {
|
||||
ident: Some("mac"),
|
||||
mac: Macro {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "macro_rules",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Brace,
|
||||
tokens: TokenStream(``),
|
||||
},
|
||||
}),
|
||||
Stmt::Macro {
|
||||
mac: Macro {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "thread_local",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Brace,
|
||||
tokens: TokenStream(`static FOO`),
|
||||
},
|
||||
},
|
||||
Stmt::Macro {
|
||||
mac: Macro {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "println",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Paren,
|
||||
tokens: TokenStream(`""`),
|
||||
},
|
||||
semi_token: Some,
|
||||
},
|
||||
Stmt::Expr(
|
||||
Expr::Macro {
|
||||
mac: Macro {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "vec",
|
||||
},
|
||||
],
|
||||
},
|
||||
delimiter: MacroDelimiter::Bracket,
|
||||
tokens: TokenStream(``),
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
],
|
||||
},
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_early_parse_loop() {
|
||||
// The following is an Expr::Loop followed by Expr::Tuple. It is not an
|
||||
// Expr::Call.
|
||||
let tokens = quote! {
|
||||
loop {}
|
||||
()
|
||||
};
|
||||
|
||||
let stmts = Block::parse_within.parse2(tokens).unwrap();
|
||||
|
||||
snapshot!(stmts, @r###"
|
||||
[
|
||||
Stmt::Expr(
|
||||
Expr::Loop {
|
||||
body: Block {
|
||||
stmts: [],
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
Stmt::Expr(
|
||||
Expr::Tuple,
|
||||
None,
|
||||
),
|
||||
]
|
||||
"###);
|
||||
|
||||
let tokens = quote! {
|
||||
'a: loop {}
|
||||
()
|
||||
};
|
||||
|
||||
let stmts = Block::parse_within.parse2(tokens).unwrap();
|
||||
|
||||
snapshot!(stmts, @r###"
|
||||
[
|
||||
Stmt::Expr(
|
||||
Expr::Loop {
|
||||
label: Some(Label {
|
||||
name: Lifetime {
|
||||
ident: "a",
|
||||
},
|
||||
}),
|
||||
body: Block {
|
||||
stmts: [],
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
Stmt::Expr(
|
||||
Expr::Tuple,
|
||||
None,
|
||||
),
|
||||
]
|
||||
"###);
|
||||
}
|
||||
32
vendor/syn/tests/test_token_trees.rs
vendored
Normal file
32
vendor/syn/tests/test_token_trees.rs
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::Lit;
|
||||
|
||||
#[test]
|
||||
fn test_struct() {
|
||||
let input = "
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Item {
|
||||
pub ident: Ident,
|
||||
pub attrs: Vec<Attribute>,
|
||||
}
|
||||
";
|
||||
|
||||
snapshot!(input as TokenStream, @r###"
|
||||
TokenStream(
|
||||
`# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
|
||||
)
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_literal_mangling() {
|
||||
let code = "0_4";
|
||||
let parsed: Lit = syn::parse_str(code).unwrap();
|
||||
assert_eq!(code, quote!(#parsed).to_string());
|
||||
}
|
||||
397
vendor/syn/tests/test_ty.rs
vendored
Normal file
397
vendor/syn/tests/test_ty.rs
vendored
Normal file
@@ -0,0 +1,397 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use quote::{quote, ToTokens as _};
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{parse_quote, token, Token, Type, TypeTuple};
|
||||
|
||||
#[test]
|
||||
fn test_mut_self() {
|
||||
syn::parse_str::<Type>("fn(mut self)").unwrap();
|
||||
syn::parse_str::<Type>("fn(mut self,)").unwrap();
|
||||
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
|
||||
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
|
||||
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
|
||||
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_type() {
|
||||
// mimics the token stream corresponding to `$ty<T>`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
|
||||
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("T", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "ty",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
// mimics the token stream corresponding to `$ty::<T>`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("T", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "ty",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
colon2_token: Some,
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_angle_brackets() {
|
||||
// mimics the token stream corresponding to `Option<$ty>`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("Option", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
|
||||
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Option",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Group {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Vec",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "u8",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_colons() {
|
||||
// mimics the token stream corresponding to `$ty::Item`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("Item", Span::call_site())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Vec",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "u8",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
Token![::],
|
||||
PathSegment {
|
||||
ident: "Item",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("Element", Span::call_site())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
qself: Some(QSelf {
|
||||
ty: Type::Slice {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
position: 0,
|
||||
}),
|
||||
path: Path {
|
||||
leading_colon: Some,
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Element",
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trait_object() {
|
||||
let tokens = quote!(dyn for<'a> Trait<'a> + 'static);
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::TraitObject {
|
||||
dyn_token: Some,
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
lifetimes: Some(BoundLifetimes {
|
||||
lifetimes: [
|
||||
GenericParam::Lifetime(LifetimeParam {
|
||||
lifetime: Lifetime {
|
||||
ident: "a",
|
||||
},
|
||||
}),
|
||||
],
|
||||
}),
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
GenericArgument::Lifetime(Lifetime {
|
||||
ident: "a",
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
TypeParamBound::Lifetime {
|
||||
ident: "static",
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = quote!(dyn 'a + Trait);
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::TraitObject {
|
||||
dyn_token: Some,
|
||||
bounds: [
|
||||
TypeParamBound::Lifetime {
|
||||
ident: "a",
|
||||
},
|
||||
Token![+],
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
// None of the following are valid Rust types.
|
||||
syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err();
|
||||
syn::parse_str::<Type>("dyn for<'a> 'a + Trait").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trailing_plus() {
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(impl Trait +);
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::ImplTrait {
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(dyn Trait +);
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::TraitObject {
|
||||
dyn_token: Some,
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(Trait +);
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::TraitObject {
|
||||
bounds: [
|
||||
TypeParamBound::Trait(TraitBound {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
Token![+],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tuple_comma() {
|
||||
let mut expr = TypeTuple {
|
||||
paren_token: token::Paren::default(),
|
||||
elems: Punctuated::new(),
|
||||
};
|
||||
snapshot!(expr.to_token_stream() as Type, @"Type::Tuple");
|
||||
|
||||
expr.elems.push_value(parse_quote!(_));
|
||||
// Must not parse to Type::Paren
|
||||
snapshot!(expr.to_token_stream() as Type, @r###"
|
||||
Type::Tuple {
|
||||
elems: [
|
||||
Type::Infer,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_punct(<Token![,]>::default());
|
||||
snapshot!(expr.to_token_stream() as Type, @r###"
|
||||
Type::Tuple {
|
||||
elems: [
|
||||
Type::Infer,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_value(parse_quote!(_));
|
||||
snapshot!(expr.to_token_stream() as Type, @r###"
|
||||
Type::Tuple {
|
||||
elems: [
|
||||
Type::Infer,
|
||||
Token![,],
|
||||
Type::Infer,
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
expr.elems.push_punct(<Token![,]>::default());
|
||||
snapshot!(expr.to_token_stream() as Type, @r###"
|
||||
Type::Tuple {
|
||||
elems: [
|
||||
Type::Infer,
|
||||
Token![,],
|
||||
Type::Infer,
|
||||
Token![,],
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
144
vendor/syn/tests/test_visibility.rs
vendored
Normal file
144
vendor/syn/tests/test_visibility.rs
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
#![allow(clippy::uninlined_format_args)]
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use syn::parse::{Parse, ParseStream};
|
||||
use syn::{DeriveInput, Result, Visibility};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct VisRest {
|
||||
vis: Visibility,
|
||||
rest: TokenStream,
|
||||
}
|
||||
|
||||
impl Parse for VisRest {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(VisRest {
|
||||
vis: input.parse()?,
|
||||
rest: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! assert_vis_parse {
|
||||
($input:expr, Ok($p:pat)) => {
|
||||
assert_vis_parse!($input, Ok($p) + "");
|
||||
};
|
||||
|
||||
($input:expr, Ok($p:pat) + $rest:expr) => {
|
||||
let expected = $rest.parse::<TokenStream>().unwrap();
|
||||
let parse: VisRest = syn::parse_str($input).unwrap();
|
||||
|
||||
match parse.vis {
|
||||
$p => {}
|
||||
_ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
|
||||
}
|
||||
|
||||
// NOTE: Round-trips through `to_string` to avoid potential whitespace
|
||||
// diffs.
|
||||
assert_eq!(parse.rest.to_string(), expected.to_string());
|
||||
};
|
||||
|
||||
($input:expr, Err) => {
|
||||
syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub() {
|
||||
assert_vis_parse!("pub", Ok(Visibility::Public(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inherited() {
|
||||
assert_vis_parse!("", Ok(Visibility::Inherited));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_in() {
|
||||
assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_crate() {
|
||||
assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_self() {
|
||||
assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pub_super() {
|
||||
assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_in() {
|
||||
assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_in_path() {
|
||||
assert_vis_parse!("pub(in)", Err);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_crate_path() {
|
||||
assert_vis_parse!(
|
||||
"pub(crate::A, crate::B)",
|
||||
Ok(Visibility::Public(_)) + "(crate::A, crate::B)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_junk_after_in() {
|
||||
assert_vis_parse!("pub(in some::path @@garbage)", Err);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_group_vis() {
|
||||
// mimics `struct S { $vis $field: () }` where $vis is empty
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("struct", Span::call_site())),
|
||||
TokenTree::Ident(Ident::new("S", Span::call_site())),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Brace,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
|
||||
"f",
|
||||
Span::call_site(),
|
||||
))]),
|
||||
)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
||||
]),
|
||||
)),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as DeriveInput, @r###"
|
||||
DeriveInput {
|
||||
vis: Visibility::Inherited,
|
||||
ident: "S",
|
||||
generics: Generics,
|
||||
data: Data::Struct {
|
||||
fields: Fields::Named {
|
||||
named: [
|
||||
Field {
|
||||
vis: Visibility::Inherited,
|
||||
ident: Some("f"),
|
||||
colon_token: Some,
|
||||
ty: Type::Tuple,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
33
vendor/syn/tests/zzz_stable.rs
vendored
Normal file
33
vendor/syn/tests/zzz_stable.rs
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
#![cfg(syn_disable_nightly_tests)]
|
||||
|
||||
use std::io::{self, Write};
|
||||
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||
|
||||
const MSG: &str = "\
|
||||
‖
|
||||
‖ WARNING:
|
||||
‖ This is not a nightly compiler so not all tests were able to
|
||||
‖ run. Syn includes tests that compare Syn's parser against the
|
||||
‖ compiler's parser, which requires access to unstable librustc
|
||||
‖ data structures and a nightly compiler.
|
||||
‖
|
||||
";
|
||||
|
||||
#[test]
|
||||
fn notice() -> io::Result<()> {
|
||||
let header = "WARNING";
|
||||
let index_of_header = MSG.find(header).unwrap();
|
||||
let before = &MSG[..index_of_header];
|
||||
let after = &MSG[index_of_header + header.len()..];
|
||||
|
||||
let mut stderr = StandardStream::stderr(ColorChoice::Auto);
|
||||
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
|
||||
write!(&mut stderr, "{}", before)?;
|
||||
stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)))?;
|
||||
write!(&mut stderr, "{}", header)?;
|
||||
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
|
||||
write!(&mut stderr, "{}", after)?;
|
||||
stderr.reset()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user