Initial vendor packages
Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
1
vendor/proc-macro2/.cargo-checksum.json
vendored
Normal file
1
vendor/proc-macro2/.cargo-checksum.json
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"files":{"Cargo.toml":"4a85db50c7866e4eba8673358705982eaf715207ae4b8e16d3f485671fa9f473","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"c609b6865476d6c35879784e9155367a97a0da496aa5c3c61488440a20f59883","build.rs":"8b4facae0d125ca3b437b4f5ebcd6ea3da3fcc65fcfc2cf357ae544423aa4568","build/probe.rs":"827da142d033f027d9f2a52ffdc0a619c7c34a2a280635e38c64fcd46cf7b635","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"d378a9e799e5c49933b067cd38f5364d16a152ef337eef86ce42fdc86005ddf3","src/fallback.rs":"35e46d4fa73175dcf857084e12f5bb7e094481738dcf59a98b1c584552d076bc","src/lib.rs":"d0f6c5e918b827df600cf5e73cf92ca52c16584b4ac7dd96eb63562947d36bd5","src/location.rs":"f55d2e61f1bb1af65e14ed04c9e91eb1ddbf8430e8c05f2048d1cd538d27368e","src/marker.rs":"c8c90351b8ebcf5b11520831b199add628bc613b0f5559260b51a3c4f6406d8a","src/parse.rs":"4b77cddbc2752bc4d38a65acd8b96b6786c5220d19b1e1b37810257b5d24132d","src/rcvec.rs":"1c3c48c4f819927cc445ae15ca3bb06775feff2fd1cb21901ae4c40c7e6b4e82","src/wrapper.rs":"46ae8c6bc87edb04c3b3fc3506234bcca050c309c209b93bc61b2d21235c8362","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"3190ee07dae510251f360db701ce257030f94a479b6689c3a9ef804bd5d8d099","tests/test.rs":"7511be57e097b15403cf36feb858b4aabdc832fac7024571059a559a7e2ed2a0","tests/test_fmt.rs":"b7743b612af65f2c88cbe109d50a093db7aa7e87f9e37bf45b7bbaeb240aa020","tests/test_size.rs":"acf05963c1e62052d769d237b50844a2c59b4182b491231b099a4f74e5456ab0"},"package":"95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"}
|
67
vendor/proc-macro2/Cargo.toml
vendored
Normal file
67
vendor/proc-macro2/Cargo.toml
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2021"
|
||||
rust-version = "1.56"
|
||||
name = "proc-macro2"
|
||||
version = "1.0.76"
|
||||
authors = [
|
||||
"David Tolnay <dtolnay@gmail.com>",
|
||||
"Alex Crichton <alex@alexcrichton.com>",
|
||||
]
|
||||
autobenches = false
|
||||
description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case."
|
||||
documentation = "https://docs.rs/proc-macro2"
|
||||
readme = "README.md"
|
||||
keywords = [
|
||||
"macros",
|
||||
"syn",
|
||||
]
|
||||
categories = ["development-tools::procedural-macro-helpers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/proc-macro2"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustc-args = [
|
||||
"--cfg",
|
||||
"procmacro2_semver_exempt",
|
||||
]
|
||||
rustdoc-args = [
|
||||
"--cfg",
|
||||
"procmacro2_semver_exempt",
|
||||
"--cfg",
|
||||
"doc_cfg",
|
||||
"--generate-link-to-definition",
|
||||
]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[package.metadata.playground]
|
||||
features = ["span-locations"]
|
||||
|
||||
[lib]
|
||||
doc-scrape-examples = false
|
||||
|
||||
[dependencies.unicode-ident]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.quote]
|
||||
version = "1.0"
|
||||
default_features = false
|
||||
|
||||
[dev-dependencies.rustversion]
|
||||
version = "1"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
nightly = []
|
||||
proc-macro = []
|
||||
span-locations = []
|
176
vendor/proc-macro2/LICENSE-APACHE
vendored
Normal file
176
vendor/proc-macro2/LICENSE-APACHE
vendored
Normal file
@ -0,0 +1,176 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
vendor/proc-macro2/LICENSE-MIT
vendored
Normal file
23
vendor/proc-macro2/LICENSE-MIT
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
94
vendor/proc-macro2/README.md
vendored
Normal file
94
vendor/proc-macro2/README.md
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
# proc-macro2
|
||||
|
||||
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/proc--macro2-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/proc-macro2)
|
||||
[<img alt="crates.io" src="https://img.shields.io/crates/v/proc-macro2.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/proc-macro2)
|
||||
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/proc-macro2)
|
||||
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/proc-macro2/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
|
||||
|
||||
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
|
||||
This library serves two purposes:
|
||||
|
||||
- **Bring proc-macro-like functionality to other contexts like build.rs and
|
||||
main.rs.** Types from `proc_macro` are entirely specific to procedural macros
|
||||
and cannot ever exist in code outside of a procedural macro. Meanwhile
|
||||
`proc_macro2` types may exist anywhere including non-macro code. By developing
|
||||
foundational libraries like [syn] and [quote] against `proc_macro2` rather
|
||||
than `proc_macro`, the procedural macro ecosystem becomes easily applicable to
|
||||
many other use cases and we avoid reimplementing non-macro equivalents of
|
||||
those libraries.
|
||||
|
||||
- **Make procedural macros unit testable.** As a consequence of being specific
|
||||
to procedural macros, nothing that uses `proc_macro` can be executed from a
|
||||
unit test. In order for helper libraries or components of a macro to be
|
||||
testable in isolation, they must be implemented using `proc_macro2`.
|
||||
|
||||
[syn]: https://github.com/dtolnay/syn
|
||||
[quote]: https://github.com/dtolnay/quote
|
||||
|
||||
## Usage
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0"
|
||||
```
|
||||
|
||||
The skeleton of a typical procedural macro typically looks like this:
|
||||
|
||||
```rust
|
||||
extern crate proc_macro;
|
||||
|
||||
#[proc_macro_derive(MyDerive)]
|
||||
pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = proc_macro2::TokenStream::from(input);
|
||||
|
||||
let output: proc_macro2::TokenStream = {
|
||||
/* transform input */
|
||||
};
|
||||
|
||||
proc_macro::TokenStream::from(output)
|
||||
}
|
||||
```
|
||||
|
||||
If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
|
||||
parse errors correctly back to the compiler when parsing fails.
|
||||
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html
|
||||
|
||||
## Unstable features
|
||||
|
||||
The default feature set of proc-macro2 tracks the most recent stable compiler
|
||||
API. Functionality in `proc_macro` that is not yet stable is not exposed by
|
||||
proc-macro2 by default.
|
||||
|
||||
To opt into the additional APIs available in the most recent nightly compiler,
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. We will
|
||||
polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable APIs
|
||||
that track the nightly compiler, minor versions of proc-macro2 may make breaking
|
||||
changes to them at any time.
|
||||
|
||||
```
|
||||
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
```
|
||||
|
||||
Note that this must not only be done for your crate, but for any crate that
|
||||
depends on your crate. This infectious nature is intentional, as it serves as a
|
||||
reminder that you are outside of the normal semver guarantees.
|
||||
|
||||
Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
|
||||
<br>
|
||||
|
||||
#### License
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
202
vendor/proc-macro2/build.rs
vendored
Normal file
202
vendor/proc-macro2/build.rs
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
// rustc-cfg emitted by the build script:
|
||||
//
|
||||
// "wrap_proc_macro"
|
||||
// Wrap types from libproc_macro rather than polyfilling the whole API.
|
||||
// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
|
||||
// because we can't emulate the unstable API without emulating everything
|
||||
// else. Also enabled unconditionally on nightly, in which case the
|
||||
// procmacro2_semver_exempt surface area is implemented by using the
|
||||
// nightly-only proc_macro API.
|
||||
//
|
||||
// "hygiene"
|
||||
// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
|
||||
// and Span::located_at. Enabled on Rust 1.45+.
|
||||
//
|
||||
// "proc_macro_span"
|
||||
// Enable non-dummy behavior of Span::start and Span::end methods which
|
||||
// requires an unstable compiler feature. Enabled when building with
|
||||
// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
|
||||
// features.
|
||||
//
|
||||
// "super_unstable"
|
||||
// Implement the semver exempt API in terms of the nightly-only proc_macro
|
||||
// API. Enabled when using procmacro2_semver_exempt on a nightly compiler.
|
||||
//
|
||||
// "span_locations"
|
||||
// Provide methods Span::start and Span::end which give the line/column
|
||||
// location of a token. Enabled by procmacro2_semver_exempt or the
|
||||
// "span-locations" Cargo cfg. This is behind a cfg because tracking
|
||||
// location inside spans is a performance hit.
|
||||
//
|
||||
// "is_available"
|
||||
// Use proc_macro::is_available() to detect if the proc macro API is
|
||||
// available or needs to be polyfilled instead of trying to use the proc
|
||||
// macro API and catching a panic if it isn't available. Enabled on Rust
|
||||
// 1.57+.
|
||||
|
||||
use std::env;
|
||||
use std::ffi::OsString;
|
||||
use std::path::Path;
|
||||
use std::process::{self, Command, Stdio};
|
||||
use std::str;
|
||||
use std::u32;
|
||||
|
||||
fn main() {
|
||||
let rustc = rustc_minor_version().unwrap_or(u32::MAX);
|
||||
|
||||
let docs_rs = env::var_os("DOCS_RS").is_some();
|
||||
let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs;
|
||||
if semver_exempt {
|
||||
// https://github.com/dtolnay/proc-macro2/issues/147
|
||||
println!("cargo:rustc-cfg=procmacro2_semver_exempt");
|
||||
}
|
||||
|
||||
if semver_exempt || cfg!(feature = "span-locations") {
|
||||
println!("cargo:rustc-cfg=span_locations");
|
||||
}
|
||||
|
||||
if rustc < 57 {
|
||||
println!("cargo:rustc-cfg=no_is_available");
|
||||
}
|
||||
|
||||
if rustc < 66 {
|
||||
println!("cargo:rustc-cfg=no_source_text");
|
||||
}
|
||||
|
||||
if !cfg!(feature = "proc-macro") {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:rerun-if-changed=build/probe.rs");
|
||||
|
||||
let proc_macro_span;
|
||||
let consider_rustc_bootstrap;
|
||||
if compile_probe(false) {
|
||||
// This is a nightly or dev compiler, so it supports unstable features
|
||||
// regardless of RUSTC_BOOTSTRAP. No need to rerun build script if
|
||||
// RUSTC_BOOTSTRAP is changed.
|
||||
proc_macro_span = true;
|
||||
consider_rustc_bootstrap = false;
|
||||
} else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") {
|
||||
if compile_probe(true) {
|
||||
// This is a stable or beta compiler for which the user has set
|
||||
// RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script
|
||||
// if they change it.
|
||||
proc_macro_span = true;
|
||||
consider_rustc_bootstrap = true;
|
||||
} else if rustc_bootstrap == "1" {
|
||||
// This compiler does not support the proc macro Span API in the
|
||||
// form that proc-macro2 expects. No need to pay attention to
|
||||
// RUSTC_BOOTSTRAP.
|
||||
proc_macro_span = false;
|
||||
consider_rustc_bootstrap = false;
|
||||
} else {
|
||||
// This is a stable or beta compiler for which RUSTC_BOOTSTRAP is
|
||||
// set to restrict the use of unstable features by this crate.
|
||||
proc_macro_span = false;
|
||||
consider_rustc_bootstrap = true;
|
||||
}
|
||||
} else {
|
||||
// Without RUSTC_BOOTSTRAP, this compiler does not support the proc
|
||||
// macro Span API in the form that proc-macro2 expects, but try again if
|
||||
// the user turns on unstable features.
|
||||
proc_macro_span = false;
|
||||
consider_rustc_bootstrap = true;
|
||||
}
|
||||
|
||||
if proc_macro_span || !semver_exempt {
|
||||
println!("cargo:rustc-cfg=wrap_proc_macro");
|
||||
}
|
||||
|
||||
if proc_macro_span {
|
||||
println!("cargo:rustc-cfg=proc_macro_span");
|
||||
}
|
||||
|
||||
if semver_exempt && proc_macro_span {
|
||||
println!("cargo:rustc-cfg=super_unstable");
|
||||
}
|
||||
|
||||
if consider_rustc_bootstrap {
|
||||
println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP");
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_probe(rustc_bootstrap: bool) -> bool {
|
||||
if env::var_os("RUSTC_STAGE").is_some() {
|
||||
// We are running inside rustc bootstrap. This is a highly non-standard
|
||||
// environment with issues such as:
|
||||
//
|
||||
// https://github.com/rust-lang/cargo/issues/11138
|
||||
// https://github.com/rust-lang/rust/issues/114839
|
||||
//
|
||||
// Let's just not use nightly features here.
|
||||
return false;
|
||||
}
|
||||
|
||||
let rustc = cargo_env_var("RUSTC");
|
||||
let out_dir = cargo_env_var("OUT_DIR");
|
||||
let probefile = Path::new("build").join("probe.rs");
|
||||
|
||||
// Make sure to pick up Cargo rustc configuration.
|
||||
let mut cmd = if let Some(wrapper) = env::var_os("RUSTC_WRAPPER") {
|
||||
let mut cmd = Command::new(wrapper);
|
||||
// The wrapper's first argument is supposed to be the path to rustc.
|
||||
cmd.arg(rustc);
|
||||
cmd
|
||||
} else {
|
||||
Command::new(rustc)
|
||||
};
|
||||
|
||||
if !rustc_bootstrap {
|
||||
cmd.env_remove("RUSTC_BOOTSTRAP");
|
||||
}
|
||||
|
||||
cmd.stderr(Stdio::null())
|
||||
.arg("--edition=2021")
|
||||
.arg("--crate-name=proc_macro2")
|
||||
.arg("--crate-type=lib")
|
||||
.arg("--emit=dep-info,metadata")
|
||||
.arg("--out-dir")
|
||||
.arg(out_dir)
|
||||
.arg(probefile);
|
||||
|
||||
if let Some(target) = env::var_os("TARGET") {
|
||||
cmd.arg("--target").arg(target);
|
||||
}
|
||||
|
||||
// If Cargo wants to set RUSTFLAGS, use that.
|
||||
if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") {
|
||||
if !rustflags.is_empty() {
|
||||
for arg in rustflags.split('\x1f') {
|
||||
cmd.arg(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match cmd.status() {
|
||||
Ok(status) => status.success(),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn rustc_minor_version() -> Option<u32> {
|
||||
let rustc = cargo_env_var("RUSTC");
|
||||
let output = Command::new(rustc).arg("--version").output().ok()?;
|
||||
let version = str::from_utf8(&output.stdout).ok()?;
|
||||
let mut pieces = version.split('.');
|
||||
if pieces.next() != Some("rustc 1") {
|
||||
return None;
|
||||
}
|
||||
pieces.next()?.parse().ok()
|
||||
}
|
||||
|
||||
fn cargo_env_var(key: &str) -> OsString {
|
||||
env::var_os(key).unwrap_or_else(|| {
|
||||
eprintln!(
|
||||
"Environment variable ${} is not set during execution of build script",
|
||||
key,
|
||||
);
|
||||
process::exit(1);
|
||||
})
|
||||
}
|
21
vendor/proc-macro2/build/probe.rs
vendored
Normal file
21
vendor/proc-macro2/build/probe.rs
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
// This code exercises the surface area that we expect of Span's unstable API.
|
||||
// If the current toolchain is able to compile it, then proc-macro2 is able to
|
||||
// offer these APIs too.
|
||||
|
||||
#![feature(proc_macro_span)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use core::ops::RangeBounds;
|
||||
use proc_macro::{Literal, Span};
|
||||
|
||||
pub fn join(this: &Span, other: Span) -> Option<Span> {
|
||||
this.join(other)
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(this: &Literal, range: R) -> Option<Span> {
|
||||
this.subspan(range)
|
||||
}
|
||||
|
||||
// Include in sccache cache key.
|
||||
const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP");
|
2
vendor/proc-macro2/rust-toolchain.toml
vendored
Normal file
2
vendor/proc-macro2/rust-toolchain.toml
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
components = ["rust-src"]
|
75
vendor/proc-macro2/src/detection.rs
vendored
Normal file
75
vendor/proc-macro2/src/detection.rs
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
use core::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = AtomicUsize::new(0);
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
pub(crate) fn inside_proc_macro() -> bool {
|
||||
match WORKS.load(Ordering::Relaxed) {
|
||||
1 => return false,
|
||||
2 => return true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
INIT.call_once(initialize);
|
||||
inside_proc_macro()
|
||||
}
|
||||
|
||||
pub(crate) fn force_fallback() {
|
||||
WORKS.store(1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub(crate) fn unforce_fallback() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
#[cfg(not(no_is_available))]
|
||||
fn initialize() {
|
||||
let available = proc_macro::is_available();
|
||||
WORKS.store(available as usize + 1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
||||
// then use catch_unwind to determine whether the compiler's proc_macro is
|
||||
// working. When proc-macro2 is used from outside of a procedural macro all
|
||||
// of the proc_macro crate's APIs currently panic.
|
||||
//
|
||||
// The Once is to prevent the possibility of this ordering:
|
||||
//
|
||||
// thread 1 calls take_hook, gets the user's original hook
|
||||
// thread 1 calls set_hook with the null hook
|
||||
// thread 2 calls take_hook, thinks null hook is the original hook
|
||||
// thread 2 calls set_hook with the null hook
|
||||
// thread 1 calls set_hook with the actual original hook
|
||||
// thread 2 calls set_hook with what it thinks is the original hook
|
||||
//
|
||||
// in which the user's hook has been lost.
|
||||
//
|
||||
// There is still a race condition where a panic in a different thread can
|
||||
// happen during the interval that the user's original panic hook is
|
||||
// unregistered such that their hook is incorrectly not called. This is
|
||||
// sufficiently unlikely and less bad than printing panic messages to stderr
|
||||
// on correct use of this crate. Maybe there is a libstd feature request
|
||||
// here. For now, if a user needs to guarantee that this failure mode does
|
||||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
#[cfg(no_is_available)]
|
||||
fn initialize() {
|
||||
use std::panic::{self, PanicInfo};
|
||||
|
||||
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
let original_hook = panic::take_hook();
|
||||
panic::set_hook(null_hook);
|
||||
|
||||
let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
|
||||
WORKS.store(works as usize + 1, Ordering::Relaxed);
|
||||
|
||||
let hopefully_null_hook = panic::take_hook();
|
||||
panic::set_hook(original_hook);
|
||||
if sanity_check != &*hopefully_null_hook {
|
||||
panic!("observed race condition in proc_macro2::inside_proc_macro");
|
||||
}
|
||||
}
|
84
vendor/proc-macro2/src/extra.rs
vendored
Normal file
84
vendor/proc-macro2/src/extra.rs
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
//! Items which do not have a correspondence to any API in the proc_macro crate,
|
||||
//! but are necessary to include in proc-macro2.
|
||||
|
||||
use crate::fallback;
|
||||
use crate::imp;
|
||||
use crate::marker::Marker;
|
||||
use crate::Span;
|
||||
use core::fmt::{self, Debug};
|
||||
|
||||
/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together
|
||||
/// (in a more compact representation than holding those 2 spans individually.
|
||||
///
|
||||
/// [`Group`]: crate::Group
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DelimSpan {
|
||||
inner: DelimSpanEnum,
|
||||
_marker: Marker,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum DelimSpanEnum {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
Compiler {
|
||||
join: proc_macro::Span,
|
||||
open: proc_macro::Span,
|
||||
close: proc_macro::Span,
|
||||
},
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl DelimSpan {
|
||||
pub(crate) fn new(group: &imp::Group) -> Self {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
let inner = match group {
|
||||
imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
|
||||
join: group.span(),
|
||||
open: group.span_open(),
|
||||
close: group.span_close(),
|
||||
},
|
||||
imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()),
|
||||
};
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
let inner = DelimSpanEnum::Fallback(group.span());
|
||||
|
||||
DelimSpan {
|
||||
inner,
|
||||
_marker: Marker,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span covering the entire delimited group.
|
||||
pub fn join(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the opening punctuation of the group only.
|
||||
pub fn open(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the closing punctuation of the group only.
|
||||
pub fn close(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for DelimSpan {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.join(), f)
|
||||
}
|
||||
}
|
1143
vendor/proc-macro2/src/fallback.rs
vendored
Normal file
1143
vendor/proc-macro2/src/fallback.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1328
vendor/proc-macro2/src/lib.rs
vendored
Normal file
1328
vendor/proc-macro2/src/lib.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
29
vendor/proc-macro2/src/location.rs
vendored
Normal file
29
vendor/proc-macro2/src/location.rs
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
use core::cmp::Ordering;
|
||||
|
||||
/// A line-column pair representing the start or end of a `Span`.
|
||||
///
|
||||
/// This type is semver exempt and not exposed by default.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LineColumn {
|
||||
/// The 1-indexed line in the source file on which the span starts or ends
|
||||
/// (inclusive).
|
||||
pub line: usize,
|
||||
/// The 0-indexed column (in UTF-8 characters) in the source file on which
|
||||
/// the span starts or ends (inclusive).
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
impl Ord for LineColumn {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.line
|
||||
.cmp(&other.line)
|
||||
.then(self.column.cmp(&other.column))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for LineColumn {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
21
vendor/proc-macro2/src/marker.rs
vendored
Normal file
21
vendor/proc-macro2/src/marker.rs
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
use alloc::rc::Rc;
|
||||
use core::marker::PhantomData;
|
||||
use core::panic::{RefUnwindSafe, UnwindSafe};
|
||||
|
||||
// Zero sized marker with the correct set of autotrait impls we want all proc
|
||||
// macro types to have.
|
||||
pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
|
||||
|
||||
pub(crate) use self::value::*;
|
||||
|
||||
mod value {
|
||||
pub(crate) use core::marker::PhantomData as Marker;
|
||||
}
|
||||
|
||||
pub(crate) struct ProcMacroAutoTraits(
|
||||
#[allow(dead_code)] // https://github.com/rust-lang/rust/issues/119645
|
||||
Rc<()>,
|
||||
);
|
||||
|
||||
impl UnwindSafe for ProcMacroAutoTraits {}
|
||||
impl RefUnwindSafe for ProcMacroAutoTraits {}
|
996
vendor/proc-macro2/src/parse.rs
vendored
Normal file
996
vendor/proc-macro2/src/parse.rs
vendored
Normal file
@ -0,0 +1,996 @@
|
||||
use crate::fallback::{
|
||||
self, is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
|
||||
TokenStreamBuilder,
|
||||
};
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use core::char;
|
||||
use core::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
#[cfg(span_locations)]
|
||||
pub off: u32,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
pub fn advance(&self, bytes: usize) -> Cursor<'a> {
|
||||
let (_front, rest) = self.rest.split_at(bytes);
|
||||
Cursor {
|
||||
rest,
|
||||
#[cfg(span_locations)]
|
||||
off: self.off + _front.chars().count() as u32,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
pub fn starts_with_char(&self, ch: char) -> bool {
|
||||
self.rest.starts_with(ch)
|
||||
}
|
||||
|
||||
pub fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool
|
||||
where
|
||||
Pattern: FnMut(char) -> bool,
|
||||
{
|
||||
self.rest.starts_with(f)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
|
||||
fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
|
||||
if self.starts_with(tag) {
|
||||
Ok(self.advance(tag.len()))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Reject;
|
||||
type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
|
||||
|
||||
fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
let mut s = input;
|
||||
|
||||
while !s.is_empty() {
|
||||
let byte = s.as_bytes()[0];
|
||||
if byte == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
let (cursor, _) = take_until_newline_or_eof(s);
|
||||
s = cursor;
|
||||
continue;
|
||||
} else if s.starts_with("/**/") {
|
||||
s = s.advance(4);
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
match block_comment(s) {
|
||||
Ok((rest, _)) => {
|
||||
s = rest;
|
||||
continue;
|
||||
}
|
||||
Err(Reject) => return s,
|
||||
}
|
||||
}
|
||||
}
|
||||
match byte {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
s = s.advance(1);
|
||||
continue;
|
||||
}
|
||||
b if b.is_ascii() => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
s = s.advance(ch.len_utf8());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut depth = 0usize;
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0usize;
|
||||
let upper = bytes.len() - 1;
|
||||
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
fn word_break(input: Cursor) -> Result<Cursor, Reject> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if is_ident_continue(ch) => Err(Reject),
|
||||
Some(_) | None => Ok(input),
|
||||
}
|
||||
}
|
||||
|
||||
// Rustc's representation of a macro expansion error in expression position or
|
||||
// type position.
|
||||
const ERROR: &str = "(/*ERROR*/)";
|
||||
|
||||
pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
|
||||
let mut trees = TokenStreamBuilder::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
loop {
|
||||
input = skip_whitespace(input);
|
||||
|
||||
if let Ok((rest, ())) = doc_comment(input, &mut trees) {
|
||||
input = rest;
|
||||
continue;
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
|
||||
let first = match input.bytes().next() {
|
||||
Some(first) => first,
|
||||
None => match stack.last() {
|
||||
None => return Ok(trees.build()),
|
||||
#[cfg(span_locations)]
|
||||
Some((lo, _frame)) => {
|
||||
return Err(LexError {
|
||||
span: Span { lo: *lo, hi: *lo },
|
||||
})
|
||||
}
|
||||
#[cfg(not(span_locations))]
|
||||
Some(_frame) => return Err(LexError { span: Span {} }),
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(open_delimiter) = match first {
|
||||
b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis),
|
||||
b'[' => Some(Delimiter::Bracket),
|
||||
b'{' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
input = input.advance(1);
|
||||
let frame = (open_delimiter, trees);
|
||||
#[cfg(span_locations)]
|
||||
let frame = (lo, frame);
|
||||
stack.push(frame);
|
||||
trees = TokenStreamBuilder::new();
|
||||
} else if let Some(close_delimiter) = match first {
|
||||
b')' => Some(Delimiter::Parenthesis),
|
||||
b']' => Some(Delimiter::Bracket),
|
||||
b'}' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
let frame = match stack.pop() {
|
||||
Some(frame) => frame,
|
||||
None => return Err(lex_error(input)),
|
||||
};
|
||||
#[cfg(span_locations)]
|
||||
let (lo, frame) = frame;
|
||||
let (open_delimiter, outer) = frame;
|
||||
if open_delimiter != close_delimiter {
|
||||
return Err(lex_error(input));
|
||||
}
|
||||
input = input.advance(1);
|
||||
let mut g = Group::new(open_delimiter, trees.build());
|
||||
g.set_span(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: input.off,
|
||||
});
|
||||
trees = outer;
|
||||
trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
|
||||
} else {
|
||||
let (rest, mut tt) = match leaf_token(input) {
|
||||
Ok((rest, tt)) => (rest, tt),
|
||||
Err(Reject) => return Err(lex_error(input)),
|
||||
};
|
||||
tt.set_span(crate::Span::_new_fallback(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
}));
|
||||
trees.push_token_from_parser(tt);
|
||||
input = rest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lex_error(cursor: Cursor) -> LexError {
|
||||
#[cfg(not(span_locations))]
|
||||
let _ = cursor;
|
||||
LexError {
|
||||
span: Span {
|
||||
#[cfg(span_locations)]
|
||||
lo: cursor.off,
|
||||
#[cfg(span_locations)]
|
||||
hi: cursor.off,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
|
||||
if let Ok((input, l)) = literal(input) {
|
||||
// must be parsed before ident
|
||||
Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
|
||||
} else if let Ok((input, p)) = punct(input) {
|
||||
Ok((input, TokenTree::Punct(p)))
|
||||
} else if let Ok((input, i)) = ident(input) {
|
||||
Ok((input, TokenTree::Ident(i)))
|
||||
} else if input.starts_with(ERROR) {
|
||||
let rest = input.advance(ERROR.len());
|
||||
let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned()));
|
||||
Ok((rest, TokenTree::Literal(repr)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident(input: Cursor) -> PResult<crate::Ident> {
|
||||
if [
|
||||
"r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#",
|
||||
]
|
||||
.iter()
|
||||
.any(|prefix| input.starts_with(prefix))
|
||||
{
|
||||
Err(Reject)
|
||||
} else {
|
||||
ident_any(input)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident_any(input: Cursor) -> PResult<crate::Ident> {
|
||||
let raw = input.starts_with("r#");
|
||||
let rest = input.advance((raw as usize) << 1);
|
||||
|
||||
let (rest, sym) = ident_not_raw(rest)?;
|
||||
|
||||
if !raw {
|
||||
let ident = crate::Ident::_new(crate::imp::Ident::new_unchecked(
|
||||
sym,
|
||||
fallback::Span::call_site(),
|
||||
));
|
||||
return Ok((rest, ident));
|
||||
}
|
||||
|
||||
match sym {
|
||||
"_" | "super" | "self" | "Self" | "crate" => return Err(Reject),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let ident = crate::Ident::_new(crate::imp::Ident::new_raw_unchecked(
|
||||
sym,
|
||||
fallback::Span::call_site(),
|
||||
));
|
||||
Ok((rest, ident))
|
||||
}
|
||||
|
||||
fn ident_not_raw(input: Cursor) -> PResult<&str> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
match chars.next() {
|
||||
Some((_, ch)) if is_ident_start(ch) => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut end = input.len();
|
||||
for (i, ch) in chars {
|
||||
if !is_ident_continue(ch) {
|
||||
end = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((input.advance(end), &input.rest[..end]))
|
||||
}
|
||||
|
||||
pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
|
||||
let rest = literal_nocapture(input)?;
|
||||
let end = input.len() - rest.len();
|
||||
Ok((rest, Literal::_new(input.rest[..end].to_string())))
|
||||
}
|
||||
|
||||
fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(ok) = string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = c_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = character(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = float(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = int(input) {
|
||||
Ok(ok)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn literal_suffix(input: Cursor) -> Cursor {
|
||||
match ident_not_raw(input) {
|
||||
Ok((input, _)) => input,
|
||||
Err(Reject) => input,
|
||||
}
|
||||
}
|
||||
|
||||
fn string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("\"") {
|
||||
cooked_string(input)
|
||||
} else if let Ok(input) = input.parse("r") {
|
||||
raw_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
backslash_x_char(&mut chars)?;
|
||||
}
|
||||
Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {}
|
||||
Some((_, 'u')) => {
|
||||
backslash_u(&mut chars)?;
|
||||
}
|
||||
Some((newline, ch @ ('\n' | '\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, ch as u8)?;
|
||||
chars = input.char_indices();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("b\"") {
|
||||
cooked_byte_string(input)
|
||||
} else if let Ok(input) = input.parse("br") {
|
||||
raw_byte_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((offset, b)) = bytes.next() {
|
||||
match b {
|
||||
b'"' => {
|
||||
let input = input.advance(offset + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\\' => match bytes.next() {
|
||||
Some((_, b'x')) => {
|
||||
backslash_x_byte(&mut bytes)?;
|
||||
}
|
||||
Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {}
|
||||
Some((newline, b @ (b'\n' | b'\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, b)?;
|
||||
bytes = input.bytes().enumerate();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
b if b.is_ascii() => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> {
|
||||
for (i, byte) in input.bytes().enumerate() {
|
||||
match byte {
|
||||
b'"' => {
|
||||
if i > 255 {
|
||||
// https://github.com/rust-lang/rust/pull/95251
|
||||
return Err(Reject);
|
||||
}
|
||||
return Ok((input.advance(i + 1), &input.rest[..i]));
|
||||
}
|
||||
b'#' => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
other => {
|
||||
if !other.is_ascii() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn c_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("c\"") {
|
||||
cooked_c_string(input)
|
||||
} else if let Ok(input) = input.parse("cr") {
|
||||
raw_c_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn raw_c_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\0' => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn cooked_c_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
backslash_x_nonzero(&mut chars)?;
|
||||
}
|
||||
Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {}
|
||||
Some((_, 'u')) => {
|
||||
if backslash_u(&mut chars)? == '\0' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some((newline, ch @ ('\n' | '\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, ch as u8)?;
|
||||
chars = input.char_indices();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
'\0' => break,
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("b'")?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
let ok = match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'\\') => match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'x') => backslash_x_byte(&mut bytes).is_ok(),
|
||||
Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true,
|
||||
_ => false,
|
||||
},
|
||||
b => b.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (offset, _) = bytes.next().ok_or(Reject)?;
|
||||
if !input.chars().as_str().is_char_boundary(offset) {
|
||||
return Err(Reject);
|
||||
}
|
||||
let input = input.advance(offset).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
fn character(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("'")?;
|
||||
let mut chars = input.char_indices();
|
||||
let ok = match chars.next().map(|(_, ch)| ch) {
|
||||
Some('\\') => match chars.next().map(|(_, ch)| ch) {
|
||||
Some('x') => backslash_x_char(&mut chars).is_ok(),
|
||||
Some('u') => backslash_u(&mut chars).is_ok(),
|
||||
Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true,
|
||||
_ => false,
|
||||
},
|
||||
ch => ch.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (idx, _) = chars.next().ok_or(Reject)?;
|
||||
let input = input.advance(idx).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
macro_rules! next_ch {
|
||||
($chars:ident @ $pat:pat) => {
|
||||
match $chars.next() {
|
||||
Some((_, ch)) => match ch {
|
||||
$pat => ch,
|
||||
_ => return Err(Reject),
|
||||
},
|
||||
None => return Err(Reject),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn backslash_x_char<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '0'..='7');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn backslash_x_byte<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn backslash_x_nonzero<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
if first == '0' && second == '0' {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn backslash_u<I>(chars: &mut I) -> Result<char, Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '{');
|
||||
let mut value = 0;
|
||||
let mut len = 0;
|
||||
for (_, ch) in chars {
|
||||
let digit = match ch {
|
||||
'0'..='9' => ch as u8 - b'0',
|
||||
'a'..='f' => 10 + ch as u8 - b'a',
|
||||
'A'..='F' => 10 + ch as u8 - b'A',
|
||||
'_' if len > 0 => continue,
|
||||
'}' if len > 0 => return char::from_u32(value).ok_or(Reject),
|
||||
_ => break,
|
||||
};
|
||||
if len == 6 {
|
||||
break;
|
||||
}
|
||||
value *= 0x10;
|
||||
value += u32::from(digit);
|
||||
len += 1;
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> {
|
||||
let mut whitespace = input.bytes().enumerate();
|
||||
loop {
|
||||
if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
match whitespace.next() {
|
||||
Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => {
|
||||
last = b;
|
||||
}
|
||||
Some((offset, _)) => {
|
||||
*input = input.advance(offset);
|
||||
return Ok(());
|
||||
}
|
||||
None => return Err(Reject),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn float(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = float_digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.chars().peekable();
|
||||
match chars.next() {
|
||||
Some(ch) if '0' <= ch && ch <= '9' => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut len = 1;
|
||||
let mut has_dot = false;
|
||||
let mut has_exp = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'0'..='9' | '_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
'.' => {
|
||||
if has_dot {
|
||||
break;
|
||||
}
|
||||
chars.next();
|
||||
if chars
|
||||
.peek()
|
||||
.map_or(false, |&ch| ch == '.' || is_ident_start(ch))
|
||||
{
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
has_dot = true;
|
||||
}
|
||||
'e' | 'E' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp = true;
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
if !(has_dot || has_exp) {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
if has_exp {
|
||||
let token_before_exp = if has_dot {
|
||||
Ok(input.advance(len - 1))
|
||||
} else {
|
||||
Err(Reject)
|
||||
};
|
||||
let mut has_sign = false;
|
||||
let mut has_exp_value = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'+' | '-' => {
|
||||
if has_exp_value {
|
||||
break;
|
||||
}
|
||||
if has_sign {
|
||||
return token_before_exp;
|
||||
}
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_sign = true;
|
||||
}
|
||||
'0'..='9' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp_value = true;
|
||||
}
|
||||
'_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
if !has_exp_value {
|
||||
return token_before_exp;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
|
||||
fn int(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let base = if input.starts_with("0x") {
|
||||
input = input.advance(2);
|
||||
16
|
||||
} else if input.starts_with("0o") {
|
||||
input = input.advance(2);
|
||||
8
|
||||
} else if input.starts_with("0b") {
|
||||
input = input.advance(2);
|
||||
2
|
||||
} else {
|
||||
10
|
||||
};
|
||||
|
||||
let mut len = 0;
|
||||
let mut empty = true;
|
||||
for b in input.bytes() {
|
||||
match b {
|
||||
b'0'..=b'9' => {
|
||||
let digit = (b - b'0') as u64;
|
||||
if digit >= base {
|
||||
return Err(Reject);
|
||||
}
|
||||
}
|
||||
b'a'..=b'f' => {
|
||||
let digit = 10 + (b - b'a') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'A'..=b'F' => {
|
||||
let digit = 10 + (b - b'A') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'_' => {
|
||||
if empty && base == 10 {
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
continue;
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
len += 1;
|
||||
empty = false;
|
||||
}
|
||||
if empty {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct(input: Cursor) -> PResult<Punct> {
|
||||
let (rest, ch) = punct_char(input)?;
|
||||
if ch == '\'' {
|
||||
if ident_any(rest)?.0.starts_with_char('\'') {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok((rest, Punct::new('\'', Spacing::Joint)))
|
||||
}
|
||||
} else {
|
||||
let kind = match punct_char(rest) {
|
||||
Ok(_) => Spacing::Joint,
|
||||
Err(Reject) => Spacing::Alone,
|
||||
};
|
||||
Ok((rest, Punct::new(ch, kind)))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct_char(input: Cursor) -> PResult<char> {
|
||||
if input.starts_with("//") || input.starts_with("/*") {
|
||||
// Do not accept `/` of a comment as a punct.
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut chars = input.chars();
|
||||
let first = match chars.next() {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
return Err(Reject);
|
||||
}
|
||||
};
|
||||
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
|
||||
if recognized.contains(first) {
|
||||
Ok((input.advance(first.len_utf8()), first))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> {
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
let (rest, (comment, inner)) = doc_comment_contents(input)?;
|
||||
let fallback_span = Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
};
|
||||
let span = crate::Span::_new_fallback(fallback_span);
|
||||
|
||||
let mut scan_for_bare_cr = comment;
|
||||
while let Some(cr) = scan_for_bare_cr.find('\r') {
|
||||
let rest = &scan_for_bare_cr[cr + 1..];
|
||||
if !rest.starts_with('\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
scan_for_bare_cr = rest;
|
||||
}
|
||||
|
||||
let mut pound = Punct::new('#', Spacing::Alone);
|
||||
pound.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Punct(pound));
|
||||
|
||||
if inner {
|
||||
let mut bang = Punct::new('!', Spacing::Alone);
|
||||
bang.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Punct(bang));
|
||||
}
|
||||
|
||||
let doc_ident = crate::Ident::_new(crate::imp::Ident::new_unchecked("doc", fallback_span));
|
||||
let mut equal = Punct::new('=', Spacing::Alone);
|
||||
equal.set_span(span);
|
||||
let mut literal = crate::Literal::string(comment);
|
||||
literal.set_span(span);
|
||||
let mut bracketed = TokenStreamBuilder::with_capacity(3);
|
||||
bracketed.push_token_from_parser(TokenTree::Ident(doc_ident));
|
||||
bracketed.push_token_from_parser(TokenTree::Punct(equal));
|
||||
bracketed.push_token_from_parser(TokenTree::Literal(literal));
|
||||
let group = Group::new(Delimiter::Bracket, bracketed.build());
|
||||
let mut group = crate::Group::_new_fallback(group);
|
||||
group.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Group(group));
|
||||
|
||||
Ok((rest, ()))
|
||||
}
|
||||
|
||||
fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
|
||||
if input.starts_with("//!") {
|
||||
let input = input.advance(3);
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, true)))
|
||||
} else if input.starts_with("/*!") {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], true)))
|
||||
} else if input.starts_with("///") {
|
||||
let input = input.advance(3);
|
||||
if input.starts_with_char('/') {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, false)))
|
||||
} else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], false)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
|
||||
let chars = input.char_indices();
|
||||
|
||||
for (i, ch) in chars {
|
||||
if ch == '\n' {
|
||||
return (input.advance(i), &input.rest[..i]);
|
||||
} else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
|
||||
return (input.advance(i + 1), &input.rest[..i]);
|
||||
}
|
||||
}
|
||||
|
||||
(input.advance(input.len()), input.rest)
|
||||
}
|
145
vendor/proc-macro2/src/rcvec.rs
vendored
Normal file
145
vendor/proc-macro2/src/rcvec.rs
vendored
Normal file
@ -0,0 +1,145 @@
|
||||
use alloc::rc::Rc;
|
||||
use alloc::vec;
|
||||
use core::mem;
|
||||
use core::panic::RefUnwindSafe;
|
||||
use core::slice;
|
||||
|
||||
pub(crate) struct RcVec<T> {
|
||||
inner: Rc<Vec<T>>,
|
||||
}
|
||||
|
||||
pub(crate) struct RcVecBuilder<T> {
|
||||
inner: Vec<T>,
|
||||
}
|
||||
|
||||
pub(crate) struct RcVecMut<'a, T> {
|
||||
inner: &'a mut Vec<T>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RcVecIntoIter<T> {
|
||||
inner: vec::IntoIter<T>,
|
||||
}
|
||||
|
||||
impl<T> RcVec<T> {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.inner.len()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> slice::Iter<T> {
|
||||
self.inner.iter()
|
||||
}
|
||||
|
||||
pub fn make_mut(&mut self) -> RcVecMut<T>
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
RcVecMut {
|
||||
inner: Rc::make_mut(&mut self.inner),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_mut(&mut self) -> Option<RcVecMut<T>> {
|
||||
let inner = Rc::get_mut(&mut self.inner)?;
|
||||
Some(RcVecMut { inner })
|
||||
}
|
||||
|
||||
pub fn make_owned(mut self) -> RcVecBuilder<T>
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) {
|
||||
mem::take(owned)
|
||||
} else {
|
||||
Vec::clone(&self.inner)
|
||||
};
|
||||
RcVecBuilder { inner: vec }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RcVecBuilder<T> {
|
||||
pub fn new() -> Self {
|
||||
RcVecBuilder { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn with_capacity(cap: usize) -> Self {
|
||||
RcVecBuilder {
|
||||
inner: Vec::with_capacity(cap),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, element: T) {
|
||||
self.inner.push(element);
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
|
||||
pub fn as_mut(&mut self) -> RcVecMut<T> {
|
||||
RcVecMut {
|
||||
inner: &mut self.inner,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> RcVec<T> {
|
||||
RcVec {
|
||||
inner: Rc::new(self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> RcVecMut<'a, T> {
|
||||
pub fn push(&mut self, element: T) {
|
||||
self.inner.push(element);
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
|
||||
pub fn pop(&mut self) -> Option<T> {
|
||||
self.inner.pop()
|
||||
}
|
||||
|
||||
pub fn as_mut(&mut self) -> RcVecMut<T> {
|
||||
RcVecMut { inner: self.inner }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for RcVec<T> {
|
||||
fn clone(&self) -> Self {
|
||||
RcVec {
|
||||
inner: Rc::clone(&self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RcVecBuilder<T> {
|
||||
type Item = T;
|
||||
type IntoIter = RcVecIntoIter<T>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
RcVecIntoIter {
|
||||
inner: self.inner.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Iterator for RcVecIntoIter<T> {
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {}
|
930
vendor/proc-macro2/src/wrapper.rs
vendored
Normal file
930
vendor/proc-macro2/src/wrapper.rs
vendored
Normal file
@ -0,0 +1,930 @@
|
||||
use crate::detection::inside_proc_macro;
|
||||
#[cfg(span_locations)]
|
||||
use crate::location::LineColumn;
|
||||
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
|
||||
use core::fmt::{self, Debug, Display};
|
||||
use core::ops::RangeBounds;
|
||||
use core::str::FromStr;
|
||||
use std::panic;
|
||||
#[cfg(super_unstable)]
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum TokenStream {
|
||||
Compiler(DeferredTokenStream),
|
||||
Fallback(fallback::TokenStream),
|
||||
}
|
||||
|
||||
// Work around https://github.com/rust-lang/rust/issues/65080.
|
||||
// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
|
||||
// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
|
||||
// late as possible to batch together consecutive uses of the Extend impl.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct DeferredTokenStream {
|
||||
stream: proc_macro::TokenStream,
|
||||
extra: Vec<proc_macro::TokenTree>,
|
||||
}
|
||||
|
||||
pub(crate) enum LexError {
|
||||
Compiler(proc_macro::LexError),
|
||||
Fallback(fallback::LexError),
|
||||
|
||||
// Rustc was supposed to return a LexError, but it panicked instead.
|
||||
// https://github.com/rust-lang/rust/issues/58736
|
||||
CompilerPanic,
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn mismatch(line: u32) -> ! {
|
||||
#[cfg(procmacro2_backtrace)]
|
||||
{
|
||||
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||
panic!("compiler/fallback mismatch #{}\n\n{}", line, backtrace)
|
||||
}
|
||||
#[cfg(not(procmacro2_backtrace))]
|
||||
{
|
||||
panic!("compiler/fallback mismatch #{}", line)
|
||||
}
|
||||
}
|
||||
|
||||
impl DeferredTokenStream {
|
||||
fn new(stream: proc_macro::TokenStream) -> Self {
|
||||
DeferredTokenStream {
|
||||
stream,
|
||||
extra: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.stream.is_empty() && self.extra.is_empty()
|
||||
}
|
||||
|
||||
fn evaluate_now(&mut self) {
|
||||
// If-check provides a fast short circuit for the common case of `extra`
|
||||
// being empty, which saves a round trip over the proc macro bridge.
|
||||
// Improves macro expansion time in winrt by 6% in debug mode.
|
||||
if !self.extra.is_empty() {
|
||||
self.stream.extend(self.extra.drain(..));
|
||||
}
|
||||
}
|
||||
|
||||
fn into_token_stream(mut self) -> proc_macro::TokenStream {
|
||||
self.evaluate_now();
|
||||
self.stream
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new() -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
|
||||
} else {
|
||||
TokenStream::Fallback(fallback::TokenStream::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.is_empty(),
|
||||
TokenStream::Fallback(tts) => tts.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(s) => s.into_token_stream(),
|
||||
TokenStream::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_stable(self) -> fallback::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(_) => mismatch(line!()),
|
||||
TokenStream::Fallback(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
if inside_proc_macro() {
|
||||
Ok(TokenStream::Compiler(DeferredTokenStream::new(
|
||||
proc_macro_parse(src)?,
|
||||
)))
|
||||
} else {
|
||||
Ok(TokenStream::Fallback(src.parse()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Work around https://github.com/rust-lang/rust/issues/58736.
|
||||
fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
|
||||
let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler));
|
||||
result.unwrap_or_else(|_| Err(LexError::CompilerPanic))
|
||||
}
|
||||
|
||||
impl Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
|
||||
TokenStream::Fallback(tts) => Display::fmt(tts, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> Self {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> Self {
|
||||
match inner {
|
||||
TokenStream::Compiler(inner) => inner.into_token_stream(),
|
||||
TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::TokenStream> for TokenStream {
|
||||
fn from(inner: fallback::TokenStream) -> Self {
|
||||
TokenStream::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
// Assumes inside_proc_macro().
|
||||
fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
|
||||
match token {
|
||||
TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
Spacing::Joint => proc_macro::Spacing::Joint,
|
||||
Spacing::Alone => proc_macro::Spacing::Alone,
|
||||
};
|
||||
let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
|
||||
punct.set_span(tt.span().inner.unwrap_nightly());
|
||||
punct.into()
|
||||
}
|
||||
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
|
||||
} else {
|
||||
TokenStream::Fallback(token.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(
|
||||
trees.into_iter().map(into_compiler_token).collect(),
|
||||
))
|
||||
} else {
|
||||
TokenStream::Fallback(trees.into_iter().collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.evaluate_now();
|
||||
first.stream.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s.into_token_stream(),
|
||||
TokenStream::Fallback(_) => mismatch(line!()),
|
||||
}));
|
||||
TokenStream::Compiler(first)
|
||||
}
|
||||
Some(TokenStream::Fallback(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Fallback(s) => s,
|
||||
TokenStream::Compiler(_) => mismatch(line!()),
|
||||
}));
|
||||
TokenStream::Fallback(first)
|
||||
}
|
||||
None => TokenStream::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
// Here is the reason for DeferredTokenStream.
|
||||
for token in stream {
|
||||
tts.extra.push(into_compiler_token(token));
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(stream),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
tts.evaluate_now();
|
||||
tts.stream
|
||||
.extend(streams.into_iter().map(TokenStream::unwrap_nightly));
|
||||
}
|
||||
TokenStream::Fallback(tts) => {
|
||||
tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
|
||||
TokenStream::Fallback(tts) => Debug::fmt(tts, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(),
|
||||
LexError::Fallback(e) => Span::Fallback(e.span()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::LexError> for LexError {
|
||||
fn from(e: proc_macro::LexError) -> Self {
|
||||
LexError::Compiler(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::LexError> for LexError {
|
||||
fn from(e: fallback::LexError) -> Self {
|
||||
LexError::Fallback(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => Debug::fmt(e, f),
|
||||
LexError::Fallback(e) => Debug::fmt(e, f),
|
||||
LexError::CompilerPanic => {
|
||||
let fallback = fallback::LexError::call_site();
|
||||
Debug::fmt(&fallback, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => Display::fmt(e, f),
|
||||
LexError::Fallback(e) => Display::fmt(e, f),
|
||||
LexError::CompilerPanic => {
|
||||
let fallback = fallback::LexError::call_site();
|
||||
Display::fmt(&fallback, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
}
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = TokenTreeIter;
|
||||
|
||||
fn into_iter(self) -> TokenTreeIter {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
|
||||
}
|
||||
TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for TokenTreeIter {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
let token = match self {
|
||||
TokenTreeIter::Compiler(iter) => iter.next()?,
|
||||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
|
||||
o.into()
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
match self {
|
||||
TokenTreeIter::Compiler(tts) => tts.size_hint(),
|
||||
TokenTreeIter::Fallback(tts) => tts.size_hint(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
#[cfg(super_unstable)]
|
||||
pub(crate) enum SourceFile {
|
||||
Compiler(proc_macro::SourceFile),
|
||||
Fallback(fallback::SourceFile),
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl SourceFile {
|
||||
fn nightly(sf: proc_macro::SourceFile) -> Self {
|
||||
SourceFile::Compiler(sf)
|
||||
}
|
||||
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.path(),
|
||||
SourceFile::Fallback(a) => a.path(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.is_real(),
|
||||
SourceFile::Fallback(a) => a.is_real(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => Debug::fmt(a, f),
|
||||
SourceFile::Fallback(a) => Debug::fmt(a, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum Span {
|
||||
Compiler(proc_macro::Span),
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn call_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::call_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mixed_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::mixed_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::mixed_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn def_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::def_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::def_site())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolved_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
|
||||
(Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn located_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
|
||||
(Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
match self {
|
||||
Span::Compiler(s) => SourceFile::nightly(s.source_file()),
|
||||
Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => s.start(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => s.end(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
#[cfg(proc_macro_span)]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
};
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn eq(&self, other: &Span) -> bool {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn source_text(&self) -> Option<String> {
|
||||
match self {
|
||||
#[cfg(not(no_source_text))]
|
||||
Span::Compiler(s) => s.source_text(),
|
||||
#[cfg(no_source_text)]
|
||||
Span::Compiler(_) => None,
|
||||
Span::Fallback(s) => s.source_text(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for crate::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> Self {
|
||||
crate::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Span> for Span {
|
||||
fn from(inner: fallback::Span) -> Self {
|
||||
Span::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Span::Compiler(s) => Debug::fmt(s, f),
|
||||
Span::Fallback(s) => Debug::fmt(s, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
debug.field("span", &s);
|
||||
}
|
||||
Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Group {
|
||||
Compiler(proc_macro::Group),
|
||||
Fallback(fallback::Group),
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
|
||||
match stream {
|
||||
TokenStream::Compiler(tts) => {
|
||||
let delimiter = match delimiter {
|
||||
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
||||
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
||||
Delimiter::None => proc_macro::Delimiter::None,
|
||||
};
|
||||
Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
|
||||
}
|
||||
TokenStream::Fallback(stream) => {
|
||||
Group::Fallback(fallback::Group::new(delimiter, stream))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
match self {
|
||||
Group::Compiler(g) => match g.delimiter() {
|
||||
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
|
||||
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
||||
proc_macro::Delimiter::None => Delimiter::None,
|
||||
},
|
||||
Group::Fallback(g) => g.delimiter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
match self {
|
||||
Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
|
||||
Group::Fallback(g) => TokenStream::Fallback(g.stream()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_open(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_close(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
|
||||
(Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
|
||||
(Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Group {
|
||||
match self {
|
||||
Group::Compiler(g) => g,
|
||||
Group::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Group> for Group {
|
||||
fn from(g: fallback::Group) -> Self {
|
||||
Group::Fallback(g)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => Display::fmt(group, formatter),
|
||||
Group::Fallback(group) => Display::fmt(group, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => Debug::fmt(group, formatter),
|
||||
Group::Fallback(group) => Debug::fmt(group, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Ident {
|
||||
Compiler(proc_macro::Ident),
|
||||
Fallback(fallback::Ident),
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
#[track_caller]
|
||||
pub fn new_checked(string: &str, span: Span) -> Self {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_unchecked(string: &str, span: fallback::Span) -> Self {
|
||||
Ident::Fallback(fallback::Ident::new_unchecked(string, span))
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn new_raw_checked(string: &str, span: Span) -> Self {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_raw_unchecked(string: &str, span: fallback::Span) -> Self {
|
||||
Ident::Fallback(fallback::Ident::new_raw_unchecked(string, span))
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Ident::Compiler(t) => Span::Compiler(t.span()),
|
||||
Ident::Fallback(t) => Span::Fallback(t.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
|
||||
(Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
|
||||
(Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Ident {
|
||||
match self {
|
||||
Ident::Compiler(s) => s,
|
||||
Ident::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Ident) -> bool {
|
||||
match (self, other) {
|
||||
(Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
|
||||
(Ident::Fallback(t), Ident::Fallback(o)) => t == o,
|
||||
(Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()),
|
||||
(Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let other = other.as_ref();
|
||||
match self {
|
||||
Ident::Compiler(t) => t.to_string() == other,
|
||||
Ident::Fallback(t) => t == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => Display::fmt(t, f),
|
||||
Ident::Fallback(t) => Display::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => Debug::fmt(t, f),
|
||||
Ident::Fallback(t) => Debug::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Literal {
|
||||
Compiler(proc_macro::Literal),
|
||||
Fallback(fallback::Literal),
|
||||
}
|
||||
|
||||
macro_rules! suffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
macro_rules! unsuffixed_integers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
pub unsafe fn from_str_unchecked(repr: &str) -> Self {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::from_str(repr).expect("invalid literal"))
|
||||
} else {
|
||||
Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) })
|
||||
}
|
||||
}
|
||||
|
||||
suffixed_numbers! {
|
||||
u8_suffixed => u8,
|
||||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::string(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::string(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::character(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::character(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::byte_string(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Literal::Compiler(lit) => Span::Compiler(lit.span()),
|
||||
Literal::Fallback(lit) => Span::Fallback(lit.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
|
||||
(Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
|
||||
(Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Literal::Compiler(_lit) => None,
|
||||
Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
Literal::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Literal> for Literal {
|
||||
fn from(s: fallback::Literal) -> Self {
|
||||
Literal::Fallback(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Literal {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(repr: &str) -> Result<Self, Self::Err> {
|
||||
if inside_proc_macro() {
|
||||
let literal = proc_macro::Literal::from_str(repr)?;
|
||||
Ok(Literal::Compiler(literal))
|
||||
} else {
|
||||
let literal = fallback::Literal::from_str(repr)?;
|
||||
Ok(Literal::Fallback(literal))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => Display::fmt(t, f),
|
||||
Literal::Fallback(t) => Display::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => Debug::fmt(t, f),
|
||||
Literal::Fallback(t) => Debug::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
105
vendor/proc-macro2/tests/comments.rs
vendored
Normal file
105
vendor/proc-macro2/tests/comments.rs
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
#![allow(clippy::assertions_on_result_states)]
|
||||
|
||||
use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
|
||||
|
||||
// #[doc = "..."] -> "..."
|
||||
fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal {
|
||||
lit_of_doc_comment(tokens, false)
|
||||
}
|
||||
|
||||
// #![doc = "..."] -> "..."
|
||||
fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal {
|
||||
lit_of_doc_comment(tokens, true)
|
||||
}
|
||||
|
||||
fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal {
|
||||
let mut iter = tokens.clone().into_iter();
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Punct(punct) => {
|
||||
assert_eq!(punct.as_char(), '#');
|
||||
assert_eq!(punct.spacing(), Spacing::Alone);
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens),
|
||||
}
|
||||
if inner {
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Punct(punct) => {
|
||||
assert_eq!(punct.as_char(), '!');
|
||||
assert_eq!(punct.spacing(), Spacing::Alone);
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens),
|
||||
}
|
||||
}
|
||||
iter = match iter.next().unwrap() {
|
||||
TokenTree::Group(group) => {
|
||||
assert_eq!(group.delimiter(), Delimiter::Bracket);
|
||||
assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
|
||||
group.stream().into_iter()
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens),
|
||||
};
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
|
||||
_ => panic!("wrong token {:?}", tokens),
|
||||
}
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Punct(punct) => {
|
||||
assert_eq!(punct.as_char(), '=');
|
||||
assert_eq!(punct.spacing(), Spacing::Alone);
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens),
|
||||
}
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Literal(literal) => {
|
||||
assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
|
||||
literal
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn closed_immediately() {
|
||||
let stream = "/**/".parse::<TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn incomplete() {
|
||||
assert!("/*/".parse::<TokenStream>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lit() {
|
||||
let stream = "/// doc".parse::<TokenStream>().unwrap();
|
||||
let lit = lit_of_outer_doc_comment(&stream);
|
||||
assert_eq!(lit.to_string(), "\" doc\"");
|
||||
|
||||
let stream = "//! doc".parse::<TokenStream>().unwrap();
|
||||
let lit = lit_of_inner_doc_comment(&stream);
|
||||
assert_eq!(lit.to_string(), "\" doc\"");
|
||||
|
||||
let stream = "/** doc */".parse::<TokenStream>().unwrap();
|
||||
let lit = lit_of_outer_doc_comment(&stream);
|
||||
assert_eq!(lit.to_string(), "\" doc \"");
|
||||
|
||||
let stream = "/*! doc */".parse::<TokenStream>().unwrap();
|
||||
let lit = lit_of_inner_doc_comment(&stream);
|
||||
assert_eq!(lit.to_string(), "\" doc \"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn carriage_return() {
|
||||
let stream = "///\r\n".parse::<TokenStream>().unwrap();
|
||||
let lit = lit_of_outer_doc_comment(&stream);
|
||||
assert_eq!(lit.to_string(), "\"\"");
|
||||
|
||||
let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
|
||||
let lit = lit_of_outer_doc_comment(&stream);
|
||||
assert_eq!(lit.to_string(), "\"\\r\\n\"");
|
||||
|
||||
"///\r".parse::<TokenStream>().unwrap_err();
|
||||
"///\r \n".parse::<TokenStream>().unwrap_err();
|
||||
"/**\r \n*/".parse::<TokenStream>().unwrap_err();
|
||||
}
|
8
vendor/proc-macro2/tests/features.rs
vendored
Normal file
8
vendor/proc-macro2/tests/features.rs
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn make_sure_no_proc_macro() {
|
||||
assert!(
|
||||
!cfg!(feature = "proc-macro"),
|
||||
"still compiled with proc_macro?"
|
||||
);
|
||||
}
|
99
vendor/proc-macro2/tests/marker.rs
vendored
Normal file
99
vendor/proc-macro2/tests/marker.rs
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
#![allow(clippy::extra_unused_type_parameters)]
|
||||
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
|
||||
macro_rules! assert_impl {
|
||||
($ty:ident is $($marker:ident) and +) => {
|
||||
#[test]
|
||||
#[allow(non_snake_case)]
|
||||
fn $ty() {
|
||||
fn assert_implemented<T: $($marker +)+>() {}
|
||||
assert_implemented::<$ty>();
|
||||
}
|
||||
};
|
||||
|
||||
($ty:ident is not $($marker:ident) or +) => {
|
||||
#[test]
|
||||
#[allow(non_snake_case)]
|
||||
fn $ty() {
|
||||
$(
|
||||
{
|
||||
// Implemented for types that implement $marker.
|
||||
trait IsNotImplemented {
|
||||
fn assert_not_implemented() {}
|
||||
}
|
||||
impl<T: $marker> IsNotImplemented for T {}
|
||||
|
||||
// Implemented for the type being tested.
|
||||
trait IsImplemented {
|
||||
fn assert_not_implemented() {}
|
||||
}
|
||||
impl IsImplemented for $ty {}
|
||||
|
||||
// If $ty does not implement $marker, there is no ambiguity
|
||||
// in the following trait method call.
|
||||
<$ty>::assert_not_implemented();
|
||||
}
|
||||
)+
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
assert_impl!(Delimiter is Send and Sync);
|
||||
assert_impl!(Spacing is Send and Sync);
|
||||
|
||||
assert_impl!(Group is not Send or Sync);
|
||||
assert_impl!(Ident is not Send or Sync);
|
||||
assert_impl!(LexError is not Send or Sync);
|
||||
assert_impl!(Literal is not Send or Sync);
|
||||
assert_impl!(Punct is not Send or Sync);
|
||||
assert_impl!(Span is not Send or Sync);
|
||||
assert_impl!(TokenStream is not Send or Sync);
|
||||
assert_impl!(TokenTree is not Send or Sync);
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
mod semver_exempt {
|
||||
use proc_macro2::{LineColumn, SourceFile};
|
||||
|
||||
assert_impl!(LineColumn is Send and Sync);
|
||||
|
||||
assert_impl!(SourceFile is not Send or Sync);
|
||||
}
|
||||
|
||||
mod unwind_safe {
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use proc_macro2::{LineColumn, SourceFile};
|
||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
||||
|
||||
macro_rules! assert_unwind_safe {
|
||||
($($types:ident)*) => {
|
||||
$(
|
||||
assert_impl!($types is UnwindSafe and RefUnwindSafe);
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
assert_unwind_safe! {
|
||||
Delimiter
|
||||
Group
|
||||
Ident
|
||||
LexError
|
||||
Literal
|
||||
Punct
|
||||
Spacing
|
||||
Span
|
||||
TokenStream
|
||||
TokenTree
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
assert_unwind_safe! {
|
||||
LineColumn
|
||||
SourceFile
|
||||
}
|
||||
}
|
759
vendor/proc-macro2/tests/test.rs
vendored
Normal file
759
vendor/proc-macro2/tests/test.rs
vendored
Normal file
@ -0,0 +1,759 @@
|
||||
#![allow(
|
||||
clippy::assertions_on_result_states,
|
||||
clippy::items_after_statements,
|
||||
clippy::non_ascii_literal,
|
||||
clippy::octal_escapes
|
||||
)]
|
||||
|
||||
use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::iter;
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
#[test]
|
||||
fn idents() {
|
||||
assert_eq!(
|
||||
Ident::new("String", Span::call_site()).to_string(),
|
||||
"String"
|
||||
);
|
||||
assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
|
||||
assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_idents() {
|
||||
assert_eq!(
|
||||
Ident::new_raw("String", Span::call_site()).to_string(),
|
||||
"r#String"
|
||||
);
|
||||
assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "`r#_` cannot be a raw identifier")]
|
||||
fn ident_raw_underscore() {
|
||||
Ident::new_raw("_", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "`r#super` cannot be a raw identifier")]
|
||||
fn ident_raw_reserved() {
|
||||
Ident::new_raw("super", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
|
||||
fn ident_empty() {
|
||||
Ident::new("", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
|
||||
fn ident_number() {
|
||||
Ident::new("255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"a#\" is not a valid Ident")]
|
||||
fn ident_invalid() {
|
||||
Ident::new("a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_ident_empty() {
|
||||
Ident::new("r#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_ident_number() {
|
||||
Ident::new("r#255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
|
||||
fn raw_ident_invalid() {
|
||||
Ident::new("r#a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn lifetime_empty() {
|
||||
Ident::new("'", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn lifetime_number() {
|
||||
Ident::new("'255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = r#""'a#" is not a valid Ident"#)]
|
||||
fn lifetime_invalid() {
|
||||
Ident::new("'a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_string() {
|
||||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||
assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
|
||||
assert_eq!(
|
||||
Literal::string("a\00b\07c\08d\0e\0").to_string(),
|
||||
"\"a\\x000b\\x007c\\08d\\0e\\0\"",
|
||||
);
|
||||
|
||||
"\"\\\r\n x\"".parse::<TokenStream>().unwrap();
|
||||
"\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_raw_string() {
|
||||
"r\"\r\n\"".parse::<TokenStream>().unwrap();
|
||||
|
||||
fn raw_string_literal_with_hashes(n: usize) -> String {
|
||||
let mut literal = String::new();
|
||||
literal.push('r');
|
||||
literal.extend(iter::repeat('#').take(n));
|
||||
literal.push('"');
|
||||
literal.push('"');
|
||||
literal.extend(iter::repeat('#').take(n));
|
||||
literal
|
||||
}
|
||||
|
||||
raw_string_literal_with_hashes(255)
|
||||
.parse::<TokenStream>()
|
||||
.unwrap();
|
||||
|
||||
// https://github.com/rust-lang/rust/pull/95251
|
||||
raw_string_literal_with_hashes(256)
|
||||
.parse::<TokenStream>()
|
||||
.unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_byte_string() {
|
||||
assert_eq!(Literal::byte_string(b"").to_string(), "b\"\"");
|
||||
assert_eq!(
|
||||
Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(),
|
||||
"b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"",
|
||||
);
|
||||
assert_eq!(
|
||||
Literal::byte_string(b"a\00b\07c\08d\0e\0").to_string(),
|
||||
"b\"a\\x000b\\x007c\\08d\\0e\\0\"",
|
||||
);
|
||||
|
||||
"b\"\\\r\n x\"".parse::<TokenStream>().unwrap();
|
||||
"b\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err();
|
||||
"b\"\\\r\n \u{a0}x\"".parse::<TokenStream>().unwrap_err();
|
||||
"br\"\u{a0}\"".parse::<TokenStream>().unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_c_string() {
|
||||
let strings = r###"
|
||||
c"hello\x80我叫\u{1F980}" // from the RFC
|
||||
cr"\"
|
||||
cr##"Hello "world"!"##
|
||||
c"\t\n\r\"\\"
|
||||
"###;
|
||||
|
||||
let mut tokens = strings.parse::<TokenStream>().unwrap().into_iter();
|
||||
|
||||
for expected in &[
|
||||
r#"c"hello\x80我叫\u{1F980}""#,
|
||||
r#"cr"\""#,
|
||||
r###"cr##"Hello "world"!"##"###,
|
||||
r#"c"\t\n\r\"\\""#,
|
||||
] {
|
||||
match tokens.next().unwrap() {
|
||||
TokenTree::Literal(literal) => {
|
||||
assert_eq!(literal.to_string(), *expected);
|
||||
}
|
||||
unexpected => panic!("unexpected token: {:?}", unexpected),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(unexpected) = tokens.next() {
|
||||
panic!("unexpected token: {:?}", unexpected);
|
||||
}
|
||||
|
||||
for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] {
|
||||
if let Ok(unexpected) = invalid.parse::<TokenStream>() {
|
||||
panic!("unexpected token: {:?}", unexpected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_character() {
|
||||
assert_eq!(Literal::character('x').to_string(), "'x'");
|
||||
assert_eq!(Literal::character('\'').to_string(), "'\\''");
|
||||
assert_eq!(Literal::character('"').to_string(), "'\"'");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_integer() {
|
||||
assert_eq!(Literal::u8_suffixed(10).to_string(), "10u8");
|
||||
assert_eq!(Literal::u16_suffixed(10).to_string(), "10u16");
|
||||
assert_eq!(Literal::u32_suffixed(10).to_string(), "10u32");
|
||||
assert_eq!(Literal::u64_suffixed(10).to_string(), "10u64");
|
||||
assert_eq!(Literal::u128_suffixed(10).to_string(), "10u128");
|
||||
assert_eq!(Literal::usize_suffixed(10).to_string(), "10usize");
|
||||
|
||||
assert_eq!(Literal::i8_suffixed(10).to_string(), "10i8");
|
||||
assert_eq!(Literal::i16_suffixed(10).to_string(), "10i16");
|
||||
assert_eq!(Literal::i32_suffixed(10).to_string(), "10i32");
|
||||
assert_eq!(Literal::i64_suffixed(10).to_string(), "10i64");
|
||||
assert_eq!(Literal::i128_suffixed(10).to_string(), "10i128");
|
||||
assert_eq!(Literal::isize_suffixed(10).to_string(), "10isize");
|
||||
|
||||
assert_eq!(Literal::u8_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::u16_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::u32_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::u64_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::u128_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::usize_unsuffixed(10).to_string(), "10");
|
||||
|
||||
assert_eq!(Literal::i8_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::i16_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::i32_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::i64_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::i128_unsuffixed(10).to_string(), "10");
|
||||
assert_eq!(Literal::isize_unsuffixed(10).to_string(), "10");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_float() {
|
||||
assert_eq!(Literal::f32_suffixed(10.0).to_string(), "10f32");
|
||||
assert_eq!(Literal::f64_suffixed(10.0).to_string(), "10f64");
|
||||
|
||||
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
|
||||
assert_eq!(Literal::f64_unsuffixed(10.0).to_string(), "10.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_suffix() {
|
||||
fn token_count(p: &str) -> usize {
|
||||
p.parse::<TokenStream>().unwrap().into_iter().count()
|
||||
}
|
||||
|
||||
assert_eq!(token_count("999u256"), 1);
|
||||
assert_eq!(token_count("999r#u256"), 3);
|
||||
assert_eq!(token_count("1."), 1);
|
||||
assert_eq!(token_count("1.f32"), 3);
|
||||
assert_eq!(token_count("1.0_0"), 1);
|
||||
assert_eq!(token_count("1._0"), 3);
|
||||
assert_eq!(token_count("1._m"), 3);
|
||||
assert_eq!(token_count("\"\"s"), 1);
|
||||
assert_eq!(token_count("r\"\"r"), 1);
|
||||
assert_eq!(token_count("b\"\"b"), 1);
|
||||
assert_eq!(token_count("br\"\"br"), 1);
|
||||
assert_eq!(token_count("r#\"\"#r"), 1);
|
||||
assert_eq!(token_count("'c'c"), 1);
|
||||
assert_eq!(token_count("b'b'b"), 1);
|
||||
assert_eq!(token_count("0E"), 1);
|
||||
assert_eq!(token_count("0o0A"), 1);
|
||||
assert_eq!(token_count("0E--0"), 4);
|
||||
assert_eq!(token_count("0.0ECMA"), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_iter_negative() {
|
||||
let negative_literal = Literal::i32_suffixed(-3);
|
||||
let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
|
||||
let mut iter = tokens.into_iter();
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Punct(punct) => {
|
||||
assert_eq!(punct.as_char(), '-');
|
||||
assert_eq!(punct.spacing(), Spacing::Alone);
|
||||
}
|
||||
unexpected => panic!("unexpected token {:?}", unexpected),
|
||||
}
|
||||
match iter.next().unwrap() {
|
||||
TokenTree::Literal(literal) => {
|
||||
assert_eq!(literal.to_string(), "3i32");
|
||||
}
|
||||
unexpected => panic!("unexpected token {:?}", unexpected),
|
||||
}
|
||||
assert!(iter.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_parse() {
|
||||
assert!("1".parse::<Literal>().is_ok());
|
||||
assert!("-1".parse::<Literal>().is_ok());
|
||||
assert!("-1u12".parse::<Literal>().is_ok());
|
||||
assert!("1.0".parse::<Literal>().is_ok());
|
||||
assert!("-1.0".parse::<Literal>().is_ok());
|
||||
assert!("-1.0f12".parse::<Literal>().is_ok());
|
||||
assert!("'a'".parse::<Literal>().is_ok());
|
||||
assert!("\"\n\"".parse::<Literal>().is_ok());
|
||||
assert!("0 1".parse::<Literal>().is_err());
|
||||
assert!(" 0".parse::<Literal>().is_err());
|
||||
assert!("0 ".parse::<Literal>().is_err());
|
||||
assert!("/* comment */0".parse::<Literal>().is_err());
|
||||
assert!("0/* comment */".parse::<Literal>().is_err());
|
||||
assert!("0// comment".parse::<Literal>().is_err());
|
||||
assert!("- 1".parse::<Literal>().is_err());
|
||||
assert!("- 1.0".parse::<Literal>().is_err());
|
||||
assert!("-\"\"".parse::<Literal>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_span() {
|
||||
let positive = "0.1".parse::<Literal>().unwrap();
|
||||
let negative = "-0.1".parse::<Literal>().unwrap();
|
||||
let subspan = positive.subspan(1..2);
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
{
|
||||
let _ = negative;
|
||||
assert!(subspan.is_none());
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
{
|
||||
assert_eq!(positive.span().start().column, 0);
|
||||
assert_eq!(positive.span().end().column, 3);
|
||||
assert_eq!(negative.span().start().column, 0);
|
||||
assert_eq!(negative.span().end().column, 4);
|
||||
assert_eq!(subspan.unwrap().source_text().unwrap(), ".");
|
||||
}
|
||||
|
||||
assert!(positive.subspan(1..4).is_none());
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[test]
|
||||
fn source_text() {
|
||||
let input = " 𓀕 a z ";
|
||||
let mut tokens = input
|
||||
.parse::<proc_macro2::TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter();
|
||||
|
||||
let first = tokens.next().unwrap();
|
||||
assert_eq!("𓀕", first.span().source_text().unwrap());
|
||||
|
||||
let second = tokens.next().unwrap();
|
||||
let third = tokens.next().unwrap();
|
||||
assert_eq!("z", third.span().source_text().unwrap());
|
||||
assert_eq!("a", second.span().source_text().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
fn roundtrip(p: &str) {
|
||||
println!("parse: {}", p);
|
||||
let s = p.parse::<TokenStream>().unwrap().to_string();
|
||||
println!("first: {}", s);
|
||||
let s2 = s.parse::<TokenStream>().unwrap().to_string();
|
||||
assert_eq!(s, s2);
|
||||
}
|
||||
roundtrip("a");
|
||||
roundtrip("<<");
|
||||
roundtrip("<<=");
|
||||
roundtrip(
|
||||
"
|
||||
1
|
||||
1.0
|
||||
1f32
|
||||
2f64
|
||||
1usize
|
||||
4isize
|
||||
4e10
|
||||
1_000
|
||||
1_0i32
|
||||
8u8
|
||||
9
|
||||
0
|
||||
0xffffffffffffffffffffffffffffffff
|
||||
1x
|
||||
1u80
|
||||
1f320
|
||||
",
|
||||
);
|
||||
roundtrip("'a");
|
||||
roundtrip("'_");
|
||||
roundtrip("'static");
|
||||
roundtrip("'\\u{10__FFFF}'");
|
||||
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fail() {
|
||||
fn fail(p: &str) {
|
||||
if let Ok(s) = p.parse::<TokenStream>() {
|
||||
panic!("should have failed to parse: {}\n{:#?}", p, s);
|
||||
}
|
||||
}
|
||||
fail("' static");
|
||||
fail("r#1");
|
||||
fail("r#_");
|
||||
fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
|
||||
fail("\"\\u{999999}\""); // outside of valid range of char
|
||||
fail("\"\\u{_0}\""); // leading underscore
|
||||
fail("\"\\u{}\""); // empty
|
||||
fail("b\"\r\""); // bare carriage return in byte string
|
||||
fail("r\"\r\""); // bare carriage return in raw string
|
||||
fail("\"\\\r \""); // backslash carriage return
|
||||
fail("'aa'aa");
|
||||
fail("br##\"\"#");
|
||||
fail("\"\\\n\u{85}\r\"");
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[test]
|
||||
fn span_test() {
|
||||
check_spans(
|
||||
"\
|
||||
/// This is a document comment
|
||||
testing 123
|
||||
{
|
||||
testing 234
|
||||
}",
|
||||
&[
|
||||
(1, 0, 1, 30), // #
|
||||
(1, 0, 1, 30), // [ ... ]
|
||||
(1, 0, 1, 30), // doc
|
||||
(1, 0, 1, 30), // =
|
||||
(1, 0, 1, 30), // "This is..."
|
||||
(2, 0, 2, 7), // testing
|
||||
(2, 8, 2, 11), // 123
|
||||
(3, 0, 5, 1), // { ... }
|
||||
(4, 2, 4, 9), // testing
|
||||
(4, 10, 4, 13), // 234
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[cfg(not(nightly))]
|
||||
#[test]
|
||||
fn default_span() {
|
||||
let start = Span::call_site().start();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
let end = Span::call_site().end();
|
||||
assert_eq!(end.line, 1);
|
||||
assert_eq!(end.column, 0);
|
||||
let source_file = Span::call_site().source_file();
|
||||
assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
|
||||
assert!(!source_file.is_real());
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[test]
|
||||
fn span_join() {
|
||||
let source1 = "aaa\nbbb"
|
||||
.parse::<TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
let source2 = "ccc\nddd"
|
||||
.parse::<TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert!(source1[0].span().source_file() != source2[0].span().source_file());
|
||||
assert_eq!(
|
||||
source1[0].span().source_file(),
|
||||
source1[1].span().source_file()
|
||||
);
|
||||
|
||||
let joined1 = source1[0].span().join(source1[1].span());
|
||||
let joined2 = source1[0].span().join(source2[0].span());
|
||||
assert!(joined1.is_some());
|
||||
assert!(joined2.is_none());
|
||||
|
||||
let start = joined1.unwrap().start();
|
||||
let end = joined1.unwrap().end();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
assert_eq!(end.line, 2);
|
||||
assert_eq!(end.column, 3);
|
||||
|
||||
assert_eq!(
|
||||
joined1.unwrap().source_file(),
|
||||
source1[0].span().source_file()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_panic() {
|
||||
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
|
||||
assert!(s.parse::<TokenStream>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn punct_before_comment() {
|
||||
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
|
||||
match tts.next().unwrap() {
|
||||
TokenTree::Punct(tt) => {
|
||||
assert_eq!(tt.as_char(), '~');
|
||||
assert_eq!(tt.spacing(), Spacing::Alone);
|
||||
}
|
||||
wrong => panic!("wrong token {:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn joint_last_token() {
|
||||
// This test verifies that we match the behavior of libproc_macro *not* in
|
||||
// the range nightly-2020-09-06 through nightly-2020-09-10, in which this
|
||||
// behavior was temporarily broken.
|
||||
// See https://github.com/rust-lang/rust/issues/76399
|
||||
|
||||
let joint_punct = Punct::new(':', Spacing::Joint);
|
||||
let stream = TokenStream::from(TokenTree::Punct(joint_punct));
|
||||
let punct = match stream.into_iter().next().unwrap() {
|
||||
TokenTree::Punct(punct) => punct,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
assert_eq!(punct.spacing(), Spacing::Joint);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_identifier() {
|
||||
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
|
||||
match tts.next().unwrap() {
|
||||
TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
|
||||
wrong => panic!("wrong token {:?}", wrong),
|
||||
}
|
||||
assert!(tts.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_debug_ident() {
|
||||
let ident = Ident::new("proc_macro", Span::call_site());
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
let expected = "Ident(proc_macro)";
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let expected = "Ident { sym: proc_macro }";
|
||||
|
||||
assert_eq!(expected, format!("{:?}", ident));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_debug_tokenstream() {
|
||||
let tts = TokenStream::from_str("[a + 1]").unwrap();
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
},
|
||||
Punct {
|
||||
char: '+',
|
||||
spacing: Alone,
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
let expected_before_trailing_commas = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a
|
||||
},
|
||||
Punct {
|
||||
char: '+',
|
||||
spacing: Alone
|
||||
},
|
||||
Literal {
|
||||
lit: 1
|
||||
}
|
||||
]
|
||||
}
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
span: bytes(2..3),
|
||||
},
|
||||
Punct {
|
||||
char: '+',
|
||||
spacing: Alone,
|
||||
span: bytes(4..5),
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
span: bytes(6..7),
|
||||
},
|
||||
],
|
||||
span: bytes(1..8),
|
||||
},
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let expected_before_trailing_commas = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
span: bytes(2..3)
|
||||
},
|
||||
Punct {
|
||||
char: '+',
|
||||
spacing: Alone,
|
||||
span: bytes(4..5)
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
span: bytes(6..7)
|
||||
}
|
||||
],
|
||||
span: bytes(1..8)
|
||||
}
|
||||
]\
|
||||
";
|
||||
|
||||
let actual = format!("{:#?}", tts);
|
||||
if actual.ends_with(",\n]") {
|
||||
assert_eq!(expected, actual);
|
||||
} else {
|
||||
assert_eq!(expected_before_trailing_commas, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_tokenstream_is_empty() {
|
||||
let default_token_stream = <TokenStream as Default>::default();
|
||||
|
||||
assert!(default_token_stream.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokenstream_size_hint() {
|
||||
let tokens = "a b (c d) e".parse::<TokenStream>().unwrap();
|
||||
|
||||
assert_eq!(tokens.into_iter().size_hint(), (4, Some(4)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tuple_indexing() {
|
||||
// This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
|
||||
let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
|
||||
assert_eq!("tuple", tokens.next().unwrap().to_string());
|
||||
assert_eq!(".", tokens.next().unwrap().to_string());
|
||||
assert_eq!("0.0", tokens.next().unwrap().to_string());
|
||||
assert!(tokens.next().is_none());
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[test]
|
||||
fn non_ascii_tokens() {
|
||||
check_spans("// abc", &[]);
|
||||
check_spans("// ábc", &[]);
|
||||
check_spans("// abc x", &[]);
|
||||
check_spans("// ábc x", &[]);
|
||||
check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
|
||||
check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
|
||||
check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
|
||||
check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
|
||||
check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
|
||||
check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
|
||||
check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
|
||||
check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
|
||||
check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]);
|
||||
check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]);
|
||||
check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
|
||||
check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
|
||||
check_spans("'a'", &[(1, 0, 1, 3)]);
|
||||
check_spans("'á'", &[(1, 0, 1, 3)]);
|
||||
check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
||||
check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
||||
check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
||||
check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
|
||||
check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
|
||||
check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
|
||||
check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
|
||||
check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
|
||||
check_spans("abc", &[(1, 0, 1, 3)]);
|
||||
check_spans("ábc", &[(1, 0, 1, 3)]);
|
||||
check_spans("ábć", &[(1, 0, 1, 3)]);
|
||||
check_spans("abc// foo", &[(1, 0, 1, 3)]);
|
||||
check_spans("ábc// foo", &[(1, 0, 1, 3)]);
|
||||
check_spans("ábć// foo", &[(1, 0, 1, 3)]);
|
||||
check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
||||
let ts = p.parse::<TokenStream>().unwrap();
|
||||
check_spans_internal(ts, &mut lines);
|
||||
assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
|
||||
for i in ts {
|
||||
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
||||
*lines = rest;
|
||||
|
||||
let start = i.span().start();
|
||||
assert_eq!(start.line, sline, "sline did not match for {}", i);
|
||||
assert_eq!(start.column, scol, "scol did not match for {}", i);
|
||||
|
||||
let end = i.span().end();
|
||||
assert_eq!(end.line, eline, "eline did not match for {}", i);
|
||||
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
||||
|
||||
if let TokenTree::Group(g) = i {
|
||||
check_spans_internal(g.stream().clone(), lines);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn whitespace() {
|
||||
// space, horizontal tab, vertical tab, form feed, carriage return, line
|
||||
// feed, non-breaking space, left-to-right mark, right-to-left mark
|
||||
let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}";
|
||||
let tokens = various_spaces.parse::<TokenStream>().unwrap();
|
||||
assert_eq!(tokens.into_iter().count(), 0);
|
||||
|
||||
let lone_carriage_returns = " \r \r\r\n ";
|
||||
lone_carriage_returns.parse::<TokenStream>().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn byte_order_mark() {
|
||||
let string = "\u{feff}foo";
|
||||
let tokens = string.parse::<TokenStream>().unwrap();
|
||||
match tokens.into_iter().next().unwrap() {
|
||||
TokenTree::Ident(ident) => assert_eq!(ident, "foo"),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
let string = "foo\u{feff}";
|
||||
string.parse::<TokenStream>().unwrap_err();
|
||||
}
|
28
vendor/proc-macro2/tests/test_fmt.rs
vendored
Normal file
28
vendor/proc-macro2/tests/test_fmt.rs
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
#![allow(clippy::from_iter_instead_of_collect)]
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
||||
use std::iter;
|
||||
|
||||
#[test]
|
||||
fn test_fmt_group() {
|
||||
let ident = Ident::new("x", Span::call_site());
|
||||
let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
|
||||
let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
|
||||
let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
|
||||
let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
|
||||
let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
|
||||
let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
|
||||
let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
|
||||
let none_empty = Group::new(Delimiter::None, TokenStream::new());
|
||||
let none_nonempty = Group::new(Delimiter::None, inner);
|
||||
|
||||
// Matches libproc_macro.
|
||||
assert_eq!("()", parens_empty.to_string());
|
||||
assert_eq!("(x)", parens_nonempty.to_string());
|
||||
assert_eq!("[]", brackets_empty.to_string());
|
||||
assert_eq!("[x]", brackets_nonempty.to_string());
|
||||
assert_eq!("{ }", braces_empty.to_string());
|
||||
assert_eq!("{ x }", braces_nonempty.to_string());
|
||||
assert_eq!("", none_empty.to_string());
|
||||
assert_eq!("x", none_nonempty.to_string());
|
||||
}
|
42
vendor/proc-macro2/tests/test_size.rs
vendored
Normal file
42
vendor/proc-macro2/tests/test_size.rs
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
extern crate proc_macro;
|
||||
|
||||
use std::mem;
|
||||
|
||||
#[rustversion::attr(before(1.32), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro_span_size() {
|
||||
assert_eq!(mem::size_of::<proc_macro::Span>(), 4);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(all(not(wrap_proc_macro), not(span_locations))), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro2_fallback_span_size_without_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 0);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(all(not(wrap_proc_macro), span_locations)), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro2_fallback_span_size_with_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 8);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
|
||||
}
|
||||
|
||||
#[rustversion::attr(before(1.32), ignore)]
|
||||
#[rustversion::attr(
|
||||
since(1.32),
|
||||
cfg_attr(not(all(wrap_proc_macro, not(span_locations))), ignore)
|
||||
)]
|
||||
#[test]
|
||||
fn test_proc_macro2_wrapper_span_size_without_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 4);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(all(wrap_proc_macro, span_locations)), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro2_wrapper_span_size_with_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 12);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
|
||||
}
|
Reference in New Issue
Block a user