Initial vendor packages

Signed-off-by: Valentin Popov <valentin@popov.link>
This commit is contained in:
2024-01-08 01:21:28 +04:00
parent 5ecd8cf2cb
commit 1b6a04ca55
7309 changed files with 2160054 additions and 0 deletions

879
vendor/syn/tests/common/eq.rs vendored Normal file
View File

@ -0,0 +1,879 @@
#![allow(unused_macro_rules)]
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_span;
extern crate thin_vec;
use rustc_ast::ast::AngleBracketedArg;
use rustc_ast::ast::AngleBracketedArgs;
use rustc_ast::ast::AnonConst;
use rustc_ast::ast::Arm;
use rustc_ast::ast::AssocConstraint;
use rustc_ast::ast::AssocConstraintKind;
use rustc_ast::ast::AssocItemKind;
use rustc_ast::ast::AttrArgs;
use rustc_ast::ast::AttrArgsEq;
use rustc_ast::ast::AttrId;
use rustc_ast::ast::AttrItem;
use rustc_ast::ast::AttrKind;
use rustc_ast::ast::AttrStyle;
use rustc_ast::ast::Attribute;
use rustc_ast::ast::BareFnTy;
use rustc_ast::ast::BinOpKind;
use rustc_ast::ast::BindingAnnotation;
use rustc_ast::ast::Block;
use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
use rustc_ast::ast::BoundConstness;
use rustc_ast::ast::BoundPolarity;
use rustc_ast::ast::ByRef;
use rustc_ast::ast::CaptureBy;
use rustc_ast::ast::Closure;
use rustc_ast::ast::ClosureBinder;
use rustc_ast::ast::Const;
use rustc_ast::ast::ConstItem;
use rustc_ast::ast::CoroutineKind;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
use rustc_ast::ast::DelimArgs;
use rustc_ast::ast::EnumDef;
use rustc_ast::ast::Expr;
use rustc_ast::ast::ExprField;
use rustc_ast::ast::ExprKind;
use rustc_ast::ast::Extern;
use rustc_ast::ast::FieldDef;
use rustc_ast::ast::FloatTy;
use rustc_ast::ast::Fn;
use rustc_ast::ast::FnDecl;
use rustc_ast::ast::FnHeader;
use rustc_ast::ast::FnRetTy;
use rustc_ast::ast::FnSig;
use rustc_ast::ast::ForLoopKind;
use rustc_ast::ast::ForeignItemKind;
use rustc_ast::ast::ForeignMod;
use rustc_ast::ast::FormatAlignment;
use rustc_ast::ast::FormatArgPosition;
use rustc_ast::ast::FormatArgPositionKind;
use rustc_ast::ast::FormatArgs;
use rustc_ast::ast::FormatArgsPiece;
use rustc_ast::ast::FormatArgument;
use rustc_ast::ast::FormatArgumentKind;
use rustc_ast::ast::FormatArguments;
use rustc_ast::ast::FormatCount;
use rustc_ast::ast::FormatDebugHex;
use rustc_ast::ast::FormatOptions;
use rustc_ast::ast::FormatPlaceholder;
use rustc_ast::ast::FormatSign;
use rustc_ast::ast::FormatTrait;
use rustc_ast::ast::GenBlockKind;
use rustc_ast::ast::GenericArg;
use rustc_ast::ast::GenericArgs;
use rustc_ast::ast::GenericBound;
use rustc_ast::ast::GenericParam;
use rustc_ast::ast::GenericParamKind;
use rustc_ast::ast::Generics;
use rustc_ast::ast::Impl;
use rustc_ast::ast::ImplPolarity;
use rustc_ast::ast::Inline;
use rustc_ast::ast::InlineAsm;
use rustc_ast::ast::InlineAsmOperand;
use rustc_ast::ast::InlineAsmOptions;
use rustc_ast::ast::InlineAsmRegOrRegClass;
use rustc_ast::ast::InlineAsmSym;
use rustc_ast::ast::InlineAsmTemplatePiece;
use rustc_ast::ast::IntTy;
use rustc_ast::ast::IsAuto;
use rustc_ast::ast::Item;
use rustc_ast::ast::ItemKind;
use rustc_ast::ast::Label;
use rustc_ast::ast::Lifetime;
use rustc_ast::ast::LitFloatType;
use rustc_ast::ast::LitIntType;
use rustc_ast::ast::LitKind;
use rustc_ast::ast::Local;
use rustc_ast::ast::LocalKind;
use rustc_ast::ast::MacCall;
use rustc_ast::ast::MacCallStmt;
use rustc_ast::ast::MacStmtStyle;
use rustc_ast::ast::MacroDef;
use rustc_ast::ast::MetaItemLit;
use rustc_ast::ast::MethodCall;
use rustc_ast::ast::ModKind;
use rustc_ast::ast::ModSpans;
use rustc_ast::ast::Movability;
use rustc_ast::ast::MutTy;
use rustc_ast::ast::Mutability;
use rustc_ast::ast::NodeId;
use rustc_ast::ast::NormalAttr;
use rustc_ast::ast::Param;
use rustc_ast::ast::ParenthesizedArgs;
use rustc_ast::ast::Pat;
use rustc_ast::ast::PatField;
use rustc_ast::ast::PatFieldsRest;
use rustc_ast::ast::PatKind;
use rustc_ast::ast::Path;
use rustc_ast::ast::PathSegment;
use rustc_ast::ast::PolyTraitRef;
use rustc_ast::ast::QSelf;
use rustc_ast::ast::RangeEnd;
use rustc_ast::ast::RangeLimits;
use rustc_ast::ast::RangeSyntax;
use rustc_ast::ast::StaticItem;
use rustc_ast::ast::Stmt;
use rustc_ast::ast::StmtKind;
use rustc_ast::ast::StrLit;
use rustc_ast::ast::StrStyle;
use rustc_ast::ast::StructExpr;
use rustc_ast::ast::StructRest;
use rustc_ast::ast::Term;
use rustc_ast::ast::Trait;
use rustc_ast::ast::TraitBoundModifiers;
use rustc_ast::ast::TraitObjectSyntax;
use rustc_ast::ast::TraitRef;
use rustc_ast::ast::Ty;
use rustc_ast::ast::TyAlias;
use rustc_ast::ast::TyAliasWhereClause;
use rustc_ast::ast::TyKind;
use rustc_ast::ast::UintTy;
use rustc_ast::ast::UnOp;
use rustc_ast::ast::Unsafe;
use rustc_ast::ast::UnsafeSource;
use rustc_ast::ast::UseTree;
use rustc_ast::ast::UseTreeKind;
use rustc_ast::ast::Variant;
use rustc_ast::ast::VariantData;
use rustc_ast::ast::Visibility;
use rustc_ast::ast::VisibilityKind;
use rustc_ast::ast::WhereBoundPredicate;
use rustc_ast::ast::WhereClause;
use rustc_ast::ast::WhereEqPredicate;
use rustc_ast::ast::WherePredicate;
use rustc_ast::ast::WhereRegionPredicate;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{
AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing, DelimSpan, LazyAttrTokenStream,
Spacing, TokenStream, TokenTree,
};
use rustc_data_structures::sync::Lrc;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Ident};
use rustc_span::{ErrorGuaranteed, Span, Symbol, SyntaxContext, DUMMY_SP};
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use thin_vec::ThinVec;
pub trait SpanlessEq {
fn eq(&self, other: &Self) -> bool;
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: SpanlessEq> SpanlessEq for Option<T> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(None, None) => true,
(Some(this), Some(other)) => SpanlessEq::eq(this, other),
_ => false,
}
}
}
impl<T: SpanlessEq, E: SpanlessEq> SpanlessEq for Result<T, E> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Ok(this), Ok(other)) => SpanlessEq::eq(this, other),
(Err(this), Err(other)) => SpanlessEq::eq(this, other),
_ => false,
}
}
}
impl<T: SpanlessEq> SpanlessEq for [T] {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b))
}
}
impl<T: SpanlessEq> SpanlessEq for Vec<T> {
fn eq(&self, other: &Self) -> bool {
<[T] as SpanlessEq>::eq(self, other)
}
}
impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len()
&& self
.iter()
.zip(other.iter())
.all(|(a, b)| SpanlessEq::eq(a, b))
}
}
impl<K: Eq + Hash, V: SpanlessEq, S: BuildHasher> SpanlessEq for HashMap<K, V, S> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len()
&& self.iter().all(|(key, this_v)| {
other
.get(key)
.map_or(false, |other_v| SpanlessEq::eq(this_v, other_v))
})
}
}
impl<T: SpanlessEq> SpanlessEq for Spanned<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.node, &other.node)
}
}
impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
}
}
impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0)
&& SpanlessEq::eq(&self.1, &other.1)
&& SpanlessEq::eq(&self.2, &other.2)
}
}
macro_rules! spanless_eq_true {
($name:ty) => {
impl SpanlessEq for $name {
fn eq(&self, _other: &Self) -> bool {
true
}
}
};
}
spanless_eq_true!(Span);
spanless_eq_true!(DelimSpan);
spanless_eq_true!(AttrId);
spanless_eq_true!(NodeId);
spanless_eq_true!(SyntaxContext);
spanless_eq_true!(Spacing);
macro_rules! spanless_eq_partial_eq {
($name:ty) => {
impl SpanlessEq for $name {
fn eq(&self, other: &Self) -> bool {
PartialEq::eq(self, other)
}
}
};
}
spanless_eq_partial_eq!(bool);
spanless_eq_partial_eq!(u8);
spanless_eq_partial_eq!(u16);
spanless_eq_partial_eq!(u32);
spanless_eq_partial_eq!(u128);
spanless_eq_partial_eq!(usize);
spanless_eq_partial_eq!(char);
spanless_eq_partial_eq!(String);
spanless_eq_partial_eq!(Symbol);
spanless_eq_partial_eq!(CommentKind);
spanless_eq_partial_eq!(Delimiter);
spanless_eq_partial_eq!(InlineAsmOptions);
spanless_eq_partial_eq!(token::LitKind);
spanless_eq_partial_eq!(ErrorGuaranteed);
macro_rules! spanless_eq_struct {
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
} => {
impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* {
fn eq(&self, other: &Self) -> bool {
let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self;
let $($name)::+ { $($field: $other,)* $($ignore: _,)* } = other;
true $(&& SpanlessEq::eq($this, $other))*
}
}
};
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
!$next:tt
$($rest:tt)*
} => {
spanless_eq_struct! {
$($name)::+ $(<$param>)*
$([$field $this $other])*
$(![$ignore])*
![$next];
$($rest)*
}
};
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
$next:tt
$($rest:tt)*
} => {
spanless_eq_struct! {
$($name)::+ $(<$param>)*
$([$field $this $other])*
[$next this other]
$(![$ignore])*;
$($rest)*
}
};
}
macro_rules! spanless_eq_enum {
{
$($name:ident)::+;
$([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])* $(![$ignore:tt])*])*
} => {
impl SpanlessEq for $($name)::+ {
fn eq(&self, other: &Self) -> bool {
match self {
$(
$($variant)::+ { .. } => {}
)*
}
#[allow(unreachable_patterns)]
match (self, other) {
$(
(
$($variant)::+ { $($field: $this,)* $($ignore: _,)* },
$($variant)::+ { $($field: $other,)* $($ignore: _,)* },
) => {
true $(&& SpanlessEq::eq($this, $other))*
}
)*
_ => false,
}
}
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt $($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt $($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [$([$($named)*])* [$i this other] $(![$ignore])*] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$($named:tt)*] ()
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
[$($name)::+::$next; $($named)*]
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident ($($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
[$($name)::+::$next;]
$($rest)*
}
};
}
spanless_eq_struct!(AngleBracketedArgs; span args);
spanless_eq_struct!(AnonConst; id value);
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
spanless_eq_struct!(AssocConstraint; id ident gen_args kind span);
spanless_eq_struct!(AttrItem; path args tokens);
spanless_eq_struct!(AttrTokenStream; 0);
spanless_eq_struct!(Attribute; kind id style span);
spanless_eq_struct!(AttributesData; attrs tokens);
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
spanless_eq_struct!(BindingAnnotation; 0 1);
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind movability fn_decl body !fn_decl_span !fn_arg_span);
spanless_eq_struct!(ConstItem; defaultness generics ty expr);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
spanless_eq_struct!(DelimArgs; dspan delim tokens);
spanless_eq_struct!(DelimSpacing; open close);
spanless_eq_struct!(EnumDef; variants);
spanless_eq_struct!(Expr; id kind span attrs !tokens);
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder);
spanless_eq_struct!(Fn; defaultness generics sig body);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness coroutine_kind unsafety ext);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; unsafety abi items);
spanless_eq_struct!(FormatArgPosition; index kind span);
spanless_eq_struct!(FormatArgs; span template arguments);
spanless_eq_struct!(FormatArgument; kind expr);
spanless_eq_struct!(FormatOptions; width precision alignment fill sign alternate zero_pad debug_hex);
spanless_eq_struct!(FormatPlaceholder; argument span format_trait format_options);
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span);
spanless_eq_struct!(Generics; params where_clause span);
spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items);
spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis options line_spans);
spanless_eq_struct!(InlineAsmSym; id qself path);
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
spanless_eq_struct!(Lit; kind symbol suffix);
spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
spanless_eq_struct!(MacCall; path args);
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
spanless_eq_struct!(MacroDef; body macro_rules);
spanless_eq_struct!(MetaItemLit; symbol suffix kind span);
spanless_eq_struct!(MethodCall; seg receiver args !span);
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
spanless_eq_struct!(MutTy; ty mutbl);
spanless_eq_struct!(NormalAttr; item tokens);
spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
spanless_eq_struct!(Pat; id kind span tokens);
spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
spanless_eq_struct!(Path; span segments tokens);
spanless_eq_struct!(PathSegment; ident id args);
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
spanless_eq_struct!(QSelf; ty path_span position);
spanless_eq_struct!(StaticItem; ty mutability expr);
spanless_eq_struct!(Stmt; id kind span);
spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span);
spanless_eq_struct!(StructExpr; qself path fields rest);
spanless_eq_struct!(Token; kind span);
spanless_eq_struct!(Trait; unsafety is_auto generics bounds items);
spanless_eq_struct!(TraitBoundModifiers; constness polarity);
spanless_eq_struct!(TraitRef; path ref_id);
spanless_eq_struct!(Ty; id kind span tokens);
spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty);
spanless_eq_struct!(TyAliasWhereClause; !0 1);
spanless_eq_struct!(UseTree; prefix kind span);
spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder);
spanless_eq_struct!(Visibility; kind span tokens);
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
spanless_eq_struct!(WhereClause; has_where_token predicates span);
spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0));
spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1));
spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0));
spanless_eq_enum!(AttrStyle; Outer Inner);
spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) Attributes(0));
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw);
spanless_eq_enum!(BoundConstness; Never Always(0) Maybe(0));
spanless_eq_enum!(BoundPolarity; Positive Negative(0) Maybe(0));
spanless_eq_enum!(ByRef; Yes No);
spanless_eq_enum!(CaptureBy; Value(move_kw) Ref);
spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(Defaultness; Default(0) Final);
spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
spanless_eq_enum!(FloatTy; F32 F64);
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForLoopKind; For ForAwait);
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
spanless_eq_enum!(FormatAlignment; Left Right Center);
spanless_eq_enum!(FormatArgPositionKind; Implicit Number Named);
spanless_eq_enum!(FormatArgsPiece; Literal(0) Placeholder(0));
spanless_eq_enum!(FormatArgumentKind; Normal Named(0) Captured(0));
spanless_eq_enum!(FormatCount; Literal(0) Argument(0));
spanless_eq_enum!(FormatDebugHex; Lower Upper);
spanless_eq_enum!(FormatSign; Plus Minus);
spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer Binary LowerHex UpperHex);
spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen);
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default));
spanless_eq_enum!(ImplPolarity; Positive Negative(0));
spanless_eq_enum!(Inline; Yes No);
spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
spanless_eq_enum!(Movability; Static Movable);
spanless_eq_enum!(Mutability; Mut Not);
spanless_eq_enum!(PatFieldsRest; Rest None);
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
spanless_eq_enum!(Term; Ty(0) Const(0));
spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3));
spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None);
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(Unsafe; Yes(0) No);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob);
spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id)
Gen(span closure_id return_impl_trait_id)
AsyncGen(span closure_id return_impl_trait_id));
spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0)
Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3)
If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2)
Match(0 1) Closure(0) Block(0 1) Gen(0 1 2) Await(0 1) TryBlock(0)
Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore
Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0)
Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym));
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0) Const(0) Fn(0)
Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0)
Int(0 1) Float(0 1) Bool(0) Err);
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
Never Paren(0) MacCall(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never
Tup(0) AnonStruct(0) AnonUnion(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1)
Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) Err CVarArgs);
impl SpanlessEq for Ident {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
impl SpanlessEq for RangeSyntax {
fn eq(&self, _other: &Self) -> bool {
match self {
RangeSyntax::DotDotDot | RangeSyntax::DotDotEq => true,
}
}
}
impl SpanlessEq for Param {
fn eq(&self, other: &Self) -> bool {
let Param {
attrs,
ty,
pat,
id,
span: _,
is_placeholder,
} = self;
let Param {
attrs: attrs2,
ty: ty2,
pat: pat2,
id: id2,
span: _,
is_placeholder: is_placeholder2,
} = other;
SpanlessEq::eq(id, id2)
&& SpanlessEq::eq(is_placeholder, is_placeholder2)
&& (matches!(ty.kind, TyKind::Err)
|| matches!(ty2.kind, TyKind::Err)
|| SpanlessEq::eq(attrs, attrs2)
&& SpanlessEq::eq(ty, ty2)
&& SpanlessEq::eq(pat, pat2))
}
}
impl SpanlessEq for TokenKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => SpanlessEq::eq(this, other),
(TokenKind::DotDotEq | TokenKind::DotDotDot, _) => match other {
TokenKind::DotDotEq | TokenKind::DotDotDot => true,
_ => false,
},
(TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => {
let (this, this_span) = this.as_ref();
let (other, other_span) = other.as_ref();
SpanlessEq::eq(this_span, other_span)
&& match (this, other) {
(Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => {
SpanlessEq::eq(this, other)
}
_ => this == other,
}
}
_ => self == other,
}
}
}
impl SpanlessEq for TokenStream {
fn eq(&self, other: &Self) -> bool {
let mut this_trees = self.trees();
let mut other_trees = other.trees();
loop {
let this = match this_trees.next() {
None => return other_trees.next().is_none(),
Some(tree) => tree,
};
let other = match other_trees.next() {
None => return false,
Some(tree) => tree,
};
if SpanlessEq::eq(this, other) {
continue;
}
if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) {
if match (&this.kind, &other.kind) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
SpanlessEq::eq(this, other)
}
(TokenKind::DocComment(_kind, style, symbol), TokenKind::Pound) => {
doc_comment(*style, *symbol, &mut other_trees)
}
(TokenKind::Pound, TokenKind::DocComment(_kind, style, symbol)) => {
doc_comment(*style, *symbol, &mut this_trees)
}
_ => false,
} {
continue;
}
}
return false;
}
}
}
fn doc_comment<'a>(
style: AttrStyle,
unescaped: Symbol,
trees: &mut impl Iterator<Item = &'a TokenTree>,
) -> bool {
if match style {
AttrStyle::Outer => false,
AttrStyle::Inner => true,
} {
match trees.next() {
Some(TokenTree::Token(
Token {
kind: TokenKind::Not,
span: _,
},
_spacing,
)) => {}
_ => return false,
}
}
let stream = match trees.next() {
Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket, stream)) => stream,
_ => return false,
};
let mut trees = stream.trees();
match trees.next() {
Some(TokenTree::Token(
Token {
kind: TokenKind::Ident(symbol, false),
span: _,
},
_spacing,
)) if *symbol == sym::doc => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(
Token {
kind: TokenKind::Eq,
span: _,
},
_spacing,
)) => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(token, _spacing)) => {
is_escaped_literal_token(token, unescaped) && trees.next().is_none()
}
_ => false,
}
}
fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
match token {
Token {
kind: TokenKind::Literal(lit),
span: _,
} => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) {
Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped),
Err(_) => false,
},
Token {
kind: TokenKind::Interpolated(nonterminal),
span: _,
} => match &nonterminal.0 {
Nonterminal::NtExpr(expr) => match &expr.kind {
ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
_ => false,
},
_ => false,
},
_ => false,
}
}
fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool {
match value {
AttrArgsEq::Ast(expr) => match &expr.kind {
ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
_ => false,
},
AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped),
}
}
fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool {
match lit {
MetaItemLit {
symbol: _,
suffix: None,
kind,
span: _,
} => is_escaped_lit_kind(kind, unescaped),
_ => false,
}
}
fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool {
match lit {
Lit {
kind: token::LitKind::Str,
symbol: _,
suffix: None,
} => match LitKind::from_token_lit(*lit) {
Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped),
_ => false,
},
_ => false,
}
}
fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool {
match kind {
LitKind::Str(symbol, StrStyle::Cooked) => {
symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', "")
}
_ => false,
}
}
impl SpanlessEq for LazyAttrTokenStream {
fn eq(&self, other: &Self) -> bool {
let this = self.to_attr_token_stream();
let other = other.to_attr_token_stream();
SpanlessEq::eq(&this, &other)
}
}
impl SpanlessEq for AttrKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(AttrKind::Normal(normal), AttrKind::Normal(normal2)) => {
SpanlessEq::eq(normal, normal2)
}
(AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
}
(AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2)) => {
match kind {
CommentKind::Line | CommentKind::Block => {}
}
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
SpanlessEq::eq(&path, &normal2.item.path)
&& match &normal2.item.args {
AttrArgs::Empty | AttrArgs::Delimited(_) => false,
AttrArgs::Eq(_span, value) => {
is_escaped_literal_attr_args(value, *unescaped)
}
}
}
(AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
}
}
}
impl SpanlessEq for FormatArguments {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(self.all_args(), other.all_args())
}
}

28
vendor/syn/tests/common/mod.rs vendored Normal file
View File

@ -0,0 +1,28 @@
#![allow(dead_code)]
#![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)]
use rayon::ThreadPoolBuilder;
use std::env;
pub mod eq;
pub mod parse;
/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it.
pub fn abort_after() -> usize {
match env::var("ABORT_AFTER_FAILURE") {
Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
Err(_) => usize::max_value(),
}
}
/// Configure Rayon threadpool.
pub fn rayon_init() {
let stack_size = match env::var("RUST_MIN_STACK") {
Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
Err(_) => 20 * 1024 * 1024,
};
ThreadPoolBuilder::new()
.stack_size(stack_size)
.build_global()
.unwrap();
}

51
vendor/syn/tests/common/parse.rs vendored Normal file
View File

@ -0,0 +1,51 @@
extern crate rustc_ast;
extern crate rustc_driver;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use std::panic;
pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
match panic::catch_unwind(|| {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
let file_path_mapping = FilePathMapping::empty();
let sess = ParseSess::new(locale_resources, file_path_mapping);
let e = parse::new_parser_from_source_str(
&sess,
FileName::Custom("test_precedence".to_string()),
input.to_string(),
)
.parse_expr();
match e {
Ok(expr) => Some(expr),
Err(mut diagnostic) => {
diagnostic.emit();
None
}
}
}) {
Ok(Some(e)) => Some(e),
Ok(None) => None,
Err(_) => {
errorf!("librustc panicked\n");
None
}
}
}
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
match syn::parse_str(input) {
Ok(e) => Some(e),
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
None
}
}
}

5157
vendor/syn/tests/debug/gen.rs vendored Normal file

File diff suppressed because it is too large Load Diff

147
vendor/syn/tests/debug/mod.rs vendored Normal file
View File

@ -0,0 +1,147 @@
#![allow(
clippy::no_effect_underscore_binding,
clippy::too_many_lines,
clippy::used_underscore_binding
)]
#[rustfmt::skip]
mod gen;
use proc_macro2::{Ident, Literal, TokenStream};
use ref_cast::RefCast;
use std::fmt::{self, Debug};
use std::ops::Deref;
use syn::punctuated::Punctuated;
#[derive(RefCast)]
#[repr(transparent)]
pub struct Lite<T: ?Sized> {
value: T,
}
#[allow(non_snake_case)]
pub fn Lite<T: ?Sized>(value: &T) -> &Lite<T> {
Lite::ref_cast(value)
}
impl<T: ?Sized> Deref for Lite<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl Debug for Lite<bool> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<u32> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<usize> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<String> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", self.value)
}
}
impl Debug for Lite<Ident> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", self.value.to_string())
}
}
impl Debug for Lite<Literal> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<TokenStream> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let string = self.value.to_string();
if string.len() <= 80 {
write!(formatter, "TokenStream(`{}`)", self.value)
} else {
formatter
.debug_tuple("TokenStream")
.field(&format_args!("`{}`", string))
.finish()
}
}
}
impl<'a, T> Debug for Lite<&'a T>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(self.value), formatter)
}
}
impl<T> Debug for Lite<Box<T>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(&*self.value), formatter)
}
}
impl<T> Debug for Lite<Vec<T>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_list()
.entries(self.value.iter().map(Lite))
.finish()
}
}
impl<T, P> Debug for Lite<Punctuated<T, P>>
where
Lite<T>: Debug,
Lite<P>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let mut list = formatter.debug_list();
for pair in self.pairs() {
let (node, punct) = pair.into_tuple();
list.entry(Lite(node));
list.entries(punct.map(Lite));
}
list.finish()
}
}
struct Present;
impl Debug for Present {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Some")
}
}
struct Option {
present: bool,
}
impl Debug for Option {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(if self.present { "Some" } else { "None" })
}
}

92
vendor/syn/tests/macros/mod.rs vendored Normal file
View File

@ -0,0 +1,92 @@
#![allow(unused_macros, unused_macro_rules)]
#[path = "../debug/mod.rs"]
pub mod debug;
use std::str::FromStr;
use syn::parse::Result;
macro_rules! errorf {
($($tt:tt)*) => {{
use ::std::io::Write;
let stderr = ::std::io::stderr();
write!(stderr.lock(), $($tt)*).unwrap();
}};
}
macro_rules! punctuated {
($($e:expr,)+) => {{
let mut seq = ::syn::punctuated::Punctuated::new();
$(
seq.push($e);
)+
seq
}};
($($e:expr),+) => {
punctuated!($($e,)+)
};
}
macro_rules! snapshot {
($($args:tt)*) => {
snapshot_impl!(() $($args)*)
};
}
macro_rules! snapshot_impl {
(($expr:ident) as $t:ty, @$snapshot:literal) => {
let tokens = crate::macros::TryIntoTokens::try_into_tokens($expr).unwrap();
let $expr: $t = syn::parse_quote!(#tokens);
let debug = crate::macros::debug::Lite(&$expr);
if !cfg!(miri) {
#[allow(clippy::needless_raw_string_hashes)] // https://github.com/mitsuhiko/insta/issues/389
{
insta::assert_debug_snapshot!(debug, @$snapshot);
}
}
};
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
let tokens = crate::macros::TryIntoTokens::try_into_tokens($($expr)*).unwrap();
let syntax_tree: $t = syn::parse_quote!(#tokens);
let debug = crate::macros::debug::Lite(&syntax_tree);
if !cfg!(miri) {
#[allow(clippy::needless_raw_string_hashes)]
{
insta::assert_debug_snapshot!(debug, @$snapshot);
}
}
syntax_tree
}};
(($($expr:tt)*) , @$snapshot:literal) => {{
let syntax_tree = $($expr)*;
let debug = crate::macros::debug::Lite(&syntax_tree);
if !cfg!(miri) {
#[allow(clippy::needless_raw_string_hashes)]
{
insta::assert_debug_snapshot!(debug, @$snapshot);
}
}
syntax_tree
}};
(($($expr:tt)*) $next:tt $($rest:tt)*) => {
snapshot_impl!(($($expr)* $next) $($rest)*)
};
}
pub trait TryIntoTokens {
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream>;
}
impl<'a> TryIntoTokens for &'a str {
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
let tokens = proc_macro2::TokenStream::from_str(self)?;
Ok(tokens)
}
}
impl TryIntoTokens for proc_macro2::TokenStream {
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
Ok(self)
}
}

5
vendor/syn/tests/regression.rs vendored Normal file
View File

@ -0,0 +1,5 @@
#![allow(clippy::let_underscore_untyped, clippy::uninlined_format_args)]
mod regression {
automod::dir!("tests/regression");
}

View File

@ -0,0 +1,5 @@
#[test]
fn issue1108() {
let data = "impl<x<>>::x for";
let _ = syn::parse_file(data);
}

View File

@ -0,0 +1,32 @@
use proc_macro2::{Delimiter, Group};
use quote::quote;
#[test]
fn main() {
// Okay. Rustc allows top-level `static` with no value syntactically, but
// not semantically. Syn parses as Item::Verbatim.
let tokens = quote! {
pub static FOO: usize;
pub static BAR: usize;
};
let file = syn::parse2::<syn::File>(tokens).unwrap();
println!("{:#?}", file);
// Okay.
let inner = Group::new(
Delimiter::None,
quote!(static FOO: usize = 0; pub static BAR: usize = 0),
);
let tokens = quote!(pub #inner;);
let file = syn::parse2::<syn::File>(tokens).unwrap();
println!("{:#?}", file);
// Formerly parser crash.
let inner = Group::new(
Delimiter::None,
quote!(static FOO: usize; pub static BAR: usize),
);
let tokens = quote!(pub #inner;);
let file = syn::parse2::<syn::File>(tokens).unwrap();
println!("{:#?}", file);
}

375
vendor/syn/tests/repo/mod.rs vendored Normal file
View File

@ -0,0 +1,375 @@
#![allow(clippy::manual_assert)]
mod progress;
use self::progress::Progress;
use anyhow::Result;
use flate2::read::GzDecoder;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::collections::BTreeSet;
use std::ffi::OsStr;
use std::fs;
use std::path::{Path, PathBuf};
use tar::Archive;
use walkdir::{DirEntry, WalkDir};
const REVISION: &str = "b10cfcd65fd7f7b1ab9beb34798b2108de003452";
#[rustfmt::skip]
static EXCLUDE_FILES: &[&str] = &[
// TODO: CStr literals: c"…", cr"…"
// https://github.com/dtolnay/syn/issues/1502
"src/tools/clippy/tests/ui/needless_raw_string.rs",
"src/tools/clippy/tests/ui/needless_raw_string_hashes.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs",
// TODO: explicit tail calls: `become _g()`
// https://github.com/dtolnay/syn/issues/1501
"tests/ui/explicit-tail-calls/return-lifetime-sub.rs",
// TODO: non-lifetime binders: `where for<'a, T> &'a Struct<T>: Trait`
// https://github.com/dtolnay/syn/issues/1435
"src/tools/rustfmt/tests/source/issue_5721.rs",
"src/tools/rustfmt/tests/source/non-lifetime-binders.rs",
"src/tools/rustfmt/tests/target/issue_5721.rs",
"src/tools/rustfmt/tests/target/non-lifetime-binders.rs",
"tests/rustdoc-json/non_lifetime_binders.rs",
"tests/rustdoc/inline_cross/auxiliary/non_lifetime_binders.rs",
"tests/rustdoc/non_lifetime_binders.rs",
// TODO: return type notation: `where T: Trait<method(): Send>`
// https://github.com/dtolnay/syn/issues/1434
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs",
"tests/ui/associated-type-bounds/return-type-notation/basic.rs",
"tests/ui/feature-gates/feature-gate-return_type_notation.rs",
// TODO: lazy type alias syntax with where-clause in trailing position
// https://github.com/dtolnay/syn/issues/1525
"tests/rustdoc/typedef-inner-variants-lazy_type_alias.rs",
// TODO: gen blocks and functions
// https://github.com/dtolnay/syn/issues/1526
"compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs",
"tests/ui/coroutine/gen_block_is_iter.rs",
"tests/ui/coroutine/gen_block_iterate.rs",
// TODO: struct literal in match guard
// https://github.com/dtolnay/syn/issues/1527
"tests/ui/parser/struct-literal-in-match-guard.rs",
// Compile-fail expr parameter in const generic position: f::<1 + 2>()
"tests/ui/const-generics/early/closing-args-token.rs",
"tests/ui/const-generics/early/const-expression-parameter.rs",
// Compile-fail variadics in not the last position of a function parameter list
"tests/ui/parser/variadic-ffi-syntactic-pass.rs",
// Need at least one trait in impl Trait, no such type as impl 'static
"tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
// Negative polarity trait bound: `where T: !Copy`
"src/tools/rustfmt/tests/target/negative-bounds.rs",
// Lifetime bound inside for<>: `T: ~const ?for<'a: 'b> Trait<'a>`
"tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-syntax.rs",
// Const impl that is not a trait impl: `impl ~const T {}`
"tests/ui/rfcs/rfc-2632-const-trait-impl/syntax.rs",
// Deprecated anonymous parameter syntax in traits
"src/tools/rustfmt/tests/source/trait.rs",
"src/tools/rustfmt/tests/target/trait.rs",
"tests/ui/issues/issue-13105.rs",
"tests/ui/issues/issue-13775.rs",
"tests/ui/issues/issue-34074.rs",
"tests/ui/proc-macro/trait-fn-args-2015.rs",
// Deprecated where-clause location
"src/tools/rustfmt/tests/source/issue_4257.rs",
"src/tools/rustfmt/tests/source/issue_4911.rs",
"src/tools/rustfmt/tests/target/issue_4257.rs",
"src/tools/rustfmt/tests/target/issue_4911.rs",
"tests/pretty/gat-bounds.rs",
"tests/rustdoc/generic-associated-types/gats.rs",
// Deprecated trait object syntax with parenthesized generic arguments and no dyn keyword
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs",
"src/tools/rustfmt/tests/source/attrib.rs",
"src/tools/rustfmt/tests/source/closure.rs",
"src/tools/rustfmt/tests/source/existential_type.rs",
"src/tools/rustfmt/tests/source/fn-simple.rs",
"src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs",
"src/tools/rustfmt/tests/source/issue-4689/one.rs",
"src/tools/rustfmt/tests/source/issue-4689/two.rs",
"src/tools/rustfmt/tests/source/paths.rs",
"src/tools/rustfmt/tests/source/structs.rs",
"src/tools/rustfmt/tests/target/attrib.rs",
"src/tools/rustfmt/tests/target/closure.rs",
"src/tools/rustfmt/tests/target/existential_type.rs",
"src/tools/rustfmt/tests/target/fn-simple.rs",
"src/tools/rustfmt/tests/target/fn.rs",
"src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs",
"src/tools/rustfmt/tests/target/issue-4689/one.rs",
"src/tools/rustfmt/tests/target/issue-4689/two.rs",
"src/tools/rustfmt/tests/target/paths.rs",
"src/tools/rustfmt/tests/target/structs.rs",
"tests/codegen-units/item-collection/non-generic-closures.rs",
"tests/debuginfo/recursive-enum.rs",
"tests/pretty/closure-reform-pretty.rs",
"tests/run-make/reproducible-build-2/reproducible-build.rs",
"tests/run-make/reproducible-build/reproducible-build.rs",
"tests/ui/auxiliary/typeid-intrinsic-aux1.rs",
"tests/ui/auxiliary/typeid-intrinsic-aux2.rs",
"tests/ui/impl-trait/generic-with-implicit-hrtb-without-dyn.rs",
"tests/ui/lifetimes/auxiliary/lifetime_bound_will_change_warning_lib.rs",
"tests/ui/lifetimes/bare-trait-object-borrowck.rs",
"tests/ui/lifetimes/bare-trait-object.rs",
"tests/ui/parser/bounds-obj-parens.rs",
// Invalid unparenthesized range pattern inside slice pattern: `[1..]`
"tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs",
// Various extensions to Rust syntax made up by rust-analyzer
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0156_fn_def_param.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0179_use_tree_abs_star.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0188_const_param_default_path.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs",
// Placeholder syntax for "throw expressions"
"compiler/rustc_errors/src/translation.rs",
"src/tools/clippy/tests/ui/needless_return.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs",
"tests/pretty/yeet-expr.rs",
"tests/ui/try-trait/yeet-for-option.rs",
"tests/ui/try-trait/yeet-for-result.rs",
// Edition 2015 code using identifiers that are now keywords
// TODO: some of these we should probably parse
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0159_try_macro_fallback.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0160_try_macro_rules.rs",
"src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs",
"src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs",
"src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs",
"src/tools/rustfmt/tests/source/issue_1306.rs",
"src/tools/rustfmt/tests/source/try-conversion.rs",
"src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs",
"src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs",
"src/tools/rustfmt/tests/target/issue-1681.rs",
"src/tools/rustfmt/tests/target/issue_1306.rs",
"tests/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs",
"tests/ui/editions/edition-keywords-2015-2015.rs",
"tests/ui/editions/edition-keywords-2015-2018.rs",
"tests/ui/lint/lint_pre_expansion_extern_module_aux.rs",
"tests/ui/macros/macro-comma-support-rpass.rs",
"tests/ui/macros/try-macro.rs",
"tests/ui/parser/extern-crate-async.rs",
"tests/ui/try-block/try-is-identifier-edition2015.rs",
// Excessive nesting
"tests/ui/issues/issue-74564-if-expr-stack-overflow.rs",
// Testing tools on invalid syntax
"src/tools/rustfmt/tests/coverage/target/comments.rs",
"src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
"src/tools/rustfmt/tests/parser/issue_4418.rs",
"src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs",
"src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs",
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs",
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/source/type.rs",
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs",
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/target/type.rs",
"tests/run-make/translation/test.rs",
"tests/ui/generics/issue-94432-garbage-ice.rs",
// Generated file containing a top-level expression, used with `include!`
"compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
// Clippy lint lists represented as expressions
"src/tools/clippy/clippy_lints/src/lib.deprecated.rs",
// Not actually test cases
"tests/ui/lint/expansion-time-include.rs",
"tests/ui/macros/auxiliary/macro-comma-support.rs",
"tests/ui/macros/auxiliary/macro-include-items-expr.rs",
"tests/ui/macros/include-single-expr-helper.rs",
"tests/ui/macros/include-single-expr-helper-1.rs",
"tests/ui/parser/issues/auxiliary/issue-21146-inc.rs",
];
#[rustfmt::skip]
static EXCLUDE_DIRS: &[&str] = &[
// Inputs that intentionally do not parse
"src/tools/rust-analyzer/crates/parser/test_data/parser/err",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err",
// Inputs that lex but do not necessarily parse
"src/tools/rust-analyzer/crates/parser/test_data/lexer",
// Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse
"src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures",
"src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures",
];
// Directories in which a .stderr implies the corresponding .rs is not expected
// to work.
static UI_TEST_DIRS: &[&str] = &["tests/ui", "tests/rustdoc-ui"];
pub fn for_each_rust_file(for_each: impl Fn(&Path) + Sync + Send) {
let mut rs_files = BTreeSet::new();
let repo_dir = Path::new("tests/rust");
for entry in WalkDir::new(repo_dir)
.into_iter()
.filter_entry(base_dir_filter)
{
let entry = entry.unwrap();
if !entry.file_type().is_dir() {
rs_files.insert(entry.into_path());
}
}
for ui_test_dir in UI_TEST_DIRS {
for entry in WalkDir::new(repo_dir.join(ui_test_dir)) {
let mut path = entry.unwrap().into_path();
if path.extension() == Some(OsStr::new("stderr")) {
loop {
rs_files.remove(&path.with_extension("rs"));
path = path.with_extension("");
if path.extension().is_none() {
break;
}
}
}
}
}
rs_files.par_iter().map(PathBuf::as_path).for_each(for_each);
}
pub fn base_dir_filter(entry: &DirEntry) -> bool {
let path = entry.path();
let mut path_string = path.to_string_lossy();
if cfg!(windows) {
path_string = path_string.replace('\\', "/").into();
}
let path_string = if path_string == "tests/rust" {
return true;
} else if let Some(path) = path_string.strip_prefix("tests/rust/") {
path
} else {
panic!("unexpected path in Rust dist: {}", path_string);
};
if path.is_dir() {
return !EXCLUDE_DIRS.contains(&path_string);
}
if path.extension() != Some(OsStr::new("rs")) {
return false;
}
!EXCLUDE_FILES.contains(&path_string)
}
#[allow(dead_code)]
pub fn edition(path: &Path) -> &'static str {
if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
"2015"
} else {
"2018"
}
}
pub fn clone_rust() {
let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
Err(_) => true,
Ok(contents) => contents.trim() != REVISION,
};
if needs_clone {
download_and_unpack().unwrap();
}
let mut missing = String::new();
let test_src = Path::new("tests/rust");
let mut exclude_files_set = BTreeSet::new();
for exclude in EXCLUDE_FILES {
if !exclude_files_set.insert(exclude) {
panic!("duplicate path in EXCLUDE_FILES: {}", exclude);
}
for dir in EXCLUDE_DIRS {
if Path::new(exclude).starts_with(dir) {
panic!("excluded file {} is inside an excluded dir", exclude);
}
}
if !test_src.join(exclude).is_file() {
missing += "\ntests/rust/";
missing += exclude;
}
}
let mut exclude_dirs_set = BTreeSet::new();
for exclude in EXCLUDE_DIRS {
if !exclude_dirs_set.insert(exclude) {
panic!("duplicate path in EXCLUDE_DIRS: {}", exclude);
}
if !test_src.join(exclude).is_dir() {
missing += "\ntests/rust/";
missing += exclude;
missing += "/";
}
}
if !missing.is_empty() {
panic!("excluded test file does not exist:{}\n", missing);
}
}
fn download_and_unpack() -> Result<()> {
let url = format!(
"https://github.com/rust-lang/rust/archive/{}.tar.gz",
REVISION
);
let response = reqwest::blocking::get(url)?.error_for_status()?;
let progress = Progress::new(response);
let decoder = GzDecoder::new(progress);
let mut archive = Archive::new(decoder);
let prefix = format!("rust-{}", REVISION);
let tests_rust = Path::new("tests/rust");
if tests_rust.exists() {
fs::remove_dir_all(tests_rust)?;
}
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
if path == Path::new("pax_global_header") {
continue;
}
let relative = path.strip_prefix(&prefix)?;
let out = tests_rust.join(relative);
entry.unpack(&out)?;
}
fs::write("tests/rust/COMMIT", REVISION)?;
Ok(())
}

37
vendor/syn/tests/repo/progress.rs vendored Normal file
View File

@ -0,0 +1,37 @@
use std::io::{Read, Result};
use std::time::{Duration, Instant};
pub struct Progress<R> {
bytes: usize,
tick: Instant,
stream: R,
}
impl<R> Progress<R> {
pub fn new(stream: R) -> Self {
Progress {
bytes: 0,
tick: Instant::now() + Duration::from_millis(2000),
stream,
}
}
}
impl<R: Read> Read for Progress<R> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let num = self.stream.read(buf)?;
self.bytes += num;
let now = Instant::now();
if now > self.tick {
self.tick = now + Duration::from_millis(500);
errorf!("downloading... {} bytes\n", self.bytes);
}
Ok(num)
}
}
impl<R> Drop for Progress<R> {
fn drop(&mut self) {
errorf!("done ({} bytes)\n", self.bytes);
}
}

43
vendor/syn/tests/test_asyncness.rs vendored Normal file
View File

@ -0,0 +1,43 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use syn::{Expr, Item};
#[test]
fn test_async_fn() {
let input = "async fn process() {}";
snapshot!(input as Item, @r###"
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
asyncness: Some,
ident: "process",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
}
"###);
}
#[test]
fn test_async_closure() {
let input = "async || {}";
snapshot!(input as Expr, @r###"
Expr::Closure {
asyncness: Some,
output: ReturnType::Default,
body: Expr::Block {
block: Block {
stmts: [],
},
},
}
"###);
}

225
vendor/syn/tests/test_attribute.rs vendored Normal file
View File

@ -0,0 +1,225 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use syn::parse::Parser;
use syn::{Attribute, Meta};
#[test]
fn test_meta_item_word() {
let meta = test("#[foo]");
snapshot!(meta, @r###"
Meta::Path {
segments: [
PathSegment {
ident: "foo",
},
],
}
"###);
}
#[test]
fn test_meta_item_name_value() {
let meta = test("#[foo = 5]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: 5,
},
}
"###);
}
#[test]
fn test_meta_item_bool_value() {
let meta = test("#[foo = true]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: Lit::Bool {
value: true,
},
},
}
"###);
let meta = test("#[foo = false]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: Lit::Bool {
value: false,
},
},
}
"###);
}
#[test]
fn test_meta_item_list_lit() {
let meta = test("#[foo(5)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`5`),
}
"###);
}
#[test]
fn test_meta_item_list_word() {
let meta = test("#[foo(bar)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`bar`),
}
"###);
}
#[test]
fn test_meta_item_list_name_value() {
let meta = test("#[foo(bar = 5)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`bar = 5`),
}
"###);
}
#[test]
fn test_meta_item_list_bool_value() {
let meta = test("#[foo(bar = true)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`bar = true`),
}
"###);
}
#[test]
fn test_meta_item_multiple() {
let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
}
"###);
}
#[test]
fn test_bool_lit() {
let meta = test("#[foo(true)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`true`),
}
"###);
}
#[test]
fn test_negative_lit() {
let meta = test("#[form(min = -1, max = 200)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "form",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`min = - 1 , max = 200`),
}
"###);
}
fn test(input: &str) -> Meta {
let attrs = Attribute::parse_outer.parse_str(input).unwrap();
assert_eq!(attrs.len(), 1);
let attr = attrs.into_iter().next().unwrap();
attr.meta
}

781
vendor/syn/tests/test_derive_input.rs vendored Normal file
View File

@ -0,0 +1,781 @@
#![allow(
clippy::assertions_on_result_states,
clippy::manual_let_else,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#[macro_use]
mod macros;
use quote::quote;
use syn::{Data, DeriveInput};
#[test]
fn test_unit() {
let input = quote! {
struct Unit;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "Unit",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_struct() {
let input = quote! {
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::List {
path: Path {
segments: [
PathSegment {
ident: "derive",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`Debug , Clone`),
},
},
],
vis: Visibility::Public,
ident: "Item",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Public,
ident: Some("ident"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Ident",
},
],
},
},
},
Token![,],
Field {
vis: Visibility::Public,
ident: Some("attrs"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Attribute",
},
],
},
}),
],
},
},
],
},
},
},
],
},
},
}
"###);
snapshot!(&input.attrs[0].meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "derive",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`Debug , Clone`),
}
"###);
}
#[test]
fn test_union() {
let input = quote! {
union MaybeUninit<T> {
uninit: (),
value: T
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "MaybeUninit",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "T",
}),
],
gt_token: Some,
},
data: Data::Union {
fields: FieldsNamed {
named: [
Field {
vis: Visibility::Inherited,
ident: Some("uninit"),
colon_token: Some,
ty: Type::Tuple,
},
Token![,],
Field {
vis: Visibility::Inherited,
ident: Some("value"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
},
],
},
},
}
"###);
}
#[test]
#[cfg(feature = "full")]
fn test_enum() {
let input = quote! {
/// See the std::result module documentation for details.
#[must_use]
pub enum Result<T, E> {
Ok(T),
Err(E),
Surprise = 0isize,
// Smuggling data into a proc_macro_derive,
// in the style of https://github.com/dtolnay/proc-macro-hack
ProcMacroHack = (0, "data").0
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "doc",
},
],
},
value: Expr::Lit {
lit: " See the std::result module documentation for details.",
},
},
},
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "must_use",
},
],
},
},
],
vis: Visibility::Public,
ident: "Result",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "T",
}),
Token![,],
GenericParam::Type(TypeParam {
ident: "E",
}),
],
gt_token: Some,
},
data: Data::Enum {
variants: [
Variant {
ident: "Ok",
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
},
],
},
},
Token![,],
Variant {
ident: "Err",
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "E",
},
],
},
},
},
],
},
},
Token![,],
Variant {
ident: "Surprise",
fields: Fields::Unit,
discriminant: Some(Expr::Lit {
lit: 0isize,
}),
},
Token![,],
Variant {
ident: "ProcMacroHack",
fields: Fields::Unit,
discriminant: Some(Expr::Field {
base: Expr::Tuple {
elems: [
Expr::Lit {
lit: 0,
},
Token![,],
Expr::Lit {
lit: "data",
},
],
},
member: Member::Unnamed(Index {
index: 0,
}),
}),
},
],
},
}
"###);
let meta_items: Vec<_> = input.attrs.into_iter().map(|attr| attr.meta).collect();
snapshot!(meta_items, @r###"
[
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "doc",
},
],
},
value: Expr::Lit {
lit: " See the std::result module documentation for details.",
},
},
Meta::Path {
segments: [
PathSegment {
ident: "must_use",
},
],
},
]
"###);
}
#[test]
fn test_attr_with_non_mod_style_path() {
let input = quote! {
#[inert <T>]
struct S;
};
syn::parse2::<DeriveInput>(input).unwrap_err();
}
#[test]
fn test_attr_with_mod_style_path_with_self() {
let input = quote! {
#[foo::self]
struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "foo",
},
Token![::],
PathSegment {
ident: "self",
},
],
},
},
],
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
snapshot!(&input.attrs[0].meta, @r###"
Meta::Path {
segments: [
PathSegment {
ident: "foo",
},
Token![::],
PathSegment {
ident: "self",
},
],
}
"###);
}
#[test]
fn test_pub_restricted() {
// Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
let input = quote! {
pub(in m) struct Z(pub(in m::n) u8);
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "m",
},
],
},
},
ident: "Z",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "m",
},
Token![::],
PathSegment {
ident: "n",
},
],
},
},
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
},
],
},
},
},
],
},
semi_token: Some,
},
}
"###);
}
#[test]
fn test_pub_restricted_crate() {
let input = quote! {
pub(crate) struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
path: Path {
segments: [
PathSegment {
ident: "crate",
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_pub_restricted_super() {
let input = quote! {
pub(super) struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
path: Path {
segments: [
PathSegment {
ident: "super",
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_pub_restricted_in_super() {
let input = quote! {
pub(in super) struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "super",
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_fields_on_unit_struct() {
let input = quote! {
struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
assert_eq!(0, data.fields.iter().count());
}
#[test]
fn test_fields_on_named_struct() {
let input = quote! {
struct S {
foo: i32,
pub bar: String,
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Inherited,
ident: Some("foo"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Token![,],
Field {
vis: Visibility::Public,
ident: Some("bar"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
Token![,],
],
},
},
}
"###);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
[
Field {
vis: Visibility::Inherited,
ident: Some("foo"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Field {
vis: Visibility::Public,
ident: Some("bar"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
]
"###);
}
#[test]
fn test_fields_on_tuple_struct() {
let input = quote! {
struct S(i32, pub String);
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Token![,],
Field {
vis: Visibility::Public,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
],
},
semi_token: Some,
},
}
"###);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
[
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Field {
vis: Visibility::Public,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
]
"###);
}
#[test]
fn test_ambiguous_crate() {
let input = quote! {
// The field type is `(crate::X)` not `crate (::X)`.
struct S(crate::X);
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "crate",
},
Token![::],
PathSegment {
ident: "X",
},
],
},
},
},
],
},
semi_token: Some,
},
}
"###);
}

540
vendor/syn/tests/test_expr.rs vendored Normal file
View File

@ -0,0 +1,540 @@
#![allow(clippy::single_element_loop, clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group};
use quote::{quote, ToTokens as _};
use syn::punctuated::Punctuated;
use syn::{parse_quote, token, Expr, ExprRange, ExprTuple, Stmt, Token};
#[test]
fn test_expr_parse() {
let tokens = quote!(..100u32);
snapshot!(tokens as Expr, @r###"
Expr::Range {
limits: RangeLimits::HalfOpen,
end: Some(Expr::Lit {
lit: 100u32,
}),
}
"###);
let tokens = quote!(..100u32);
snapshot!(tokens as ExprRange, @r###"
ExprRange {
limits: RangeLimits::HalfOpen,
end: Some(Expr::Lit {
lit: 100u32,
}),
}
"###);
}
#[test]
fn test_await() {
// Must not parse as Expr::Field.
let tokens = quote!(fut.await);
snapshot!(tokens as Expr, @r###"
Expr::Await {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "fut",
},
],
},
},
}
"###);
}
#[rustfmt::skip]
#[test]
fn test_tuple_multi_index() {
let expected = snapshot!("tuple.0.0" as Expr, @r###"
Expr::Field {
base: Expr::Field {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "tuple",
},
],
},
},
member: Member::Unnamed(Index {
index: 0,
}),
},
member: Member::Unnamed(Index {
index: 0,
}),
}
"###);
for &input in &[
"tuple .0.0",
"tuple. 0.0",
"tuple.0 .0",
"tuple.0. 0",
"tuple . 0 . 0",
] {
assert_eq!(expected, syn::parse_str(input).unwrap());
}
for tokens in [
quote!(tuple.0.0),
quote!(tuple .0.0),
quote!(tuple. 0.0),
quote!(tuple.0 .0),
quote!(tuple.0. 0),
quote!(tuple . 0 . 0),
] {
assert_eq!(expected, syn::parse2(tokens).unwrap());
}
}
#[test]
fn test_macro_variable_func() {
// mimics the token stream corresponding to `$fn()`
let path = Group::new(Delimiter::None, quote!(f));
let tokens = quote!(#path());
snapshot!(tokens as Expr, @r###"
Expr::Call {
func: Expr::Group {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "f",
},
],
},
},
},
}
"###);
let path = Group::new(Delimiter::None, quote! { #[inside] f });
let tokens = quote!(#[outside] #path());
snapshot!(tokens as Expr, @r###"
Expr::Call {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "outside",
},
],
},
},
],
func: Expr::Group {
expr: Expr::Path {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "inside",
},
],
},
},
],
path: Path {
segments: [
PathSegment {
ident: "f",
},
],
},
},
},
}
"###);
}
#[test]
fn test_macro_variable_macro() {
// mimics the token stream corresponding to `$macro!()`
let mac = Group::new(Delimiter::None, quote!(m));
let tokens = quote!(#mac!());
snapshot!(tokens as Expr, @r###"
Expr::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "m",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(``),
},
}
"###);
}
#[test]
fn test_macro_variable_struct() {
// mimics the token stream corresponding to `$struct {}`
let s = Group::new(Delimiter::None, quote! { S });
let tokens = quote!(#s {});
snapshot!(tokens as Expr, @r###"
Expr::Struct {
path: Path {
segments: [
PathSegment {
ident: "S",
},
],
},
}
"###);
}
#[test]
fn test_macro_variable_unary() {
// mimics the token stream corresponding to `$expr.method()` where expr is `&self`
let inner = Group::new(Delimiter::None, quote!(&self));
let tokens = quote!(#inner.method());
snapshot!(tokens as Expr, @r###"
Expr::MethodCall {
receiver: Expr::Group {
expr: Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "self",
},
],
},
},
},
},
method: "method",
}
"###);
}
#[test]
fn test_macro_variable_match_arm() {
// mimics the token stream corresponding to `match v { _ => $expr }`
let expr = Group::new(Delimiter::None, quote! { #[a] () });
let tokens = quote!(match v { _ => #expr });
snapshot!(tokens as Expr, @r###"
Expr::Match {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "v",
},
],
},
},
arms: [
Arm {
pat: Pat::Wild,
body: Expr::Group {
expr: Expr::Tuple {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "a",
},
],
},
},
],
},
},
},
],
}
"###);
let expr = Group::new(Delimiter::None, quote!(loop {} + 1));
let tokens = quote!(match v { _ => #expr });
snapshot!(tokens as Expr, @r###"
Expr::Match {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "v",
},
],
},
},
arms: [
Arm {
pat: Pat::Wild,
body: Expr::Group {
expr: Expr::Binary {
left: Expr::Loop {
body: Block {
stmts: [],
},
},
op: BinOp::Add,
right: Expr::Lit {
lit: 1,
},
},
},
},
],
}
"###);
}
// https://github.com/dtolnay/syn/issues/1019
#[test]
fn test_closure_vs_rangefull() {
#[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/4808
let tokens = quote!(|| .. .method());
snapshot!(tokens as Expr, @r###"
Expr::MethodCall {
receiver: Expr::Closure {
output: ReturnType::Default,
body: Expr::Range {
limits: RangeLimits::HalfOpen,
},
},
method: "method",
}
"###);
}
#[test]
fn test_postfix_operator_after_cast() {
syn::parse_str::<Expr>("|| &x as T[0]").unwrap_err();
syn::parse_str::<Expr>("|| () as ()()").unwrap_err();
}
#[test]
fn test_ranges() {
syn::parse_str::<Expr>("..").unwrap();
syn::parse_str::<Expr>("..hi").unwrap();
syn::parse_str::<Expr>("lo..").unwrap();
syn::parse_str::<Expr>("lo..hi").unwrap();
syn::parse_str::<Expr>("..=").unwrap_err();
syn::parse_str::<Expr>("..=hi").unwrap();
syn::parse_str::<Expr>("lo..=").unwrap_err();
syn::parse_str::<Expr>("lo..=hi").unwrap();
syn::parse_str::<Expr>("...").unwrap_err();
syn::parse_str::<Expr>("...hi").unwrap_err();
syn::parse_str::<Expr>("lo...").unwrap_err();
syn::parse_str::<Expr>("lo...hi").unwrap_err();
}
#[test]
fn test_ambiguous_label() {
for stmt in [
quote! {
return 'label: loop { break 'label 42; };
},
quote! {
break ('label: loop { break 'label 42; });
},
quote! {
break 1 + 'label: loop { break 'label 42; };
},
quote! {
break 'outer 'inner: loop { break 'inner 42; };
},
] {
syn::parse2::<Stmt>(stmt).unwrap();
}
for stmt in [
// Parentheses required. See https://github.com/rust-lang/rust/pull/87026.
quote! {
break 'label: loop { break 'label 42; };
},
] {
syn::parse2::<Stmt>(stmt).unwrap_err();
}
}
#[test]
fn test_extended_interpolated_path() {
let path = Group::new(Delimiter::None, quote!(a::b));
let tokens = quote!(if #path {});
snapshot!(tokens as Expr, @r###"
Expr::If {
cond: Expr::Group {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "a",
},
Token![::],
PathSegment {
ident: "b",
},
],
},
},
},
then_branch: Block {
stmts: [],
},
}
"###);
let tokens = quote!(#path {});
snapshot!(tokens as Expr, @r###"
Expr::Struct {
path: Path {
segments: [
PathSegment {
ident: "a",
},
Token![::],
PathSegment {
ident: "b",
},
],
},
}
"###);
let tokens = quote!(#path :: c);
snapshot!(tokens as Expr, @r###"
Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "a",
},
Token![::],
PathSegment {
ident: "b",
},
Token![::],
PathSegment {
ident: "c",
},
],
},
}
"###);
let nested = Group::new(Delimiter::None, quote!(a::b || true));
let tokens = quote!(if #nested && false {});
snapshot!(tokens as Expr, @r###"
Expr::If {
cond: Expr::Binary {
left: Expr::Group {
expr: Expr::Binary {
left: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "a",
},
Token![::],
PathSegment {
ident: "b",
},
],
},
},
op: BinOp::Or,
right: Expr::Lit {
lit: Lit::Bool {
value: true,
},
},
},
},
op: BinOp::And,
right: Expr::Lit {
lit: Lit::Bool {
value: false,
},
},
},
then_branch: Block {
stmts: [],
},
}
"###);
}
#[test]
fn test_tuple_comma() {
let mut expr = ExprTuple {
attrs: Vec::new(),
paren_token: token::Paren::default(),
elems: Punctuated::new(),
};
snapshot!(expr.to_token_stream() as Expr, @"Expr::Tuple");
expr.elems.push_value(parse_quote!(continue));
// Must not parse to Expr::Paren
snapshot!(expr.to_token_stream() as Expr, @r###"
Expr::Tuple {
elems: [
Expr::Continue,
Token![,],
],
}
"###);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Expr, @r###"
Expr::Tuple {
elems: [
Expr::Continue,
Token![,],
],
}
"###);
expr.elems.push_value(parse_quote!(continue));
snapshot!(expr.to_token_stream() as Expr, @r###"
Expr::Tuple {
elems: [
Expr::Continue,
Token![,],
Expr::Continue,
],
}
"###);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Expr, @r###"
Expr::Tuple {
elems: [
Expr::Continue,
Token![,],
Expr::Continue,
Token![,],
],
}
"###);
}

282
vendor/syn/tests/test_generics.rs vendored Normal file
View File

@ -0,0 +1,282 @@
#![allow(
clippy::manual_let_else,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#[macro_use]
mod macros;
use quote::quote;
use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
#[test]
fn test_split_for_impl() {
let input = quote! {
struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "a",
},
}),
Token![,],
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "b",
},
colon_token: Some,
bounds: [
Lifetime {
ident: "a",
},
],
}),
Token![,],
GenericParam::Type(TypeParam {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "may_dangle",
},
],
},
},
],
ident: "T",
colon_token: Some,
bounds: [
TypeParamBound::Lifetime {
ident: "a",
},
],
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
WherePredicate::Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Debug",
},
],
},
}),
],
}),
],
}),
},
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"###);
let generics = input.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let generated = quote! {
impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
};
let expected = quote! {
impl<'a, 'b: 'a, #[may_dangle] T: 'a> MyTrait
for Test<'a, 'b, T>
where
T: Debug
{}
};
assert_eq!(generated.to_string(), expected.to_string());
let turbofish = ty_generics.as_turbofish();
let generated = quote! {
Test #turbofish
};
let expected = quote! {
Test::<'a, 'b, T>
};
assert_eq!(generated.to_string(), expected.to_string());
}
#[test]
fn test_ty_param_bound() {
let tokens = quote!('a);
snapshot!(tokens as TypeParamBound, @r###"
TypeParamBound::Lifetime {
ident: "a",
}
"###);
let tokens = quote!('_);
snapshot!(tokens as TypeParamBound, @r###"
TypeParamBound::Lifetime {
ident: "_",
}
"###);
let tokens = quote!(Debug);
snapshot!(tokens as TypeParamBound, @r###"
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Debug",
},
],
},
})
"###);
let tokens = quote!(?Sized);
snapshot!(tokens as TypeParamBound, @r###"
TypeParamBound::Trait(TraitBound {
modifier: TraitBoundModifier::Maybe,
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
})
"###);
}
#[test]
fn test_fn_precedence_in_where_clause() {
// This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not
// `FnOnce() -> (i32 + Send)`.
let input = quote! {
fn f<G>()
where
G: FnOnce() -> i32 + Send,
{
}
};
snapshot!(input as ItemFn, @r###"
ItemFn {
vis: Visibility::Inherited,
sig: Signature {
ident: "f",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "G",
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
WherePredicate::Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "G",
},
],
},
},
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: ReturnType::Type(
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
),
},
},
],
},
}),
Token![+],
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Send",
},
],
},
}),
],
}),
Token![,],
],
}),
},
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
}
"###);
let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
assert_eq!(where_clause.predicates.len(), 1);
let predicate = match &where_clause.predicates[0] {
WherePredicate::Type(pred) => pred,
_ => panic!("wrong predicate kind"),
};
assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
let first_bound = &predicate.bounds[0];
assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
let second_bound = &predicate.bounds[1];
assert_eq!(quote!(#second_bound).to_string(), "Send");
}
#[test]
fn test_where_clause_at_end_of_input() {
let input = quote! {
where
};
snapshot!(input as WhereClause, @"WhereClause");
assert_eq!(input.predicates.len(), 0);
}

53
vendor/syn/tests/test_grouping.rs vendored Normal file
View File

@ -0,0 +1,53 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
use syn::Expr;
#[test]
fn test_grouping() {
let tokens: TokenStream = TokenStream::from_iter(vec![
TokenTree::Literal(Literal::i32_suffixed(1)),
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Literal(Literal::i32_suffixed(2)),
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Literal(Literal::i32_suffixed(3)),
]),
)),
TokenTree::Punct(Punct::new('*', Spacing::Alone)),
TokenTree::Literal(Literal::i32_suffixed(4)),
]);
assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
snapshot!(tokens as Expr, @r###"
Expr::Binary {
left: Expr::Lit {
lit: 1i32,
},
op: BinOp::Add,
right: Expr::Binary {
left: Expr::Group {
expr: Expr::Binary {
left: Expr::Lit {
lit: 2i32,
},
op: BinOp::Add,
right: Expr::Lit {
lit: 3i32,
},
},
},
op: BinOp::Mul,
right: Expr::Lit {
lit: 4i32,
},
},
}
"###);
}

85
vendor/syn/tests/test_ident.rs vendored Normal file
View File

@ -0,0 +1,85 @@
use proc_macro2::{Ident, Span, TokenStream};
use std::str::FromStr;
use syn::Result;
fn parse(s: &str) -> Result<Ident> {
syn::parse2(TokenStream::from_str(s).unwrap())
}
fn new(s: &str) -> Ident {
Ident::new(s, Span::call_site())
}
#[test]
fn ident_parse() {
parse("String").unwrap();
}
#[test]
fn ident_parse_keyword() {
parse("abstract").unwrap_err();
}
#[test]
fn ident_parse_empty() {
parse("").unwrap_err();
}
#[test]
fn ident_parse_lifetime() {
parse("'static").unwrap_err();
}
#[test]
fn ident_parse_underscore() {
parse("_").unwrap_err();
}
#[test]
fn ident_parse_number() {
parse("255").unwrap_err();
}
#[test]
fn ident_parse_invalid() {
parse("a#").unwrap_err();
}
#[test]
fn ident_new() {
new("String");
}
#[test]
fn ident_new_keyword() {
new("abstract");
}
#[test]
#[should_panic(expected = "use Option<Ident>")]
fn ident_new_empty() {
new("");
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn ident_new_lifetime() {
new("'static");
}
#[test]
fn ident_new_underscore() {
new("_");
}
#[test]
#[should_panic(expected = "use Literal instead")]
fn ident_new_number() {
new("255");
}
#[test]
#[should_panic(expected = "\"a#\" is not a valid Ident")]
fn ident_new_invalid() {
new("a#");
}

332
vendor/syn/tests/test_item.rs vendored Normal file
View File

@ -0,0 +1,332 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::quote;
use syn::{Item, ItemTrait};
#[test]
fn test_macro_variable_attr() {
// mimics the token stream corresponding to `$attr fn f() {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r###"
Item::Fn {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "test",
},
],
},
},
],
vis: Visibility::Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
}
"###);
}
#[test]
fn test_negative_impl() {
// Rustc parses all of the following.
#[cfg(any())]
impl ! {}
let tokens = quote! {
impl ! {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
self_ty: Type::Never,
}
"###);
#[cfg(any())]
#[rustfmt::skip]
impl !Trait {}
let tokens = quote! {
impl !Trait {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
self_ty: Type::Verbatim(`! Trait`),
}
"###);
#[cfg(any())]
impl !Trait for T {}
let tokens = quote! {
impl !Trait for T {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
trait_: Some((
Some,
Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
)),
self_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
}
"###);
#[cfg(any())]
#[rustfmt::skip]
impl !! {}
let tokens = quote! {
impl !! {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
self_ty: Type::Verbatim(`! !`),
}
"###);
}
#[test]
fn test_macro_variable_impl() {
// mimics the token stream corresponding to `impl $trait for $ty {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("impl", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))),
TokenTree::Ident(Ident::new("for", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::None, quote!(Type))),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
trait_: Some((
None,
Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
)),
self_ty: Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Type",
},
],
},
},
},
}
"###);
}
#[test]
fn test_supertraits() {
// Rustc parses all of the following.
#[rustfmt::skip]
let tokens = quote!(trait Trait where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
}
"###);
#[rustfmt::skip]
let tokens = quote!(trait Trait: where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
}
"###);
#[rustfmt::skip]
let tokens = quote!(trait Trait: Sized where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
supertraits: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
],
}
"###);
#[rustfmt::skip]
let tokens = quote!(trait Trait: Sized + where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
supertraits: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
Token![+],
],
}
"###);
}
#[test]
fn test_type_empty_bounds() {
#[rustfmt::skip]
let tokens = quote! {
trait Foo {
type Bar: ;
}
};
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Visibility::Inherited,
ident: "Foo",
generics: Generics,
items: [
TraitItem::Type {
ident: "Bar",
generics: Generics,
colon_token: Some,
},
],
}
"###);
}
#[test]
fn test_impl_visibility() {
let tokens = quote! {
pub default unsafe impl union {}
};
snapshot!(tokens as Item, @"Item::Verbatim(`pub default unsafe impl union { }`)");
}
#[test]
fn test_impl_type_parameter_defaults() {
#[cfg(any())]
impl<T = ()> () {}
let tokens = quote! {
impl<T = ()> () {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "T",
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
},
self_ty: Type::Tuple,
}
"###);
}
#[test]
fn test_impl_trait_trailing_plus() {
let tokens = quote! {
fn f() -> impl Sized + {}
};
snapshot!(tokens as Item, @r###"
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: ReturnType::Type(
Type::ImplTrait {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
Token![+],
],
},
),
},
block: Block {
stmts: [],
},
}
"###);
}

70
vendor/syn/tests/test_iterators.rs vendored Normal file
View File

@ -0,0 +1,70 @@
#![allow(clippy::uninlined_format_args)]
use syn::punctuated::{Pair, Punctuated};
use syn::Token;
#[macro_use]
mod macros;
macro_rules! check_exact_size_iterator {
($iter:expr) => {{
let iter = $iter;
let size_hint = iter.size_hint();
let len = iter.len();
let count = iter.count();
assert_eq!(len, count);
assert_eq!(size_hint, (count, Some(count)));
}};
}
#[test]
fn pairs() {
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
check_exact_size_iterator!(p.pairs());
check_exact_size_iterator!(p.pairs_mut());
check_exact_size_iterator!(p.into_pairs());
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
assert_eq!(p.pairs().next_back().map(Pair::into_value), Some(&4));
assert_eq!(
p.pairs_mut().next_back().map(Pair::into_value),
Some(&mut 4)
);
assert_eq!(p.into_pairs().next_back().map(Pair::into_value), Some(4));
}
#[test]
fn iter() {
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
check_exact_size_iterator!(p.iter());
check_exact_size_iterator!(p.iter_mut());
check_exact_size_iterator!(p.into_iter());
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
assert_eq!(p.iter().next_back(), Some(&4));
assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
assert_eq!(p.into_iter().next_back(), Some(4));
}
#[test]
fn may_dangle() {
let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
for element in &p {
if *element == 2 {
drop(p);
break;
}
}
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
for element in &mut p {
if *element == 2 {
drop(p);
break;
}
}
}

273
vendor/syn/tests/test_lit.rs vendored Normal file
View File

@ -0,0 +1,273 @@
#![allow(
clippy::float_cmp,
clippy::non_ascii_literal,
clippy::single_match_else,
clippy::uninlined_format_args
)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
use quote::ToTokens;
use std::str::FromStr;
use syn::{Lit, LitFloat, LitInt, LitStr};
fn lit(s: &str) -> Lit {
let mut tokens = TokenStream::from_str(s).unwrap().into_iter();
match tokens.next().unwrap() {
TokenTree::Literal(lit) => {
assert!(tokens.next().is_none());
Lit::new(lit)
}
wrong => panic!("{:?}", wrong),
}
}
#[test]
fn strings() {
fn test_string(s: &str, value: &str) {
match lit(s) {
Lit::Str(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_string("\"a\"", "a");
test_string("\"\\n\"", "\n");
test_string("\"\\r\"", "\r");
test_string("\"\\t\"", "\t");
test_string("\"🐕\"", "🐕"); // NOTE: This is an emoji
test_string("\"\\\"\"", "\"");
test_string("\"'\"", "'");
test_string("\"\"", "");
test_string("\"\\u{1F415}\"", "\u{1F415}");
test_string("\"\\u{1_2__3_}\"", "\u{123}");
test_string(
"\"contains\nnewlines\\\nescaped newlines\"",
"contains\nnewlinesescaped newlines",
);
test_string(
"\"escaped newline\\\n \x0C unsupported whitespace\"",
"escaped newline\x0C unsupported whitespace",
);
test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
test_string("\"...\"q", "...");
test_string("r\"...\"q", "...");
test_string("r##\"...\"##q", "...");
}
#[test]
fn byte_strings() {
fn test_byte_string(s: &str, value: &[u8]) {
match lit(s) {
Lit::ByteStr(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_byte_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_byte_string("b\"a\"", b"a");
test_byte_string("b\"\\n\"", b"\n");
test_byte_string("b\"\\r\"", b"\r");
test_byte_string("b\"\\t\"", b"\t");
test_byte_string("b\"\\\"\"", b"\"");
test_byte_string("b\"'\"", b"'");
test_byte_string("b\"\"", b"");
test_byte_string(
"b\"contains\nnewlines\\\nescaped newlines\"",
b"contains\nnewlinesescaped newlines",
);
test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
test_byte_string("b\"...\"q", b"...");
test_byte_string("br\"...\"q", b"...");
test_byte_string("br##\"...\"##q", b"...");
}
#[test]
fn bytes() {
fn test_byte(s: &str, value: u8) {
match lit(s) {
Lit::Byte(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
assert_eq!(again, s);
}
wrong => panic!("{:?}", wrong),
}
}
test_byte("b'a'", b'a');
test_byte("b'\\n'", b'\n');
test_byte("b'\\r'", b'\r');
test_byte("b'\\t'", b'\t');
test_byte("b'\\''", b'\'');
test_byte("b'\"'", b'"');
test_byte("b'a'q", b'a');
}
#[test]
fn chars() {
fn test_char(s: &str, value: char) {
match lit(s) {
Lit::Char(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_char(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_char("'a'", 'a');
test_char("'\\n'", '\n');
test_char("'\\r'", '\r');
test_char("'\\t'", '\t');
test_char("'🐕'", '🐕'); // NOTE: This is an emoji
test_char("'\\''", '\'');
test_char("'\"'", '"');
test_char("'\\u{1F415}'", '\u{1F415}');
test_char("'a'q", 'a');
}
#[test]
fn ints() {
fn test_int(s: &str, value: u64, suffix: &str) {
match lit(s) {
Lit::Int(lit) => {
assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
assert_eq!(lit.suffix(), suffix);
let again = lit.into_token_stream().to_string();
if again != s {
test_int(&again, value, suffix);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_int("5", 5, "");
test_int("5u32", 5, "u32");
test_int("0E", 0, "E");
test_int("0ECMA", 0, "ECMA");
test_int("0o0A", 0, "A");
test_int("5_0", 50, "");
test_int("5_____0_____", 50, "");
test_int("0x7f", 127, "");
test_int("0x7F", 127, "");
test_int("0b1001", 9, "");
test_int("0o73", 59, "");
test_int("0x7Fu8", 127, "u8");
test_int("0b1001i8", 9, "i8");
test_int("0o73u32", 59, "u32");
test_int("0x__7___f_", 127, "");
test_int("0x__7___F_", 127, "");
test_int("0b_1_0__01", 9, "");
test_int("0o_7__3", 59, "");
test_int("0x_7F__u8", 127, "u8");
test_int("0b__10__0_1i8", 9, "i8");
test_int("0o__7__________________3u32", 59, "u32");
test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
}
#[test]
fn floats() {
fn test_float(s: &str, value: f64, suffix: &str) {
match lit(s) {
Lit::Float(lit) => {
assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
assert_eq!(lit.suffix(), suffix);
let again = lit.into_token_stream().to_string();
if again != s {
test_float(&again, value, suffix);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_float("5.5", 5.5, "");
test_float("5.5E12", 5.5e12, "");
test_float("5.5e12", 5.5e12, "");
test_float("1.0__3e-12", 1.03e-12, "");
test_float("1.03e+12", 1.03e12, "");
test_float("9e99e99", 9e99, "e99");
test_float("1e_0", 1.0, "");
test_float("0.0ECMA", 0.0, "ECMA");
}
#[test]
fn negative() {
let span = Span::call_site();
assert_eq!("-1", LitInt::new("-1", span).to_string());
assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
}
#[test]
fn suffix() {
fn get_suffix(token: &str) -> String {
let lit = syn::parse_str::<Lit>(token).unwrap();
match lit {
Lit::Str(lit) => lit.suffix().to_owned(),
Lit::ByteStr(lit) => lit.suffix().to_owned(),
Lit::Byte(lit) => lit.suffix().to_owned(),
Lit::Char(lit) => lit.suffix().to_owned(),
Lit::Int(lit) => lit.suffix().to_owned(),
Lit::Float(lit) => lit.suffix().to_owned(),
_ => unimplemented!(),
}
}
assert_eq!(get_suffix("\"\"s"), "s");
assert_eq!(get_suffix("r\"\"r"), "r");
assert_eq!(get_suffix("b\"\"b"), "b");
assert_eq!(get_suffix("br\"\"br"), "br");
assert_eq!(get_suffix("r#\"\"#r"), "r");
assert_eq!(get_suffix("'c'c"), "c");
assert_eq!(get_suffix("b'b'b"), "b");
assert_eq!(get_suffix("1i32"), "i32");
assert_eq!(get_suffix("1_i32"), "i32");
assert_eq!(get_suffix("1.0f32"), "f32");
assert_eq!(get_suffix("1.0_f32"), "f32");
}
#[test]
fn test_deep_group_empty() {
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
))]),
))]);
snapshot!(tokens as Lit, @r#""hi""# );
}
#[test]
fn test_error() {
let err = syn::parse_str::<LitStr>("...").unwrap_err();
assert_eq!("expected string literal", err.to_string());
let err = syn::parse_str::<LitStr>("5").unwrap_err();
assert_eq!("expected string literal", err.to_string());
}

154
vendor/syn/tests/test_meta.rs vendored Normal file
View File

@ -0,0 +1,154 @@
#![allow(
clippy::shadow_unrelated,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#[macro_use]
mod macros;
use syn::{Meta, MetaList, MetaNameValue};
#[test]
fn test_parse_meta_item_word() {
let input = "hello";
snapshot!(input as Meta, @r###"
Meta::Path {
segments: [
PathSegment {
ident: "hello",
},
],
}
"###);
}
#[test]
fn test_parse_meta_name_value() {
let input = "foo = 5";
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: 5,
},
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: 5,
},
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_meta_item_list_lit() {
let input = "foo(5)";
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r###"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`5`),
}
"###);
snapshot!(meta as Meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`5`),
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_meta_item_multiple() {
let input = "foo(word, name = 5, list(name2 = 6), word2)";
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r###"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
}
"###);
snapshot!(meta as Meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_path() {
let input = "::serde::Serialize";
snapshot!(input as Meta, @r###"
Meta::Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "serde",
},
Token![::],
PathSegment {
ident: "Serialize",
},
],
}
"###);
}

92
vendor/syn/tests/test_parse_buffer.rs vendored Normal file
View File

@ -0,0 +1,92 @@
#![allow(clippy::non_ascii_literal)]
use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
use syn::parse::discouraged::Speculative as _;
use syn::parse::{Parse, ParseStream, Parser, Result};
use syn::{parenthesized, Token};
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_between_sources() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input1: ParseStream) -> Result<Self> {
let nested = |input2: ParseStream| {
input1.advance_to(input2);
Ok(Self)
};
nested.parse_str("")
}
}
syn::parse_str::<BreakRules>("").unwrap();
}
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_between_brackets() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input: ParseStream) -> Result<Self> {
let a;
let b;
parenthesized!(a in input);
parenthesized!(b in input);
a.advance_to(&b);
Ok(Self)
}
}
syn::parse_str::<BreakRules>("()()").unwrap();
}
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_into_brackets() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input: ParseStream) -> Result<Self> {
let a;
parenthesized!(a in input);
input.advance_to(&a);
Ok(Self)
}
}
syn::parse_str::<BreakRules>("()").unwrap();
}
#[test]
fn trailing_empty_none_group() {
fn parse(input: ParseStream) -> Result<()> {
input.parse::<Token![+]>()?;
let content;
parenthesized!(content in input);
content.parse::<Token![+]>()?;
Ok(())
}
// `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
]),
)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::new(),
))]),
)),
]);
parse.parse2(tokens).unwrap();
}

164
vendor/syn/tests/test_parse_quote.rs vendored Normal file
View File

@ -0,0 +1,164 @@
#[macro_use]
mod macros;
use syn::punctuated::Punctuated;
use syn::{parse_quote, Attribute, Field, Lit, Pat, Stmt, Token};
#[test]
fn test_attribute() {
let attr: Attribute = parse_quote!(#[test]);
snapshot!(attr, @r###"
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "test",
},
],
},
}
"###);
let attr: Attribute = parse_quote!(#![no_std]);
snapshot!(attr, @r###"
Attribute {
style: AttrStyle::Inner,
meta: Meta::Path {
segments: [
PathSegment {
ident: "no_std",
},
],
},
}
"###);
}
#[test]
fn test_field() {
let field: Field = parse_quote!(pub enabled: bool);
snapshot!(field, @r###"
Field {
vis: Visibility::Public,
ident: Some("enabled"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "bool",
},
],
},
},
}
"###);
let field: Field = parse_quote!(primitive::bool);
snapshot!(field, @r###"
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "primitive",
},
Token![::],
PathSegment {
ident: "bool",
},
],
},
},
}
"###);
}
#[test]
fn test_pat() {
let pat: Pat = parse_quote!(Some(false) | None);
snapshot!(&pat, @r###"
Pat::Or {
cases: [
Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
},
],
},
elems: [
Pat::Lit(ExprLit {
lit: Lit::Bool {
value: false,
},
}),
],
},
Token![|],
Pat::Ident {
ident: "None",
},
],
}
"###);
let boxed_pat: Box<Pat> = parse_quote!(Some(false) | None);
assert_eq!(*boxed_pat, pat);
}
#[test]
fn test_punctuated() {
let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true);
snapshot!(punctuated, @r###"
[
Lit::Bool {
value: true,
},
Token![|],
Lit::Bool {
value: true,
},
]
"###);
let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true |);
snapshot!(punctuated, @r###"
[
Lit::Bool {
value: true,
},
Token![|],
Lit::Bool {
value: true,
},
Token![|],
]
"###);
}
#[test]
fn test_vec_stmt() {
let stmts: Vec<Stmt> = parse_quote! {
let _;
true
};
snapshot!(stmts, @r###"
[
Stmt::Local {
pat: Pat::Wild,
},
Stmt::Expr(
Expr::Lit {
lit: Lit::Bool {
value: true,
},
},
None,
),
]
"###);
}

14
vendor/syn/tests/test_parse_stream.rs vendored Normal file
View File

@ -0,0 +1,14 @@
#![allow(clippy::let_underscore_untyped)]
use syn::ext::IdentExt as _;
use syn::parse::ParseStream;
use syn::{Ident, Token};
#[test]
fn test_peek() {
let _ = |input: ParseStream| {
let _ = input.peek(Ident);
let _ = input.peek(Ident::peek_any);
let _ = input.peek(Token![::]);
};
}

152
vendor/syn/tests/test_pat.rs vendored Normal file
View File

@ -0,0 +1,152 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
use quote::{quote, ToTokens as _};
use syn::parse::Parser;
use syn::punctuated::Punctuated;
use syn::{parse_quote, token, Item, Pat, PatTuple, Stmt, Token};
#[test]
fn test_pat_ident() {
match Pat::parse_single.parse2(quote!(self)).unwrap() {
Pat::Ident(_) => (),
value => panic!("expected PatIdent, got {:?}", value),
}
}
#[test]
fn test_pat_path() {
match Pat::parse_single.parse2(quote!(self::CONST)).unwrap() {
Pat::Path(_) => (),
value => panic!("expected PatPath, got {:?}", value),
}
}
#[test]
fn test_leading_vert() {
// https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
syn::parse_str::<Item>("fn f() {}").unwrap();
syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
syn::parse_str::<Stmt>("let | () = ();").unwrap_err();
syn::parse_str::<Stmt>("let (| A): E;").unwrap();
syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap();
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap();
syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap();
syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap();
syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
}
#[test]
fn test_group() {
let group = Group::new(Delimiter::None, quote!(Some(_)));
let tokens = TokenStream::from_iter(vec![TokenTree::Group(group)]);
let pat = Pat::parse_single.parse2(tokens).unwrap();
snapshot!(pat, @r###"
Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
},
],
},
elems: [
Pat::Wild,
],
}
"###);
}
#[test]
fn test_ranges() {
Pat::parse_single.parse_str("..").unwrap();
Pat::parse_single.parse_str("..hi").unwrap();
Pat::parse_single.parse_str("lo..").unwrap();
Pat::parse_single.parse_str("lo..hi").unwrap();
Pat::parse_single.parse_str("..=").unwrap_err();
Pat::parse_single.parse_str("..=hi").unwrap();
Pat::parse_single.parse_str("lo..=").unwrap_err();
Pat::parse_single.parse_str("lo..=hi").unwrap();
Pat::parse_single.parse_str("...").unwrap_err();
Pat::parse_single.parse_str("...hi").unwrap_err();
Pat::parse_single.parse_str("lo...").unwrap_err();
Pat::parse_single.parse_str("lo...hi").unwrap();
Pat::parse_single.parse_str("[lo..]").unwrap_err();
Pat::parse_single.parse_str("[..=hi]").unwrap_err();
Pat::parse_single.parse_str("[(lo..)]").unwrap();
Pat::parse_single.parse_str("[(..=hi)]").unwrap();
Pat::parse_single.parse_str("[lo..=hi]").unwrap();
Pat::parse_single.parse_str("[_, lo.., _]").unwrap_err();
Pat::parse_single.parse_str("[_, ..=hi, _]").unwrap_err();
Pat::parse_single.parse_str("[_, (lo..), _]").unwrap();
Pat::parse_single.parse_str("[_, (..=hi), _]").unwrap();
Pat::parse_single.parse_str("[_, lo..=hi, _]").unwrap();
}
#[test]
fn test_tuple_comma() {
let mut expr = PatTuple {
attrs: Vec::new(),
paren_token: token::Paren::default(),
elems: Punctuated::new(),
};
snapshot!(expr.to_token_stream() as Pat, @"Pat::Tuple");
expr.elems.push_value(parse_quote!(_));
// Must not parse to Pat::Paren
snapshot!(expr.to_token_stream() as Pat, @r###"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
],
}
"###);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Pat, @r###"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
],
}
"###);
expr.elems.push_value(parse_quote!(_));
snapshot!(expr.to_token_stream() as Pat, @r###"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
Pat::Wild,
],
}
"###);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Pat, @r###"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
Pat::Wild,
Token![,],
],
}
"###);
}

130
vendor/syn/tests/test_path.rs vendored Normal file
View File

@ -0,0 +1,130 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens};
use syn::{parse_quote, Expr, Type, TypePath};
#[test]
fn parse_interpolated_leading_component() {
// mimics the token stream corresponding to `$mod::rest`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("rest", Span::call_site())),
]);
snapshot!(tokens.clone() as Expr, @r###"
Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
},
Token![::],
PathSegment {
ident: "rest",
},
],
},
}
"###);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
},
Token![::],
PathSegment {
ident: "rest",
},
],
},
}
"###);
}
#[test]
fn print_incomplete_qpath() {
// qpath with `as` token
let mut ty: TypePath = parse_quote!(<Self as A>::Q);
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self as A > :: Q`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self as A > ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self >`)
"###);
assert!(ty.path.segments.pop().is_none());
// qpath without `as` token
let mut ty: TypePath = parse_quote!(<Self>::A::B);
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self > :: A :: B`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self > :: A ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self > ::`)
"###);
assert!(ty.path.segments.pop().is_none());
// normal path
let mut ty: TypePath = parse_quote!(Self::A::B);
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`Self :: A :: B`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`Self :: A ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`Self ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(``)
"###);
assert!(ty.path.segments.pop().is_none());
}
#[test]
fn parse_parenthesized_path_arguments_with_disambiguator() {
#[rustfmt::skip]
let tokens = quote!(dyn FnOnce::() -> !);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: ReturnType::Type(
Type::Never,
),
},
},
],
},
}),
],
}
"###);
}

548
vendor/syn/tests/test_precedence.rs vendored Normal file
View File

@ -0,0 +1,548 @@
//! This test does the following for every file in the rust-lang/rust repo:
//!
//! 1. Parse the file using syn into a syn::File.
//! 2. Extract every syn::Expr from the file.
//! 3. Print each expr to a string of source code.
//! 4. Parse the source code using librustc_parse into a rustc_ast::Expr.
//! 5. For both the syn::Expr and rustc_ast::Expr, crawl the syntax tree to
//! insert parentheses surrounding every subexpression.
//! 6. Serialize the fully parenthesized syn::Expr to a string of source code.
//! 7. Parse the fully parenthesized source code using librustc_parse.
//! 8. Compare the rustc_ast::Expr resulting from parenthesizing using rustc
//! data structures vs syn data structures, ignoring spans. If they agree,
//! rustc's parser and syn's parser have identical handling of expression
//! precedence.
#![cfg(not(syn_disable_nightly_tests))]
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(
clippy::blocks_in_conditions,
clippy::doc_markdown,
clippy::explicit_deref_methods,
clippy::let_underscore_untyped,
clippy::manual_assert,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::match_wildcard_for_single_variants,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_span;
extern crate smallvec;
extern crate thin_vec;
use crate::common::eq::SpanlessEq;
use crate::common::parse;
use quote::ToTokens;
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_ast_pretty::pprust;
use rustc_span::edition::Edition;
use std::fs;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
#[macro_use]
mod macros;
#[allow(dead_code)]
mod common;
mod repo;
#[test]
fn test_rustc_precedence() {
common::rayon_init();
repo::clone_rust();
let abort_after = common::abort_after();
if abort_after == 0 {
panic!("Skipping all precedence tests");
}
let passed = AtomicUsize::new(0);
let failed = AtomicUsize::new(0);
repo::for_each_rust_file(|path| {
let content = fs::read_to_string(path).unwrap();
let (l_passed, l_failed) = match syn::parse_file(&content) {
Ok(file) => {
let edition = repo::edition(path).parse().unwrap();
let exprs = collect_exprs(file);
let (l_passed, l_failed) = test_expressions(path, edition, exprs);
errorf!(
"=== {}: {} passed | {} failed\n",
path.display(),
l_passed,
l_failed,
);
(l_passed, l_failed)
}
Err(msg) => {
errorf!("\nFAIL {} - syn failed to parse: {}\n", path.display(), msg);
(0, 1)
}
};
passed.fetch_add(l_passed, Ordering::Relaxed);
let prev_failed = failed.fetch_add(l_failed, Ordering::Relaxed);
if prev_failed + l_failed >= abort_after {
process::exit(1);
}
});
let passed = passed.load(Ordering::Relaxed);
let failed = failed.load(Ordering::Relaxed);
errorf!("\n===== Precedence Test Results =====\n");
errorf!("{} passed | {} failed\n", passed, failed);
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
let mut passed = 0;
let mut failed = 0;
rustc_span::create_session_if_not_set_then(edition, |_| {
for expr in exprs {
let source_code = expr.to_token_stream().to_string();
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
e
} else {
failed += 1;
errorf!(
"\nFAIL {} - librustc failed to parse original\n",
path.display(),
);
continue;
};
let syn_parenthesized_code =
syn_parenthesize(expr.clone()).to_token_stream().to_string();
let syn_ast = if let Some(e) = parse::librustc_expr(&syn_parenthesized_code) {
e
} else {
failed += 1;
errorf!(
"\nFAIL {} - librustc failed to parse parenthesized\n",
path.display(),
);
continue;
};
if !SpanlessEq::eq(&syn_ast, &librustc_ast) {
failed += 1;
let syn_pretty = pprust::expr_to_string(&syn_ast);
let librustc_pretty = pprust::expr_to_string(&librustc_ast);
errorf!(
"\nFAIL {}\n{}\nsyn != rustc\n{}\n",
path.display(),
syn_pretty,
librustc_pretty,
);
continue;
}
let expr_invisible = make_parens_invisible(expr);
let Ok(reparsed_expr_invisible) = syn::parse2(expr_invisible.to_token_stream()) else {
failed += 1;
errorf!(
"\nFAIL {} - syn failed to parse invisible delimiters\n{}\n",
path.display(),
source_code,
);
continue;
};
if expr_invisible != reparsed_expr_invisible {
failed += 1;
errorf!(
"\nFAIL {} - mismatch after parsing invisible delimiters\n{}\n",
path.display(),
source_code,
);
continue;
}
passed += 1;
}
});
(passed, failed)
}
fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
parse::librustc_expr(input).map(librustc_parenthesize)
}
fn librustc_parenthesize(mut librustc_expr: P<ast::Expr>) -> P<ast::Expr> {
use rustc_ast::ast::{
AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BorrowKind, BoundConstness, Expr,
ExprField, ExprKind, GenericArg, GenericBound, ItemKind, Local, LocalKind, Pat, Stmt,
StmtKind, StructExpr, StructRest, TraitBoundModifiers, Ty,
};
use rustc_ast::mut_visit::{
noop_flat_map_assoc_item, noop_visit_generic_arg, noop_visit_item_kind, noop_visit_local,
noop_visit_param_bound, MutVisitor,
};
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_span::DUMMY_SP;
use smallvec::SmallVec;
use std::mem;
use std::ops::DerefMut;
use thin_vec::ThinVec;
struct FullyParenthesize;
fn contains_let_chain(expr: &Expr) -> bool {
match &expr.kind {
ExprKind::Let(..) => true,
ExprKind::Binary(binop, left, right) => {
binop.node == BinOpKind::And
&& (contains_let_chain(left) || contains_let_chain(right))
}
_ => false,
}
}
fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> {
if f.is_shorthand {
noop_visit_expr(&mut f.expr, vis);
} else {
vis.visit_expr(&mut f.expr);
}
vec![f]
}
fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
let kind = match stmt.kind {
// Don't wrap toplevel expressions in statements.
StmtKind::Expr(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Expr(e)
}
StmtKind::Semi(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Semi(e)
}
s => s,
};
vec![Stmt { kind, ..stmt }]
}
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
use rustc_ast::mut_visit::{noop_visit_expr, visit_attrs};
match &mut e.kind {
ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
ExprKind::Struct(expr) => {
let StructExpr {
qself,
path,
fields,
rest,
} = expr.deref_mut();
vis.visit_qself(qself);
vis.visit_path(path);
fields.flat_map_in_place(|field| flat_map_field(field, vis));
if let StructRest::Base(rest) = rest {
vis.visit_expr(rest);
}
vis.visit_id(&mut e.id);
vis.visit_span(&mut e.span);
visit_attrs(&mut e.attrs, vis);
}
_ => noop_visit_expr(e, vis),
}
}
impl MutVisitor for FullyParenthesize {
fn visit_expr(&mut self, e: &mut P<Expr>) {
noop_visit_expr(e, self);
match e.kind {
ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) => {}
ExprKind::Binary(..) if contains_let_chain(e) => {}
_ => {
let inner = mem::replace(
e,
P(Expr {
id: ast::DUMMY_NODE_ID,
kind: ExprKind::Err,
span: DUMMY_SP,
attrs: ThinVec::new(),
tokens: None,
}),
);
e.kind = ExprKind::Paren(inner);
}
}
}
fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
match arg {
// Don't wrap unbraced const generic arg as that's invalid syntax.
GenericArg::Const(anon_const) => {
if let ExprKind::Block(..) = &mut anon_const.value.kind {
noop_visit_expr(&mut anon_const.value, self);
}
}
_ => noop_visit_generic_arg(arg, self),
}
}
fn visit_param_bound(&mut self, bound: &mut GenericBound) {
match bound {
GenericBound::Trait(
_,
TraitBoundModifiers {
constness: BoundConstness::Maybe(_),
..
},
) => {}
_ => noop_visit_param_bound(bound, self),
}
}
fn visit_block(&mut self, block: &mut P<Block>) {
self.visit_id(&mut block.id);
block
.stmts
.flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
self.visit_span(&mut block.span);
}
fn visit_local(&mut self, local: &mut P<Local>) {
match local.kind {
LocalKind::InitElse(..) => {}
_ => noop_visit_local(local, self),
}
}
fn visit_item_kind(&mut self, item: &mut ItemKind) {
match item {
ItemKind::Const(const_item)
if !const_item.generics.params.is_empty()
|| !const_item.generics.where_clause.predicates.is_empty() => {}
_ => noop_visit_item_kind(item, self),
}
}
fn flat_map_trait_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> {
match &item.kind {
AssocItemKind::Const(const_item)
if !const_item.generics.params.is_empty()
|| !const_item.generics.where_clause.predicates.is_empty() =>
{
SmallVec::from([item])
}
_ => noop_flat_map_assoc_item(item, self),
}
}
fn flat_map_impl_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> {
match &item.kind {
AssocItemKind::Const(const_item)
if !const_item.generics.params.is_empty()
|| !const_item.generics.where_clause.predicates.is_empty() =>
{
SmallVec::from([item])
}
_ => noop_flat_map_assoc_item(item, self),
}
}
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn visit_pat(&mut self, pat: &mut P<Pat>) {
let _ = pat;
}
fn visit_ty(&mut self, ty: &mut P<Ty>) {
let _ = ty;
}
fn visit_attribute(&mut self, attr: &mut Attribute) {
let _ = attr;
}
}
let mut folder = FullyParenthesize;
folder.visit_expr(&mut librustc_expr);
librustc_expr
}
fn syn_parenthesize(syn_expr: syn::Expr) -> syn::Expr {
use syn::fold::{fold_expr, fold_generic_argument, Fold};
use syn::{token, BinOp, Expr, ExprParen, GenericArgument, MetaNameValue, Pat, Stmt, Type};
struct FullyParenthesize;
fn parenthesize(expr: Expr) -> Expr {
Expr::Paren(ExprParen {
attrs: Vec::new(),
expr: Box::new(expr),
paren_token: token::Paren::default(),
})
}
fn needs_paren(expr: &Expr) -> bool {
match expr {
Expr::Group(_) => unreachable!(),
Expr::If(_) | Expr::Unsafe(_) | Expr::Block(_) | Expr::Let(_) => false,
Expr::Binary(_) => !contains_let_chain(expr),
_ => true,
}
}
fn contains_let_chain(expr: &Expr) -> bool {
match expr {
Expr::Let(_) => true,
Expr::Binary(expr) => {
matches!(expr.op, BinOp::And(_))
&& (contains_let_chain(&expr.left) || contains_let_chain(&expr.right))
}
_ => false,
}
}
impl Fold for FullyParenthesize {
fn fold_expr(&mut self, expr: Expr) -> Expr {
let needs_paren = needs_paren(&expr);
let folded = fold_expr(self, expr);
if needs_paren {
parenthesize(folded)
} else {
folded
}
}
fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
match arg {
GenericArgument::Const(arg) => GenericArgument::Const(match arg {
Expr::Block(_) => fold_expr(self, arg),
// Don't wrap unbraced const generic arg as that's invalid syntax.
_ => arg,
}),
_ => fold_generic_argument(self, arg),
}
}
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
match stmt {
// Don't wrap toplevel expressions in statements.
Stmt::Expr(Expr::Verbatim(_), Some(_)) => stmt,
Stmt::Expr(e, semi) => Stmt::Expr(fold_expr(self, e), semi),
s => s,
}
}
fn fold_meta_name_value(&mut self, meta: MetaNameValue) -> MetaNameValue {
// Don't turn #[p = "..."] into #[p = ("...")].
meta
}
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn fold_pat(&mut self, pat: Pat) -> Pat {
pat
}
fn fold_type(&mut self, ty: Type) -> Type {
ty
}
}
let mut folder = FullyParenthesize;
folder.fold_expr(syn_expr)
}
fn make_parens_invisible(expr: syn::Expr) -> syn::Expr {
use syn::fold::{fold_expr, fold_stmt, Fold};
use syn::{token, Expr, ExprGroup, ExprParen, Stmt};
struct MakeParensInvisible;
impl Fold for MakeParensInvisible {
fn fold_expr(&mut self, mut expr: Expr) -> Expr {
if let Expr::Paren(paren) = expr {
expr = Expr::Group(ExprGroup {
attrs: paren.attrs,
group_token: token::Group(paren.paren_token.span.join()),
expr: paren.expr,
});
}
fold_expr(self, expr)
}
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
if let Stmt::Expr(expr @ (Expr::Binary(_) | Expr::Cast(_)), None) = stmt {
Stmt::Expr(
Expr::Paren(ExprParen {
attrs: Vec::new(),
paren_token: token::Paren::default(),
expr: Box::new(fold_expr(self, expr)),
}),
None,
)
} else {
fold_stmt(self, stmt)
}
}
}
let mut folder = MakeParensInvisible;
folder.fold_expr(expr)
}
/// Walk through a crate collecting all expressions we can find in it.
fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
use syn::fold::Fold;
use syn::punctuated::Punctuated;
use syn::{token, ConstParam, Expr, ExprTuple, Pat, Path};
struct CollectExprs(Vec<Expr>);
impl Fold for CollectExprs {
fn fold_expr(&mut self, expr: Expr) -> Expr {
match expr {
Expr::Verbatim(_) => {}
_ => self.0.push(expr),
}
Expr::Tuple(ExprTuple {
attrs: vec![],
elems: Punctuated::new(),
paren_token: token::Paren::default(),
})
}
fn fold_pat(&mut self, pat: Pat) -> Pat {
pat
}
fn fold_path(&mut self, path: Path) -> Path {
// Skip traversing into const generic path arguments
path
}
fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam {
const_param
}
}
let mut folder = CollectExprs(vec![]);
folder.fold_file(file);
folder.0
}

321
vendor/syn/tests/test_receiver.rs vendored Normal file
View File

@ -0,0 +1,321 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use syn::{parse_quote, TraitItemFn};
#[test]
fn test_by_value() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_value(self: Self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"###);
}
#[test]
fn test_by_mut_value() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_mut(mut self: Self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
mutability: Some,
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"###);
}
#[test]
fn test_by_ref() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_ref(self: &Self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Reference {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"###);
}
#[test]
fn test_by_box() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_box(self: Box<Self>);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Box",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
}),
],
},
},
],
},
},
})
"###);
}
#[test]
fn test_by_pin() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_pin(self: Pin<Self>);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Pin",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
}),
],
},
},
],
},
},
})
"###);
}
#[test]
fn test_explicit_type() {
let TraitItemFn { sig, .. } = parse_quote! {
fn explicit_type(self: Pin<MyType>);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Pin",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "MyType",
},
],
},
}),
],
},
},
],
},
},
})
"###);
}
#[test]
fn test_value_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn value_shorthand(self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"###);
}
#[test]
fn test_mut_value_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn mut_value_shorthand(mut self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
mutability: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"###);
}
#[test]
fn test_ref_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_shorthand(&self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
reference: Some(None),
ty: Type::Reference {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"###);
}
#[test]
fn test_ref_shorthand_with_lifetime() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_shorthand(&'a self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
reference: Some(Some(Lifetime {
ident: "a",
})),
ty: Type::Reference {
lifetime: Some(Lifetime {
ident: "a",
}),
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"###);
}
#[test]
fn test_ref_mut_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_mut_shorthand(&mut self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
reference: Some(None),
mutability: Some,
ty: Type::Reference {
mutability: Some,
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"###);
}
#[test]
fn test_ref_mut_shorthand_with_lifetime() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_mut_shorthand(&'a mut self);
};
snapshot!(&sig.inputs[0], @r###"
FnArg::Receiver(Receiver {
reference: Some(Some(Lifetime {
ident: "a",
})),
mutability: Some,
ty: Type::Reference {
lifetime: Some(Lifetime {
ident: "a",
}),
mutability: Some,
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"###);
}

239
vendor/syn/tests/test_round_trip.rs vendored Normal file
View File

@ -0,0 +1,239 @@
#![cfg(not(syn_disable_nightly_tests))]
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(
clippy::blocks_in_conditions,
clippy::manual_assert,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::uninlined_format_args
)]
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_error_messages;
extern crate rustc_errors;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use crate::common::eq::SpanlessEq;
use quote::quote;
use rustc_ast::ast::{
AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg, GenericParamKind, Generics,
WhereClause,
};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast_pretty::pprust;
use rustc_error_messages::{DiagnosticMessage, LazyFallbackBundle};
use rustc_errors::{translation, Diagnostic, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use std::borrow::Cow;
use std::fs;
use std::panic;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::Instant;
#[macro_use]
mod macros;
#[allow(dead_code)]
mod common;
mod repo;
#[test]
fn test_round_trip() {
common::rayon_init();
repo::clone_rust();
let abort_after = common::abort_after();
if abort_after == 0 {
panic!("Skipping all round_trip tests");
}
let failed = AtomicUsize::new(0);
repo::for_each_rust_file(|path| test(path, &failed, abort_after));
let failed = failed.load(Ordering::Relaxed);
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) {
let content = fs::read_to_string(path).unwrap();
let start = Instant::now();
let (krate, elapsed) = match syn::parse_file(&content) {
Ok(krate) => (krate, start.elapsed()),
Err(msg) => {
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
if prev_failed + 1 >= abort_after {
process::exit(1);
}
return;
}
};
let back = quote!(#krate).to_string();
let edition = repo::edition(path).parse().unwrap();
rustc_span::create_session_if_not_set_then(edition, |_| {
let equal = match panic::catch_unwind(|| {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
let file_path_mapping = FilePathMapping::empty();
let sess = ParseSess::new(locale_resources, file_path_mapping);
let before = match librustc_parse(content, &sess) {
Ok(before) => before,
Err(diagnostic) => {
errorf!(
"=== {}: ignore - librustc failed to parse original content: {}\n",
path.display(),
translate_message(&diagnostic),
);
diagnostic.cancel();
return Err(true);
}
};
let after = match librustc_parse(back, &sess) {
Ok(after) => after,
Err(mut diagnostic) => {
errorf!("=== {}: librustc failed to parse", path.display());
diagnostic.emit();
return Err(false);
}
};
Ok((before, after))
}) {
Err(_) => {
errorf!("=== {}: ignoring librustc panic\n", path.display());
true
}
Ok(Err(equal)) => equal,
Ok(Ok((mut before, mut after))) => {
normalize(&mut before);
normalize(&mut after);
if SpanlessEq::eq(&before, &after) {
errorf!(
"=== {}: pass in {}ms\n",
path.display(),
elapsed.as_secs() * 1000 + u64::from(elapsed.subsec_nanos()) / 1_000_000
);
true
} else {
errorf!(
"=== {}: FAIL\n{}\n!=\n{}\n",
path.display(),
pprust::crate_to_string_for_macros(&before),
pprust::crate_to_string_for_macros(&after),
);
false
}
}
};
if !equal {
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
if prev_failed + 1 >= abort_after {
process::exit(1);
}
}
});
}
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
static COUNTER: AtomicUsize = AtomicUsize::new(0);
let counter = COUNTER.fetch_add(1, Ordering::Relaxed);
let name = FileName::Custom(format!("test_round_trip{}", counter));
parse::parse_crate_from_source_str(name, content, sess)
}
fn translate_message(diagnostic: &Diagnostic) -> Cow<'static, str> {
thread_local! {
static FLUENT_BUNDLE: LazyFallbackBundle = {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
let with_directionality_markers = false;
rustc_error_messages::fallback_fluent_bundle(locale_resources, with_directionality_markers)
};
}
let message = &diagnostic.messages[0].0;
let args = translation::to_fluent_args(diagnostic.args());
let (identifier, attr) = match message {
DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => return msg.clone(),
DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
};
FLUENT_BUNDLE.with(|fluent_bundle| {
let message = fluent_bundle
.get_message(identifier)
.expect("missing diagnostic in fluent bundle");
let value = match attr {
Some(attr) => message
.get_attribute(attr)
.expect("missing attribute in fluent message")
.value(),
None => message.value().expect("missing value in fluent message"),
};
let mut err = Vec::new();
let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err);
assert!(err.is_empty());
Cow::Owned(translated.into_owned())
})
}
fn normalize(krate: &mut Crate) {
struct NormalizeVisitor;
impl MutVisitor for NormalizeVisitor {
fn visit_angle_bracketed_parameter_data(&mut self, e: &mut AngleBracketedArgs) {
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
Lifetimes,
TypesAndConsts,
Constraints,
}
e.args.sort_by_key(|arg| match arg {
AngleBracketedArg::Arg(arg) => match arg {
GenericArg::Lifetime(_) => Group::Lifetimes,
GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts,
},
AngleBracketedArg::Constraint(_) => Group::Constraints,
});
mut_visit::noop_visit_angle_bracketed_parameter_data(e, self);
}
fn visit_generics(&mut self, e: &mut Generics) {
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
Lifetimes,
TypesAndConsts,
}
e.params.sort_by_key(|param| match param.kind {
GenericParamKind::Lifetime => Group::Lifetimes,
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
Group::TypesAndConsts
}
});
mut_visit::noop_visit_generics(e, self);
}
fn visit_where_clause(&mut self, e: &mut WhereClause) {
if e.predicates.is_empty() {
e.has_where_token = false;
}
}
}
NormalizeVisitor.visit_crate(krate);
}

67
vendor/syn/tests/test_shebang.rs vendored Normal file
View File

@ -0,0 +1,67 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
#[test]
fn test_basic() {
let content = "#!/usr/bin/env rustx\nfn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
shebang: Some("#!/usr/bin/env rustx"),
items: [
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
},
],
}
"###);
}
#[test]
fn test_comment() {
let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
attrs: [
Attribute {
style: AttrStyle::Inner,
meta: Meta::List {
path: Path {
segments: [
PathSegment {
ident: "allow",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`dead_code`),
},
},
],
items: [
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
},
],
}
"###);
}

45
vendor/syn/tests/test_should_parse.rs vendored Normal file
View File

@ -0,0 +1,45 @@
macro_rules! should_parse {
($name:ident, { $($in:tt)* }) => {
#[test]
fn $name() {
// Make sure we can parse the file!
syn::parse_file(stringify!($($in)*)).unwrap();
}
}
}
should_parse!(generic_associated_type, {
impl Foo {
type Item = &'a i32;
fn foo<'a>(&'a self) -> Self::Item<'a> {}
}
});
#[rustfmt::skip]
should_parse!(const_generics_use, {
type X = Foo<5>;
type Y = Foo<"foo">;
type Z = Foo<X>;
type W = Foo<{ X + 10 }>;
});
should_parse!(trailing_plus_type, {
type A = Box<Foo>;
type A = Box<Foo + 'a>;
type A = Box<'a + Foo>;
});
should_parse!(generic_associated_type_where, {
trait Foo {
type Item;
fn foo<T>(&self, t: T) -> Self::Item<T>;
}
});
should_parse!(match_with_block_expr, {
fn main() {
match false {
_ => {}.a(),
}
}
});

36
vendor/syn/tests/test_size.rs vendored Normal file
View File

@ -0,0 +1,36 @@
// Assumes proc-macro2's "span-locations" feature is off.
#![cfg(target_pointer_width = "64")]
use std::mem;
use syn::{Expr, Item, Lit, Pat, Type};
#[rustversion::attr(before(2022-11-24), ignore)]
#[test]
fn test_expr_size() {
assert_eq!(mem::size_of::<Expr>(), 176);
}
#[rustversion::attr(before(2022-09-09), ignore)]
#[test]
fn test_item_size() {
assert_eq!(mem::size_of::<Item>(), 360);
}
#[rustversion::attr(before(2023-04-29), ignore)]
#[test]
fn test_type_size() {
assert_eq!(mem::size_of::<Type>(), 232);
}
#[rustversion::attr(before(2023-04-29), ignore)]
#[test]
fn test_pat_size() {
assert_eq!(mem::size_of::<Pat>(), 184);
}
#[rustversion::attr(before(2023-12-20), ignore)]
#[test]
fn test_lit_size() {
assert_eq!(mem::size_of::<Lit>(), 24);
}

322
vendor/syn/tests/test_stmt.rs vendored Normal file
View File

@ -0,0 +1,322 @@
#![allow(
clippy::assertions_on_result_states,
clippy::non_ascii_literal,
clippy::uninlined_format_args
)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens as _};
use syn::parse::Parser as _;
use syn::{Block, Stmt};
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r###"
Stmt::Local {
pat: Pat::Wild,
init: Some(LocalInit {
expr: Expr::Verbatim(`& raw const x`),
}),
}
"###);
}
#[test]
fn test_raw_variable() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r###"
Stmt::Local {
pat: Pat::Wild,
init: Some(LocalInit {
expr: Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
},
],
},
},
},
}),
}
"###);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}
#[test]
fn test_none_group() {
// <Ø async fn f() {} Ø>
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("async", Span::call_site())),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]),
))]);
snapshot!(tokens as Stmt, @r###"
Stmt::Item(Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
asyncness: Some,
ident: "f",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
})
"###);
let tokens = Group::new(Delimiter::None, quote!(let None = None)).to_token_stream();
let stmts = Block::parse_within.parse2(tokens).unwrap();
snapshot!(stmts, @r###"
[
Stmt::Expr(
Expr::Group {
expr: Expr::Let {
pat: Pat::Ident {
ident: "None",
},
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "None",
},
],
},
},
},
},
None,
),
]
"###);
}
#[test]
fn test_let_dot_dot() {
let tokens = quote! {
let .. = 10;
};
snapshot!(tokens as Stmt, @r###"
Stmt::Local {
pat: Pat::Rest,
init: Some(LocalInit {
expr: Expr::Lit {
lit: 10,
},
}),
}
"###);
}
#[test]
fn test_let_else() {
let tokens = quote! {
let Some(x) = None else { return 0; };
};
snapshot!(tokens as Stmt, @r###"
Stmt::Local {
pat: Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
},
],
},
elems: [
Pat::Ident {
ident: "x",
},
],
},
init: Some(LocalInit {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "None",
},
],
},
},
diverge: Some(Expr::Block {
block: Block {
stmts: [
Stmt::Expr(
Expr::Return {
expr: Some(Expr::Lit {
lit: 0,
}),
},
Some,
),
],
},
}),
}),
}
"###);
}
#[test]
fn test_macros() {
let tokens = quote! {
fn main() {
macro_rules! mac {}
thread_local! { static FOO }
println!("");
vec![]
}
};
snapshot!(tokens as Stmt, @r###"
Stmt::Item(Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [
Stmt::Item(Item::Macro {
ident: Some("mac"),
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "macro_rules",
},
],
},
delimiter: MacroDelimiter::Brace,
tokens: TokenStream(``),
},
}),
Stmt::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "thread_local",
},
],
},
delimiter: MacroDelimiter::Brace,
tokens: TokenStream(`static FOO`),
},
},
Stmt::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "println",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`""`),
},
semi_token: Some,
},
Stmt::Expr(
Expr::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "vec",
},
],
},
delimiter: MacroDelimiter::Bracket,
tokens: TokenStream(``),
},
},
None,
),
],
},
})
"###);
}
#[test]
fn test_early_parse_loop() {
// The following is an Expr::Loop followed by Expr::Tuple. It is not an
// Expr::Call.
let tokens = quote! {
loop {}
()
};
let stmts = Block::parse_within.parse2(tokens).unwrap();
snapshot!(stmts, @r###"
[
Stmt::Expr(
Expr::Loop {
body: Block {
stmts: [],
},
},
None,
),
Stmt::Expr(
Expr::Tuple,
None,
),
]
"###);
let tokens = quote! {
'a: loop {}
()
};
let stmts = Block::parse_within.parse2(tokens).unwrap();
snapshot!(stmts, @r###"
[
Stmt::Expr(
Expr::Loop {
label: Some(Label {
name: Lifetime {
ident: "a",
},
}),
body: Block {
stmts: [],
},
},
None,
),
Stmt::Expr(
Expr::Tuple,
None,
),
]
"###);
}

32
vendor/syn/tests/test_token_trees.rs vendored Normal file
View File

@ -0,0 +1,32 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::TokenStream;
use quote::quote;
use syn::Lit;
#[test]
fn test_struct() {
let input = "
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>,
}
";
snapshot!(input as TokenStream, @r###"
TokenStream(
`# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
)
"###);
}
#[test]
fn test_literal_mangling() {
let code = "0_4";
let parsed: Lit = syn::parse_str(code).unwrap();
assert_eq!(code, quote!(#parsed).to_string());
}

397
vendor/syn/tests/test_ty.rs vendored Normal file
View File

@ -0,0 +1,397 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens as _};
use syn::punctuated::Punctuated;
use syn::{parse_quote, token, Token, Type, TypeTuple};
#[test]
fn test_mut_self() {
syn::parse_str::<Type>("fn(mut self)").unwrap();
syn::parse_str::<Type>("fn(mut self,)").unwrap();
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
}
#[test]
fn test_macro_variable_type() {
// mimics the token stream corresponding to `$ty<T>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
}),
],
},
},
],
},
}
"###);
// mimics the token stream corresponding to `$ty::<T>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
colon2_token: Some,
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
}),
],
},
},
],
},
}
"###);
}
#[test]
fn test_group_angle_brackets() {
// mimics the token stream corresponding to `Option<$ty>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("Option", Span::call_site())),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Option",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
},
],
},
}),
],
},
},
],
},
},
}),
],
},
},
],
},
}
"###);
}
#[test]
fn test_group_colons() {
// mimics the token stream corresponding to `$ty::Item`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("Item", Span::call_site())),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
},
],
},
}),
],
},
},
Token![::],
PathSegment {
ident: "Item",
},
],
},
}
"###);
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("Element", Span::call_site())),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
qself: Some(QSelf {
ty: Type::Slice {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
},
position: 0,
}),
path: Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "Element",
},
],
},
}
"###);
}
#[test]
fn test_trait_object() {
let tokens = quote!(dyn for<'a> Trait<'a> + 'static);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Trait(TraitBound {
lifetimes: Some(BoundLifetimes {
lifetimes: [
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "a",
},
}),
],
}),
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Lifetime(Lifetime {
ident: "a",
}),
],
},
},
],
},
}),
Token![+],
TypeParamBound::Lifetime {
ident: "static",
},
],
}
"###);
let tokens = quote!(dyn 'a + Trait);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Lifetime {
ident: "a",
},
Token![+],
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
],
}
"###);
// None of the following are valid Rust types.
syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err();
syn::parse_str::<Type>("dyn for<'a> 'a + Trait").unwrap_err();
}
#[test]
fn test_trailing_plus() {
#[rustfmt::skip]
let tokens = quote!(impl Trait +);
snapshot!(tokens as Type, @r###"
Type::ImplTrait {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
Token![+],
],
}
"###);
#[rustfmt::skip]
let tokens = quote!(dyn Trait +);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
Token![+],
],
}
"###);
#[rustfmt::skip]
let tokens = quote!(Trait +);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
Token![+],
],
}
"###);
}
#[test]
fn test_tuple_comma() {
let mut expr = TypeTuple {
paren_token: token::Paren::default(),
elems: Punctuated::new(),
};
snapshot!(expr.to_token_stream() as Type, @"Type::Tuple");
expr.elems.push_value(parse_quote!(_));
// Must not parse to Type::Paren
snapshot!(expr.to_token_stream() as Type, @r###"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
],
}
"###);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Type, @r###"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
],
}
"###);
expr.elems.push_value(parse_quote!(_));
snapshot!(expr.to_token_stream() as Type, @r###"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
Type::Infer,
],
}
"###);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Type, @r###"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
Type::Infer,
Token![,],
],
}
"###);
}

144
vendor/syn/tests/test_visibility.rs vendored Normal file
View File

@ -0,0 +1,144 @@
#![allow(clippy::uninlined_format_args)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use syn::parse::{Parse, ParseStream};
use syn::{DeriveInput, Result, Visibility};
#[derive(Debug)]
struct VisRest {
vis: Visibility,
rest: TokenStream,
}
impl Parse for VisRest {
fn parse(input: ParseStream) -> Result<Self> {
Ok(VisRest {
vis: input.parse()?,
rest: input.parse()?,
})
}
}
macro_rules! assert_vis_parse {
($input:expr, Ok($p:pat)) => {
assert_vis_parse!($input, Ok($p) + "");
};
($input:expr, Ok($p:pat) + $rest:expr) => {
let expected = $rest.parse::<TokenStream>().unwrap();
let parse: VisRest = syn::parse_str($input).unwrap();
match parse.vis {
$p => {}
_ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
}
// NOTE: Round-trips through `to_string` to avoid potential whitespace
// diffs.
assert_eq!(parse.rest.to_string(), expected.to_string());
};
($input:expr, Err) => {
syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
};
}
#[test]
fn test_pub() {
assert_vis_parse!("pub", Ok(Visibility::Public(_)));
}
#[test]
fn test_inherited() {
assert_vis_parse!("", Ok(Visibility::Inherited));
}
#[test]
fn test_in() {
assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_crate() {
assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_self() {
assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_super() {
assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_missing_in() {
assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
}
#[test]
fn test_missing_in_path() {
assert_vis_parse!("pub(in)", Err);
}
#[test]
fn test_crate_path() {
assert_vis_parse!(
"pub(crate::A, crate::B)",
Ok(Visibility::Public(_)) + "(crate::A, crate::B)"
);
}
#[test]
fn test_junk_after_in() {
assert_vis_parse!("pub(in some::path @@garbage)", Err);
}
#[test]
fn test_empty_group_vis() {
// mimics `struct S { $vis $field: () }` where $vis is empty
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("struct", Span::call_site())),
TokenTree::Ident(Ident::new("S", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Brace,
TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
"f",
Span::call_site(),
))]),
)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]),
)),
]);
snapshot!(tokens as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Inherited,
ident: Some("f"),
colon_token: Some,
ty: Type::Tuple,
},
],
},
},
}
"###);
}

33
vendor/syn/tests/zzz_stable.rs vendored Normal file
View File

@ -0,0 +1,33 @@
#![cfg(syn_disable_nightly_tests)]
use std::io::{self, Write};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
const MSG: &str = "\
‖ WARNING:
‖ This is not a nightly compiler so not all tests were able to
‖ run. Syn includes tests that compare Syn's parser against the
‖ compiler's parser, which requires access to unstable librustc
‖ data structures and a nightly compiler.
";
#[test]
fn notice() -> io::Result<()> {
let header = "WARNING";
let index_of_header = MSG.find(header).unwrap();
let before = &MSG[..index_of_header];
let after = &MSG[index_of_header + header.len()..];
let mut stderr = StandardStream::stderr(ColorChoice::Auto);
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", before)?;
stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", header)?;
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", after)?;
stderr.reset()?;
Ok(())
}