RPM build fix (reverted CI changes which will need to be un-reverted or made conditional) and vendor Rust dependencies to make builds much faster in any CI system.

This commit is contained in:
Adam Ierymenko
2022-06-08 07:32:16 -04:00
parent 373ca30269
commit d5ca4e5f52
12611 changed files with 2898014 additions and 284 deletions

1
zeroidc/vendor/syn/tests/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/*.pending-snap

760
zeroidc/vendor/syn/tests/common/eq.rs vendored Normal file
View File

@@ -0,0 +1,760 @@
#![allow(unused_macro_rules)]
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_span;
use rustc_ast::ast::AngleBracketedArg;
use rustc_ast::ast::AngleBracketedArgs;
use rustc_ast::ast::AnonConst;
use rustc_ast::ast::Arm;
use rustc_ast::ast::AssocConstraint;
use rustc_ast::ast::AssocConstraintKind;
use rustc_ast::ast::AssocItemKind;
use rustc_ast::ast::Async;
use rustc_ast::ast::AttrId;
use rustc_ast::ast::AttrItem;
use rustc_ast::ast::AttrKind;
use rustc_ast::ast::AttrStyle;
use rustc_ast::ast::Attribute;
use rustc_ast::ast::BareFnTy;
use rustc_ast::ast::BinOpKind;
use rustc_ast::ast::BindingMode;
use rustc_ast::ast::Block;
use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
use rustc_ast::ast::CaptureBy;
use rustc_ast::ast::Const;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
use rustc_ast::ast::EnumDef;
use rustc_ast::ast::Expr;
use rustc_ast::ast::ExprField;
use rustc_ast::ast::ExprKind;
use rustc_ast::ast::Extern;
use rustc_ast::ast::FieldDef;
use rustc_ast::ast::FloatTy;
use rustc_ast::ast::Fn;
use rustc_ast::ast::FnDecl;
use rustc_ast::ast::FnHeader;
use rustc_ast::ast::FnRetTy;
use rustc_ast::ast::FnSig;
use rustc_ast::ast::ForeignItemKind;
use rustc_ast::ast::ForeignMod;
use rustc_ast::ast::GenericArg;
use rustc_ast::ast::GenericArgs;
use rustc_ast::ast::GenericBound;
use rustc_ast::ast::GenericParam;
use rustc_ast::ast::GenericParamKind;
use rustc_ast::ast::Generics;
use rustc_ast::ast::Impl;
use rustc_ast::ast::ImplPolarity;
use rustc_ast::ast::Inline;
use rustc_ast::ast::InlineAsm;
use rustc_ast::ast::InlineAsmOperand;
use rustc_ast::ast::InlineAsmOptions;
use rustc_ast::ast::InlineAsmRegOrRegClass;
use rustc_ast::ast::InlineAsmSym;
use rustc_ast::ast::InlineAsmTemplatePiece;
use rustc_ast::ast::IntTy;
use rustc_ast::ast::IsAuto;
use rustc_ast::ast::Item;
use rustc_ast::ast::ItemKind;
use rustc_ast::ast::Label;
use rustc_ast::ast::Lifetime;
use rustc_ast::ast::Lit;
use rustc_ast::ast::LitFloatType;
use rustc_ast::ast::LitIntType;
use rustc_ast::ast::LitKind;
use rustc_ast::ast::Local;
use rustc_ast::ast::LocalKind;
use rustc_ast::ast::MacArgs;
use rustc_ast::ast::MacArgsEq;
use rustc_ast::ast::MacCall;
use rustc_ast::ast::MacCallStmt;
use rustc_ast::ast::MacDelimiter;
use rustc_ast::ast::MacStmtStyle;
use rustc_ast::ast::MacroDef;
use rustc_ast::ast::ModKind;
use rustc_ast::ast::ModSpans;
use rustc_ast::ast::Movability;
use rustc_ast::ast::MutTy;
use rustc_ast::ast::Mutability;
use rustc_ast::ast::NodeId;
use rustc_ast::ast::Param;
use rustc_ast::ast::ParenthesizedArgs;
use rustc_ast::ast::Pat;
use rustc_ast::ast::PatField;
use rustc_ast::ast::PatKind;
use rustc_ast::ast::Path;
use rustc_ast::ast::PathSegment;
use rustc_ast::ast::PolyTraitRef;
use rustc_ast::ast::QSelf;
use rustc_ast::ast::RangeEnd;
use rustc_ast::ast::RangeLimits;
use rustc_ast::ast::RangeSyntax;
use rustc_ast::ast::Stmt;
use rustc_ast::ast::StmtKind;
use rustc_ast::ast::StrLit;
use rustc_ast::ast::StrStyle;
use rustc_ast::ast::StructExpr;
use rustc_ast::ast::StructRest;
use rustc_ast::ast::Term;
use rustc_ast::ast::Trait;
use rustc_ast::ast::TraitBoundModifier;
use rustc_ast::ast::TraitObjectSyntax;
use rustc_ast::ast::TraitRef;
use rustc_ast::ast::Ty;
use rustc_ast::ast::TyAlias;
use rustc_ast::ast::TyAliasWhereClause;
use rustc_ast::ast::TyKind;
use rustc_ast::ast::UintTy;
use rustc_ast::ast::UnOp;
use rustc_ast::ast::Unsafe;
use rustc_ast::ast::UnsafeSource;
use rustc_ast::ast::UseTree;
use rustc_ast::ast::UseTreeKind;
use rustc_ast::ast::Variant;
use rustc_ast::ast::VariantData;
use rustc_ast::ast::Visibility;
use rustc_ast::ast::VisibilityKind;
use rustc_ast::ast::WhereBoundPredicate;
use rustc_ast::ast::WhereClause;
use rustc_ast::ast::WhereEqPredicate;
use rustc_ast::ast::WherePredicate;
use rustc_ast::ast::WhereRegionPredicate;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{
AttrAnnotatedTokenStream, AttrAnnotatedTokenTree, AttributesData, DelimSpan, LazyTokenStream,
Spacing, TokenStream, TokenTree,
};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::thin_vec::ThinVec;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Ident};
use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP};
pub trait SpanlessEq {
fn eq(&self, other: &Self) -> bool;
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: SpanlessEq> SpanlessEq for P<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: SpanlessEq> SpanlessEq for Option<T> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(None, None) => true,
(Some(this), Some(other)) => SpanlessEq::eq(this, other),
_ => false,
}
}
}
impl<T: SpanlessEq> SpanlessEq for [T] {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b))
}
}
impl<T: SpanlessEq> SpanlessEq for Vec<T> {
fn eq(&self, other: &Self) -> bool {
<[T] as SpanlessEq>::eq(self, other)
}
}
impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len()
&& self
.iter()
.zip(other.iter())
.all(|(a, b)| SpanlessEq::eq(a, b))
}
}
impl<T: SpanlessEq> SpanlessEq for Spanned<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.node, &other.node)
}
}
impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
}
}
impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0)
&& SpanlessEq::eq(&self.1, &other.1)
&& SpanlessEq::eq(&self.2, &other.2)
}
}
macro_rules! spanless_eq_true {
($name:ty) => {
impl SpanlessEq for $name {
fn eq(&self, _other: &Self) -> bool {
true
}
}
};
}
spanless_eq_true!(Span);
spanless_eq_true!(DelimSpan);
spanless_eq_true!(AttrId);
spanless_eq_true!(NodeId);
spanless_eq_true!(SyntaxContext);
spanless_eq_true!(Spacing);
macro_rules! spanless_eq_partial_eq {
($name:ty) => {
impl SpanlessEq for $name {
fn eq(&self, other: &Self) -> bool {
PartialEq::eq(self, other)
}
}
};
}
spanless_eq_partial_eq!(bool);
spanless_eq_partial_eq!(u8);
spanless_eq_partial_eq!(u16);
spanless_eq_partial_eq!(u128);
spanless_eq_partial_eq!(usize);
spanless_eq_partial_eq!(char);
spanless_eq_partial_eq!(String);
spanless_eq_partial_eq!(Symbol);
spanless_eq_partial_eq!(CommentKind);
spanless_eq_partial_eq!(Delimiter);
spanless_eq_partial_eq!(InlineAsmOptions);
spanless_eq_partial_eq!(token::LitKind);
macro_rules! spanless_eq_struct {
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
} => {
impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* {
fn eq(&self, other: &Self) -> bool {
let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self;
let $($name)::+ { $($field: $other,)* $($ignore: _,)* } = other;
true $(&& SpanlessEq::eq($this, $other))*
}
}
};
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
!$next:tt
$($rest:tt)*
} => {
spanless_eq_struct! {
$($name)::+ $(<$param>)*
$([$field $this $other])*
$(![$ignore])*
![$next];
$($rest)*
}
};
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
$next:tt
$($rest:tt)*
} => {
spanless_eq_struct! {
$($name)::+ $(<$param>)*
$([$field $this $other])*
[$next this other]
$(![$ignore])*;
$($rest)*
}
};
}
macro_rules! spanless_eq_enum {
{
$($name:ident)::+;
$([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])* $(![$ignore:tt])*])*
} => {
impl SpanlessEq for $($name)::+ {
fn eq(&self, other: &Self) -> bool {
match self {
$(
$($variant)::+ { .. } => {}
)*
}
#[allow(unreachable_patterns)]
match (self, other) {
$(
(
$($variant)::+ { $($field: $this,)* $($ignore: _,)* },
$($variant)::+ { $($field: $other,)* $($ignore: _,)* },
) => {
true $(&& SpanlessEq::eq($this, $other))*
}
)*
_ => false,
}
}
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt $($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt $($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [$([$($named)*])* [$i this other] $(![$ignore])*] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$($named:tt)*] ()
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
[$($name)::+::$next; $($named)*]
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident ($($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
[$($name)::+::$next;]
$($rest)*
}
};
}
spanless_eq_struct!(AngleBracketedArgs; span args);
spanless_eq_struct!(AnonConst; id value);
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
spanless_eq_struct!(AssocConstraint; id ident gen_args kind span);
spanless_eq_struct!(AttrAnnotatedTokenStream; 0);
spanless_eq_struct!(AttrItem; path args tokens);
spanless_eq_struct!(Attribute; kind id style span);
spanless_eq_struct!(AttributesData; attrs tokens);
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
spanless_eq_struct!(EnumDef; variants);
spanless_eq_struct!(Expr; id kind span attrs !tokens);
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
spanless_eq_struct!(Fn; defaultness generics sig body);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; unsafety abi items);
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span);
spanless_eq_struct!(Generics; params where_clause span);
spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items);
spanless_eq_struct!(InlineAsm; template template_strs operands clobber_abis options line_spans);
spanless_eq_struct!(InlineAsmSym; id qself path);
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
spanless_eq_struct!(Lit; token kind span);
spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
spanless_eq_struct!(MacCall; path args prior_type_ascription);
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
spanless_eq_struct!(MacroDef; body macro_rules);
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
spanless_eq_struct!(MutTy; ty mutbl);
spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
spanless_eq_struct!(Pat; id kind span tokens);
spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
spanless_eq_struct!(Path; span segments tokens);
spanless_eq_struct!(PathSegment; ident id args);
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
spanless_eq_struct!(QSelf; ty path_span position);
spanless_eq_struct!(Stmt; id kind span);
spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
spanless_eq_struct!(StructExpr; qself path fields rest);
spanless_eq_struct!(Token; kind span);
spanless_eq_struct!(Trait; unsafety is_auto generics bounds items);
spanless_eq_struct!(TraitRef; path ref_id);
spanless_eq_struct!(Ty; id kind span tokens);
spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty);
spanless_eq_struct!(TyAliasWhereClause; !0 1);
spanless_eq_struct!(UseTree; prefix kind span);
spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder);
spanless_eq_struct!(Visibility; kind span tokens);
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
spanless_eq_struct!(WhereClause; has_where_token predicates span);
spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
spanless_eq_struct!(token::Lit; kind symbol suffix);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) TyAlias(0) MacCall(0));
spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
spanless_eq_enum!(AttrAnnotatedTokenTree; Token(0) Delimited(0 1 2) Attributes(0));
spanless_eq_enum!(AttrStyle; Outer Inner);
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw);
spanless_eq_enum!(CaptureBy; Value Ref);
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(Defaultness; Default(0) Final);
spanless_eq_enum!(Extern; None Implicit Explicit(0));
spanless_eq_enum!(FloatTy; F32 F64);
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default));
spanless_eq_enum!(ImplPolarity; Positive Negative(0));
spanless_eq_enum!(Inline; Yes No);
spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
spanless_eq_enum!(MacArgsEq; Ast(0) Hir(0));
spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
spanless_eq_enum!(Movability; Static Movable);
spanless_eq_enum!(Mutability; Mut Not);
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
spanless_eq_enum!(Term; Ty(0) Const(0));
spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
spanless_eq_enum!(TraitObjectSyntax; Dyn None);
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(Unsafe; Yes(0) No);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Restricted(path id) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
MethodCall(0 1 2) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1)
Closure(0 1 2 3 4 5) Block(0 1) Async(0 1 2) Await(0) TryBlock(0)
Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2)
Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0)
MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym));
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
Fn(0) Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
Float(0 1) Bool(0) Err(0));
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
Paren(0) MacCall(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
ImplicitSelf MacCall(0) Err CVarArgs);
impl SpanlessEq for Ident {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
impl SpanlessEq for RangeSyntax {
fn eq(&self, _other: &Self) -> bool {
match self {
RangeSyntax::DotDotDot | RangeSyntax::DotDotEq => true,
}
}
}
impl SpanlessEq for Param {
fn eq(&self, other: &Self) -> bool {
let Param {
attrs,
ty,
pat,
id,
span: _,
is_placeholder,
} = self;
let Param {
attrs: attrs2,
ty: ty2,
pat: pat2,
id: id2,
span: _,
is_placeholder: is_placeholder2,
} = other;
SpanlessEq::eq(id, id2)
&& SpanlessEq::eq(is_placeholder, is_placeholder2)
&& (matches!(ty.kind, TyKind::Err)
|| matches!(ty2.kind, TyKind::Err)
|| SpanlessEq::eq(attrs, attrs2)
&& SpanlessEq::eq(ty, ty2)
&& SpanlessEq::eq(pat, pat2))
}
}
impl SpanlessEq for TokenKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => SpanlessEq::eq(this, other),
(TokenKind::DotDotEq, _) | (TokenKind::DotDotDot, _) => match other {
TokenKind::DotDotEq | TokenKind::DotDotDot => true,
_ => false,
},
(TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => {
match (this.as_ref(), other.as_ref()) {
(Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => {
SpanlessEq::eq(this, other)
}
_ => this == other,
}
}
_ => self == other,
}
}
}
impl SpanlessEq for TokenStream {
fn eq(&self, other: &Self) -> bool {
let mut this_trees = self.trees();
let mut other_trees = other.trees();
loop {
let this = match this_trees.next() {
None => return other_trees.next().is_none(),
Some(tree) => tree,
};
let other = match other_trees.next() {
None => return false,
Some(tree) => tree,
};
if SpanlessEq::eq(this, other) {
continue;
}
if let (TokenTree::Token(this), TokenTree::Token(other)) = (this, other) {
if match (&this.kind, &other.kind) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
SpanlessEq::eq(this, other)
}
(TokenKind::DocComment(_kind, style, symbol), TokenKind::Pound) => {
doc_comment(*style, *symbol, &mut other_trees)
}
(TokenKind::Pound, TokenKind::DocComment(_kind, style, symbol)) => {
doc_comment(*style, *symbol, &mut this_trees)
}
_ => false,
} {
continue;
}
}
return false;
}
}
}
fn doc_comment<'a>(
style: AttrStyle,
unescaped: Symbol,
trees: &mut impl Iterator<Item = &'a TokenTree>,
) -> bool {
if match style {
AttrStyle::Outer => false,
AttrStyle::Inner => true,
} {
match trees.next() {
Some(TokenTree::Token(Token {
kind: TokenKind::Not,
span: _,
})) => {}
_ => return false,
}
}
let stream = match trees.next() {
Some(TokenTree::Delimited(_span, Delimiter::Bracket, stream)) => stream,
_ => return false,
};
let mut trees = stream.trees();
match trees.next() {
Some(TokenTree::Token(Token {
kind: TokenKind::Ident(symbol, false),
span: _,
})) if *symbol == sym::doc => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(Token {
kind: TokenKind::Eq,
span: _,
})) => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(token)) => {
is_escaped_literal_token(token, unescaped) && trees.next().is_none()
}
_ => false,
}
}
fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
match token {
Token {
kind: TokenKind::Literal(lit),
span: _,
} => match Lit::from_lit_token(*lit, DUMMY_SP) {
Ok(lit) => is_escaped_literal(&lit, unescaped),
Err(_) => false,
},
Token {
kind: TokenKind::Interpolated(nonterminal),
span: _,
} => match nonterminal.as_ref() {
Nonterminal::NtExpr(expr) => match &expr.kind {
ExprKind::Lit(lit) => is_escaped_literal(lit, unescaped),
_ => false,
},
_ => false,
},
_ => false,
}
}
fn is_escaped_literal_macro_arg(arg: &MacArgsEq, unescaped: Symbol) -> bool {
match arg {
MacArgsEq::Ast(expr) => match &expr.kind {
ExprKind::Lit(lit) => is_escaped_literal(lit, unescaped),
_ => false,
},
MacArgsEq::Hir(lit) => is_escaped_literal(lit, unescaped),
}
}
fn is_escaped_literal(lit: &Lit, unescaped: Symbol) -> bool {
match lit {
Lit {
token:
token::Lit {
kind: token::LitKind::Str,
symbol: _,
suffix: None,
},
kind: LitKind::Str(symbol, StrStyle::Cooked),
span: _,
} => symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', ""),
_ => false,
}
}
impl SpanlessEq for LazyTokenStream {
fn eq(&self, other: &Self) -> bool {
let this = self.create_token_stream();
let other = other.create_token_stream();
SpanlessEq::eq(&this, &other)
}
}
impl SpanlessEq for AttrKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(AttrKind::Normal(item, tokens), AttrKind::Normal(item2, tokens2)) => {
SpanlessEq::eq(item, item2) && SpanlessEq::eq(tokens, tokens2)
}
(AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
}
(AttrKind::DocComment(kind, unescaped), AttrKind::Normal(item2, _tokens)) => {
match kind {
CommentKind::Line | CommentKind::Block => {}
}
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
SpanlessEq::eq(&path, &item2.path)
&& match &item2.args {
MacArgs::Empty | MacArgs::Delimited(..) => false,
MacArgs::Eq(_span, token) => {
is_escaped_literal_macro_arg(token, *unescaped)
}
}
}
(AttrKind::Normal(..), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
}
}
}

28
zeroidc/vendor/syn/tests/common/mod.rs vendored Normal file
View File

@@ -0,0 +1,28 @@
#![allow(dead_code)]
#![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)]
use rayon::ThreadPoolBuilder;
use std::env;
pub mod eq;
pub mod parse;
/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it.
pub fn abort_after() -> usize {
match env::var("ABORT_AFTER_FAILURE") {
Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
Err(_) => usize::max_value(),
}
}
/// Configure Rayon threadpool.
pub fn rayon_init() {
let stack_size = match env::var("RUST_MIN_STACK") {
Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
Err(_) => 20 * 1024 * 1024,
};
ThreadPoolBuilder::new()
.stack_size(stack_size)
.build_global()
.unwrap();
}

View File

@@ -0,0 +1,48 @@
extern crate rustc_ast;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use std::panic;
pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
match panic::catch_unwind(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let e = parse::new_parser_from_source_str(
&sess,
FileName::Custom("test_precedence".to_string()),
input.to_string(),
)
.parse_expr();
match e {
Ok(expr) => Some(expr),
Err(mut diagnostic) => {
diagnostic.emit();
None
}
}
}) {
Ok(Some(e)) => Some(e),
Ok(None) => None,
Err(_) => {
errorf!("librustc panicked\n");
None
}
}
}
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
match syn::parse_str(input) {
Ok(e) => Some(e),
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
None
}
}
}

5640
zeroidc/vendor/syn/tests/debug/gen.rs vendored Normal file

File diff suppressed because it is too large Load Diff

125
zeroidc/vendor/syn/tests/debug/mod.rs vendored Normal file
View File

@@ -0,0 +1,125 @@
#![allow(
clippy::no_effect_underscore_binding,
clippy::too_many_lines,
clippy::used_underscore_binding
)]
#[rustfmt::skip]
mod gen;
use proc_macro2::{Ident, Literal, TokenStream};
use ref_cast::RefCast;
use std::fmt::{self, Debug};
use std::ops::Deref;
use syn::punctuated::Punctuated;
#[derive(RefCast)]
#[repr(transparent)]
pub struct Lite<T: ?Sized> {
value: T,
}
#[allow(non_snake_case)]
pub fn Lite<T: ?Sized>(value: &T) -> &Lite<T> {
Lite::ref_cast(value)
}
impl<T: ?Sized> Deref for Lite<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl Debug for Lite<bool> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<u32> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<usize> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<String> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", self.value)
}
}
impl Debug for Lite<Ident> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", self.value.to_string())
}
}
impl Debug for Lite<Literal> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<TokenStream> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let string = self.value.to_string();
if string.len() <= 80 {
write!(formatter, "TokenStream(`{}`)", self.value)
} else {
formatter
.debug_tuple("TokenStream")
.field(&format_args!("`{}`", string))
.finish()
}
}
}
impl<'a, T> Debug for Lite<&'a T>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(&*self.value), formatter)
}
}
impl<T> Debug for Lite<Box<T>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(&*self.value), formatter)
}
}
impl<T> Debug for Lite<Vec<T>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_list()
.entries(self.value.iter().map(Lite))
.finish()
}
}
impl<T, P> Debug for Lite<Punctuated<T, P>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_list()
.entries(self.value.iter().map(Lite))
.finish()
}
}

79
zeroidc/vendor/syn/tests/macros/mod.rs vendored Normal file
View File

@@ -0,0 +1,79 @@
#![allow(unused_macros, unused_macro_rules)]
#[path = "../debug/mod.rs"]
pub mod debug;
use syn::parse::{Parse, Result};
macro_rules! errorf {
($($tt:tt)*) => {{
use ::std::io::Write;
let stderr = ::std::io::stderr();
write!(stderr.lock(), $($tt)*).unwrap();
}};
}
macro_rules! punctuated {
($($e:expr,)+) => {{
let mut seq = ::syn::punctuated::Punctuated::new();
$(
seq.push($e);
)+
seq
}};
($($e:expr),+) => {
punctuated!($($e,)+)
};
}
macro_rules! snapshot {
($($args:tt)*) => {
snapshot_impl!(() $($args)*)
};
}
macro_rules! snapshot_impl {
(($expr:ident) as $t:ty, @$snapshot:literal) => {
let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
let debug = crate::macros::debug::Lite(&$expr);
if !cfg!(miri) {
insta::assert_debug_snapshot!(debug, @$snapshot);
}
};
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
let debug = crate::macros::debug::Lite(&syntax_tree);
if !cfg!(miri) {
insta::assert_debug_snapshot!(debug, @$snapshot);
}
syntax_tree
}};
(($($expr:tt)*) , @$snapshot:literal) => {{
let syntax_tree = $($expr)*;
let debug = crate::macros::debug::Lite(&syntax_tree);
if !cfg!(miri) {
insta::assert_debug_snapshot!(debug, @$snapshot);
}
syntax_tree
}};
(($($expr:tt)*) $next:tt $($rest:tt)*) => {
snapshot_impl!(($($expr)* $next) $($rest)*)
};
}
pub trait Tokens {
fn parse<T: Parse>(self) -> Result<T>;
}
impl<'a> Tokens for &'a str {
fn parse<T: Parse>(self) -> Result<T> {
syn::parse_str(self)
}
}
impl Tokens for proc_macro2::TokenStream {
fn parse<T: Parse>(self) -> Result<T> {
syn::parse2(self)
}
}

View File

@@ -0,0 +1,5 @@
#![allow(clippy::let_underscore_drop)]
mod regression {
automod::dir!("tests/regression");
}

View File

@@ -0,0 +1,5 @@
#[test]
fn issue1108() {
let data = "impl<x<>>::x for";
let _ = syn::parse_file(data);
}

180
zeroidc/vendor/syn/tests/repo/mod.rs vendored Normal file
View File

@@ -0,0 +1,180 @@
#![allow(clippy::manual_assert)]
mod progress;
use self::progress::Progress;
use anyhow::Result;
use flate2::read::GzDecoder;
use std::fs;
use std::path::Path;
use tar::Archive;
use walkdir::DirEntry;
const REVISION: &str = "ee160f2f5e73b6f5954bc33f059c316d9e8582c4";
#[rustfmt::skip]
static EXCLUDE: &[&str] = &[
// TODO: impl ~const T {}
// https://github.com/dtolnay/syn/issues/1051
"src/test/ui/rfc-2632-const-trait-impl/syntax.rs",
// Compile-fail expr parameter in const generic position: f::<1 + 2>()
"src/test/ui/const-generics/early/closing-args-token.rs",
"src/test/ui/const-generics/early/const-expression-parameter.rs",
// Need at least one trait in impl Trait, no such type as impl 'static
"src/test/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
// Deprecated anonymous parameter syntax in traits
"src/test/ui/issues/issue-13105.rs",
"src/test/ui/issues/issue-13775.rs",
"src/test/ui/issues/issue-34074.rs",
"src/test/ui/proc-macro/trait-fn-args-2015.rs",
"src/tools/rustfmt/tests/source/trait.rs",
"src/tools/rustfmt/tests/target/trait.rs",
// Placeholder syntax for "throw expressions"
"src/test/pretty/yeet-expr.rs",
"src/test/ui/try-trait/yeet-for-option.rs",
"src/test/ui/try-trait/yeet-for-result.rs",
// Excessive nesting
"src/test/ui/issues/issue-74564-if-expr-stack-overflow.rs",
// Testing rustfmt on invalid syntax
"src/tools/rustfmt/tests/coverage/target/comments.rs",
"src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
"src/tools/rustfmt/tests/parser/issue_4418.rs",
"src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs",
"src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs",
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs",
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/source/type.rs",
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs",
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/target/type.rs",
// Testing compiler diagnostic localization on invalid syntax
"src/test/run-make/translation/basic-translation.rs",
// Clippy lint lists represented as expressions
"src/tools/clippy/clippy_lints/src/lib.deprecated.rs",
"src/tools/clippy/clippy_lints/src/lib.register_all.rs",
"src/tools/clippy/clippy_lints/src/lib.register_cargo.rs",
"src/tools/clippy/clippy_lints/src/lib.register_complexity.rs",
"src/tools/clippy/clippy_lints/src/lib.register_correctness.rs",
"src/tools/clippy/clippy_lints/src/lib.register_internal.rs",
"src/tools/clippy/clippy_lints/src/lib.register_lints.rs",
"src/tools/clippy/clippy_lints/src/lib.register_nursery.rs",
"src/tools/clippy/clippy_lints/src/lib.register_pedantic.rs",
"src/tools/clippy/clippy_lints/src/lib.register_perf.rs",
"src/tools/clippy/clippy_lints/src/lib.register_restriction.rs",
"src/tools/clippy/clippy_lints/src/lib.register_style.rs",
"src/tools/clippy/clippy_lints/src/lib.register_suspicious.rs",
// Not actually test cases
"src/test/rustdoc-ui/test-compile-fail2.rs",
"src/test/rustdoc-ui/test-compile-fail3.rs",
"src/test/ui/json-bom-plus-crlf-multifile-aux.rs",
"src/test/ui/lint/expansion-time-include.rs",
"src/test/ui/macros/auxiliary/macro-comma-support.rs",
"src/test/ui/macros/auxiliary/macro-include-items-expr.rs",
"src/test/ui/macros/include-single-expr-helper.rs",
"src/test/ui/macros/include-single-expr-helper-1.rs",
"src/test/ui/parser/issues/auxiliary/issue-21146-inc.rs",
];
pub fn base_dir_filter(entry: &DirEntry) -> bool {
let path = entry.path();
if path.is_dir() {
return true; // otherwise walkdir does not visit the files
}
if path.extension().map_or(true, |e| e != "rs") {
return false;
}
let mut path_string = path.to_string_lossy();
if cfg!(windows) {
path_string = path_string.replace('\\', "/").into();
}
let path = if let Some(path) = path_string.strip_prefix("tests/rust/") {
path
} else {
panic!("unexpected path in Rust dist: {}", path_string);
};
if path.starts_with("src/test/compile-fail") || path.starts_with("src/test/rustfix") {
return false;
}
if path.starts_with("src/test/ui") {
let stderr_path = entry.path().with_extension("stderr");
if stderr_path.exists() {
// Expected to fail in some way
return false;
}
}
!EXCLUDE.contains(&path)
}
#[allow(dead_code)]
pub fn edition(path: &Path) -> &'static str {
if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
"2015"
} else {
"2018"
}
}
pub fn clone_rust() {
let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
Err(_) => true,
Ok(contents) => contents.trim() != REVISION,
};
if needs_clone {
download_and_unpack().unwrap();
}
let mut missing = String::new();
let test_src = Path::new("tests/rust");
for exclude in EXCLUDE {
if !test_src.join(exclude).exists() {
missing += "\ntests/rust/";
missing += exclude;
}
}
if !missing.is_empty() {
panic!("excluded test file does not exist:{}\n", missing);
}
}
fn download_and_unpack() -> Result<()> {
let url = format!(
"https://github.com/rust-lang/rust/archive/{}.tar.gz",
REVISION
);
let response = reqwest::blocking::get(&url)?.error_for_status()?;
let progress = Progress::new(response);
let decoder = GzDecoder::new(progress);
let mut archive = Archive::new(decoder);
let prefix = format!("rust-{}", REVISION);
let tests_rust = Path::new("tests/rust");
if tests_rust.exists() {
fs::remove_dir_all(tests_rust)?;
}
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
if path == Path::new("pax_global_header") {
continue;
}
let relative = path.strip_prefix(&prefix)?;
let out = tests_rust.join(relative);
entry.unpack(&out)?;
}
fs::write("tests/rust/COMMIT", REVISION)?;
Ok(())
}

View File

@@ -0,0 +1,37 @@
use std::io::{Read, Result};
use std::time::{Duration, Instant};
pub struct Progress<R> {
bytes: usize,
tick: Instant,
stream: R,
}
impl<R> Progress<R> {
pub fn new(stream: R) -> Self {
Progress {
bytes: 0,
tick: Instant::now() + Duration::from_millis(2000),
stream,
}
}
}
impl<R: Read> Read for Progress<R> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let num = self.stream.read(buf)?;
self.bytes += num;
let now = Instant::now();
if now > self.tick {
self.tick = now + Duration::from_millis(500);
errorf!("downloading... {} bytes\n", self.bytes);
}
Ok(num)
}
}
impl<R> Drop for Progress<R> {
fn drop(&mut self) {
errorf!("done ({} bytes)\n", self.bytes);
}
}

View File

@@ -0,0 +1,37 @@
#[macro_use]
mod macros;
use syn::{Expr, Item};
#[test]
fn test_async_fn() {
let input = "async fn process() {}";
snapshot!(input as Item, @r###"
Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "process",
generics: Generics,
output: Default,
},
block: Block,
}
"###);
}
#[test]
fn test_async_closure() {
let input = "async || {}";
snapshot!(input as Expr, @r###"
Expr::Closure {
asyncness: Some,
output: Default,
body: Expr::Block {
block: Block,
},
}
"###);
}

View File

@@ -0,0 +1,336 @@
#[macro_use]
mod macros;
use syn::parse::Parser;
use syn::{Attribute, Meta};
#[test]
fn test_meta_item_word() {
let meta = test("#[foo]");
snapshot!(meta, @r###"
Path(Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
})
"###);
}
#[test]
fn test_meta_item_name_value() {
let meta = test("#[foo = 5]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
"###);
}
#[test]
fn test_meta_item_bool_value() {
let meta = test("#[foo = true]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: Lit::Bool {
value: true,
},
}
"###);
let meta = test("#[foo = false]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: Lit::Bool {
value: false,
},
}
"###);
}
#[test]
fn test_meta_item_list_lit() {
let meta = test("#[foo(5)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
"###);
}
#[test]
fn test_meta_item_list_word() {
let meta = test("#[foo(bar)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
})),
],
}
"###);
}
#[test]
fn test_meta_item_list_name_value() {
let meta = test("#[foo(bar = 5)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
},
lit: 5,
}),
],
}
"###);
}
#[test]
fn test_meta_item_list_bool_value() {
let meta = test("#[foo(bar = true)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
},
lit: Lit::Bool {
value: true,
},
}),
],
}
"###);
}
#[test]
fn test_meta_item_multiple() {
let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
"###);
}
#[test]
fn test_bool_lit() {
let meta = test("#[foo(true)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(Lit::Bool {
value: true,
}),
],
}
"###);
}
#[test]
fn test_negative_lit() {
let meta = test("#[form(min = -1, max = 200)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "form",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "min",
arguments: None,
},
],
},
lit: -1,
}),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "max",
arguments: None,
},
],
},
lit: 200,
}),
],
}
"###);
}
fn test(input: &str) -> Meta {
let attrs = Attribute::parse_outer.parse_str(input).unwrap();
assert_eq!(attrs.len(), 1);
let attr = attrs.into_iter().next().unwrap();
attr.parse_meta().unwrap()
}

View File

@@ -0,0 +1,894 @@
#![allow(clippy::too_many_lines)]
#[macro_use]
mod macros;
use quote::quote;
use syn::{Data, DeriveInput};
#[test]
fn test_unit() {
let input = quote! {
struct Unit;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "Unit",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_struct() {
let input = quote! {
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "derive",
arguments: None,
},
],
},
tokens: TokenStream(`(Debug , Clone)`),
},
],
vis: Visibility::Public,
ident: "Item",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Public,
ident: Some("ident"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Ident",
arguments: None,
},
],
},
},
},
Field {
vis: Visibility::Public,
ident: Some("attrs"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Attribute",
arguments: None,
},
],
},
}),
],
},
},
],
},
},
},
],
},
},
}
"###);
snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "derive",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
})),
Meta(Path(Path {
segments: [
PathSegment {
ident: "Clone",
arguments: None,
},
],
})),
],
}
"###);
}
#[test]
fn test_union() {
let input = quote! {
union MaybeUninit<T> {
uninit: (),
value: T
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "MaybeUninit",
generics: Generics {
lt_token: Some,
params: [
Type(TypeParam {
ident: "T",
}),
],
gt_token: Some,
},
data: Data::Union {
fields: FieldsNamed {
named: [
Field {
vis: Inherited,
ident: Some("uninit"),
colon_token: Some,
ty: Type::Tuple,
},
Field {
vis: Inherited,
ident: Some("value"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
},
],
},
},
}
"###);
}
#[test]
#[cfg(feature = "full")]
fn test_enum() {
let input = quote! {
/// See the std::result module documentation for details.
#[must_use]
pub enum Result<T, E> {
Ok(T),
Err(E),
Surprise = 0isize,
// Smuggling data into a proc_macro_derive,
// in the style of https://github.com/dtolnay/proc-macro-hack
ProcMacroHack = (0, "data").0
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "doc",
arguments: None,
},
],
},
tokens: TokenStream(`= r" See the std::result module documentation for details."`),
},
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "must_use",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
vis: Visibility::Public,
ident: "Result",
generics: Generics {
lt_token: Some,
params: [
Type(TypeParam {
ident: "T",
}),
Type(TypeParam {
ident: "E",
}),
],
gt_token: Some,
},
data: Data::Enum {
variants: [
Variant {
ident: "Ok",
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
},
],
},
},
Variant {
ident: "Err",
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "E",
arguments: None,
},
],
},
},
},
],
},
},
Variant {
ident: "Surprise",
fields: Unit,
discriminant: Some(Expr::Lit {
lit: 0isize,
}),
},
Variant {
ident: "ProcMacroHack",
fields: Unit,
discriminant: Some(Expr::Field {
base: Expr::Tuple {
elems: [
Expr::Lit {
lit: 0,
},
Expr::Lit {
lit: "data",
},
],
},
member: Unnamed(Index {
index: 0,
}),
}),
},
],
},
}
"###);
let meta_items: Vec<_> = input
.attrs
.into_iter()
.map(|attr| attr.parse_meta().unwrap())
.collect();
snapshot!(meta_items, @r###"
[
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "doc",
arguments: None,
},
],
},
lit: " See the std::result module documentation for details.",
},
Path(Path {
segments: [
PathSegment {
ident: "must_use",
arguments: None,
},
],
}),
]
"###);
}
#[test]
fn test_attr_with_path() {
let input = quote! {
#[::attr_args::identity
fn main() { assert_eq!(foo(), "Hello, world!"); }]
struct Dummy;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: Outer,
path: Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "attr_args",
arguments: None,
},
PathSegment {
ident: "identity",
arguments: None,
},
],
},
tokens: TokenStream(`fn main () { assert_eq ! (foo () , "Hello, world!") ; }`),
},
],
vis: Inherited,
ident: "Dummy",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
assert!(input.attrs[0].parse_meta().is_err());
}
#[test]
fn test_attr_with_non_mod_style_path() {
let input = quote! {
#[inert <T>]
struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "inert",
arguments: None,
},
],
},
tokens: TokenStream(`< T >`),
},
],
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
assert!(input.attrs[0].parse_meta().is_err());
}
#[test]
fn test_attr_with_mod_style_path_with_self() {
let input = quote! {
#[foo::self]
struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
PathSegment {
ident: "self",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
snapshot!(input.attrs[0].parse_meta().unwrap(), @r###"
Path(Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
PathSegment {
ident: "self",
arguments: None,
},
],
})
"###);
}
#[test]
fn test_pub_restricted() {
// Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
let input = quote! {
pub(in m) struct Z(pub(in m::n) u8);
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "m",
arguments: None,
},
],
},
},
ident: "Z",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "m",
arguments: None,
},
PathSegment {
ident: "n",
arguments: None,
},
],
},
},
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
arguments: None,
},
],
},
},
},
],
},
semi_token: Some,
},
}
"###);
}
#[test]
fn test_vis_crate() {
let input = quote! {
crate struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Crate,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_pub_restricted_crate() {
let input = quote! {
pub(crate) struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
path: Path {
segments: [
PathSegment {
ident: "crate",
arguments: None,
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_pub_restricted_super() {
let input = quote! {
pub(super) struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
path: Path {
segments: [
PathSegment {
ident: "super",
arguments: None,
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_pub_restricted_in_super() {
let input = quote! {
pub(in super) struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "super",
arguments: None,
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
}
#[test]
fn test_fields_on_unit_struct() {
let input = quote! {
struct S;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
assert_eq!(0, data.fields.iter().count());
}
#[test]
fn test_fields_on_named_struct() {
let input = quote! {
struct S {
foo: i32,
pub bar: String,
}
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Inherited,
ident: Some("foo"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
},
Field {
vis: Visibility::Public,
ident: Some("bar"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
arguments: None,
},
],
},
},
},
],
},
},
}
"###);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r###"
[
Field {
vis: Inherited,
ident: Some("foo"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
},
Field {
vis: Visibility::Public,
ident: Some("bar"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
arguments: None,
},
],
},
},
},
]
"###);
}
#[test]
fn test_fields_on_tuple_struct() {
let input = quote! {
struct S(i32, pub String);
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
},
Field {
vis: Visibility::Public,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
arguments: None,
},
],
},
},
},
],
},
semi_token: Some,
},
}
"###);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
snapshot!(data.fields.iter().collect::<Vec<_>>(), @r###"
[
Field {
vis: Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
},
Field {
vis: Visibility::Public,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
arguments: None,
},
],
},
},
},
]
"###);
}
#[test]
fn test_ambiguous_crate() {
let input = quote! {
// The field type is `(crate::X)` not `crate (::X)`.
struct S(crate::X);
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "crate",
arguments: None,
},
PathSegment {
ident: "X",
arguments: None,
},
],
},
},
},
],
},
semi_token: Some,
},
}
"###);
}

326
zeroidc/vendor/syn/tests/test_expr.rs vendored Normal file
View File

@@ -0,0 +1,326 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::{Expr, ExprRange};
#[test]
fn test_expr_parse() {
let tokens = quote!(..100u32);
snapshot!(tokens as Expr, @r###"
Expr::Range {
limits: HalfOpen,
to: Some(Expr::Lit {
lit: 100u32,
}),
}
"###);
let tokens = quote!(..100u32);
snapshot!(tokens as ExprRange, @r###"
ExprRange {
limits: HalfOpen,
to: Some(Expr::Lit {
lit: 100u32,
}),
}
"###);
}
#[test]
fn test_await() {
// Must not parse as Expr::Field.
let tokens = quote!(fut.await);
snapshot!(tokens as Expr, @r###"
Expr::Await {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "fut",
arguments: None,
},
],
},
},
}
"###);
}
#[rustfmt::skip]
#[test]
fn test_tuple_multi_index() {
for &input in &[
"tuple.0.0",
"tuple .0.0",
"tuple. 0.0",
"tuple.0 .0",
"tuple.0. 0",
"tuple . 0 . 0",
] {
snapshot!(input as Expr, @r###"
Expr::Field {
base: Expr::Field {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "tuple",
arguments: None,
},
],
},
},
member: Unnamed(Index {
index: 0,
}),
},
member: Unnamed(Index {
index: 0,
}),
}
"###);
}
for tokens in vec![
quote!(tuple.0.0),
quote!(tuple .0.0),
quote!(tuple. 0.0),
quote!(tuple.0 .0),
quote!(tuple.0. 0),
quote!(tuple . 0 . 0),
] {
snapshot!(tokens as Expr, @r###"
Expr::Field {
base: Expr::Field {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "tuple",
arguments: None,
},
],
},
},
member: Unnamed(Index {
index: 0,
}),
},
member: Unnamed(Index {
index: 0,
}),
}
"###);
}
}
#[test]
fn test_macro_variable_func() {
// mimics the token stream corresponding to `$fn()`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Call {
func: Expr::Group {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "f",
arguments: None,
},
],
},
},
},
}
"###);
let tokens = TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('#', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Call {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "outside",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
func: Expr::Group {
expr: Expr::Path {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "inside",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
path: Path {
segments: [
PathSegment {
ident: "f",
arguments: None,
},
],
},
},
},
}
"###);
}
#[test]
fn test_macro_variable_macro() {
// mimics the token stream corresponding to `$macro!()`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
TokenTree::Punct(Punct::new('!', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "m",
arguments: None,
},
],
},
delimiter: Paren,
tokens: TokenStream(``),
},
}
"###);
}
#[test]
fn test_macro_variable_struct() {
// mimics the token stream corresponding to `$struct {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Struct {
path: Path {
segments: [
PathSegment {
ident: "S",
arguments: None,
},
],
},
}
"###);
}
#[test]
fn test_macro_variable_match_arm() {
// mimics the token stream corresponding to `match v { _ => $expr }`
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("match", Span::call_site())),
TokenTree::Ident(Ident::new("v", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Brace,
TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('_', Spacing::Alone)),
TokenTree::Punct(Punct::new('=', Spacing::Joint)),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
]),
)),
]);
snapshot!(tokens as Expr, @r###"
Expr::Match {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "v",
arguments: None,
},
],
},
},
arms: [
Arm {
pat: Pat::Wild,
body: Expr::Group {
expr: Expr::Tuple {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "a",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
},
},
},
],
}
"###);
}
// https://github.com/dtolnay/syn/issues/1019
#[test]
fn test_closure_vs_rangefull() {
#[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/4808
let tokens = quote!(|| .. .method());
snapshot!(tokens as Expr, @r###"
Expr::MethodCall {
receiver: Expr::Closure {
output: Default,
body: Expr::Range {
limits: HalfOpen,
},
},
method: "method",
}
"###);
}
#[test]
fn test_postfix_operator_after_cast() {
syn::parse_str::<Expr>("|| &x as T[0]").unwrap_err();
syn::parse_str::<Expr>("|| () as ()()").unwrap_err();
}

View File

@@ -0,0 +1,285 @@
#![allow(clippy::too_many_lines)]
#[macro_use]
mod macros;
use quote::quote;
use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
#[test]
fn test_split_for_impl() {
let input = quote! {
struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics {
lt_token: Some,
params: [
Lifetime(LifetimeDef {
lifetime: Lifetime {
ident: "a",
},
}),
Lifetime(LifetimeDef {
lifetime: Lifetime {
ident: "b",
},
colon_token: Some,
bounds: [
Lifetime {
ident: "a",
},
],
}),
Type(TypeParam {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "may_dangle",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
ident: "T",
colon_token: Some,
bounds: [
Lifetime(Lifetime {
ident: "a",
}),
],
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
},
}),
],
}),
],
}),
},
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
let generics = input.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let generated = quote! {
impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
};
let expected = quote! {
impl<'a, 'b: 'a, #[may_dangle] T: 'a> MyTrait
for Test<'a, 'b, T>
where
T: Debug
{}
};
assert_eq!(generated.to_string(), expected.to_string());
let turbofish = ty_generics.as_turbofish();
let generated = quote! {
Test #turbofish
};
let expected = quote! {
Test::<'a, 'b, T>
};
assert_eq!(generated.to_string(), expected.to_string());
}
#[test]
fn test_ty_param_bound() {
let tokens = quote!('a);
snapshot!(tokens as TypeParamBound, @r###"
Lifetime(Lifetime {
ident: "a",
})
"###);
let tokens = quote!('_);
snapshot!(tokens as TypeParamBound, @r###"
Lifetime(Lifetime {
ident: "_",
})
"###);
let tokens = quote!(Debug);
snapshot!(tokens as TypeParamBound, @r###"
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
},
})
"###);
let tokens = quote!(?Sized);
snapshot!(tokens as TypeParamBound, @r###"
Trait(TraitBound {
modifier: Maybe,
path: Path {
segments: [
PathSegment {
ident: "Sized",
arguments: None,
},
],
},
})
"###);
}
#[test]
fn test_fn_precedence_in_where_clause() {
// This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not
// `FnOnce() -> (i32 + Send)`.
let input = quote! {
fn f<G>()
where
G: FnOnce() -> i32 + Send,
{
}
};
snapshot!(input as ItemFn, @r###"
ItemFn {
vis: Inherited,
sig: Signature {
ident: "f",
generics: Generics {
lt_token: Some,
params: [
Type(TypeParam {
ident: "G",
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "G",
arguments: None,
},
],
},
},
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: Type(
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
),
},
},
],
},
}),
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Send",
arguments: None,
},
],
},
}),
],
}),
],
}),
},
output: Default,
},
block: Block,
}
"###);
let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
assert_eq!(where_clause.predicates.len(), 1);
let predicate = match &where_clause.predicates[0] {
WherePredicate::Type(pred) => pred,
_ => panic!("wrong predicate kind"),
};
assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
let first_bound = &predicate.bounds[0];
assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
let second_bound = &predicate.bounds[1];
assert_eq!(quote!(#second_bound).to_string(), "Send");
}
#[test]
fn test_where_clause_at_end_of_input() {
let input = quote! {
where
};
snapshot!(input as WhereClause, @"WhereClause");
assert_eq!(input.predicates.len(), 0);
}

View File

@@ -0,0 +1,52 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::Expr;
#[test]
fn test_grouping() {
let tokens: TokenStream = TokenStream::from_iter(vec![
TokenTree::Literal(Literal::i32_suffixed(1)),
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Literal(Literal::i32_suffixed(2)),
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Literal(Literal::i32_suffixed(3)),
]),
)),
TokenTree::Punct(Punct::new('*', Spacing::Alone)),
TokenTree::Literal(Literal::i32_suffixed(4)),
]);
assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
snapshot!(tokens as Expr, @r###"
Expr::Binary {
left: Expr::Lit {
lit: 1i32,
},
op: Add,
right: Expr::Binary {
left: Expr::Group {
expr: Expr::Binary {
left: Expr::Lit {
lit: 2i32,
},
op: Add,
right: Expr::Lit {
lit: 3i32,
},
},
},
op: Mul,
right: Expr::Lit {
lit: 4i32,
},
},
}
"###);
}

85
zeroidc/vendor/syn/tests/test_ident.rs vendored Normal file
View File

@@ -0,0 +1,85 @@
use proc_macro2::{Ident, Span, TokenStream};
use std::str::FromStr;
use syn::Result;
fn parse(s: &str) -> Result<Ident> {
syn::parse2(TokenStream::from_str(s).unwrap())
}
fn new(s: &str) -> Ident {
Ident::new(s, Span::call_site())
}
#[test]
fn ident_parse() {
parse("String").unwrap();
}
#[test]
fn ident_parse_keyword() {
parse("abstract").unwrap_err();
}
#[test]
fn ident_parse_empty() {
parse("").unwrap_err();
}
#[test]
fn ident_parse_lifetime() {
parse("'static").unwrap_err();
}
#[test]
fn ident_parse_underscore() {
parse("_").unwrap_err();
}
#[test]
fn ident_parse_number() {
parse("255").unwrap_err();
}
#[test]
fn ident_parse_invalid() {
parse("a#").unwrap_err();
}
#[test]
fn ident_new() {
new("String");
}
#[test]
fn ident_new_keyword() {
new("abstract");
}
#[test]
#[should_panic(expected = "use Option<Ident>")]
fn ident_new_empty() {
new("");
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn ident_new_lifetime() {
new("'static");
}
#[test]
fn ident_new_underscore() {
new("_");
}
#[test]
#[should_panic(expected = "use Literal instead")]
fn ident_new_number() {
new("255");
}
#[test]
#[should_panic(expected = "\"a#\" is not a valid Ident")]
fn ident_new_invalid() {
new("a#");
}

336
zeroidc/vendor/syn/tests/test_item.rs vendored Normal file
View File

@@ -0,0 +1,336 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::{Item, ItemTrait};
#[test]
fn test_macro_variable_attr() {
// mimics the token stream corresponding to `$attr fn f() {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r###"
Item::Fn {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "test",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
vis: Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: Default,
},
block: Block,
}
"###);
}
#[test]
fn test_negative_impl() {
// Rustc parses all of the following.
#[cfg(any())]
impl ! {}
let tokens = quote! {
impl ! {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
self_ty: Type::Never,
}
"###);
#[cfg(any())]
#[rustfmt::skip]
impl !Trait {}
let tokens = quote! {
impl !Trait {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
self_ty: Verbatim(`! Trait`),
}
"###);
#[cfg(any())]
impl !Trait for T {}
let tokens = quote! {
impl !Trait for T {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
trait_: Some((
Some,
Path {
segments: [
PathSegment {
ident: "Trait",
arguments: None,
},
],
},
)),
self_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
}
"###);
#[cfg(any())]
#[rustfmt::skip]
impl !! {}
let tokens = quote! {
impl !! {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
self_ty: Verbatim(`! !`),
}
"###);
}
#[test]
fn test_macro_variable_impl() {
// mimics the token stream corresponding to `impl $trait for $ty {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("impl", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))),
TokenTree::Ident(Ident::new("for", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::None, quote!(Type))),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics,
trait_: Some((
None,
Path {
segments: [
PathSegment {
ident: "Trait",
arguments: None,
},
],
},
)),
self_ty: Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Type",
arguments: None,
},
],
},
},
},
}
"###);
}
#[test]
fn test_supertraits() {
// Rustc parses all of the following.
#[rustfmt::skip]
let tokens = quote!(trait Trait where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
}
"###);
#[rustfmt::skip]
let tokens = quote!(trait Trait: where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
}
"###);
#[rustfmt::skip]
let tokens = quote!(trait Trait: Sized where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
supertraits: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Sized",
arguments: None,
},
],
},
}),
],
}
"###);
#[rustfmt::skip]
let tokens = quote!(trait Trait: Sized + where {});
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
supertraits: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Sized",
arguments: None,
},
],
},
}),
],
}
"###);
}
#[test]
fn test_type_empty_bounds() {
#[rustfmt::skip]
let tokens = quote! {
trait Foo {
type Bar: ;
}
};
snapshot!(tokens as ItemTrait, @r###"
ItemTrait {
vis: Inherited,
ident: "Foo",
generics: Generics,
items: [
TraitItem::Type {
ident: "Bar",
generics: Generics,
colon_token: Some,
},
],
}
"###);
}
#[test]
fn test_impl_visibility() {
let tokens = quote! {
pub default unsafe impl union {}
};
snapshot!(tokens as Item, @"Verbatim(`pub default unsafe impl union { }`)");
}
#[test]
fn test_impl_type_parameter_defaults() {
#[cfg(any())]
impl<T = ()> () {}
let tokens = quote! {
impl<T = ()> () {}
};
snapshot!(tokens as Item, @r###"
Item::Impl {
generics: Generics {
lt_token: Some,
params: [
Type(TypeParam {
ident: "T",
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
},
self_ty: Type::Tuple,
}"###);
}
#[test]
fn test_impl_trait_trailing_plus() {
let tokens = quote! {
fn f() -> impl Sized + {}
};
snapshot!(tokens as Item, @r###"
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: Type(
Type::ImplTrait {
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Sized",
arguments: None,
},
],
},
}),
],
},
),
},
block: Block,
}
"###);
}

View File

@@ -0,0 +1,49 @@
use syn::punctuated::{Pair, Punctuated};
use syn::Token;
#[macro_use]
mod macros;
macro_rules! check_exact_size_iterator {
($iter:expr) => {{
let iter = $iter;
let size_hint = iter.size_hint();
let len = iter.len();
let count = iter.count();
assert_eq!(len, count);
assert_eq!(size_hint, (count, Some(count)));
}};
}
#[test]
fn pairs() {
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
check_exact_size_iterator!(p.pairs());
check_exact_size_iterator!(p.pairs_mut());
check_exact_size_iterator!(p.into_pairs());
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
assert_eq!(p.pairs().next_back().map(Pair::into_value), Some(&4));
assert_eq!(
p.pairs_mut().next_back().map(Pair::into_value),
Some(&mut 4)
);
assert_eq!(p.into_pairs().next_back().map(Pair::into_value), Some(4));
}
#[test]
fn iter() {
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
check_exact_size_iterator!(p.iter());
check_exact_size_iterator!(p.iter_mut());
check_exact_size_iterator!(p.into_iter());
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
assert_eq!(p.iter().next_back(), Some(&4));
assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
assert_eq!(p.into_iter().next_back(), Some(4));
}

266
zeroidc/vendor/syn/tests/test_lit.rs vendored Normal file
View File

@@ -0,0 +1,266 @@
#![allow(clippy::float_cmp, clippy::non_ascii_literal)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
use quote::ToTokens;
use std::iter::FromIterator;
use std::str::FromStr;
use syn::{Lit, LitFloat, LitInt, LitStr};
fn lit(s: &str) -> Lit {
match TokenStream::from_str(s)
.unwrap()
.into_iter()
.next()
.unwrap()
{
TokenTree::Literal(lit) => Lit::new(lit),
_ => panic!(),
}
}
#[test]
fn strings() {
fn test_string(s: &str, value: &str) {
match lit(s) {
Lit::Str(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_string("\"a\"", "a");
test_string("\"\\n\"", "\n");
test_string("\"\\r\"", "\r");
test_string("\"\\t\"", "\t");
test_string("\"🐕\"", "🐕"); // NOTE: This is an emoji
test_string("\"\\\"\"", "\"");
test_string("\"'\"", "'");
test_string("\"\"", "");
test_string("\"\\u{1F415}\"", "\u{1F415}");
test_string("\"\\u{1_2__3_}\"", "\u{123}");
test_string(
"\"contains\nnewlines\\\nescaped newlines\"",
"contains\nnewlinesescaped newlines",
);
test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
test_string("\"...\"q", "...");
test_string("r\"...\"q", "...");
test_string("r##\"...\"##q", "...");
}
#[test]
fn byte_strings() {
fn test_byte_string(s: &str, value: &[u8]) {
match lit(s) {
Lit::ByteStr(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_byte_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_byte_string("b\"a\"", b"a");
test_byte_string("b\"\\n\"", b"\n");
test_byte_string("b\"\\r\"", b"\r");
test_byte_string("b\"\\t\"", b"\t");
test_byte_string("b\"\\\"\"", b"\"");
test_byte_string("b\"'\"", b"'");
test_byte_string("b\"\"", b"");
test_byte_string(
"b\"contains\nnewlines\\\nescaped newlines\"",
b"contains\nnewlinesescaped newlines",
);
test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
test_byte_string("b\"...\"q", b"...");
test_byte_string("br\"...\"q", b"...");
test_byte_string("br##\"...\"##q", b"...");
}
#[test]
fn bytes() {
fn test_byte(s: &str, value: u8) {
match lit(s) {
Lit::Byte(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
assert_eq!(again, s);
}
wrong => panic!("{:?}", wrong),
}
}
test_byte("b'a'", b'a');
test_byte("b'\\n'", b'\n');
test_byte("b'\\r'", b'\r');
test_byte("b'\\t'", b'\t');
test_byte("b'\\''", b'\'');
test_byte("b'\"'", b'"');
test_byte("b'a'q", b'a');
}
#[test]
fn chars() {
fn test_char(s: &str, value: char) {
match lit(s) {
Lit::Char(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_char(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_char("'a'", 'a');
test_char("'\\n'", '\n');
test_char("'\\r'", '\r');
test_char("'\\t'", '\t');
test_char("'🐕'", '🐕'); // NOTE: This is an emoji
test_char("'\\''", '\'');
test_char("'\"'", '"');
test_char("'\\u{1F415}'", '\u{1F415}');
test_char("'a'q", 'a');
}
#[test]
fn ints() {
fn test_int(s: &str, value: u64, suffix: &str) {
match lit(s) {
Lit::Int(lit) => {
assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
assert_eq!(lit.suffix(), suffix);
let again = lit.into_token_stream().to_string();
if again != s {
test_int(&again, value, suffix);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_int("5", 5, "");
test_int("5u32", 5, "u32");
test_int("0E", 0, "E");
test_int("0ECMA", 0, "ECMA");
test_int("0o0A", 0, "A");
test_int("5_0", 50, "");
test_int("5_____0_____", 50, "");
test_int("0x7f", 127, "");
test_int("0x7F", 127, "");
test_int("0b1001", 9, "");
test_int("0o73", 59, "");
test_int("0x7Fu8", 127, "u8");
test_int("0b1001i8", 9, "i8");
test_int("0o73u32", 59, "u32");
test_int("0x__7___f_", 127, "");
test_int("0x__7___F_", 127, "");
test_int("0b_1_0__01", 9, "");
test_int("0o_7__3", 59, "");
test_int("0x_7F__u8", 127, "u8");
test_int("0b__10__0_1i8", 9, "i8");
test_int("0o__7__________________3u32", 59, "u32");
test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
}
#[test]
fn floats() {
fn test_float(s: &str, value: f64, suffix: &str) {
match lit(s) {
Lit::Float(lit) => {
assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
assert_eq!(lit.suffix(), suffix);
let again = lit.into_token_stream().to_string();
if again != s {
test_float(&again, value, suffix);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_float("5.5", 5.5, "");
test_float("5.5E12", 5.5e12, "");
test_float("5.5e12", 5.5e12, "");
test_float("1.0__3e-12", 1.03e-12, "");
test_float("1.03e+12", 1.03e12, "");
test_float("9e99e99", 9e99, "e99");
test_float("1e_0", 1.0, "");
test_float("0.0ECMA", 0.0, "ECMA");
}
#[test]
fn negative() {
let span = Span::call_site();
assert_eq!("-1", LitInt::new("-1", span).to_string());
assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
}
#[test]
fn suffix() {
fn get_suffix(token: &str) -> String {
let lit = syn::parse_str::<Lit>(token).unwrap();
match lit {
Lit::Str(lit) => lit.suffix().to_owned(),
Lit::ByteStr(lit) => lit.suffix().to_owned(),
Lit::Byte(lit) => lit.suffix().to_owned(),
Lit::Char(lit) => lit.suffix().to_owned(),
Lit::Int(lit) => lit.suffix().to_owned(),
Lit::Float(lit) => lit.suffix().to_owned(),
_ => unimplemented!(),
}
}
assert_eq!(get_suffix("\"\"s"), "s");
assert_eq!(get_suffix("r\"\"r"), "r");
assert_eq!(get_suffix("b\"\"b"), "b");
assert_eq!(get_suffix("br\"\"br"), "br");
assert_eq!(get_suffix("r#\"\"#r"), "r");
assert_eq!(get_suffix("'c'c"), "c");
assert_eq!(get_suffix("b'b'b"), "b");
assert_eq!(get_suffix("1i32"), "i32");
assert_eq!(get_suffix("1_i32"), "i32");
assert_eq!(get_suffix("1.0f32"), "f32");
assert_eq!(get_suffix("1.0_f32"), "f32");
}
#[test]
fn test_deep_group_empty() {
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
))]),
))]);
snapshot!(tokens as Lit, @r#""hi""# );
}
#[test]
fn test_error() {
let err = syn::parse_str::<LitStr>("...").unwrap_err();
assert_eq!("expected string literal", err.to_string());
let err = syn::parse_str::<LitStr>("5").unwrap_err();
assert_eq!("expected string literal", err.to_string());
}

378
zeroidc/vendor/syn/tests/test_meta.rs vendored Normal file
View File

@@ -0,0 +1,378 @@
#![allow(clippy::shadow_unrelated, clippy::too_many_lines)]
#[macro_use]
mod macros;
use syn::{Meta, MetaList, MetaNameValue, NestedMeta};
#[test]
fn test_parse_meta_item_word() {
let input = "hello";
snapshot!(input as Meta, @r###"
Path(Path {
segments: [
PathSegment {
ident: "hello",
arguments: None,
},
],
})
"###);
}
#[test]
fn test_parse_meta_name_value() {
let input = "foo = 5";
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_meta_name_value_with_keyword() {
let input = "static = 5";
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "static",
arguments: None,
},
],
},
lit: 5,
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "static",
arguments: None,
},
],
},
lit: 5,
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_meta_name_value_with_bool() {
let input = "true = 5";
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "true",
arguments: None,
},
],
},
lit: 5,
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "true",
arguments: None,
},
],
},
lit: 5,
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_meta_item_list_lit() {
let input = "foo(5)";
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r###"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
"###);
snapshot!(meta as Meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_meta_item_multiple() {
let input = "foo(word, name = 5, list(name2 = 6), word2)";
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r###"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
"###);
snapshot!(meta as Meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
"###);
assert_eq!(meta, inner.into());
}
#[test]
fn test_parse_nested_meta() {
let input = "5";
snapshot!(input as NestedMeta, @"Lit(5)");
let input = "list(name2 = 6)";
snapshot!(input as NestedMeta, @r###"
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
})
"###);
}
#[test]
fn test_parse_path() {
let input = "::serde::Serialize";
snapshot!(input as Meta, @r###"
Path(Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "serde",
arguments: None,
},
PathSegment {
ident: "Serialize",
arguments: None,
},
],
})
"###);
let input = "::serde::Serialize";
snapshot!(input as NestedMeta, @r###"
Meta(Path(Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "serde",
arguments: None,
},
PathSegment {
ident: "Serialize",
arguments: None,
},
],
}))
"###);
}

View File

@@ -0,0 +1,92 @@
#![allow(clippy::non_ascii_literal)]
use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
use syn::{parenthesized, Token};
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_between_sources() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input1: ParseStream) -> Result<Self> {
let nested = |input2: ParseStream| {
input1.advance_to(input2);
Ok(Self)
};
nested.parse_str("")
}
}
syn::parse_str::<BreakRules>("").unwrap();
}
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_between_brackets() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input: ParseStream) -> Result<Self> {
let a;
let b;
parenthesized!(a in input);
parenthesized!(b in input);
a.advance_to(&b);
Ok(Self)
}
}
syn::parse_str::<BreakRules>("()()").unwrap();
}
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_into_brackets() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input: ParseStream) -> Result<Self> {
let a;
parenthesized!(a in input);
input.advance_to(&a);
Ok(Self)
}
}
syn::parse_str::<BreakRules>("()").unwrap();
}
#[test]
fn trailing_empty_none_group() {
fn parse(input: ParseStream) -> Result<()> {
input.parse::<Token![+]>()?;
let content;
parenthesized!(content in input);
content.parse::<Token![+]>()?;
Ok(())
}
// `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
]),
)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::new(),
))]),
)),
]);
parse.parse2(tokens).unwrap();
}

View File

@@ -0,0 +1,12 @@
use syn::ext::IdentExt;
use syn::parse::ParseStream;
use syn::{Ident, Token};
#[test]
fn test_peek() {
let _ = |input: ParseStream| {
let _ = input.peek(Ident);
let _ = input.peek(Ident::peek_any);
let _ = input.peek(Token![::]);
};
}

67
zeroidc/vendor/syn/tests/test_pat.rs vendored Normal file
View File

@@ -0,0 +1,67 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::{Item, Pat, Stmt};
#[test]
fn test_pat_ident() {
match syn::parse2(quote!(self)).unwrap() {
Pat::Ident(_) => (),
value => panic!("expected PatIdent, got {:?}", value),
}
}
#[test]
fn test_pat_path() {
match syn::parse2(quote!(self::CONST)).unwrap() {
Pat::Path(_) => (),
value => panic!("expected PatPath, got {:?}", value),
}
}
#[test]
fn test_leading_vert() {
// https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
syn::parse_str::<Item>("fn f() {}").unwrap();
syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
syn::parse_str::<Stmt>("let | () = ();").unwrap();
syn::parse_str::<Stmt>("let (| A): E;").unwrap();
syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap();
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap();
syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap();
syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap();
syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
}
#[test]
fn test_group() {
let group = Group::new(Delimiter::None, quote!(Some(_)));
let tokens = TokenStream::from_iter(vec![TokenTree::Group(group)]);
snapshot!(tokens as Pat, @r###"
Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
arguments: None,
},
],
},
pat: PatTuple {
elems: [
Pat::Wild,
],
},
}
"###);
}

126
zeroidc/vendor/syn/tests/test_path.rs vendored Normal file
View File

@@ -0,0 +1,126 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens};
use std::iter::FromIterator;
use syn::{parse_quote, Expr, Type, TypePath};
#[test]
fn parse_interpolated_leading_component() {
// mimics the token stream corresponding to `$mod::rest`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("rest", Span::call_site())),
]);
snapshot!(tokens.clone() as Expr, @r###"
Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
arguments: None,
},
PathSegment {
ident: "rest",
arguments: None,
},
],
},
}
"###);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
arguments: None,
},
PathSegment {
ident: "rest",
arguments: None,
},
],
},
}
"###);
}
#[test]
fn print_incomplete_qpath() {
// qpath with `as` token
let mut ty: TypePath = parse_quote!(<Self as A>::Q);
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self as A > :: Q`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self as A > ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self >`)
"###);
assert!(ty.path.segments.pop().is_none());
// qpath without `as` token
let mut ty: TypePath = parse_quote!(<Self>::A::B);
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self > :: A :: B`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self > :: A ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`< Self > ::`)
"###);
assert!(ty.path.segments.pop().is_none());
// normal path
let mut ty: TypePath = parse_quote!(Self::A::B);
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`Self :: A :: B`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`Self :: A ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(`Self ::`)
"###);
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @r###"
TokenStream(``)
"###);
assert!(ty.path.segments.pop().is_none());
}
#[test]
fn parse_parenthesized_path_arguments_with_disambiguator() {
#[rustfmt::skip]
let tokens = quote!(FnOnce::() -> !);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: Type(
Type::Never,
),
},
},
],
},
}
"###);
}

View File

@@ -0,0 +1,455 @@
#![cfg(not(syn_disable_nightly_tests))]
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(
clippy::explicit_deref_methods,
clippy::manual_assert,
clippy::match_wildcard_for_single_variants,
clippy::too_many_lines
)]
//! The tests in this module do the following:
//!
//! 1. Parse a given expression in both `syn` and `librustc`.
//! 2. Fold over the expression adding brackets around each subexpression (with
//! some complications - see the `syn_brackets` and `librustc_brackets`
//! methods).
//! 3. Serialize the `syn` expression back into a string, and re-parse it with
//! `librustc`.
//! 4. Respan all of the expressions, replacing the spans with the default
//! spans.
//! 5. Compare the expressions with one another, if they are not equal fail.
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_span;
use crate::common::eq::SpanlessEq;
use crate::common::parse;
use quote::quote;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use regex::Regex;
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_span::edition::Edition;
use std::fs;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use walkdir::{DirEntry, WalkDir};
#[macro_use]
mod macros;
#[allow(dead_code)]
mod common;
mod repo;
/// Test some pre-set expressions chosen by us.
#[test]
fn test_simple_precedence() {
const EXPRS: &[&str] = &[
"1 + 2 * 3 + 4",
"1 + 2 * ( 3 + 4 )",
"{ for i in r { } *some_ptr += 1; }",
"{ loop { break 5; } }",
"{ if true { () }.mthd() }",
"{ for i in unsafe { 20 } { } }",
];
let mut failed = 0;
for input in EXPRS {
let expr = if let Some(expr) = parse::syn_expr(input) {
expr
} else {
failed += 1;
continue;
};
let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
(1, 0) => "passed",
(0, 1) => {
failed += 1;
"failed"
}
_ => unreachable!(),
};
errorf!("=== {}: {}\n", input, pf);
}
if failed > 0 {
panic!("Failed {} tests", failed);
}
}
/// Test expressions from rustc, like in `test_round_trip`.
#[test]
fn test_rustc_precedence() {
common::rayon_init();
repo::clone_rust();
let abort_after = common::abort_after();
if abort_after == 0 {
panic!("Skipping all precedence tests");
}
let passed = AtomicUsize::new(0);
let failed = AtomicUsize::new(0);
// 2018 edition is hard
let edition_regex = Regex::new(r"\b(async|try)[!(]").unwrap();
WalkDir::new("tests/rust")
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.filter_entry(repo::base_dir_filter)
.collect::<Result<Vec<DirEntry>, walkdir::Error>>()
.unwrap()
.into_par_iter()
.for_each(|entry| {
let path = entry.path();
if path.is_dir() {
return;
}
let content = fs::read_to_string(path).unwrap();
let content = edition_regex.replace_all(&content, "_$0");
let (l_passed, l_failed) = match syn::parse_file(&content) {
Ok(file) => {
let edition = repo::edition(path).parse().unwrap();
let exprs = collect_exprs(file);
test_expressions(edition, exprs)
}
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
(0, 1)
}
};
errorf!(
"=== {}: {} passed | {} failed\n",
path.display(),
l_passed,
l_failed
);
passed.fetch_add(l_passed, Ordering::Relaxed);
let prev_failed = failed.fetch_add(l_failed, Ordering::Relaxed);
if prev_failed + l_failed >= abort_after {
process::exit(1);
}
});
let passed = passed.load(Ordering::Relaxed);
let failed = failed.load(Ordering::Relaxed);
errorf!("\n===== Precedence Test Results =====\n");
errorf!("{} passed | {} failed\n", passed, failed);
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
let mut passed = 0;
let mut failed = 0;
rustc_span::create_session_if_not_set_then(edition, |_| {
for expr in exprs {
let raw = quote!(#expr).to_string();
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
e
} else {
failed += 1;
errorf!("\nFAIL - librustc failed to parse raw\n");
continue;
};
let syn_expr = syn_brackets(expr);
let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
e
} else {
failed += 1;
errorf!("\nFAIL - librustc failed to parse bracketed\n");
continue;
};
if SpanlessEq::eq(&syn_ast, &librustc_ast) {
passed += 1;
} else {
failed += 1;
errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
}
}
});
(passed, failed)
}
fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
parse::librustc_expr(input).and_then(librustc_brackets)
}
/// Wrap every expression which is not already wrapped in parens with parens, to
/// reveal the precedence of the parsed expressions, and produce a stringified
/// form of the resulting expression.
///
/// This method operates on librustc objects.
fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
use rustc_ast::ast::{
Attribute, Block, BorrowKind, Expr, ExprField, ExprKind, GenericArg, Local, LocalKind, Pat,
Stmt, StmtKind, StructExpr, StructRest, Ty,
};
use rustc_ast::mut_visit::{noop_visit_generic_arg, noop_visit_local, MutVisitor};
use rustc_data_structures::map_in_place::MapInPlace;
use rustc_data_structures::thin_vec::ThinVec;
use rustc_span::DUMMY_SP;
use std::mem;
use std::ops::DerefMut;
struct BracketsVisitor {
failed: bool,
}
fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> {
if f.is_shorthand {
noop_visit_expr(&mut f.expr, vis);
} else {
vis.visit_expr(&mut f.expr);
}
vec![f]
}
fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
let kind = match stmt.kind {
// Don't wrap toplevel expressions in statements.
StmtKind::Expr(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Expr(e)
}
StmtKind::Semi(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Semi(e)
}
s => s,
};
vec![Stmt { kind, ..stmt }]
}
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
use rustc_ast::mut_visit::{noop_visit_expr, visit_thin_attrs};
match &mut e.kind {
ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
ExprKind::Struct(expr) => {
let StructExpr {
qself,
path,
fields,
rest,
} = expr.deref_mut();
vis.visit_qself(qself);
vis.visit_path(path);
fields.flat_map_in_place(|field| flat_map_field(field, vis));
if let StructRest::Base(rest) = rest {
vis.visit_expr(rest);
}
vis.visit_id(&mut e.id);
vis.visit_span(&mut e.span);
visit_thin_attrs(&mut e.attrs, vis);
}
_ => noop_visit_expr(e, vis),
}
}
impl MutVisitor for BracketsVisitor {
fn visit_expr(&mut self, e: &mut P<Expr>) {
match e.kind {
ExprKind::ConstBlock(..) => {}
_ => noop_visit_expr(e, self),
}
match e.kind {
ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
_ => {
let inner = mem::replace(
e,
P(Expr {
id: ast::DUMMY_NODE_ID,
kind: ExprKind::Err,
span: DUMMY_SP,
attrs: ThinVec::new(),
tokens: None,
}),
);
e.kind = ExprKind::Paren(inner);
}
}
}
fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
match arg {
// Don't wrap unbraced const generic arg as that's invalid syntax.
GenericArg::Const(anon_const) => {
if let ExprKind::Block(..) = &mut anon_const.value.kind {
noop_visit_expr(&mut anon_const.value, self);
}
}
_ => noop_visit_generic_arg(arg, self),
}
}
fn visit_block(&mut self, block: &mut P<Block>) {
self.visit_id(&mut block.id);
block
.stmts
.flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
self.visit_span(&mut block.span);
}
fn visit_local(&mut self, local: &mut P<Local>) {
match local.kind {
LocalKind::InitElse(..) => {}
_ => noop_visit_local(local, self),
}
}
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn visit_pat(&mut self, pat: &mut P<Pat>) {
let _ = pat;
}
fn visit_ty(&mut self, ty: &mut P<Ty>) {
let _ = ty;
}
fn visit_attribute(&mut self, attr: &mut Attribute) {
let _ = attr;
}
}
let mut folder = BracketsVisitor { failed: false };
folder.visit_expr(&mut librustc_expr);
if folder.failed {
None
} else {
Some(librustc_expr)
}
}
/// Wrap every expression which is not already wrapped in parens with parens, to
/// reveal the precedence of the parsed expressions, and produce a stringified
/// form of the resulting expression.
fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
use syn::fold::{fold_expr, fold_generic_argument, fold_generic_method_argument, Fold};
use syn::{token, Expr, ExprParen, GenericArgument, GenericMethodArgument, Pat, Stmt, Type};
struct ParenthesizeEveryExpr;
impl Fold for ParenthesizeEveryExpr {
fn fold_expr(&mut self, expr: Expr) -> Expr {
match expr {
Expr::Group(_) => unreachable!(),
Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
fold_expr(self, expr)
}
_ => Expr::Paren(ExprParen {
attrs: Vec::new(),
expr: Box::new(fold_expr(self, expr)),
paren_token: token::Paren::default(),
}),
}
}
fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
match arg {
GenericArgument::Const(arg) => GenericArgument::Const(match arg {
Expr::Block(_) => fold_expr(self, arg),
// Don't wrap unbraced const generic arg as that's invalid syntax.
_ => arg,
}),
_ => fold_generic_argument(self, arg),
}
}
fn fold_generic_method_argument(
&mut self,
arg: GenericMethodArgument,
) -> GenericMethodArgument {
match arg {
GenericMethodArgument::Const(arg) => GenericMethodArgument::Const(match arg {
Expr::Block(_) => fold_expr(self, arg),
// Don't wrap unbraced const generic arg as that's invalid syntax.
_ => arg,
}),
_ => fold_generic_method_argument(self, arg),
}
}
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
match stmt {
// Don't wrap toplevel expressions in statements.
Stmt::Expr(e) => Stmt::Expr(fold_expr(self, e)),
Stmt::Semi(e, semi) => {
if let Expr::Verbatim(_) = e {
Stmt::Semi(e, semi)
} else {
Stmt::Semi(fold_expr(self, e), semi)
}
}
s => s,
}
}
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn fold_pat(&mut self, pat: Pat) -> Pat {
pat
}
fn fold_type(&mut self, ty: Type) -> Type {
ty
}
}
let mut folder = ParenthesizeEveryExpr;
folder.fold_expr(syn_expr)
}
/// Walk through a crate collecting all expressions we can find in it.
fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
use syn::fold::Fold;
use syn::punctuated::Punctuated;
use syn::{token, Expr, ExprTuple, Path};
struct CollectExprs(Vec<Expr>);
impl Fold for CollectExprs {
fn fold_expr(&mut self, expr: Expr) -> Expr {
match expr {
Expr::Verbatim(_) => {}
_ => self.0.push(expr),
}
Expr::Tuple(ExprTuple {
attrs: vec![],
elems: Punctuated::new(),
paren_token: token::Paren::default(),
})
}
fn fold_path(&mut self, path: Path) -> Path {
// Skip traversing into const generic path arguments
path
}
}
let mut folder = CollectExprs(vec![]);
folder.fold_file(file);
folder.0
}

View File

@@ -0,0 +1,127 @@
use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
#[test]
fn test_by_value() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_value(self: Self);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_mut_value() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_mut(mut self: Self);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_ref() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_ref(self: &Self);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_box() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_box(self: Box<Self>);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_pin() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_pin(self: Pin<Self>);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_explicit_type() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn explicit_type(self: Pin<MyType>);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_value_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn value_shorthand(self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: None,
mutability: None,
..
})) => (),
value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
}
}
#[test]
fn test_mut_value_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn mut_value_shorthand(mut self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: None,
mutability: Some(_),
..
})) => (),
value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
}
}
#[test]
fn test_ref_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn ref_shorthand(&self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: Some(_),
mutability: None,
..
})) => (),
value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
}
}
#[test]
fn test_ref_mut_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn ref_mut_shorthand(&mut self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: Some(_),
mutability: Some(_),
..
})) => (),
value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
}
}

View File

@@ -0,0 +1,241 @@
#![cfg(not(syn_disable_nightly_tests))]
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(clippy::manual_assert)]
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_error_messages;
extern crate rustc_errors;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use crate::common::eq::SpanlessEq;
use quote::quote;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use rustc_ast::ast::{
AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg, GenericParamKind, Generics,
WhereClause,
};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_error_messages::{DiagnosticMessage, FluentArgs, LazyFallbackBundle};
use rustc_errors::{Diagnostic, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use std::fs;
use std::panic;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::Instant;
use walkdir::{DirEntry, WalkDir};
#[macro_use]
mod macros;
#[allow(dead_code)]
mod common;
mod repo;
#[test]
fn test_round_trip() {
common::rayon_init();
repo::clone_rust();
let abort_after = common::abort_after();
if abort_after == 0 {
panic!("Skipping all round_trip tests");
}
let failed = AtomicUsize::new(0);
WalkDir::new("tests/rust")
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
.into_iter()
.filter_entry(repo::base_dir_filter)
.collect::<Result<Vec<DirEntry>, walkdir::Error>>()
.unwrap()
.into_par_iter()
.for_each(|entry| {
let path = entry.path();
if !path.is_dir() {
test(path, &failed, abort_after);
}
});
let failed = failed.load(Ordering::Relaxed);
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) {
let content = fs::read_to_string(path).unwrap();
let start = Instant::now();
let (krate, elapsed) = match syn::parse_file(&content) {
Ok(krate) => (krate, start.elapsed()),
Err(msg) => {
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
if prev_failed + 1 >= abort_after {
process::exit(1);
}
return;
}
};
let back = quote!(#krate).to_string();
let edition = repo::edition(path).parse().unwrap();
rustc_span::create_session_if_not_set_then(edition, |_| {
let equal = match panic::catch_unwind(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let before = match librustc_parse(content, &sess) {
Ok(before) => before,
Err(diagnostic) => {
errorf!(
"=== {}: ignore - librustc failed to parse original content: {}\n",
path.display(),
translate_message(&diagnostic),
);
diagnostic.cancel();
return Err(true);
}
};
let after = match librustc_parse(back, &sess) {
Ok(after) => after,
Err(mut diagnostic) => {
errorf!("=== {}: librustc failed to parse", path.display());
diagnostic.emit();
return Err(false);
}
};
Ok((before, after))
}) {
Err(_) => {
errorf!("=== {}: ignoring librustc panic\n", path.display());
true
}
Ok(Err(equal)) => equal,
Ok(Ok((mut before, mut after))) => {
normalize(&mut before);
normalize(&mut after);
if SpanlessEq::eq(&before, &after) {
errorf!(
"=== {}: pass in {}ms\n",
path.display(),
elapsed.as_secs() * 1000 + u64::from(elapsed.subsec_nanos()) / 1_000_000
);
true
} else {
errorf!(
"=== {}: FAIL\nbefore: {:#?}\nafter: {:#?}\n",
path.display(),
before,
after,
);
false
}
}
};
if !equal {
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
if prev_failed + 1 >= abort_after {
process::exit(1);
}
}
});
}
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
static COUNTER: AtomicUsize = AtomicUsize::new(0);
let counter = COUNTER.fetch_add(1, Ordering::Relaxed);
let name = FileName::Custom(format!("test_round_trip{}", counter));
parse::parse_crate_from_source_str(name, content, sess)
}
fn translate_message(diagnostic: &Diagnostic) -> String {
thread_local! {
static FLUENT_BUNDLE: LazyFallbackBundle = {
let resources = rustc_error_messages::DEFAULT_LOCALE_RESOURCES;
let with_directionality_markers = false;
rustc_error_messages::fallback_fluent_bundle(resources, with_directionality_markers)
};
}
let message = &diagnostic.message[0].0;
let args = diagnostic.args().iter().cloned().collect::<FluentArgs>();
let (identifier, attr) = match message {
DiagnosticMessage::Str(msg) => return msg.clone(),
DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
};
FLUENT_BUNDLE.with(|fluent_bundle| {
let message = fluent_bundle
.get_message(identifier)
.expect("missing diagnostic in fluent bundle");
let value = match attr {
Some(attr) => message
.get_attribute(attr)
.expect("missing attribute in fluent message")
.value(),
None => message.value().expect("missing value in fluent message"),
};
let mut err = Vec::new();
let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err);
assert!(err.is_empty());
translated.into_owned()
})
}
fn normalize(krate: &mut Crate) {
struct NormalizeVisitor;
impl MutVisitor for NormalizeVisitor {
fn visit_angle_bracketed_parameter_data(&mut self, e: &mut AngleBracketedArgs) {
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
Lifetimes,
TypesAndConsts,
Constraints,
}
e.args.sort_by_key(|arg| match arg {
AngleBracketedArg::Arg(arg) => match arg {
GenericArg::Lifetime(_) => Group::Lifetimes,
GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts,
},
AngleBracketedArg::Constraint(_) => Group::Constraints,
});
mut_visit::noop_visit_angle_bracketed_parameter_data(e, self);
}
fn visit_generics(&mut self, e: &mut Generics) {
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
Lifetimes,
TypesAndConsts,
}
e.params.sort_by_key(|param| match param.kind {
GenericParamKind::Lifetime => Group::Lifetimes,
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
Group::TypesAndConsts
}
});
mut_visit::noop_visit_generics(e, self);
}
fn visit_where_clause(&mut self, e: &mut WhereClause) {
if e.predicates.is_empty() {
e.has_where_token = false;
}
}
}
NormalizeVisitor.visit_crate(krate);
}

View File

@@ -0,0 +1,59 @@
#[macro_use]
mod macros;
#[test]
fn test_basic() {
let content = "#!/usr/bin/env rustx\nfn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
shebang: Some("#!/usr/bin/env rustx"),
items: [
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: Default,
},
block: Block,
},
],
}
"###);
}
#[test]
fn test_comment() {
let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
attrs: [
Attribute {
style: Inner,
path: Path {
segments: [
PathSegment {
ident: "allow",
arguments: None,
},
],
},
tokens: TokenStream(`(dead_code)`),
},
],
items: [
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: Default,
},
block: Block,
},
],
}
"###);
}

View File

@@ -0,0 +1,45 @@
macro_rules! should_parse {
($name:ident, { $($in:tt)* }) => {
#[test]
fn $name() {
// Make sure we can parse the file!
syn::parse_file(stringify!($($in)*)).unwrap();
}
}
}
should_parse!(generic_associated_type, {
impl Foo {
type Item = &'a i32;
fn foo<'a>(&'a self) -> Self::Item<'a> {}
}
});
#[rustfmt::skip]
should_parse!(const_generics_use, {
type X = Foo<5>;
type Y = Foo<"foo">;
type Z = Foo<X>;
type W = Foo<{ X + 10 }>;
});
should_parse!(trailing_plus_type, {
type A = Box<Foo>;
type A = Box<Foo + 'a>;
type A = Box<'a + Foo>;
});
should_parse!(generic_associated_type_where, {
trait Foo {
type Item;
fn foo<T>(&self, t: T) -> Self::Item<T>;
}
});
should_parse!(match_with_block_expr, {
fn main() {
match false {
_ => {}.a(),
}
}
});

29
zeroidc/vendor/syn/tests/test_size.rs vendored Normal file
View File

@@ -0,0 +1,29 @@
#![cfg(target_pointer_width = "64")]
use std::mem;
use syn::{Expr, Item, Lit, Pat, Type};
#[test]
fn test_expr_size() {
assert_eq!(mem::size_of::<Expr>(), 280);
}
#[test]
fn test_item_size() {
assert_eq!(mem::size_of::<Item>(), 344);
}
#[test]
fn test_type_size() {
assert_eq!(mem::size_of::<Type>(), 304);
}
#[test]
fn test_pat_size() {
assert_eq!(mem::size_of::<Pat>(), 144);
}
#[test]
fn test_lit_size() {
assert_eq!(mem::size_of::<Lit>(), 40);
}

93
zeroidc/vendor/syn/tests/test_stmt.rs vendored Normal file
View File

@@ -0,0 +1,93 @@
#![allow(clippy::non_ascii_literal)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::Stmt;
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Verbatim(`& raw const x`)),
})
"###);
}
#[test]
fn test_raw_variable() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
arguments: None,
},
],
},
},
}),
})
"###);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}
#[test]
fn test_none_group() {
// <Ø async fn f() {} Ø>
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("async", Span::call_site())),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]),
))]);
snapshot!(tokens as Stmt, @r###"
Item(Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "f",
generics: Generics,
output: Default,
},
block: Block,
})
"###);
}
#[test]
fn test_let_dot_dot() {
let tokens = quote! {
let .. = 10;
};
snapshot!(tokens as Stmt, @r###"
Local(Local {
pat: Pat::Rest,
init: Some(Expr::Lit {
lit: 10,
}),
})
"###);
}

View File

@@ -0,0 +1,30 @@
#[macro_use]
mod macros;
use proc_macro2::TokenStream;
use quote::quote;
use syn::Lit;
#[test]
fn test_struct() {
let input = "
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>,
}
";
snapshot!(input as TokenStream, @r###"
TokenStream(
`# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
)
"###);
}
#[test]
fn test_literal_mangling() {
let code = "0_4";
let parsed: Lit = syn::parse_str(code).unwrap();
assert_eq!(code, quote!(#parsed).to_string());
}

352
zeroidc/vendor/syn/tests/test_ty.rs vendored Normal file
View File

@@ -0,0 +1,352 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::Type;
#[test]
fn test_mut_self() {
syn::parse_str::<Type>("fn(mut self)").unwrap();
syn::parse_str::<Type>("fn(mut self,)").unwrap();
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
}
#[test]
fn test_macro_variable_type() {
// mimics the token stream corresponding to `$ty<T>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
}),
],
},
},
],
},
}
"###);
// mimics the token stream corresponding to `$ty::<T>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
colon2_token: Some,
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
}),
],
},
},
],
},
}
"###);
}
#[test]
fn test_group_angle_brackets() {
// mimics the token stream corresponding to `Option<$ty>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("Option", Span::call_site())),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Option",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
arguments: None,
},
],
},
}),
],
},
},
],
},
},
}),
],
},
},
],
},
}
"###);
}
#[test]
fn test_group_colons() {
// mimics the token stream corresponding to `$ty::Item`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("Item", Span::call_site())),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
arguments: None,
},
],
},
}),
],
},
},
PathSegment {
ident: "Item",
arguments: None,
},
],
},
}
"###);
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("Element", Span::call_site())),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
qself: Some(QSelf {
ty: Type::Slice {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
},
position: 0,
}),
path: Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "Element",
arguments: None,
},
],
},
}
"###);
}
#[test]
fn test_trait_object() {
let tokens = quote!(dyn for<'a> Trait<'a> + 'static);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
Trait(TraitBound {
modifier: None,
lifetimes: Some(BoundLifetimes {
lifetimes: [
LifetimeDef {
lifetime: Lifetime {
ident: "a",
},
},
],
}),
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: PathArguments::AngleBracketed {
args: [
Lifetime(Lifetime {
ident: "a",
}),
],
},
},
],
},
}),
Lifetime(Lifetime {
ident: "static",
}),
],
}
"###);
let tokens = quote!(dyn 'a + Trait);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
Lifetime(Lifetime {
ident: "a",
}),
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: None,
},
],
},
}),
],
}
"###);
// None of the following are valid Rust types.
syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err();
syn::parse_str::<Type>("dyn for<'a> 'a + Trait").unwrap_err();
}
#[test]
fn test_trailing_plus() {
#[rustfmt::skip]
let tokens = quote!(impl Trait +);
snapshot!(tokens as Type, @r###"
Type::ImplTrait {
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: None,
},
],
},
}),
],
}
"###);
#[rustfmt::skip]
let tokens = quote!(dyn Trait +);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
dyn_token: Some,
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: None,
},
],
},
}),
],
}
"###);
#[rustfmt::skip]
let tokens = quote!(Trait +);
snapshot!(tokens as Type, @r###"
Type::TraitObject {
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: None,
},
],
},
}),
],
}
"###);
}

View File

@@ -0,0 +1,148 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::parse::{Parse, ParseStream};
use syn::{DeriveInput, Result, Visibility};
#[derive(Debug)]
struct VisRest {
vis: Visibility,
rest: TokenStream,
}
impl Parse for VisRest {
fn parse(input: ParseStream) -> Result<Self> {
Ok(VisRest {
vis: input.parse()?,
rest: input.parse()?,
})
}
}
macro_rules! assert_vis_parse {
($input:expr, Ok($p:pat)) => {
assert_vis_parse!($input, Ok($p) + "");
};
($input:expr, Ok($p:pat) + $rest:expr) => {
let expected = $rest.parse::<TokenStream>().unwrap();
let parse: VisRest = syn::parse_str($input).unwrap();
match parse.vis {
$p => {}
_ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
}
// NOTE: Round-trips through `to_string` to avoid potential whitespace
// diffs.
assert_eq!(parse.rest.to_string(), expected.to_string());
};
($input:expr, Err) => {
syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
};
}
#[test]
fn test_pub() {
assert_vis_parse!("pub", Ok(Visibility::Public(_)));
}
#[test]
fn test_crate() {
assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
}
#[test]
fn test_inherited() {
assert_vis_parse!("", Ok(Visibility::Inherited));
}
#[test]
fn test_in() {
assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_crate() {
assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_self() {
assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_super() {
assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_missing_in() {
assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
}
#[test]
fn test_missing_in_path() {
assert_vis_parse!("pub(in)", Err);
}
#[test]
fn test_crate_path() {
assert_vis_parse!(
"pub(crate::A, crate::B)",
Ok(Visibility::Public(_)) + "(crate::A, crate::B)"
);
}
#[test]
fn test_junk_after_in() {
assert_vis_parse!("pub(in some::path @@garbage)", Err);
}
#[test]
fn test_empty_group_vis() {
// mimics `struct S { $vis $field: () }` where $vis is empty
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("struct", Span::call_site())),
TokenTree::Ident(Ident::new("S", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Brace,
TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
"f",
Span::call_site(),
))]),
)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]),
)),
]);
snapshot!(tokens as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Inherited,
ident: Some("f"),
colon_token: Some,
ty: Type::Tuple,
},
],
},
},
}
"###);
}

33
zeroidc/vendor/syn/tests/zzz_stable.rs vendored Normal file
View File

@@ -0,0 +1,33 @@
#![cfg(syn_disable_nightly_tests)]
use std::io::{self, Write};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
const MSG: &str = "\
‖ WARNING:
‖ This is not a nightly compiler so not all tests were able to
‖ run. Syn includes tests that compare Syn's parser against the
‖ compiler's parser, which requires access to unstable librustc
‖ data structures and a nightly compiler.
";
#[test]
fn notice() -> io::Result<()> {
let header = "WARNING";
let index_of_header = MSG.find(header).unwrap();
let before = &MSG[..index_of_header];
let after = &MSG[index_of_header + header.len()..];
let mut stderr = StandardStream::stderr(ColorChoice::Auto);
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", before)?;
stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", header)?;
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", after)?;
stderr.reset()?;
Ok(())
}