Format with rustfmt 0.4.2
diff --git a/src/attr.rs b/src/attr.rs
index 600302f..b63b302 100644
--- a/src/attr.rs
+++ b/src/attr.rs
@@ -133,7 +133,7 @@
_ => return None,
};
if g.delimiter() != Delimiter::Parenthesis {
- return None
+ return None;
}
let tokens = g.stream().clone().into_iter().collect::<Vec<_>>();
let nested = match list_of_nested_meta_items_from_tokens(&tokens) {
@@ -153,10 +153,10 @@
_ => return None,
};
if a.spacing() != Spacing::Alone {
- return None
+ return None;
}
if a.op() != '=' {
- return None
+ return None;
}
match *b {
@@ -167,21 +167,17 @@
lit: Lit::new(l.clone()),
}))
}
- TokenTree::Term(ref term) => {
- match term.as_str() {
- v @ "true" | v @ "false" => {
- Some(Meta::NameValue(MetaNameValue {
- ident: ident,
- eq_token: Token]),
- lit: Lit::Bool(LitBool {
- value: v == "true",
- span: b.span(),
- }),
- }))
- },
- _ => None ,
- }
- }
+ TokenTree::Term(ref term) => match term.as_str() {
+ v @ "true" | v @ "false" => Some(Meta::NameValue(MetaNameValue {
+ ident: ident,
+ eq_token: Token]),
+ lit: Lit::Bool(LitBool {
+ value: v == "true",
+ span: b.span(),
+ }),
+ })),
+ _ => None,
+ },
_ => None,
}
}
@@ -204,13 +200,13 @@
let ident = Ident::new(sym.as_str(), sym.span());
if tts.len() >= 3 {
if let Some(meta) = Attribute::extract_name_value(ident, &tts[1], &tts[2]) {
- return Some((NestedMeta::Meta(meta), &tts[3..]))
+ return Some((NestedMeta::Meta(meta), &tts[3..]));
}
}
if tts.len() >= 2 {
if let Some(meta) = Attribute::extract_meta_list(ident, &tts[1]) {
- return Some((NestedMeta::Meta(meta), &tts[2..]))
+ return Some((NestedMeta::Meta(meta), &tts[2..]));
}
}
@@ -233,10 +229,10 @@
None
} else if let TokenTree::Op(ref op) = tts[0] {
if op.spacing() != Spacing::Alone {
- return None
+ return None;
}
if op.op() != ',' {
- return None
+ return None;
}
let tok = Token]);
tts = &tts[1..];
@@ -401,8 +397,8 @@
use super::*;
use buffer::Cursor;
use parse_error;
+ use proc_macro2::{Literal, Op, Spacing, Span, TokenTree};
use synom::PResult;
- use proc_macro2::{Literal, Spacing, Span, TokenTree, Op};
fn eq(span: Span) -> TokenTree {
let mut op = Op::new('=', Spacing::Alone);
diff --git a/src/buffer.rs b/src/buffer.rs
index 13ef844..a4af7bc 100644
--- a/src/buffer.rs
+++ b/src/buffer.rs
@@ -130,10 +130,10 @@
#[cfg(feature = "proc-macro")]
use proc_macro as pm;
use proc_macro2::{Delimiter, Literal, Span, Term, TokenStream};
-use proc_macro2::{Group, TokenTree, Op};
+use proc_macro2::{Group, Op, TokenTree};
-use std::ptr;
use std::marker::PhantomData;
+use std::ptr;
#[cfg(synom_verbose_trace)]
use std::fmt::{self, Debug};
diff --git a/src/error.rs b/src/error.rs
index 23d2a1a..619af10 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -6,8 +6,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::error::Error;
use buffer::Cursor;
+use std::error::Error;
use std::fmt::{self, Display};
/// The result of a `Synom` parser.
diff --git a/src/expr.rs b/src/expr.rs
index e770fde..d554fbc 100644
--- a/src/expr.rs
+++ b/src/expr.rs
@@ -7,14 +7,14 @@
// except according to those terms.
use super::*;
-use punctuated::Punctuated;
use proc_macro2::{Span, TokenStream};
+use punctuated::Punctuated;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
-#[cfg(feature = "extra-traits")]
-use tt::TokenStreamHelper;
#[cfg(feature = "full")]
use std::mem;
+#[cfg(feature = "extra-traits")]
+use tt::TokenStreamHelper;
ast_enum_of_structs! {
/// A Rust expression.
@@ -1020,13 +1020,13 @@
#[cfg(feature = "full")]
use path::parsing::ty_no_eq_after;
- #[cfg(feature = "full")]
- use proc_macro2::TokenStream;
- use synom::Synom;
use buffer::Cursor;
#[cfg(feature = "full")]
use parse_error;
+ #[cfg(feature = "full")]
+ use proc_macro2::TokenStream;
use synom::PResult;
+ use synom::Synom;
// When we're parsing expressions which occur before blocks, like in an if
// statement's condition, we cannot parse a struct literal.
@@ -2818,8 +2818,8 @@
use super::*;
#[cfg(feature = "full")]
use attr::FilterAttrs;
- use quote::{ToTokens, Tokens};
use proc_macro2::Literal;
+ use quote::{ToTokens, Tokens};
// If the given expression is a bare `ExprStruct`, wraps it in parenthesis
// before appending it to `Tokens`.
diff --git a/src/gen_helper.rs b/src/gen_helper.rs
index f7f64a9..fe00c8f 100644
--- a/src/gen_helper.rs
+++ b/src/gen_helper.rs
@@ -8,9 +8,9 @@
#[cfg(feature = "fold")]
pub mod fold {
- use punctuated::{Pair, Punctuated};
use fold::Fold;
use proc_macro2::Span;
+ use punctuated::{Pair, Punctuated};
pub trait FoldHelper {
type Item;
@@ -84,10 +84,7 @@
use proc_macro2::Span;
use visit::Visit;
- pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(
- visitor: &mut V,
- spans: &'ast S,
- ) {
+ pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(visitor: &mut V, spans: &'ast S) {
spans.visit(visitor);
}
diff --git a/src/generics.rs b/src/generics.rs
index 23c6958..919b5fe 100644
--- a/src/generics.rs
+++ b/src/generics.rs
@@ -486,8 +486,8 @@
pub mod parsing {
use super::*;
- use synom::Synom;
use punctuated::Pair;
+ use synom::Synom;
impl Synom for Generics {
named!(parse -> Self, map!(
diff --git a/src/ident.rs b/src/ident.rs
index 620bfc7..2d86ea8 100644
--- a/src/ident.rs
+++ b/src/ident.rs
@@ -239,10 +239,10 @@
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
- use synom::Synom;
use buffer::Cursor;
use parse_error;
use synom::PResult;
+ use synom::Synom;
impl Synom for Ident {
fn parse(input: Cursor) -> PResult<Self> {
@@ -266,12 +266,7 @@
_ => {}
}
- Ok((
- Ident {
- term: term,
- },
- rest,
- ))
+ Ok((Ident { term: term }, rest))
}
fn description() -> Option<&'static str> {
diff --git a/src/item.rs b/src/item.rs
index 5e0f234..beb5a9e 100644
--- a/src/item.rs
+++ b/src/item.rs
@@ -8,14 +8,14 @@
use super::*;
use derive::{Data, DeriveInput};
-use punctuated::Punctuated;
use proc_macro2::TokenStream;
+use punctuated::Punctuated;
use token::{Brace, Paren};
#[cfg(feature = "extra-traits")]
-use tt::TokenStreamHelper;
-#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
+#[cfg(feature = "extra-traits")]
+use tt::TokenStreamHelper;
ast_enum_of_structs! {
/// Things that can appear directly inside of a module or scope.
diff --git a/src/lib.rs b/src/lib.rs
index 60cf297..91bf0db 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -259,13 +259,17 @@
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/0.13.2")]
-#![cfg_attr(feature = "cargo-clippy",
- allow(const_static_lifetime, doc_markdown, large_enum_variant, match_bool,
- redundant_closure, needless_pass_by_value, redundant_field_names))]
+#![cfg_attr(
+ feature = "cargo-clippy",
+ allow(
+ const_static_lifetime, doc_markdown, large_enum_variant, match_bool, redundant_closure,
+ needless_pass_by_value, redundant_field_names
+ )
+)]
-extern crate proc_macro2;
#[cfg(feature = "proc-macro")]
extern crate proc_macro;
+extern crate proc_macro2;
extern crate unicode_xid;
#[cfg(feature = "printing")]
@@ -296,11 +300,11 @@
#[cfg(any(feature = "full", feature = "derive"))]
mod expr;
#[cfg(any(feature = "full", feature = "derive"))]
-pub use expr::{Expr, ExprReference, ExprArray, ExprAssign, ExprAssignOp, ExprBinary, ExprBlock,
- ExprBox, ExprBreak, ExprCall, ExprCast, ExprCatch, ExprClosure, ExprContinue,
- ExprField, ExprForLoop, ExprGroup, ExprIf, ExprIfLet, ExprInPlace, ExprIndex,
- ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprParen, ExprPath,
- ExprRange, ExprRepeat, ExprReturn, ExprStruct, ExprTry, ExprTuple, ExprType,
+pub use expr::{Expr, ExprArray, ExprAssign, ExprAssignOp, ExprBinary, ExprBlock, ExprBox,
+ ExprBreak, ExprCall, ExprCast, ExprCatch, ExprClosure, ExprContinue, ExprField,
+ ExprForLoop, ExprGroup, ExprIf, ExprIfLet, ExprInPlace, ExprIndex, ExprLit,
+ ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprParen, ExprPath, ExprRange,
+ ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry, ExprTuple, ExprType,
ExprUnary, ExprUnsafe, ExprVerbatim, ExprWhile, ExprWhileLet, ExprYield, Index,
Member};
@@ -374,17 +378,17 @@
#[cfg(any(feature = "full", feature = "derive"))]
mod path;
+#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
+pub use path::PathTokens;
#[cfg(any(feature = "full", feature = "derive"))]
pub use path::{AngleBracketedGenericArguments, Binding, GenericArgument,
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf};
-#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
-pub use path::PathTokens;
#[cfg(feature = "parsing")]
pub mod buffer;
+pub mod punctuated;
#[cfg(feature = "parsing")]
pub mod synom;
-pub mod punctuated;
#[cfg(any(feature = "full", feature = "derive"))]
mod tt;
@@ -432,7 +436,6 @@
#[cfg(feature = "visit")]
pub mod visit;
-
/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
/// place.
///
@@ -517,7 +520,7 @@
////////////////////////////////////////////////////////////////////////////////
#[cfg(feature = "parsing")]
-use synom::{Synom, Parser};
+use synom::{Parser, Synom};
#[cfg(feature = "parsing")]
mod error;
@@ -602,11 +605,9 @@
T: Synom,
{
let parser = T::parse;
- parser.parse2(tokens).map_err(|err| {
- match T::description() {
- Some(s) => ParseError::new(format!("failed to parse {}: {}", s, err)),
- None => err,
- }
+ parser.parse2(tokens).map_err(|err| match T::description() {
+ Some(s) => ParseError::new(format!("failed to parse {}: {}", s, err)),
+ None => err,
})
}
diff --git a/src/lifetime.rs b/src/lifetime.rs
index 276e2d5..7e3f231 100644
--- a/src/lifetime.rs
+++ b/src/lifetime.rs
@@ -113,10 +113,10 @@
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
- use synom::Synom;
use buffer::Cursor;
use parse_error;
use synom::PResult;
+ use synom::Synom;
impl Synom for Lifetime {
fn parse(input: Cursor) -> PResult<Self> {
@@ -128,12 +128,7 @@
return parse_error();
}
- Ok((
- Lifetime {
- term: term,
- },
- rest,
- ))
+ Ok((Lifetime { term: term }, rest))
}
fn description() -> Option<&'static str> {
diff --git a/src/lit.rs b/src/lit.rs
index 6785f88..5096b1b 100644
--- a/src/lit.rs
+++ b/src/lit.rs
@@ -111,9 +111,7 @@
pub fn new(value: &str, span: Span) -> Self {
let mut lit = Literal::string(value);
lit.set_span(span);
- LitStr {
- token: lit,
- }
+ LitStr { token: lit }
}
pub fn value(&self) -> String {
@@ -136,7 +134,10 @@
// Token stream with every span replaced by the given one.
fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
- stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
+ stream
+ .into_iter()
+ .map(|token| respan_token_tree(token, span))
+ .collect()
}
// Token tree with every span replaced by the given one.
@@ -336,7 +337,7 @@
self.$field.to_string().hash(state);
}
}
- }
+ };
}
impl LitVerbatim {
@@ -412,10 +413,10 @@
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
- use synom::Synom;
use buffer::Cursor;
use parse_error;
use synom::PResult;
+ use synom::Synom;
impl Synom for Lit {
fn parse(input: Cursor) -> PResult<Self> {
@@ -559,9 +560,9 @@
mod value {
use super::*;
+ use proc_macro2::TokenStream;
use std::char;
use std::ops::{Index, RangeFrom};
- use proc_macro2::TokenStream;
impl Lit {
/// Interpret a Syn literal from a proc-macro2 literal.
@@ -579,42 +580,20 @@
let value = token.to_string();
match value::byte(&value, 0) {
- b'"' | b'r' => {
- return Lit::Str(LitStr {
- token: token,
- })
- }
+ b'"' | b'r' => return Lit::Str(LitStr { token: token }),
b'b' => match value::byte(&value, 1) {
- b'"' | b'r' => {
- return Lit::ByteStr(LitByteStr {
- token: token,
- })
- }
- b'\'' => {
- return Lit::Byte(LitByte {
- token: token,
- })
- }
+ b'"' | b'r' => return Lit::ByteStr(LitByteStr { token: token }),
+ b'\'' => return Lit::Byte(LitByte { token: token }),
_ => {}
},
- b'\'' => {
- return Lit::Char(LitChar {
- token: token,
- })
- }
+ b'\'' => return Lit::Char(LitChar { token: token }),
b'0'...b'9' => if number_is_int(&value) {
- return Lit::Int(LitInt {
- token: token,
- });
+ return Lit::Int(LitInt { token: token });
} else if number_is_float(&value) {
- return Lit::Float(LitFloat {
- token: token,
- });
+ return Lit::Float(LitFloat { token: token });
} else {
// number overflow
- return Lit::Verbatim(LitVerbatim {
- token: token,
- });
+ return Lit::Verbatim(LitVerbatim { token: token });
},
_ => if value == "true" || value == "false" {
return Lit::Bool(LitBool {
diff --git a/src/parse_quote.rs b/src/parse_quote.rs
index 9fe7ef4..ec8ef5a 100644
--- a/src/parse_quote.rs
+++ b/src/parse_quote.rs
@@ -88,9 +88,9 @@
////////////////////////////////////////////////////////////////////////////////
// Can parse any type that implements Synom.
-use synom::{Synom, Parser, PResult};
use buffer::Cursor;
use proc_macro2::TokenStream;
+use synom::{PResult, Parser, Synom};
// Not public API.
#[doc(hidden)]
@@ -105,7 +105,7 @@
Err(err) => match T::description() {
Some(s) => panic!("failed to parse {}: {}", s, err),
None => panic!("{}", err),
- }
+ },
}
}
@@ -116,7 +116,10 @@
fn description() -> Option<&'static str>;
}
-impl<T> ParseQuote for T where T: Synom {
+impl<T> ParseQuote for T
+where
+ T: Synom,
+{
fn parse(input: Cursor) -> PResult<Self> {
<T as Synom>::parse(input)
}
diff --git a/src/parsers.rs b/src/parsers.rs
index 4738502..b592e1f 100644
--- a/src/parsers.rs
+++ b/src/parsers.rs
@@ -723,7 +723,7 @@
($i:expr,) => {{
let _ = $i;
$crate::parse_error()
- }}
+ }};
}
/// Run a series of parsers and produce all of the results in a tuple.
diff --git a/src/path.rs b/src/path.rs
index d5b7238..39edd11 100644
--- a/src/path.rs
+++ b/src/path.rs
@@ -6,8 +6,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use punctuated::Punctuated;
use super::*;
+use punctuated::Punctuated;
ast_struct! {
/// A path at which a named item is exported: `std::collections::HashMap`.
diff --git a/src/punctuated.rs b/src/punctuated.rs
index c87b639..ac6492a 100644
--- a/src/punctuated.rs
+++ b/src/punctuated.rs
@@ -27,6 +27,8 @@
//! ^^^^^ ~~~~~ ^^^^
//! ```
+#[cfg(feature = "extra-traits")]
+use std::fmt::{self, Debug};
#[cfg(any(feature = "full", feature = "derive"))]
use std::iter;
use std::iter::FromIterator;
@@ -34,15 +36,13 @@
use std::option;
use std::slice;
use std::vec;
-#[cfg(feature = "extra-traits")]
-use std::fmt::{self, Debug};
#[cfg(feature = "parsing")]
-use synom::{Synom, PResult};
-#[cfg(feature = "parsing")]
use buffer::Cursor;
#[cfg(feature = "parsing")]
use parse_error;
+#[cfg(feature = "parsing")]
+use synom::{PResult, Synom};
/// A punctuated sequence of syntax tree nodes of type `T` separated by
/// punctuation of type `P`.
@@ -60,7 +60,10 @@
impl<T, P> Punctuated<T, P> {
/// Creates an empty punctuated sequence.
pub fn new() -> Punctuated<T, P> {
- Punctuated { inner: Vec::new(), last: None }
+ Punctuated {
+ inner: Vec::new(),
+ last: None,
+ }
}
/// Determines whether this punctuated sequence is empty, meaning it
@@ -85,10 +88,10 @@
/// Borrows the last punctuated pair in this sequence.
pub fn last(&self) -> Option<Pair<&T, &P>> {
if self.last.is_some() {
- self.last.as_ref()
- .map(|t| Pair::End(t.as_ref()))
+ self.last.as_ref().map(|t| Pair::End(t.as_ref()))
} else {
- self.inner.last()
+ self.inner
+ .last()
.map(|&(ref t, ref d)| Pair::Punctuated(t, d))
}
}
@@ -96,10 +99,10 @@
/// Mutably borrows the last punctuated pair in this sequence.
pub fn last_mut(&mut self) -> Option<Pair<&mut T, &mut P>> {
if self.last.is_some() {
- self.last.as_mut()
- .map(|t| Pair::End(t.as_mut()))
+ self.last.as_mut().map(|t| Pair::End(t.as_mut()))
} else {
- self.inner.last_mut()
+ self.inner
+ .last_mut()
.map(|&mut (ref mut t, ref mut d)| Pair::Punctuated(t, d))
}
}
@@ -351,7 +354,9 @@
type Item = Pair<&'a T, &'a P>;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|&(ref t, ref p)| Pair::Punctuated(t, p))
+ self.inner
+ .next()
+ .map(|&(ref t, ref p)| Pair::Punctuated(t, p))
.or_else(|| self.last.next().map(Pair::End))
}
}
@@ -370,7 +375,9 @@
type Item = Pair<&'a mut T, &'a mut P>;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|&mut (ref mut t, ref mut p)| Pair::Punctuated(t, p))
+ self.inner
+ .next()
+ .map(|&mut (ref mut t, ref mut p)| Pair::Punctuated(t, p))
.or_else(|| self.last.next().map(Pair::End))
}
}
@@ -389,7 +396,9 @@
type Item = Pair<T, P>;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|(t, p)| Pair::Punctuated(t, p))
+ self.inner
+ .next()
+ .map(|(t, p)| Pair::Punctuated(t, p))
.or_else(|| self.last.next().map(Pair::End))
}
}
@@ -408,7 +417,10 @@
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|pair| pair.0).or_else(|| self.last.next())
+ self.inner
+ .next()
+ .map(|pair| pair.0)
+ .or_else(|| self.last.next())
}
}
@@ -449,7 +461,10 @@
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|pair| &pair.0).or_else(|| self.last.next())
+ self.inner
+ .next()
+ .map(|pair| &pair.0)
+ .or_else(|| self.last.next())
}
}
@@ -479,7 +494,10 @@
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|pair| &mut pair.0).or_else(|| self.last.next())
+ self.inner
+ .next()
+ .map(|pair| &mut pair.0)
+ .or_else(|| self.last.next())
}
}
@@ -552,7 +570,7 @@
if index == self.len() - 1 {
match self.last {
Some(ref t) => t,
- None => &self.inner[index].0
+ None => &self.inner[index].0,
}
} else {
&self.inner[index].0
@@ -565,7 +583,7 @@
if index == self.len() - 1 {
match self.last {
Some(ref mut t) => t,
- None => &mut self.inner[index].0
+ None => &mut self.inner[index].0,
}
} else {
&mut self.inner[index].0
@@ -612,10 +630,7 @@
{
/// Parse **zero or more** syntax tree nodes using the given parser with
/// punctuation in between and **no trailing** punctuation.
- pub fn parse_separated_with(
- input: Cursor,
- parse: fn(Cursor) -> PResult<T>,
- ) -> PResult<Self> {
+ pub fn parse_separated_with(input: Cursor, parse: fn(Cursor) -> PResult<T>) -> PResult<Self> {
Self::parse(input, parse, false)
}
@@ -633,10 +648,7 @@
/// Parse **zero or more** syntax tree nodes using the given parser with
/// punctuation in between and **optional trailing** punctuation.
- pub fn parse_terminated_with(
- input: Cursor,
- parse: fn(Cursor) -> PResult<T>,
- ) -> PResult<Self> {
+ pub fn parse_terminated_with(input: Cursor, parse: fn(Cursor) -> PResult<T>) -> PResult<Self> {
Self::parse(input, parse, true)
}
diff --git a/src/synom.rs b/src/synom.rs
index 63de9a0..0185090 100644
--- a/src/synom.rs
+++ b/src/synom.rs
@@ -248,7 +248,10 @@
}
}
-impl<F, T> Parser for F where F: FnOnce(Cursor) -> PResult<T> {
+impl<F, T> Parser for F
+where
+ F: FnOnce(Cursor) -> PResult<T>,
+{
type Output = T;
fn parse2(self, tokens: proc_macro2::TokenStream) -> Result<T, ParseError> {
diff --git a/src/token.rs b/src/token.rs
index 9107156..10fc75a 100644
--- a/src/token.rs
+++ b/src/token.rs
@@ -119,7 +119,7 @@
}
macro_rules! token_punct_def {
- (#[$doc:meta] $s:tt pub struct $name:ident/$len:tt) => {
+ (#[$doc:meta] $s:tt pub struct $name:ident / $len:tt) => {
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[$doc]
///
@@ -161,8 +161,10 @@
#[cfg(feature = "extra-traits")]
impl ::std::hash::Hash for $name {
fn hash<H>(&self, _state: &mut H)
- where H: ::std::hash::Hasher
- {}
+ where
+ H: ::std::hash::Hasher,
+ {
+ }
}
impl From<Span> for $name {
@@ -170,7 +172,7 @@
$name([span; $len])
}
}
- }
+ };
}
macro_rules! token_punct_parser {
@@ -192,7 +194,7 @@
Some(concat!("`", $s, "`"))
}
}
- }
+ };
}
macro_rules! token_keyword {
@@ -232,8 +234,10 @@
#[cfg(feature = "extra-traits")]
impl ::std::hash::Hash for $name {
fn hash<H>(&self, _state: &mut H)
- where H: ::std::hash::Hasher
- {}
+ where
+ H: ::std::hash::Hasher,
+ {
+ }
}
#[cfg(feature = "printing")]
@@ -259,7 +263,7 @@
$name(span)
}
}
- }
+ };
}
macro_rules! token_delimiter {
@@ -294,23 +298,28 @@
#[cfg(feature = "extra-traits")]
impl ::std::hash::Hash for $name {
fn hash<H>(&self, _state: &mut H)
- where H: ::std::hash::Hasher
- {}
+ where
+ H: ::std::hash::Hasher,
+ {
+ }
}
impl $name {
#[cfg(feature = "printing")]
- pub fn surround<F>(&self,
- tokens: &mut ::quote::Tokens,
- f: F)
- where F: FnOnce(&mut ::quote::Tokens)
+ pub fn surround<F>(&self, tokens: &mut ::quote::Tokens, f: F)
+ where
+ F: FnOnce(&mut ::quote::Tokens),
{
printing::delim($s, &self.0, tokens, f);
}
#[cfg(feature = "parsing")]
- pub fn parse<F, R>(tokens: $crate::buffer::Cursor, f: F) -> $crate::synom::PResult<($name, R)>
- where F: FnOnce($crate::buffer::Cursor) -> $crate::synom::PResult<R>
+ pub fn parse<F, R>(
+ tokens: $crate::buffer::Cursor,
+ f: F,
+ ) -> $crate::synom::PResult<($name, R)>
+ where
+ F: FnOnce($crate::buffer::Cursor) -> $crate::synom::PResult<R>,
{
parsing::delim($s, tokens, $name, f)
}
@@ -321,7 +330,7 @@
$name(span)
}
}
- }
+ };
}
token_punct_def! {
@@ -340,11 +349,9 @@
impl ::Synom for Underscore {
fn parse(input: ::buffer::Cursor) -> ::synom::PResult<Underscore> {
match input.term() {
- Some((term, rest)) if term.as_str() == "_" => {
- Ok((Underscore([term.span()]), rest))
- }
+ Some((term, rest)) if term.as_str() == "_" => Ok((Underscore([term.span()]), rest)),
Some(_) => ::parse_error(),
- None => parsing::punct("_", input, Underscore)
+ None => parsing::punct("_", input, Underscore),
}
}
@@ -764,7 +771,7 @@
#[cfg(feature = "printing")]
mod printing {
- use proc_macro2::{Delimiter, Spacing, Span, Term, Op, Group};
+ use proc_macro2::{Delimiter, Group, Op, Spacing, Span, Term};
use quote::Tokens;
pub fn punct(s: &str, spans: &[Span], tokens: &mut Tokens) {
diff --git a/src/tt.rs b/src/tt.rs
index 5fb95ba..2443dce 100644
--- a/src/tt.rs
+++ b/src/tt.rs
@@ -32,7 +32,7 @@
Delimiter::None => return parse_error(),
};
- return Ok(((delimiter, g.stream().clone()), rest))
+ return Ok(((delimiter, g.stream().clone()), rest));
}
parse_error()
}
@@ -41,7 +41,7 @@
pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> {
if let Some((TokenTree::Group(g), rest)) = input.token_tree() {
if g.delimiter() == Delimiter::Brace {
- return Ok(((Brace(g.span()), g.stream().clone()), rest))
+ return Ok(((Brace(g.span()), g.stream().clone()), rest));
}
}
parse_error()
@@ -51,7 +51,7 @@
pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> {
if let Some((TokenTree::Group(g), rest)) = input.token_tree() {
if g.delimiter() == Delimiter::Parenthesis {
- return Ok(((Paren(g.span()), g.stream().clone()), rest))
+ return Ok(((Paren(g.span()), g.stream().clone()), rest));
}
}
parse_error()
diff --git a/src/ty.rs b/src/ty.rs
index 1809a65..fa3900a 100644
--- a/src/ty.rs
+++ b/src/ty.rs
@@ -6,9 +6,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use punctuated::Punctuated;
use super::*;
use proc_macro2::TokenStream;
+use punctuated::Punctuated;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(feature = "extra-traits")]
@@ -249,8 +249,8 @@
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
- use synom::Synom;
use path::parsing::qpath;
+ use synom::Synom;
impl Synom for Type {
named!(parse -> Self, call!(ambig_ty, true));
diff --git a/tests/test_derive_input.rs b/tests/test_derive_input.rs
index 7057996..553871b 100644
--- a/tests/test_derive_input.rs
+++ b/tests/test_derive_input.rs
@@ -71,18 +71,17 @@
vis: Visibility::Public(VisPublic {
pub_token: Default::default(),
}),
- attrs: vec![
- Attribute {
- bracket_token: Default::default(),
- pound_token: Default::default(),
- style: AttrStyle::Outer,
- path: "derive".into(),
- tts: TokenStream::from_iter(vec![
- delimited(Parenthesis, vec![word("Debug"), op(','), word("Clone")]),
- ]),
- is_sugared_doc: false,
- },
- ],
+ attrs: vec![Attribute {
+ bracket_token: Default::default(),
+ pound_token: Default::default(),
+ style: AttrStyle::Outer,
+ path: "derive".into(),
+ tts: TokenStream::from_iter(vec![delimited(
+ Parenthesis,
+ vec![word("Debug"), op(','), word("Clone")],
+ )]),
+ is_sugared_doc: false,
+ }],
generics: Generics::default(),
data: Data::Struct(DataStruct {
semi_token: None,
@@ -113,24 +112,22 @@
qself: None,
path: Path {
leading_colon: None,
- segments: punctuated![
- PathSegment {
- ident: "Vec".into(),
- arguments: PathArguments::AngleBracketed(
- AngleBracketedGenericArguments {
- colon2_token: None,
- lt_token: Default::default(),
- args: punctuated![
- GenericArgument::Type(Type::from(TypePath {
- qself: None,
- path: "Attribute".into(),
- })),
- ],
- gt_token: Default::default(),
- },
- ),
- },
- ],
+ segments: punctuated![PathSegment {
+ ident: "Vec".into(),
+ arguments: PathArguments::AngleBracketed(
+ AngleBracketedGenericArguments {
+ colon2_token: None,
+ lt_token: Default::default(),
+ args: punctuated![GenericArgument::Type(Type::from(
+ TypePath {
+ qself: None,
+ path: "Attribute".into(),
+ }
+ )),],
+ gt_token: Default::default(),
+ },
+ ),
+ },],
},
}.into(),
},
@@ -233,18 +230,16 @@
attrs: Vec::new(),
fields: Fields::Unnamed(FieldsUnnamed {
paren_token: Default::default(),
- unnamed: punctuated![
- Field {
- colon_token: None,
- ident: None,
- vis: Visibility::Inherited,
- attrs: Vec::new(),
- ty: TypePath {
- qself: None,
- path: "T".into(),
- }.into(),
- },
- ],
+ unnamed: punctuated![Field {
+ colon_token: None,
+ ident: None,
+ vis: Visibility::Inherited,
+ attrs: Vec::new(),
+ ty: TypePath {
+ qself: None,
+ path: "T".into(),
+ }.into(),
+ },],
}),
discriminant: None,
},
@@ -253,18 +248,16 @@
attrs: Vec::new(),
fields: Fields::Unnamed(FieldsUnnamed {
paren_token: Default::default(),
- unnamed: punctuated![
- Field {
- ident: None,
- colon_token: None,
- vis: Visibility::Inherited,
- attrs: Vec::new(),
- ty: TypePath {
- qself: None,
- path: "E".into(),
- }.into(),
- },
- ],
+ unnamed: punctuated![Field {
+ ident: None,
+ colon_token: None,
+ vis: Visibility::Inherited,
+ attrs: Vec::new(),
+ ty: TypePath {
+ qself: None,
+ path: "E".into(),
+ }.into(),
+ },],
}),
discriminant: None,
},
@@ -356,43 +349,41 @@
let expected = DeriveInput {
ident: "Dummy".into(),
vis: Visibility::Inherited,
- attrs: vec![
- Attribute {
- bracket_token: Default::default(),
- pound_token: Default::default(),
- style: AttrStyle::Outer,
- path: Path {
- leading_colon: Some(Default::default()),
- segments: punctuated![
- PathSegment::from("attr_args"),
- PathSegment::from("identity"),
- ],
- },
- tts: TokenStream::from_iter(vec![
- word("fn"),
- word("main"),
- delimited(Parenthesis, vec![]),
- delimited(
- Brace,
- vec![
- word("assert_eq"),
- op('!'),
- delimited(
- Parenthesis,
- vec![
- word("foo"),
- delimited(Parenthesis, vec![]),
- op(','),
- lit(Literal::string("Hello, world!")),
- ],
- ),
- op(';'),
- ],
- ),
- ]),
- is_sugared_doc: false,
+ attrs: vec![Attribute {
+ bracket_token: Default::default(),
+ pound_token: Default::default(),
+ style: AttrStyle::Outer,
+ path: Path {
+ leading_colon: Some(Default::default()),
+ segments: punctuated![
+ PathSegment::from("attr_args"),
+ PathSegment::from("identity"),
+ ],
},
- ],
+ tts: TokenStream::from_iter(vec![
+ word("fn"),
+ word("main"),
+ delimited(Parenthesis, vec![]),
+ delimited(
+ Brace,
+ vec![
+ word("assert_eq"),
+ op('!'),
+ delimited(
+ Parenthesis,
+ vec![
+ word("foo"),
+ delimited(Parenthesis, vec![]),
+ op(','),
+ lit(Literal::string("Hello, world!")),
+ ],
+ ),
+ op(';'),
+ ],
+ ),
+ ]),
+ is_sugared_doc: false,
+ }],
generics: Generics::default(),
data: Data::Struct(DataStruct {
fields: Fields::Unit,
@@ -418,19 +409,17 @@
let expected = DeriveInput {
ident: "S".into(),
vis: Visibility::Inherited,
- attrs: vec![
- Attribute {
- bracket_token: Default::default(),
- pound_token: Default::default(),
- style: AttrStyle::Outer,
- path: Path {
- leading_colon: None,
- segments: punctuated![PathSegment::from("inert")],
- },
- tts: TokenStream::from_iter(vec![op('<'), word("T"), op('>')]),
- is_sugared_doc: false,
+ attrs: vec![Attribute {
+ bracket_token: Default::default(),
+ pound_token: Default::default(),
+ style: AttrStyle::Outer,
+ path: Path {
+ leading_colon: None,
+ segments: punctuated![PathSegment::from("inert")],
},
- ],
+ tts: TokenStream::from_iter(vec![op('<'), word("T"), op('>')]),
+ is_sugared_doc: false,
+ }],
generics: Generics::default(),
data: Data::Struct(DataStruct {
fields: Fields::Unit,
@@ -456,19 +445,17 @@
let expected = DeriveInput {
ident: "S".into(),
vis: Visibility::Inherited,
- attrs: vec![
- Attribute {
- bracket_token: Default::default(),
- pound_token: Default::default(),
- style: AttrStyle::Outer,
- path: Path {
- leading_colon: None,
- segments: punctuated![PathSegment::from("foo"), PathSegment::from("self")],
- },
- tts: TokenStream::empty(),
- is_sugared_doc: false,
+ attrs: vec![Attribute {
+ bracket_token: Default::default(),
+ pound_token: Default::default(),
+ style: AttrStyle::Outer,
+ path: Path {
+ leading_colon: None,
+ segments: punctuated![PathSegment::from("foo"), PathSegment::from("self")],
},
- ],
+ tts: TokenStream::empty(),
+ is_sugared_doc: false,
+ }],
generics: Generics::default(),
data: Data::Struct(DataStruct {
fields: Fields::Unit,
@@ -504,29 +491,24 @@
data: Data::Struct(DataStruct {
fields: Fields::Unnamed(FieldsUnnamed {
paren_token: Default::default(),
- unnamed: punctuated![
- Field {
- ident: None,
- vis: Visibility::Restricted(VisRestricted {
- path: Box::new(Path {
- leading_colon: None,
- segments: punctuated![
- PathSegment::from("m"),
- PathSegment::from("n")
- ],
- }),
- in_token: Some(Default::default()),
- paren_token: Default::default(),
- pub_token: Default::default(),
+ unnamed: punctuated![Field {
+ ident: None,
+ vis: Visibility::Restricted(VisRestricted {
+ path: Box::new(Path {
+ leading_colon: None,
+ segments: punctuated![PathSegment::from("m"), PathSegment::from("n")],
}),
- colon_token: None,
- attrs: vec![],
- ty: TypePath {
- qself: None,
- path: "u8".into(),
- }.into(),
- },
- ],
+ in_token: Some(Default::default()),
+ paren_token: Default::default(),
+ pub_token: Default::default(),
+ }),
+ colon_token: None,
+ attrs: vec![],
+ ty: TypePath {
+ qself: None,
+ path: "u8".into(),
+ }.into(),
+ },],
}),
semi_token: Some(Default::default()),
struct_token: Default::default(),
diff --git a/tests/test_generics.rs b/tests/test_generics.rs
index d4ca16f..bdeb171 100644
--- a/tests/test_generics.rs
+++ b/tests/test_generics.rs
@@ -43,20 +43,19 @@
colon_token: Some(token::Colon::default()),
}),
GenericParam::Type(TypeParam {
- attrs: vec![
- Attribute {
- bracket_token: Default::default(),
- pound_token: Default::default(),
- style: AttrStyle::Outer,
- path: "may_dangle".into(),
- tts: TokenStream::empty(),
- is_sugared_doc: false,
- },
- ],
+ attrs: vec![Attribute {
+ bracket_token: Default::default(),
+ pound_token: Default::default(),
+ style: AttrStyle::Outer,
+ path: "may_dangle".into(),
+ tts: TokenStream::empty(),
+ is_sugared_doc: false,
+ }],
ident: "T".into(),
- bounds: punctuated![
- TypeParamBound::Lifetime(Lifetime::new("'a", Span::call_site())),
- ],
+ bounds: punctuated![TypeParamBound::Lifetime(Lifetime::new(
+ "'a",
+ Span::call_site()
+ )),],
default: Some(
TypeTuple {
elems: Default::default(),
@@ -69,24 +68,20 @@
],
where_clause: Some(WhereClause {
where_token: Default::default(),
- predicates: punctuated![
- WherePredicate::Type(PredicateType {
+ predicates: punctuated![WherePredicate::Type(PredicateType {
+ lifetimes: None,
+ colon_token: Default::default(),
+ bounded_ty: TypePath {
+ qself: None,
+ path: "T".into(),
+ }.into(),
+ bounds: punctuated![TypeParamBound::Trait(TraitBound {
+ paren_token: None,
+ modifier: TraitBoundModifier::None,
lifetimes: None,
- colon_token: Default::default(),
- bounded_ty: TypePath {
- qself: None,
- path: "T".into(),
- }.into(),
- bounds: punctuated![
- TypeParamBound::Trait(TraitBound {
- paren_token: None,
- modifier: TraitBoundModifier::None,
- lifetimes: None,
- path: "Debug".into(),
- }),
- ],
- }),
- ],
+ path: "Debug".into(),
+ }),],
+ }),],
}),
};
diff --git a/tests/test_ident.rs b/tests/test_ident.rs
index 44cd86e..7a88f53 100644
--- a/tests/test_ident.rs
+++ b/tests/test_ident.rs
@@ -11,8 +11,8 @@
use proc_macro2::{Span, TokenStream};
use std::str::FromStr;
-use syn::Ident;
use syn::synom::ParseError;
+use syn::Ident;
fn parse(s: &str) -> Result<Ident, ParseError> {
syn::parse2(TokenStream::from_str(s).unwrap())
diff --git a/tests/test_meta_item.rs b/tests/test_meta_item.rs
index 22a38fb..55eefb8 100644
--- a/tests/test_meta_item.rs
+++ b/tests/test_meta_item.rs
@@ -96,15 +96,13 @@
MetaList {
ident: "foo".into(),
paren_token: Default::default(),
- nested: punctuated![
- NestedMeta::Meta(
- MetaNameValue {
- ident: "bar".into(),
- eq_token: Default::default(),
- lit: lit(Literal::i32_unsuffixed(5)),
- }.into(),
- ),
- ],
+ nested: punctuated![NestedMeta::Meta(
+ MetaNameValue {
+ ident: "bar".into(),
+ eq_token: Default::default(),
+ lit: lit(Literal::i32_unsuffixed(5)),
+ }.into(),
+ ),],
},
)
}
@@ -116,18 +114,16 @@
MetaList {
ident: "foo".into(),
paren_token: Default::default(),
- nested: punctuated![
- NestedMeta::Meta(
- MetaNameValue {
- ident: "bar".into(),
- eq_token: Default::default(),
- lit: Lit::Bool(LitBool {
- value: true,
- span: Span::call_site()
- }),
- }.into(),
- ),
- ],
+ nested: punctuated![NestedMeta::Meta(
+ MetaNameValue {
+ ident: "bar".into(),
+ eq_token: Default::default(),
+ lit: Lit::Bool(LitBool {
+ value: true,
+ span: Span::call_site()
+ }),
+ }.into(),
+ ),],
},
)
}
@@ -152,15 +148,13 @@
MetaList {
ident: "list".into(),
paren_token: Default::default(),
- nested: punctuated![
- NestedMeta::Meta(
- MetaNameValue {
- ident: "name2".into(),
- eq_token: Default::default(),
- lit: lit(Literal::i32_unsuffixed(6)),
- }.into(),
- ),
- ],
+ nested: punctuated![NestedMeta::Meta(
+ MetaNameValue {
+ ident: "name2".into(),
+ eq_token: Default::default(),
+ lit: lit(Literal::i32_unsuffixed(6)),
+ }.into(),
+ ),],
}.into(),
),
NestedMeta::Meta(Meta::Word("word2".into())),
diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs
index b3f90d2..1c4814d 100644
--- a/tests/test_precedence.rs
+++ b/tests/test_precedence.rs
@@ -211,8 +211,8 @@
use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
use syntax::ext::quote::rt::DUMMY_SP;
use syntax::fold::{self, Folder};
- use syntax::util::ThinVec;
use syntax::util::small_vector::SmallVector;
+ use syntax::util::ThinVec;
fn expr(node: ExprKind) -> P<Expr> {
P(Expr {