Format with rustfmt 0.7.0
diff --git a/src/attr.rs b/src/attr.rs
index 6f50e0a..2d0f244 100644
--- a/src/attr.rs
+++ b/src/attr.rs
@@ -74,8 +74,10 @@
 #[cfg(feature = "extra-traits")]
 impl PartialEq for Attribute {
     fn eq(&self, other: &Self) -> bool {
-        self.style == other.style && self.pound_token == other.pound_token
-            && self.bracket_token == other.bracket_token && self.path == other.path
+        self.style == other.style
+            && self.pound_token == other.pound_token
+            && self.bracket_token == other.bracket_token
+            && self.path == other.path
             && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
             && self.is_sugared_doc == other.is_sugared_doc
     }
diff --git a/src/buffer.rs b/src/buffer.rs
index d695c77..3ca7f7f 100644
--- a/src/buffer.rs
+++ b/src/buffer.rs
@@ -129,7 +129,7 @@
 
 #[cfg(feature = "proc-macro")]
 use proc_macro as pm;
-use proc_macro2::{Delimiter, Literal, Span, Ident, TokenStream};
+use proc_macro2::{Delimiter, Ident, Literal, Span, TokenStream};
 use proc_macro2::{Group, Punct, TokenTree};
 
 use std::marker::PhantomData;
diff --git a/src/derive.rs b/src/derive.rs
index ba60f8d..e1cb110 100644
--- a/src/derive.rs
+++ b/src/derive.rs
@@ -163,8 +163,8 @@
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::ToTokens;
     use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for DeriveInput {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/file.rs b/src/file.rs
index 99a311d..9b5b11f 100644
--- a/src/file.rs
+++ b/src/file.rs
@@ -111,8 +111,8 @@
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, TokenStreamExt};
     use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for File {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/generics.rs b/src/generics.rs
index 4c2858d..970e1ff 100644
--- a/src/generics.rs
+++ b/src/generics.rs
@@ -753,8 +753,8 @@
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, TokenStreamExt};
     use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for Generics {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/item.rs b/src/item.rs
index 226e102..3f1ad79 100644
--- a/src/item.rs
+++ b/src/item.rs
@@ -248,8 +248,11 @@
 #[cfg(feature = "extra-traits")]
 impl PartialEq for ItemMacro2 {
     fn eq(&self, other: &Self) -> bool {
-        self.attrs == other.attrs && self.vis == other.vis && self.macro_token == other.macro_token
-            && self.ident == other.ident && self.paren_token == other.paren_token
+        self.attrs == other.attrs
+            && self.vis == other.vis
+            && self.macro_token == other.macro_token
+            && self.ident == other.ident
+            && self.paren_token == other.paren_token
             && TokenStreamHelper(&self.args) == TokenStreamHelper(&other.args)
             && self.brace_token == other.brace_token
             && TokenStreamHelper(&self.body) == TokenStreamHelper(&other.body)
@@ -1536,8 +1539,8 @@
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, TokenStreamExt};
     use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for ItemExternCrate {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/lib.rs b/src/lib.rs
index df82a52..a36ca6f 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -260,7 +260,6 @@
 // Syn types in rustdoc of other crates get linked to here.
 #![doc(html_root_url = "https://docs.rs/syn/0.13.11")]
 #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
-
 // Ignored clippy lints.
 #![cfg_attr(
     feature = "cargo-clippy",
@@ -273,14 +272,8 @@
 #![cfg_attr(
     feature = "cargo-clippy",
     allow(
-        cast_possible_truncation,
-        cast_possible_wrap,
-        items_after_statements,
-        similar_names,
-        single_match_else,
-        stutter,
-        unseparated_literal_suffix,
-        use_self,
+        cast_possible_truncation, cast_possible_wrap, items_after_statements, similar_names,
+        single_match_else, stutter, unseparated_literal_suffix, use_self
     )
 )]
 
diff --git a/src/lifetime.rs b/src/lifetime.rs
index bf8147b..74f4f68 100644
--- a/src/lifetime.rs
+++ b/src/lifetime.rs
@@ -10,7 +10,7 @@
 use std::fmt::{self, Display};
 use std::hash::{Hash, Hasher};
 
-use proc_macro2::{Span, Ident};
+use proc_macro2::{Ident, Span};
 use unicode_xid::UnicodeXID;
 
 use token::Apostrophe;
@@ -139,8 +139,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::ToTokens;
     use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Lifetime {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/lit.rs b/src/lit.rs
index 76df32a..b4debdb 100644
--- a/src/lit.rs
+++ b/src/lit.rs
@@ -506,8 +506,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, TokenStreamExt};
     use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for LitStr {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/mac.rs b/src/mac.rs
index a486d7c..a9219fe 100644
--- a/src/mac.rs
+++ b/src/mac.rs
@@ -46,7 +46,8 @@
 #[cfg(feature = "extra-traits")]
 impl PartialEq for Macro {
     fn eq(&self, other: &Self) -> bool {
-        self.path == other.path && self.bang_token == other.bang_token
+        self.path == other.path
+            && self.bang_token == other.bang_token
             && self.delimiter == other.delimiter
             && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
     }
@@ -93,8 +94,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::ToTokens;
     use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Macro {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/op.rs b/src/op.rs
index 95ba33c..a5188d0 100644
--- a/src/op.rs
+++ b/src/op.rs
@@ -174,8 +174,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::ToTokens;
     use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for BinOp {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/path.rs b/src/path.rs
index cded435..19c1f05 100644
--- a/src/path.rs
+++ b/src/path.rs
@@ -418,8 +418,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::ToTokens;
     use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Path {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/src/punctuated.rs b/src/punctuated.rs
index 8f4144f..a06abb5 100644
--- a/src/punctuated.rs
+++ b/src/punctuated.rs
@@ -762,8 +762,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, TokenStreamExt};
     use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl<T, P> ToTokens for Punctuated<T, P>
     where
diff --git a/src/synom.rs b/src/synom.rs
index 28444f3..a018ff3 100644
--- a/src/synom.rs
+++ b/src/synom.rs
@@ -154,8 +154,8 @@
 use proc_macro;
 use proc_macro2;
 
-pub use error::{PResult, ParseError};
 use error::parse_error;
+pub use error::{PResult, ParseError};
 
 use buffer::{Cursor, TokenBuffer};
 
@@ -215,12 +215,12 @@
 }
 
 impl Synom for proc_macro2::Ident {
-	fn parse(input: Cursor) -> PResult<Self> {
-		let (term, rest) = match input.ident() {
-			Some(term) => term,
-			_ => return parse_error(),
-		};
-		match &term.to_string()[..] {
+    fn parse(input: Cursor) -> PResult<Self> {
+        let (term, rest) = match input.ident() {
+            Some(term) => term,
+            _ => return parse_error(),
+        };
+        match &term.to_string()[..] {
 			"_"
 			// From https://doc.rust-lang.org/grammar.html#keywords
 			| "abstract" | "alignof" | "as" | "become" | "box" | "break" | "const"
@@ -233,12 +233,12 @@
 			_ => {}
 		}
 
-		Ok((term, rest))
-	}
+        Ok((term, rest))
+    }
 
-	fn description() -> Option<&'static str> {
-		Some("identifier")
-	}
+    fn description() -> Option<&'static str> {
+        Some("identifier")
+    }
 }
 
 /// Parser that can parse Rust tokens into a particular syntax tree node.
diff --git a/src/token.rs b/src/token.rs
index 97f33f1..37a0b35 100644
--- a/src/token.rs
+++ b/src/token.rs
@@ -97,7 +97,7 @@
 //! # fn main() {}
 //! ```
 
-use proc_macro2::{Span, Ident};
+use proc_macro2::{Ident, Span};
 
 macro_rules! tokens {
     (
@@ -119,7 +119,7 @@
 }
 
 macro_rules! token_punct_def {
-    (#[$doc:meta] pub struct $name:ident / $len:tt) => {
+    (#[$doc:meta]pub struct $name:ident / $len:tt) => {
         #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
         #[$doc]
         ///
@@ -400,7 +400,7 @@
                     ::parse_error()
                 }
             }
-            None => ::parse_error()
+            None => ::parse_error(),
         }
     }
 
@@ -788,7 +788,7 @@
                         *slot = op.span();
                         tokens = rest;
                     } else {
-                        return parse_error()
+                        return parse_error();
                     }
                 }
                 _ => return parse_error(),
@@ -797,11 +797,7 @@
         Ok((new(T::from_spans(&spans)), tokens))
     }
 
-    pub fn keyword<'a, T>(
-        keyword: &str,
-        tokens: Cursor<'a>,
-        new: fn(Span) -> T,
-    ) -> PResult<'a, T> {
+    pub fn keyword<'a, T>(keyword: &str, tokens: Cursor<'a>, new: fn(Span) -> T) -> PResult<'a, T> {
         if let Some((term, rest)) = tokens.ident() {
             if term == keyword {
                 return Ok((new(term.span()), rest));
@@ -844,7 +840,7 @@
 
 #[cfg(feature = "printing")]
 mod printing {
-    use proc_macro2::{Delimiter, Group, Punct, Spacing, Span, Ident, TokenStream};
+    use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream};
     use quote::TokenStreamExt;
 
     pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) {
diff --git a/src/ty.rs b/src/ty.rs
index 6b9cd3d..f916505 100644
--- a/src/ty.rs
+++ b/src/ty.rs
@@ -646,8 +646,8 @@
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::ToTokens;
     use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for TypeSlice {
         fn to_tokens(&self, tokens: &mut TokenStream) {
diff --git a/tests/common/mod.rs b/tests/common/mod.rs
index 508389f..5bffa7b 100644
--- a/tests/common/mod.rs
+++ b/tests/common/mod.rs
@@ -25,7 +25,7 @@
         Ok(s) => s,
         Err(_) => {
             env::set_var("RUST_MIN_STACK", 16000000.to_string());
-            return
+            return;
         }
     };
     let min_stack_value: usize = min_stack_value
diff --git a/tests/test_expr.rs b/tests/test_expr.rs
index 304ebfb..4b5bf8c 100644
--- a/tests/test_expr.rs
+++ b/tests/test_expr.rs
@@ -8,10 +8,10 @@
 
 #![cfg(feature = "extra-traits")]
 
-extern crate syn;
 extern crate proc_macro2;
-use syn::*;
+extern crate syn;
 use proc_macro2::*;
+use syn::*;
 
 macro_rules! assert_let {
     ($p:pat = $e:expr) => {
diff --git a/tests/test_generics.rs b/tests/test_generics.rs
index 2c15e18..9783b13 100644
--- a/tests/test_generics.rs
+++ b/tests/test_generics.rs
@@ -16,7 +16,7 @@
 extern crate quote;
 
 extern crate proc_macro2;
-use proc_macro2::{Span, TokenStream, Ident};
+use proc_macro2::{Ident, Span, TokenStream};
 
 #[macro_use]
 mod macros;
@@ -153,7 +153,13 @@
 #[test]
 fn test_fn_precedence_in_where_clause() {
     // This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not `FnOnce() -> (i32 + Send)`.
-    let sig = quote!(fn f<G>() where G: FnOnce() -> i32 + Send {});
+    let sig = quote! {
+        fn f<G>()
+        where
+            G: FnOnce() -> i32 + Send,
+        {
+        }
+    };
     let fun = common::parse::syn::<ItemFn>(sig.into());
     let where_clause = fun.decl.generics.where_clause.as_ref().unwrap();
     assert_eq!(where_clause.predicates.len(), 1);
diff --git a/tests/test_ident.rs b/tests/test_ident.rs
index cb0b2f4..73187d3 100644
--- a/tests/test_ident.rs
+++ b/tests/test_ident.rs
@@ -9,7 +9,7 @@
 extern crate proc_macro2;
 extern crate syn;
 
-use proc_macro2::{Span, TokenStream, Ident};
+use proc_macro2::{Ident, Span, TokenStream};
 use std::str::FromStr;
 use syn::synom::ParseError;
 
diff --git a/tests/test_meta_item.rs b/tests/test_meta_item.rs
index 1c2d4e3..a45e36e 100644
--- a/tests/test_meta_item.rs
+++ b/tests/test_meta_item.rs
@@ -11,7 +11,7 @@
 extern crate proc_macro2;
 extern crate syn;
 
-use proc_macro2::{Literal, Span, TokenStream, Ident};
+use proc_macro2::{Ident, Literal, Span, TokenStream};
 use syn::buffer::TokenBuffer;
 use syn::*;
 
diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs
index 1c4814d..1066b92 100644
--- a/tests/test_precedence.rs
+++ b/tests/test_precedence.rs
@@ -110,7 +110,8 @@
             }
 
             // Our version of `libsyntax` can't parse this tests
-            if path.to_str()
+            if path
+                .to_str()
                 .unwrap()
                 .ends_with("optional_comma_in_match_arm.rs")
             {