Format with rustfmt 0.4.1
diff --git a/src/lib.rs b/src/lib.rs
index 490ec7a..7ec1a73 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -25,7 +25,6 @@
 
 // Proc-macro2 types in rustdoc of other crates get linked to here.
 #![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.1")]
-
 #![cfg_attr(feature = "nightly", feature(proc_macro))]
 
 #[cfg(feature = "proc-macro")]
@@ -83,8 +82,9 @@
     type Err = LexError;
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        let e = src.parse().map_err(|e| {
-            LexError { inner: e, _marker: marker::PhantomData }
+        let e = src.parse().map_err(|e| LexError {
+            inner: e,
+            _marker: marker::PhantomData,
         })?;
         Ok(TokenStream::_new(e))
     }
@@ -219,14 +219,20 @@
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
-        let imp::LineColumn{ line, column } = self.inner.start();
-        LineColumn { line: line, column: column }
+        let imp::LineColumn { line, column } = self.inner.start();
+        LineColumn {
+            line: line,
+            column: column,
+        }
     }
 
     #[cfg(procmacro2_semver_exempt)]
     pub fn end(&self) -> LineColumn {
-        let imp::LineColumn{ line, column } = self.inner.end();
-        LineColumn { line: line, column: column }
+        let imp::LineColumn { line, column } = self.inner.end();
+        LineColumn {
+            line: line,
+            column: column,
+        }
     }
 
     #[cfg(procmacro2_semver_exempt)]
@@ -559,16 +565,15 @@
     use std::marker;
     use std::rc::Rc;
 
-    use imp;
-    use TokenTree;
     pub use TokenStream;
+    use TokenTree;
+    use imp;
 
     pub struct IntoIter {
         inner: imp::TokenTreeIter,
         _marker: marker::PhantomData<Rc<()>>,
     }
 
-
     impl Iterator for IntoIter {
         type Item = TokenTree;
 
diff --git a/src/stable.rs b/src/stable.rs
index b4b7eae..b04e4ca 100644
--- a/src/stable.rs
+++ b/src/stable.rs
@@ -13,10 +13,10 @@
 use std::str::FromStr;
 use std::vec;
 
+use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
 use unicode_xid::UnicodeXID;
-use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break};
 
-use {TokenTree, Delimiter, Spacing, Group, Op};
+use {Delimiter, Group, Op, Spacing, TokenTree};
 
 #[derive(Clone, Debug)]
 pub struct TokenStream {
@@ -52,9 +52,7 @@
 
 #[cfg(not(procmacro2_semver_exempt))]
 fn get_cursor(src: &str) -> Cursor {
-    Cursor {
-        rest: src,
-    }
+    Cursor { rest: src }
 }
 
 impl FromStr for TokenStream {
@@ -124,18 +122,23 @@
 #[cfg(feature = "proc-macro")]
 impl From<::proc_macro::TokenStream> for TokenStream {
     fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
-        inner.to_string().parse().expect("compiler token stream parse failed")
+        inner
+            .to_string()
+            .parse()
+            .expect("compiler token stream parse failed")
     }
 }
 
 #[cfg(feature = "proc-macro")]
 impl From<TokenStream> for ::proc_macro::TokenStream {
     fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
-        inner.to_string().parse().expect("failed to parse to compiler tokens")
+        inner
+            .to_string()
+            .parse()
+            .expect("failed to parse to compiler tokens")
     }
 }
 
-
 impl From<TokenTree> for TokenStream {
     fn from(tree: TokenTree) -> TokenStream {
         TokenStream { inner: vec![tree] }
@@ -143,7 +146,7 @@
 }
 
 impl iter::FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item=TokenTree>>(streams: I) -> Self {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
         let mut v = Vec::new();
 
         for token in streams.into_iter() {
@@ -242,16 +245,19 @@
 #[cfg(procmacro2_semver_exempt)]
 impl FileInfo {
     fn offset_line_column(&self, offset: usize) -> LineColumn {
-        assert!(self.span_within(Span { lo: offset as u32, hi: offset as u32 }));
+        assert!(self.span_within(Span {
+            lo: offset as u32,
+            hi: offset as u32
+        }));
         let offset = offset - self.span.lo as usize;
         match self.lines.binary_search(&offset) {
             Ok(found) => LineColumn {
                 line: found + 1,
-                column: 0
+                column: 0,
             },
             Err(idx) => LineColumn {
                 line: idx,
-                column: offset - self.lines[idx - 1]
+                column: offset - self.lines[idx - 1],
             },
         }
     }
@@ -292,7 +298,10 @@
         let lines = lines_offsets(src);
         let lo = self.next_start_pos();
         // XXX(nika): Shouild we bother doing a checked cast or checked add here?
-        let span = Span { lo: lo, hi: lo + (src.len() as u32) };
+        let span = Span {
+            lo: lo,
+            hi: lo + (src.len() as u32),
+        };
 
         self.files.push(FileInfo {
             name: name.to_owned(),
@@ -412,9 +421,7 @@
         SYMBOLS.with(|interner| {
             let interner = interner.borrow();
             let s = interner.get(self.intern);
-            unsafe {
-                &*(s as *const str)
-            }
+            unsafe { &*(s as *const str) }
         })
     }
 }
@@ -447,19 +454,20 @@
         }
     }
 
-   fn intern(&mut self, s: &str) -> usize {
+    fn intern(&mut self, s: &str) -> usize {
         if let Some(&idx) = self.string_to_index.get(s) {
-            return idx
+            return idx;
         }
         let s = Rc::new(s.to_string());
         self.index_to_string.push(s.clone());
-        self.string_to_index.insert(MyRc(s), self.index_to_string.len() - 1);
+        self.string_to_index
+            .insert(MyRc(s), self.index_to_string.len() - 1);
         self.index_to_string.len() - 1
     }
 
-   fn get(&self, idx: usize) -> &str {
-       &self.index_to_string[idx]
-   }
+    fn get(&self, idx: usize) -> &str {
+        &self.index_to_string[idx]
+    }
 }
 
 #[derive(Clone, Debug)]
@@ -489,7 +497,7 @@
                 b'\r' => escaped.push_str(r"\r"),
                 b'"' => escaped.push_str("\\\""),
                 b'\\' => escaped.push_str("\\\\"),
-                b'\x20' ... b'\x7E' => escaped.push(*b as char),
+                b'\x20'...b'\x7E' => escaped.push(*b as char),
                 _ => escaped.push_str(&format!("\\x{:02X}", b)),
             }
         }
@@ -576,7 +584,9 @@
 
 impl<'a> From<&'a str> for Literal {
     fn from(t: &'a str) -> Literal {
-        let mut s = t.chars().flat_map(|c| c.escape_default()).collect::<String>();
+        let mut s = t.chars()
+            .flat_map(|c| c.escape_default())
+            .collect::<String>();
         s.push('"');
         s.insert(0, '"');
         Literal(s)
@@ -605,10 +615,7 @@
     let lo = input.off;
     let (input, mut token) = token_kind(input)?;
     let hi = input.off;
-    token.set_span(::Span::_new(Span {
-        lo: lo,
-        hi: hi,
-    }));
+    token.set_span(::Span::_new(Span { lo: lo, hi: hi }));
     Ok((input, token))
 }
 
@@ -677,19 +684,20 @@
     } else if a == "_" {
         Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
     } else {
-        Ok((input.advance(end), ::Term::new(a, ::Span::call_site()).into()))
+        Ok((
+            input.advance(end),
+            ::Term::new(a, ::Span::call_site()).into(),
+        ))
     }
 }
 
 // From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs
 static KEYWORDS: &'static [&'static str] = &[
-    "abstract", "alignof", "as", "become", "box", "break", "const", "continue",
-    "crate", "do", "else", "enum", "extern", "false", "final", "fn", "for",
-    "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut",
-    "offsetof", "override", "priv", "proc", "pub", "pure", "ref", "return",
-    "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
-    "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while",
-    "yield",
+    "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
+    "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
+    "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure",
+    "ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
+    "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
 ];
 
 fn literal(input: Cursor) -> PResult<::Literal> {
@@ -700,7 +708,10 @@
             let start = input.len() - input_no_ws.len();
             let len = input_no_ws.len() - a.len();
             let end = start + len;
-            Ok((a, ::Literal::_new(Literal(input.rest[start..end].to_string()))))
+            Ok((
+                a,
+                ::Literal::_new(Literal(input.rest[start..end].to_string())),
+            ))
         }
         Err(LexError) => Err(LexError),
     }
@@ -751,37 +762,30 @@
                     break;
                 }
             }
-            '\\' => {
-                match chars.next() {
-                    Some((_, 'x')) => {
-                        if !backslash_x_char(&mut chars) {
-                            break
-                        }
+            '\\' => match chars.next() {
+                Some((_, 'x')) => {
+                    if !backslash_x_char(&mut chars) {
+                        break;
                     }
-                    Some((_, 'n')) |
-                    Some((_, 'r')) |
-                    Some((_, 't')) |
-                    Some((_, '\\')) |
-                    Some((_, '\'')) |
-                    Some((_, '"')) |
-                    Some((_, '0')) => {}
-                    Some((_, 'u')) => {
-                        if !backslash_u(&mut chars) {
-                            break
-                        }
-                    }
-                    Some((_, '\n')) | Some((_, '\r')) => {
-                        while let Some(&(_, ch)) = chars.peek() {
-                            if ch.is_whitespace() {
-                                chars.next();
-                            } else {
-                                break;
-                            }
-                        }
-                    }
-                    _ => break,
                 }
-            }
+                Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+                | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+                Some((_, 'u')) => {
+                    if !backslash_u(&mut chars) {
+                        break;
+                    }
+                }
+                Some((_, '\n')) | Some((_, '\r')) => {
+                    while let Some(&(_, ch)) = chars.peek() {
+                        if ch.is_whitespace() {
+                            chars.next();
+                        } else {
+                            break;
+                        }
+                    }
+                }
+                _ => break,
+            },
             _ch => {}
         }
     }
@@ -815,35 +819,27 @@
                     break;
                 }
             }
-            b'\\' => {
-                match bytes.next() {
-                    Some((_, b'x')) => {
-                        if !backslash_x_byte(&mut bytes) {
-                            break
-                        }
-                    }
-                    Some((_, b'n')) |
-                    Some((_, b'r')) |
-                    Some((_, b't')) |
-                    Some((_, b'\\')) |
-                    Some((_, b'0')) |
-                    Some((_, b'\'')) |
-                    Some((_, b'"'))  => {}
-                    Some((newline, b'\n')) |
-                    Some((newline, b'\r')) => {
-                        let rest = input.advance(newline + 1);
-                        for (offset, ch) in rest.char_indices() {
-                            if !ch.is_whitespace() {
-                                input = rest.advance(offset);
-                                bytes = input.bytes().enumerate();
-                                continue 'outer;
-                            }
-                        }
+            b'\\' => match bytes.next() {
+                Some((_, b'x')) => {
+                    if !backslash_x_byte(&mut bytes) {
                         break;
                     }
-                    _ => break,
                 }
-            }
+                Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+                | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+                Some((newline, b'\n')) | Some((newline, b'\r')) => {
+                    let rest = input.advance(newline + 1);
+                    for (offset, ch) in rest.char_indices() {
+                        if !ch.is_whitespace() {
+                            input = rest.advance(offset);
+                            bytes = input.bytes().enumerate();
+                            continue 'outer;
+                        }
+                    }
+                    break;
+                }
+                _ => break,
+            },
             b if b < 0x80 => {}
             _ => break,
         }
@@ -868,7 +864,7 @@
         match ch {
             '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
                 let rest = input.advance(byte_offset + 1 + n);
-                return Ok((rest, ()))
+                return Ok((rest, ()));
             }
             '\r' => {}
             _ => {}
@@ -888,19 +884,12 @@
 fn cooked_byte(input: Cursor) -> PResult<()> {
     let mut bytes = input.bytes().enumerate();
     let ok = match bytes.next().map(|(_, b)| b) {
-        Some(b'\\') => {
-            match bytes.next().map(|(_, b)| b) {
-                Some(b'x') => backslash_x_byte(&mut bytes),
-                Some(b'n') |
-                Some(b'r') |
-                Some(b't') |
-                Some(b'\\') |
-                Some(b'0') |
-                Some(b'\'') |
-                Some(b'"') => true,
-                _ => false,
-            }
-        }
+        Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+            Some(b'x') => backslash_x_byte(&mut bytes),
+            Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+            | Some(b'"') => true,
+            _ => false,
+        },
         b => b.is_some(),
     };
     if ok {
@@ -929,20 +918,14 @@
 fn cooked_char(input: Cursor) -> PResult<()> {
     let mut chars = input.char_indices();
     let ok = match chars.next().map(|(_, ch)| ch) {
-        Some('\\') => {
-            match chars.next().map(|(_, ch)| ch) {
-                Some('x') => backslash_x_char(&mut chars),
-                Some('u') => backslash_u(&mut chars),
-                Some('n') |
-                Some('r') |
-                Some('t') |
-                Some('\\') |
-                Some('0') |
-                Some('\'') |
-                Some('"') => true,
-                _ => false,
+        Some('\\') => match chars.next().map(|(_, ch)| ch) {
+            Some('x') => backslash_x_char(&mut chars),
+            Some('u') => backslash_u(&mut chars),
+            Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+                true
             }
-        }
+            _ => false,
+        },
         ch => ch.is_some(),
     };
     if ok {
@@ -968,7 +951,8 @@
 }
 
 fn backslash_x_char<I>(chars: &mut I) -> bool
-    where I: Iterator<Item = (usize, char)>
+where
+    I: Iterator<Item = (usize, char)>,
 {
     next_ch!(chars @ '0'...'7');
     next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
@@ -976,7 +960,8 @@
 }
 
 fn backslash_x_byte<I>(chars: &mut I) -> bool
-    where I: Iterator<Item = (usize, u8)>
+where
+    I: Iterator<Item = (usize, u8)>,
 {
     next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
     next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
@@ -984,7 +969,8 @@
 }
 
 fn backslash_u<I>(chars: &mut I) -> bool
-    where I: Iterator<Item = (usize, char)>
+where
+    I: Iterator<Item = (usize, char)>,
 {
     next_ch!(chars @ '{');
     next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
@@ -1027,9 +1013,11 @@
                     break;
                 }
                 chars.next();
-                if chars.peek()
-                       .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
-                       .unwrap_or(false) {
+                if chars
+                    .peek()
+                    .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
+                    .unwrap_or(false)
+                {
                     return Err(LexError);
                 }
                 len += 1;
@@ -1084,18 +1072,7 @@
 fn int(input: Cursor) -> PResult<()> {
     let (rest, ()) = digits(input)?;
     for suffix in &[
-        "isize",
-        "i8",
-        "i16",
-        "i32",
-        "i64",
-        "i128",
-        "usize",
-        "u8",
-        "u16",
-        "u32",
-        "u64",
-        "u128",
+        "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128"
     ] {
         if rest.starts_with(suffix) {
             return word_break(rest.advance(suffix.len()));
diff --git a/src/strnom.rs b/src/strnom.rs
index dbac5c9..1fddcd0 100644
--- a/src/strnom.rs
+++ b/src/strnom.rs
@@ -1,6 +1,6 @@
 //! Adapted from [`nom`](https://github.com/Geal/nom).
 
-use std::str::{Chars, CharIndices, Bytes};
+use std::str::{Bytes, CharIndices, Chars};
 
 use unicode_xid::UnicodeXID;
 
@@ -73,8 +73,9 @@
     while i < bytes.len() {
         let s = input.advance(i);
         if bytes[i] == b'/' {
-            if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) &&
-               !s.starts_with("//!") {
+            if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////"))
+                && !s.starts_with("//!")
+            {
                 if let Some(len) = s.find('\n') {
                     i += len + 1;
                     continue;
@@ -82,9 +83,10 @@
                 break;
             } else if s.starts_with("/**/") {
                 i += 4;
-                continue
-            } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) &&
-                      !s.starts_with("/*!") {
+                continue;
+            } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***"))
+                && !s.starts_with("/*!")
+            {
                 let (_, com) = block_comment(s)?;
                 i += com.len();
                 continue;
@@ -104,11 +106,7 @@
                 }
             }
         }
-        return if i > 0 {
-            Ok((s, ()))
-        } else {
-            Err(LexError)
-        };
+        return if i > 0 { Ok((s, ())) } else { Err(LexError) };
     }
     Ok((input.advance(input.len()), ()))
 }
@@ -270,7 +268,7 @@
         } else {
             match $i.find('\n') {
                 Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
-                None => Ok(($i.advance($i.len()), ""))
+                None => Ok(($i.advance($i.len()), "")),
             }
         }
     }};
@@ -395,7 +393,7 @@
 macro_rules! many0 {
     ($i:expr, $f:expr) => {{
         let ret;
-        let mut res   = ::std::vec::Vec::new();
+        let mut res = ::std::vec::Vec::new();
         let mut input = $i;
 
         loop {
diff --git a/src/unstable.rs b/src/unstable.rs
index e01e1f0..49d1d2c 100644
--- a/src/unstable.rs
+++ b/src/unstable.rs
@@ -7,7 +7,7 @@
 
 use proc_macro;
 
-use {TokenTree, Delimiter, Spacing, Group, Op};
+use {Delimiter, Group, Op, Spacing, TokenTree};
 
 #[derive(Clone)]
 pub struct TokenStream(proc_macro::TokenStream);
@@ -71,19 +71,20 @@
                 };
                 (tt.span(), proc_macro::TokenNode::Op(tt.op(), kind))
             }
-            TokenTree::Term(tt) => {
-                (tt.span(), proc_macro::TokenNode::Term(tt.inner.0))
-            }
-            TokenTree::Literal(tt) => {
-                (tt.span(), proc_macro::TokenNode::Literal(tt.inner.0))
-            }
+            TokenTree::Term(tt) => (tt.span(), proc_macro::TokenNode::Term(tt.inner.0)),
+            TokenTree::Literal(tt) => (tt.span(), proc_macro::TokenNode::Literal(tt.inner.0)),
         };
-        TokenStream(proc_macro::TokenTree { span: span.inner.0, kind }.into())
+        TokenStream(
+            proc_macro::TokenTree {
+                span: span.inner.0,
+                kind,
+            }.into(),
+        )
     }
 }
 
 impl iter::FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item=TokenTree>>(streams: I) -> Self {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
         let streams = streams.into_iter().map(TokenStream::from);
         TokenStream(streams.collect::<proc_macro::TokenStream>())
     }
@@ -140,9 +141,7 @@
                 o.span = span;
                 o.into()
             }
-            proc_macro::TokenNode::Term(s) => {
-                ::Term::_new(Term(s), span).into()
-            }
+            proc_macro::TokenNode::Term(s) => ::Term::_new(Term(s), span).into(),
             proc_macro::TokenNode::Literal(l) => {
                 let mut l = ::Literal::_new(Literal(l));
                 l.span = span;
@@ -244,12 +243,12 @@
     }
 
     pub fn start(&self) -> LineColumn {
-        let proc_macro::LineColumn{ line, column } = self.0.start();
+        let proc_macro::LineColumn { line, column } = self.0.start();
         LineColumn { line, column }
     }
 
     pub fn end(&self) -> LineColumn {
-        let proc_macro::LineColumn{ line, column } = self.0.end();
+        let proc_macro::LineColumn { line, column } = self.0.end();
         LineColumn { line, column }
     }
 
diff --git a/tests/test.rs b/tests/test.rs
index 23ff04d..50322a1 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -2,7 +2,7 @@
 
 use std::str::{self, FromStr};
 
-use proc_macro2::{Term, Literal, TokenStream, Span, TokenTree};
+use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
 
 #[test]
 fn symbols() {
@@ -29,11 +29,14 @@
     roundtrip("a");
     roundtrip("<<");
     roundtrip("<<=");
-    roundtrip("
+    roundtrip(
+        "
         /// a
         wut
-    ");
-    roundtrip("
+    ",
+    );
+    roundtrip(
+        "
         1
         1.0
         1f32
@@ -47,7 +50,8 @@
         9
         0
         0xffffffffffffffffffffffffffffffff
-    ");
+    ",
+    );
     roundtrip("'a");
     roundtrip("'static");
     roundtrip("'\\u{10__FFFF}'");
@@ -80,10 +84,7 @@
         check_spans_internal(ts, &mut lines);
     }
 
-    fn check_spans_internal(
-        ts: TokenStream,
-        lines: &mut &[(usize, usize, usize, usize)],
-    ) {
+    fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
         for i in ts {
             if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
                 *lines = rest;
@@ -106,19 +107,22 @@
         }
     }
 
-    check_spans("\
+    check_spans(
+        "\
 /// This is a document comment
 testing 123
 {
   testing 234
-}", &[
-    (1, 0, 1, 30),
-    (2, 0, 2, 7),
-    (2, 8, 2, 11),
-    (3, 0, 5, 1),
-    (4, 2, 4, 9),
-    (4, 10, 4, 13),
-]);
+}",
+        &[
+            (1, 0, 1, 30),
+            (2, 0, 2, 7),
+            (2, 8, 2, 11),
+            (3, 0, 5, 1),
+            (4, 2, 4, 9),
+            (4, 10, 4, 13),
+        ],
+    );
 }
 
 #[cfg(procmacro2_semver_exempt)]
@@ -139,13 +143,22 @@
 #[cfg(procmacro2_semver_exempt)]
 #[test]
 fn span_join() {
-    let source1 =
-        "aaa\nbbb".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
-    let source2 =
-        "ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
+    let source1 = "aaa\nbbb"
+        .parse::<TokenStream>()
+        .unwrap()
+        .into_iter()
+        .collect::<Vec<_>>();
+    let source2 = "ccc\nddd"
+        .parse::<TokenStream>()
+        .unwrap()
+        .into_iter()
+        .collect::<Vec<_>>();
 
     assert!(source1[0].span().source_file() != source2[0].span().source_file());
-    assert_eq!(source1[0].span().source_file(), source1[1].span().source_file());
+    assert_eq!(
+        source1[0].span().source_file(),
+        source1[1].span().source_file()
+    );
 
     let joined1 = source1[0].span().join(source1[1].span());
     let joined2 = source1[0].span().join(source2[0].span());
@@ -159,7 +172,10 @@
     assert_eq!(end.line, 2);
     assert_eq!(end.column, 3);
 
-    assert_eq!(joined1.unwrap().source_file(), source1[0].span().source_file());
+    assert_eq!(
+        joined1.unwrap().source_file(),
+        source1[0].span().source_file()
+    );
 }
 
 #[test]