Pare down the Synom trait
I would like to make it clearer that parsing a string is second-class
functionality compared to parsing tokens.
diff --git a/Cargo.toml b/Cargo.toml
index 523af02..c9ac607 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -27,6 +27,7 @@
synom = { version = "0.11", path = "synom" }
[dev-dependencies]
+error-chain = "0.10"
syntex_pos = "0.59"
syntex_syntax = "0.59"
tempdir = "0.3.5"
diff --git a/src/file.rs b/src/file.rs
new file mode 100644
index 0000000..0cc1428
--- /dev/null
+++ b/src/file.rs
@@ -0,0 +1,46 @@
+use super::*;
+
+ast_struct! {
+ pub struct File {
+ pub shebang: Option<String>,
+ pub attrs: Vec<Attribute>,
+ pub items: Vec<Item>,
+ }
+}
+
+#[cfg(feature = "parsing")]
+pub mod parsing {
+ use super::*;
+
+ use synom::Synom;
+
+ impl Synom for File {
+ named!(parse -> Self, do_parse!(
+ attrs: many0!(call!(Attribute::parse_inner)) >>
+ items: many0!(syn!(Item)) >>
+ (File {
+ shebang: None,
+ attrs: attrs,
+ items: items,
+ })
+ ));
+
+ fn description() -> Option<&'static str> {
+ Some("crate")
+ }
+ }
+}
+
+#[cfg(feature = "printing")]
+mod printing {
+ use super::*;
+ use attr::FilterAttrs;
+ use quote::{Tokens, ToTokens};
+
+ impl ToTokens for File {
+ fn to_tokens(&self, tokens: &mut Tokens) {
+ tokens.append_all(self.attrs.inner());
+ tokens.append_all(&self.items);
+ }
+ }
+}
diff --git a/src/fold.rs b/src/fold.rs
index b39557a..99b66cb 100644
--- a/src/fold.rs
+++ b/src/fold.rs
@@ -13,7 +13,7 @@
/// method's default implementation recursively visits the substructure of the
/// input via the `noop_fold` methods, which perform an "identity fold", that
/// is, they return the same structure that they are given (for example the
-/// `fold_crate` method by default calls `fold::noop_fold_crate`).
+/// `fold_file` method by default calls `fold::noop_fold_file`).
///
/// If you want to ensure that your code handles every variant explicitly, you
/// need to override each method and monitor future changes to `Folder` in case
@@ -90,8 +90,8 @@
}
#[cfg(feature = "full")]
- fn fold_crate(&mut self, _crate: Crate) -> Crate {
- noop_fold_crate(self, _crate)
+ fn fold_file(&mut self, file: File) -> File {
+ noop_fold_file(self, file)
}
#[cfg(feature = "full")]
fn fold_item(&mut self, item: Item) -> Item {
@@ -564,15 +564,14 @@
}
#[cfg(feature = "full")]
-pub fn noop_fold_crate<F: ?Sized + Folder>(folder: &mut F,
- krate: Crate)
- -> Crate {
- Crate {
- attrs: krate.attrs.lift(|a| folder.fold_attribute(a)),
- items: krate.items.lift(|i| folder.fold_item(i)),
- ..krate
+pub fn noop_fold_file<F: ?Sized + Folder>(folder: &mut F,
+ file: File)
+ -> File {
+ File {
+ attrs: file.attrs.lift(|a| folder.fold_attribute(a)),
+ items: file.items.lift(|i| folder.fold_item(i)),
+ ..file
}
-
}
#[cfg(feature = "full")]
diff --git a/src/krate.rs b/src/krate.rs
deleted file mode 100644
index 7239bc3..0000000
--- a/src/krate.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-use super::*;
-
-ast_struct! {
- pub struct Crate {
- pub shebang: Option<String>,
- pub attrs: Vec<Attribute>,
- pub items: Vec<Item>,
- }
-}
-
-#[cfg(feature = "parsing")]
-pub mod parsing {
- use super::*;
-
- use synom::{Synom, ParseError};
-
- impl Synom for Crate {
- named!(parse -> Self, do_parse!(
- attrs: many0!(call!(Attribute::parse_inner)) >>
- items: many0!(syn!(Item)) >>
- (Crate {
- shebang: None,
- attrs: attrs,
- items: items,
- })
- ));
-
- fn description() -> Option<&'static str> {
- Some("crate")
- }
-
- fn parse_str_all(mut input: &str) -> Result<Self, ParseError> {
- // Strip the BOM if it is present
- const BOM: &'static str = "\u{feff}";
- if input.starts_with(BOM) {
- input = &input[BOM.len()..];
- }
-
- let mut shebang = None;
- if input.starts_with("#!") && !input.starts_with("#![") {
- if let Some(idx) = input.find('\n') {
- shebang = Some(input[..idx].to_string());
- input = &input[idx..];
- } else {
- shebang = Some(input.to_string());
- input = "";
- }
- }
-
- let mut krate: Crate = Self::parse_all(input.parse()?)?;
- krate.shebang = shebang;
- Ok(krate)
- }
- }
-}
-
-#[cfg(feature = "printing")]
-mod printing {
- use super::*;
- use attr::FilterAttrs;
- use quote::{Tokens, ToTokens};
-
- impl ToTokens for Crate {
- fn to_tokens(&self, tokens: &mut Tokens) {
- tokens.append_all(self.attrs.inner());
- tokens.append_all(&self.items);
- }
- }
-}
diff --git a/src/lib.rs b/src/lib.rs
index a19581d..92cb9b2 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -2,6 +2,7 @@
#![cfg_attr(feature = "cargo-clippy", allow(large_enum_variant))]
+extern crate proc_macro;
extern crate proc_macro2;
extern crate unicode_xid;
@@ -62,9 +63,9 @@
ArgSelf, ArgCaptured};
#[cfg(feature = "full")]
-mod krate;
+mod file;
#[cfg(feature = "full")]
-pub use krate::Crate;
+pub use file::File;
mod lifetime;
pub use lifetime::Lifetime;
@@ -100,45 +101,177 @@
#[cfg(feature = "fold")]
pub mod fold;
+////////////////////////////////////////////////////////////////////////////////
+
#[cfg(feature = "parsing")]
-mod parsing {
- use std::str::FromStr;
+pub use synom::ParseError;
- use super::*;
- use synom::{Synom, ParseError};
- use proc_macro2::TokenStream;
+#[cfg(feature = "parsing")]
+use synom::{Synom, SynomBuffer};
- macro_rules! traits {
- ($($ty:ident,)*) => ($(
- impl From<TokenStream> for $ty {
- fn from(stream: TokenStream) -> $ty {
- $ty::parse_all_unwrap(stream)
- }
+/// Parse tokens of source code into the chosen syn data type.
+///
+/// This is preferred over parsing a string because tokens are able to preserve
+/// information about where in the user's code they were originally written (the
+/// "span" of the token), possibly allowing the compiler to produce better error
+/// messages.
+///
+/// # Examples
+///
+/// ```rust,ignore
+/// extern crate proc_macro;
+/// use proc_macro::TokenStream;
+///
+/// extern crate syn;
+///
+/// #[macro_use]
+/// extern crate quote;
+///
+/// use syn::DeriveInput;
+///
+/// #[proc_macro_derive(MyMacro)]
+/// pub fn my_macro(input: TokenStream) -> TokenStream {
+/// // Parse the tokens into a syntax tree
+/// let ast: DeriveInput = syn::parse(input).unwrap();
+///
+/// // Build the output, possibly using quasi-quotation
+/// let expanded = quote! {
+/// /* ... */
+/// };
+///
+/// // Parse back to a token stream and return it
+/// expanded.parse().unwrap()
+/// }
+/// ```
+#[cfg(feature = "parsing")]
+pub fn parse<T>(tokens: proc_macro::TokenStream) -> Result<T, ParseError>
+ where T: Synom,
+{
+ _parse(tokens.into())
+}
+
+#[cfg(feature = "parsing")]
+fn _parse<T>(tokens: proc_macro2::TokenStream) -> Result<T, ParseError>
+ where T: Synom,
+{
+ let buf = SynomBuffer::new(tokens);
+ let result = T::parse(buf.begin());
+ let err = match result {
+ Ok((rest, t)) => {
+ if rest.eof() {
+ return Ok(t);
+ } else if rest == buf.begin() {
+ // parsed nothing
+ ParseError::new("failed to parse anything")
+ } else {
+ ParseError::new("failed to parse all tokens")
}
+ }
+ Err(err) => err,
+ };
+ match T::description() {
+ Some(s) => Err(ParseError::new(format!("parsing {}: {}", s, err))),
+ None => Err(err),
+ }
+}
- impl FromStr for $ty {
- type Err = ParseError;
+/// Parse a string of Rust code into the chosen syn data type.
+///
+/// # Examples
+///
+/// ```rust
+/// extern crate syn;
+/// #
+/// # #[macro_use]
+/// # extern crate error_chain;
+///
+/// use syn::Expr;
+/// #
+/// # error_chain! {
+/// # foreign_links {
+/// # Syn(syn::ParseError);
+/// # }
+/// # }
+///
+/// fn run() -> Result<()> {
+/// let code = "assert_eq!(u8::max_value(), 255)";
+/// let expr = syn::parse_str::<Expr>(code)?;
+/// println!("{:#?}", expr);
+/// Ok(())
+/// }
+/// #
+/// # fn main() { run().unwrap() }
+/// ```
+#[cfg(feature = "parsing")]
+pub fn parse_str<T: Synom>(s: &str) -> Result<T, ParseError> {
+ _parse(s.parse()?)
+}
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- $ty::parse_str_all(s)
- }
- }
- )*)
+// FIXME the name parse_file makes it sound like you might pass in a path to a
+// file, rather than the content.
+/// Parse the content of a file of Rust code.
+///
+/// This is different from `syn::parse_str::<File>(content)` in two ways:
+///
+/// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
+/// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
+///
+/// If present, either of these would be an error using `from_str`.
+///
+/// # Examples
+///
+/// ```rust,no_run
+/// extern crate syn;
+/// #
+/// # #[macro_use]
+/// # extern crate error_chain;
+///
+/// use std::fs::File;
+/// use std::io::Read;
+/// #
+/// # error_chain! {
+/// # foreign_links {
+/// # Io(std::io::Error);
+/// # Syn(syn::ParseError);
+/// # }
+/// # }
+///
+/// fn run() -> Result<()> {
+/// let mut file = File::open("path/to/code.rs")?;
+/// let mut content = String::new();
+/// file.read_to_string(&mut content)?;
+///
+/// let ast = syn::parse_file(&content)?;
+/// if let Some(shebang) = ast.shebang {
+/// println!("{}", shebang);
+/// }
+/// println!("{} items", ast.items.len());
+///
+/// Ok(())
+/// }
+/// #
+/// # fn main() { run().unwrap() }
+/// ```
+#[cfg(all(feature = "parsing", feature = "full"))]
+pub fn parse_file(mut content: &str) -> Result<File, ParseError> {
+ // Strip the BOM if it is present
+ const BOM: &'static str = "\u{feff}";
+ if content.starts_with(BOM) {
+ content = &content[BOM.len()..];
}
- traits! {
- DeriveInput,
- TyParamBound,
- Ident,
- WhereClause,
- Ty,
- Lit,
+ let mut shebang = None;
+ if content.starts_with("#!") && !content.starts_with("#![") {
+ if let Some(idx) = content.find('\n') {
+ shebang = Some(content[..idx].to_string());
+ content = &content[idx..];
+ } else {
+ shebang = Some(content.to_string());
+ content = "";
+ }
}
- #[cfg(feature = "full")]
- traits! {
- Expr,
- Item,
- Crate,
- }
+ let mut file: File = parse_str(content)?;
+ file.shebang = shebang;
+ Ok(file)
}
diff --git a/src/visit.rs b/src/visit.rs
index 5542c45..550afb7 100644
--- a/src/visit.rs
+++ b/src/visit.rs
@@ -82,8 +82,8 @@
}
#[cfg(feature = "full")]
- fn visit_crate(&mut self, _crate: &Crate) {
- walk_crate(self, _crate);
+ fn visit_file(&mut self, file: &File) {
+ walk_file(self, file);
}
#[cfg(feature = "full")]
fn visit_item(&mut self, item: &Item) {
@@ -330,9 +330,9 @@
}
#[cfg(feature = "full")]
-pub fn walk_crate<V: Visitor>(visitor: &mut V, _crate: &Crate) {
- walk_list!(visitor, visit_attribute, &_crate.attrs);
- walk_list!(visitor, visit_item, &_crate.items);
+pub fn walk_file<V: Visitor>(visitor: &mut V, file: &File) {
+ walk_list!(visitor, visit_attribute, &file.attrs);
+ walk_list!(visitor, visit_item, &file.items);
}
#[cfg(feature = "full")]
diff --git a/synom/src/lib.rs b/synom/src/lib.rs
index e257b5d..396260e 100644
--- a/synom/src/lib.rs
+++ b/synom/src/lib.rs
@@ -21,8 +21,9 @@
//! For our use case, this strategy is a huge improvement in usability,
//! correctness, and compile time over nom's `ws!` strategy.
-extern crate unicode_xid;
+extern crate proc_macro;
extern crate proc_macro2;
+extern crate unicode_xid;
#[cfg(feature = "printing")]
extern crate quote;
@@ -34,8 +35,6 @@
use std::error::Error;
use std::fmt;
-use proc_macro2::LexError;
-
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub mod helper;
@@ -63,38 +62,6 @@
fn description() -> Option<&'static str> {
None
}
-
- fn parse_all(input: TokenStream) -> Result<Self, ParseError> {
- let buf = SynomBuffer::new(input);
- let descr = Self::description().unwrap_or("unnamed parser");
- let err = match Self::parse(buf.begin()) {
- Ok((rest, t)) => {
- if rest.eof() {
- return Ok(t)
- } else if rest == buf.begin() {
- // parsed nothing
- format!("parsed no input while parsing {}", descr)
- } else {
- // Partially parsed the output. Print the input which remained.
- format!("unparsed tokens after parsing {}:\n{}",
- descr, rest.token_stream())
- }
- }
- Err(ref err) => format!("{} while parsing {}", err.description(), descr),
- };
- Err(ParseError(Some(err)))
- }
-
- fn parse_str_all(input: &str) -> Result<Self, ParseError> {
- Self::parse_all(input.parse()?)
- }
-
- fn parse_all_unwrap(input: TokenStream) -> Self {
- // TODO: eventually try to provide super nice error messages here as
- // this is what most users will hit. Hopefully the compiler will give us
- // an interface one day to give an extra-good error message here.
- Self::parse_all(input).unwrap()
- }
}
#[derive(Debug)]
@@ -115,12 +82,26 @@
}
}
-impl From<LexError> for ParseError {
- fn from(_: LexError) -> ParseError {
+impl From<proc_macro2::LexError> for ParseError {
+ fn from(_: proc_macro2::LexError) -> ParseError {
ParseError(Some("error while lexing input string".to_owned()))
}
}
+impl From<proc_macro::LexError> for ParseError {
+ fn from(_: proc_macro::LexError) -> ParseError {
+ ParseError(Some("error while lexing input string".to_owned()))
+ }
+}
+
+impl ParseError {
+ // For syn use only. Not public API.
+ #[doc(hidden)]
+ pub fn new<T: Into<String>>(msg: T) -> Self {
+ ParseError(Some(msg.into()))
+ }
+}
+
impl Synom for TokenStream {
fn parse(input: Cursor) -> PResult<Self> {
Ok((Cursor::empty(), input.token_stream()))
diff --git a/tests/common/mod.rs b/tests/common/mod.rs
index fcb8c61..5ffeb84 100644
--- a/tests/common/mod.rs
+++ b/tests/common/mod.rs
@@ -1,9 +1,12 @@
+#![allow(dead_code)]
+
extern crate walkdir;
-use walkdir::DirEntry;
use std::env;
use std::u32;
+use self::walkdir::DirEntry;
+
macro_rules! errorf {
($($tt:tt)*) => {
{
diff --git a/tests/common/parse.rs b/tests/common/parse.rs
index 15c2faf..a0c54e5 100644
--- a/tests/common/parse.rs
+++ b/tests/common/parse.rs
@@ -1,4 +1,6 @@
+extern crate proc_macro2;
extern crate syn;
+extern crate synom;
extern crate syntex_syntax;
use self::syntex_syntax::ast;
@@ -8,6 +10,8 @@
use std::panic;
+use self::synom::{Synom, SynomBuffer};
+
pub fn syntex_expr(input: &str) -> Option<P<ast::Expr>> {
match panic::catch_unwind(|| {
let sess = ParseSess::new(FilePathMapping::empty());
@@ -37,7 +41,7 @@
}
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
- match input.parse::<syn::Expr>() {
+ match syn::parse_str(input) {
Ok(e) => Some(e),
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
@@ -45,3 +49,20 @@
}
}
}
+
+pub fn syn<T: Synom>(tokens: proc_macro2::TokenStream) -> T {
+ let buf = SynomBuffer::new(tokens);
+ let result = T::parse(buf.begin());
+ match result {
+ Ok((rest, t)) => {
+ if rest.eof() {
+ t
+ } else if rest == buf.begin() {
+ panic!("failed to parse anything")
+ } else {
+ panic!("failed to parse all tokens")
+ }
+ }
+ Err(err) => panic!("failed to parse: {}", err),
+ }
+}
diff --git a/tests/common/respan.rs b/tests/common/respan.rs
index c9d12f5..0d5eeb3 100644
--- a/tests/common/respan.rs
+++ b/tests/common/respan.rs
@@ -1,19 +1,22 @@
+extern crate syntex_syntax;
+extern crate syntex_pos;
+
use std::rc::Rc;
-use syntex_syntax::ast::{Attribute, Expr, ExprKind, Field, FnDecl, FunctionRetTy, ImplItem,
+use self::syntex_syntax::ast::{Attribute, Expr, ExprKind, Field, FnDecl, FunctionRetTy, ImplItem,
ImplItemKind, ItemKind, Mac, MetaItem, MetaItemKind, MethodSig,
NestedMetaItem, NestedMetaItemKind, TraitItem, TraitItemKind, TyParam,
Visibility};
-use syntex_syntax::codemap::{self, Spanned};
-use syntex_syntax::fold::{self, Folder};
-use syntex_syntax::parse::token::{Lit, Token};
-use syntex_syntax::ptr::P;
-use syntex_syntax::symbol::Symbol;
-use syntex_syntax::tokenstream::{Delimited, TokenTree};
-use syntex_syntax::util::move_map::MoveMap;
-use syntex_syntax::util::small_vector::SmallVector;
+use self::syntex_syntax::codemap::{self, Spanned};
+use self::syntex_syntax::fold::{self, Folder};
+use self::syntex_syntax::parse::token::{Lit, Token};
+use self::syntex_syntax::ptr::P;
+use self::syntex_syntax::symbol::Symbol;
+use self::syntex_syntax::tokenstream::{Delimited, TokenTree};
+use self::syntex_syntax::util::move_map::MoveMap;
+use self::syntex_syntax::util::small_vector::SmallVector;
-use syntex_pos::{Span, DUMMY_SP};
-use syntex_syntax::ast;
+use self::syntex_pos::{Span, DUMMY_SP};
+use self::syntex_syntax::ast;
struct Respanner;
diff --git a/tests/test_derive_input.rs b/tests/test_derive_input.rs
index 1437e3b..bd56825 100644
--- a/tests/test_derive_input.rs
+++ b/tests/test_derive_input.rs
@@ -51,7 +51,7 @@
}),
};
- assert_eq!(expected, raw.parse().unwrap());
+ assert_eq!(expected, syn::parse_str(raw).unwrap());
}
#[test]
@@ -136,7 +136,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
@@ -309,7 +309,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
@@ -378,7 +378,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
@@ -420,7 +420,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
@@ -459,7 +459,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
@@ -510,7 +510,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
}
@@ -537,7 +537,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
}
@@ -565,7 +565,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
}
@@ -593,7 +593,7 @@
}),
};
- let actual = raw.parse().unwrap();
+ let actual = syn::parse_str(raw).unwrap();
assert_eq!(expected, actual);
}
diff --git a/tests/test_expr.rs b/tests/test_expr.rs
index 0e4b574..14c0cae 100644
--- a/tests/test_expr.rs
+++ b/tests/test_expr.rs
@@ -86,7 +86,7 @@
}
"#;
- let actual = raw.parse::<Crate>().unwrap();
+ let actual: File = syn::parse_str(raw).unwrap();
assert_let!(ItemKind::Struct(ItemStruct { ref ident, .. }) = actual.items[0].node; {
assert_eq!(ident, "catch");
diff --git a/tests/test_generics.rs b/tests/test_generics.rs
index f743b78..148b879 100644
--- a/tests/test_generics.rs
+++ b/tests/test_generics.rs
@@ -9,6 +9,8 @@
extern crate proc_macro2;
use proc_macro2::Term;
+mod common;
+
#[test]
fn test_split_for_impl() {
// <'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug
@@ -96,7 +98,7 @@
fn test_ty_param_bound() {
let tokens = quote!('a);
let expected = TyParamBound::Region(Lifetime::new(Term::intern("'a"), Span::default()));
- assert_eq!(expected, tokens.to_string().parse().unwrap());
+ assert_eq!(expected, common::parse::syn::<TyParamBound>(tokens.into()));
let tokens = quote!(Debug);
let expected = TyParamBound::Trait(
@@ -105,7 +107,7 @@
trait_ref: "Debug".into(),
},
TraitBoundModifier::None);
- assert_eq!(expected, tokens.to_string().parse().unwrap());
+ assert_eq!(expected, common::parse::syn::<TyParamBound>(tokens.into()));
let tokens = quote!(?Sized);
let expected = TyParamBound::Trait(
@@ -114,5 +116,5 @@
trait_ref: "Sized".into(),
},
TraitBoundModifier::Maybe(Default::default()));
- assert_eq!(expected, tokens.to_string().parse().unwrap());
+ assert_eq!(expected, common::parse::syn::<TyParamBound>(tokens.into()));
}
diff --git a/tests/test_grouping.rs b/tests/test_grouping.rs
index f41f9ce..d010462 100644
--- a/tests/test_grouping.rs
+++ b/tests/test_grouping.rs
@@ -4,11 +4,13 @@
use syn::{Expr, ExprKind, ExprGroup, ExprBinary, Lit, LitKind, BinOp};
extern crate synom;
-use synom::{tokens, Synom};
+use synom::tokens;
extern crate proc_macro2;
use proc_macro2::*;
+mod common;
+
fn tt(k: TokenNode) -> TokenTree {
TokenTree {
span: Span::default(),
@@ -43,7 +45,7 @@
assert_eq!(raw.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
- assert_eq!(Expr::parse_all(raw).unwrap(), expr(ExprBinary {
+ assert_eq!(common::parse::syn::<Expr>(raw), expr(ExprBinary {
left: Box::new(lit(Literal::i32(1))),
op: BinOp::Add(tokens::Add::default()),
right: Box::new(expr(ExprBinary {
@@ -77,7 +79,7 @@
assert_eq!(raw.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
- assert_eq!(Expr::parse_all(raw).unwrap(), expr(ExprBinary {
+ assert_eq!(common::parse::syn::<Expr>(raw.into()), expr(ExprBinary {
left: Box::new(expr(ExprBinary {
left: Box::new(lit(Literal::i32(1))),
op: BinOp::Add(tokens::Add::default()),
diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs
index a97f622..a5128fc 100644
--- a/tests/test_precedence.rs
+++ b/tests/test_precedence.rs
@@ -95,9 +95,9 @@
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
- let (l_passed, l_failed) = match content.parse::<syn::Crate>() {
- Ok(krate) => {
- let exprs = collect_exprs(krate);
+ let (l_passed, l_failed) = match syn::parse_file(&content) {
+ Ok(file) => {
+ let exprs = collect_exprs(file);
test_expressions(exprs)
}
Err(msg) => {
@@ -351,7 +351,7 @@
}
/// Walk through a crate collecting all expressions we can find in it.
-fn collect_exprs(krate: syn::Crate) -> Vec<syn::Expr> {
+fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
use synom::delimited::Delimited;
use syn::*;
use syn::fold::*;
@@ -373,6 +373,6 @@
}
let mut folder = CollectExprsFolder(vec![]);
- folder.fold_crate(krate);
+ folder.fold_file(file);
folder.0
}
diff --git a/tests/test_round_trip.rs b/tests/test_round_trip.rs
index c07d5f9..6bab10f 100644
--- a/tests/test_round_trip.rs
+++ b/tests/test_round_trip.rs
@@ -47,7 +47,7 @@
file.read_to_string(&mut content).unwrap();
let start = Instant::now();
- let (krate, elapsed) = match content.parse::<syn::Crate>() {
+ let (krate, elapsed) = match syn::parse_file(&content) {
Ok(krate) => (krate, start.elapsed()),
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
diff --git a/tests/test_token_trees.rs b/tests/test_token_trees.rs
index de9eb0b..d644d52 100644
--- a/tests/test_token_trees.rs
+++ b/tests/test_token_trees.rs
@@ -90,6 +90,6 @@
#[test]
fn test_literal_mangling() {
let raw = "0_4";
- let parsed = raw.parse::<Lit>().unwrap();
+ let parsed: Lit = syn::parse_str(raw).unwrap();
assert_eq!(raw, quote!(#parsed).to_string());
}