Merge pull request #189 from dtolnay/suffix
Accept arbitrary lit suffixes
diff --git a/src/fallback.rs b/src/fallback.rs
index 5590480..1bd5848 100644
--- a/src/fallback.rs
+++ b/src/fallback.rs
@@ -881,14 +881,27 @@
}
fn symbol(input: Cursor) -> PResult<TokenTree> {
- let mut chars = input.char_indices();
-
let raw = input.starts_with("r#");
- if raw {
- chars.next();
- chars.next();
+ let rest = input.advance((raw as usize) << 1);
+
+ let (rest, sym) = symbol_not_raw(rest)?;
+
+ if !raw {
+ let ident = crate::Ident::new(sym, crate::Span::call_site());
+ return Ok((rest, ident.into()));
}
+ if sym == "_" {
+ return Err(LexError)
+ }
+
+ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+ Ok((rest, ident.into()))
+}
+
+fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+ let mut chars = input.char_indices();
+
match chars.next() {
Some((_, ch)) if is_ident_start(ch) => {}
_ => return Err(LexError),
@@ -902,17 +915,7 @@
}
}
- let a = &input.rest[..end];
- if a == "r#_" {
- Err(LexError)
- } else {
- let ident = if raw {
- crate::Ident::_new_raw(&a[2..], crate::Span::call_site())
- } else {
- crate::Ident::new(a, crate::Span::call_site())
- };
- Ok((input.advance(end), ident.into()))
- }
+ Ok((input.advance(end), &input.rest[..end]))
}
fn literal(input: Cursor) -> PResult<Literal> {
@@ -952,10 +955,12 @@
) => { |_| () }
));
-named!(quoted_string -> (), delimited!(
- punct!("\""),
- cooked_string,
- tag!("\"")
+named!(quoted_string -> (), do_parse!(
+ punct!("\"") >>
+ cooked_string >>
+ tag!("\"") >>
+ option!(symbol_not_raw) >>
+ (())
));
fn cooked_string(input: Cursor) -> PResult<()> {
@@ -1193,10 +1198,10 @@
}
fn float(input: Cursor) -> PResult<()> {
- let (rest, ()) = float_digits(input)?;
- for suffix in &["f32", "f64"] {
- if rest.starts_with(suffix) {
- return word_break(rest.advance(suffix.len()));
+ let (mut rest, ()) = float_digits(input)?;
+ if let Some(ch) = rest.chars().next() {
+ if is_ident_start(ch) {
+ rest = symbol_not_raw(rest)?.0;
}
}
word_break(rest)
@@ -1225,7 +1230,7 @@
chars.next();
if chars
.peek()
- .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
+ .map(|&ch| ch == '.' || is_ident_start(ch))
.unwrap_or(false)
{
return Err(LexError);
@@ -1280,12 +1285,10 @@
}
fn int(input: Cursor) -> PResult<()> {
- let (rest, ()) = digits(input)?;
- for suffix in &[
- "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128",
- ] {
- if rest.starts_with(suffix) {
- return word_break(rest.advance(suffix.len()));
+ let (mut rest, ()) = digits(input)?;
+ if let Some(ch) = rest.chars().next() {
+ if is_ident_start(ch) {
+ rest = symbol_not_raw(rest)?.0;
}
}
word_break(rest)
diff --git a/tests/test.rs b/tests/test.rs
index 8ab975c..7528388 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -97,6 +97,22 @@
}
#[test]
+fn literal_suffix() {
+ fn token_count(p: &str) -> usize {
+ p.parse::<TokenStream>().unwrap().into_iter().count()
+ }
+
+ assert_eq!(token_count("999u256"), 1);
+ assert_eq!(token_count("999r#u256"), 3);
+ assert_eq!(token_count("1."), 1);
+ assert_eq!(token_count("1.f32"), 3);
+ assert_eq!(token_count("1.0_0"), 1);
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
+}
+
+#[test]
fn roundtrip() {
fn roundtrip(p: &str) {
println!("parse: {}", p);
@@ -123,6 +139,9 @@
9
0
0xffffffffffffffffffffffffffffffff
+ 1x
+ 1u80
+ 1f320
",
);
roundtrip("'a");
@@ -139,9 +158,6 @@
panic!("should have failed to parse: {}\n{:#?}", p, s);
}
}
- fail("1x");
- fail("1u80");
- fail("1f320");
fail("' static");
fail("r#1");
fail("r#_");