Update to rust-lang/rust's proc_macro API
diff --git a/src/lib.rs b/src/lib.rs
index d019555..746897d 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -65,10 +65,10 @@
impl IntoIterator for TokenStream {
type Item = TokenTree;
- type IntoIter = TokenIter;
+ type IntoIter = TokenTreeIter;
- fn into_iter(self) -> TokenIter {
- TokenIter(self.0.into_iter())
+ fn into_iter(self) -> TokenTreeIter {
+ TokenTreeIter(self.0.into_iter())
}
}
@@ -100,7 +100,13 @@
#[derive(Clone, Debug)]
pub struct TokenTree {
pub span: Span,
- pub kind: TokenKind,
+ pub kind: TokenNode,
+}
+
+impl From<TokenNode> for TokenTree {
+ fn from(kind: TokenNode) -> TokenTree {
+ TokenTree { span: Span::default(), kind: kind }
+ }
}
impl fmt::Display for TokenTree {
@@ -110,10 +116,10 @@
}
#[derive(Clone, Debug)]
-pub enum TokenKind {
- Sequence(Delimiter, TokenStream),
- Word(Symbol),
- Op(char, OpKind),
+pub enum TokenNode {
+ Group(Delimiter, TokenStream),
+ Term(Term),
+ Op(char, Spacing),
Literal(Literal),
}
@@ -126,28 +132,20 @@
}
#[derive(Copy, Clone)]
-pub struct Symbol(imp::Symbol);
+pub struct Term(imp::Term);
-impl<'a> From<&'a str> for Symbol {
- fn from(string: &'a str) -> Symbol {
- Symbol(string.into())
+impl Term {
+ pub fn intern(string: &str) -> Term {
+ Term(string.into())
}
-}
-impl From<String> for Symbol {
- fn from(string: String) -> Symbol {
- Symbol(string[..].into())
- }
-}
-
-impl Symbol {
pub fn as_str(&self) -> &str {
&self.0
}
}
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum OpKind {
+#[derive(Copy, Clone, Debug)]
+pub enum Spacing {
Alone,
Joint,
}
@@ -155,27 +153,59 @@
#[derive(Clone)]
pub struct Literal(imp::Literal);
+macro_rules! int_literals {
+ ($($kind:ident,)*) => ($(
+ pub fn $kind(n: $kind) -> Literal {
+ Literal(n.into())
+ }
+ )*)
+}
+
impl Literal {
- pub fn byte_char(b: u8) -> Literal {
- Literal(imp::Literal::byte_char(b))
+ pub fn integer(s: i64) -> Literal {
+ Literal(imp::Literal::integer(s))
+ }
+
+ int_literals! {
+ u8, u16, u32, u64, /*usize*/
+ i8, i16, i32, i64, /*isize,*/
+ }
+
+ pub fn float(f: f64) -> Literal {
+ Literal(imp::Literal::float(f))
+ }
+
+ pub fn f64(f: f64) -> Literal {
+ Literal(f.into())
+ }
+
+ pub fn f32(f: f32) -> Literal {
+ Literal(f.into())
+ }
+
+ pub fn string(string: &str) -> Literal {
+ Literal(string.into())
+ }
+
+ pub fn character(ch: char) -> Literal {
+ Literal(ch.into())
}
pub fn byte_string(s: &[u8]) -> Literal {
Literal(imp::Literal::byte_string(s))
}
+ // =======================================================================
+ // Not present upstream in proc_macro yet
+
+ pub fn byte_char(b: u8) -> Literal {
+ Literal(imp::Literal::byte_char(b))
+ }
+
pub fn doccomment(s: &str) -> Literal {
Literal(imp::Literal::doccomment(s))
}
- pub fn float(s: &str) -> Literal {
- Literal(imp::Literal::float(s))
- }
-
- pub fn integer(s: &str) -> Literal {
- Literal(imp::Literal::integer(s))
- }
-
pub fn raw_string(s: &str, pounds: usize) -> Literal {
Literal(imp::Literal::raw_string(s, pounds))
}
@@ -185,25 +215,9 @@
}
}
-macro_rules! froms {
- ($($t:ty,)*) => {$(
- impl<'a> From<$t> for Literal {
- fn from(t: $t) -> Literal {
- Literal(t.into())
- }
- }
- )*}
-}
+pub struct TokenTreeIter(imp::TokenTreeIter);
-froms! {
- u8, u16, u32, u64, usize,
- i8, i16, i32, i64, isize,
- f32, f64, char, &'a str,
-}
-
-pub struct TokenIter(imp::TokenIter);
-
-impl Iterator for TokenIter {
+impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
@@ -214,8 +228,8 @@
forward_fmt!(Debug for LexError);
forward_fmt!(Debug for Literal);
forward_fmt!(Debug for Span);
-forward_fmt!(Debug for Symbol);
-forward_fmt!(Debug for TokenIter);
+forward_fmt!(Debug for Term);
+forward_fmt!(Debug for TokenTreeIter);
forward_fmt!(Debug for TokenStream);
forward_fmt!(Display for Literal);
forward_fmt!(Display for TokenStream);
diff --git a/src/stable.rs b/src/stable.rs
index a6daf84..0d5cdca 100644
--- a/src/stable.rs
+++ b/src/stable.rs
@@ -14,7 +14,7 @@
use unicode_xid::UnicodeXID;
use strnom::{PResult, skip_whitespace, block_comment, whitespace, word_break};
-use {TokenTree, TokenKind, Delimiter, OpKind};
+use {TokenTree, TokenNode, Delimiter, Spacing};
#[derive(Clone, Debug)]
pub struct TokenStream {
@@ -60,7 +60,7 @@
}
joint = false;
match tt.kind {
- TokenKind::Sequence(delim, ref stream) => {
+ TokenNode::Group(delim, ref stream) => {
let (start, end) = match delim {
Delimiter::Parenthesis => ("(", ")"),
Delimiter::Brace => ("{", "}"),
@@ -73,15 +73,15 @@
write!(f, "{} {} {}", start, stream, end)?
}
}
- TokenKind::Word(ref sym) => write!(f, "{}", sym.as_str())?,
- TokenKind::Op(ch, ref op) => {
+ TokenNode::Term(ref sym) => write!(f, "{}", sym.as_str())?,
+ TokenNode::Op(ch, ref op) => {
write!(f, "{}", ch)?;
match *op {
- OpKind::Alone => {}
- OpKind::Joint => joint = true,
+ Spacing::Alone => {}
+ Spacing::Joint => joint = true,
}
}
- TokenKind::Literal(ref literal) => {
+ TokenNode::Literal(ref literal) => {
write!(f, "{}", literal)?;
// handle comments
if (literal.0).0.starts_with("/") {
@@ -126,13 +126,13 @@
}
}
-pub type TokenIter = vec::IntoIter<TokenTree>;
+pub type TokenTreeIter = vec::IntoIter<TokenTree>;
impl IntoIterator for TokenStream {
type Item = TokenTree;
- type IntoIter = TokenIter;
+ type IntoIter = TokenTreeIter;
- fn into_iter(self) -> TokenIter {
+ fn into_iter(self) -> TokenTreeIter {
self.inner.into_iter()
}
}
@@ -147,23 +147,23 @@
}
#[derive(Copy, Clone)]
-pub struct Symbol {
+pub struct Term {
intern: usize,
not_send_sync: PhantomData<*const ()>,
}
thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new()));
-impl<'a> From<&'a str> for Symbol {
- fn from(string: &'a str) -> Symbol {
- Symbol {
+impl<'a> From<&'a str> for Term {
+ fn from(string: &'a str) -> Term {
+ Term {
intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)),
not_send_sync: PhantomData,
}
}
}
-impl ops::Deref for Symbol {
+impl ops::Deref for Term {
type Target = str;
fn deref(&self) -> &str {
@@ -177,9 +177,9 @@
}
}
-impl fmt::Debug for Symbol {
+impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Symbol").field(&&**self).finish()
+ f.debug_tuple("Term").field(&&**self).finish()
}
}
@@ -259,11 +259,11 @@
Literal(s.to_string())
}
- pub fn float(s: &str) -> Literal {
+ pub fn float(s: f64) -> Literal {
Literal(s.to_string())
}
- pub fn integer(s: &str) -> Literal {
+ pub fn integer(s: i64) -> Literal {
Literal(s.to_string())
}
@@ -346,21 +346,21 @@
));
named!(token_tree -> TokenTree,
- map!(token_kind, |s: TokenKind| {
+ map!(token_kind, |s: TokenNode| {
TokenTree {
span: ::Span(Span),
kind: s,
}
}));
-named!(token_kind -> TokenKind, alt!(
- map!(delimited, |(d, s)| TokenKind::Sequence(d, s))
+named!(token_kind -> TokenNode, alt!(
+ map!(delimited, |(d, s)| TokenNode::Group(d, s))
|
- map!(literal, TokenKind::Literal) // must be before symbol
+ map!(literal, TokenNode::Literal) // must be before symbol
|
- map!(symbol, TokenKind::Word)
+ map!(symbol, TokenNode::Term)
|
- map!(op, |(op, kind)| TokenKind::Op(op, kind))
+ map!(op, |(op, kind)| TokenNode::Op(op, kind))
));
named!(delimited -> (Delimiter, ::TokenStream), alt!(
@@ -383,7 +383,7 @@
) => { |ts| (Delimiter::Brace, ts) }
));
-fn symbol(mut input: &str) -> PResult<::Symbol> {
+fn symbol(mut input: &str) -> PResult<::Term> {
input = skip_whitespace(input);
let mut chars = input.char_indices();
@@ -409,7 +409,7 @@
if lifetime && &input[..end] != "'static" && KEYWORDS.contains(&&input[1..end]) {
Err(LexError)
} else {
- Ok((&input[end..], input[..end].into()))
+ Ok((&input[end..], ::Term::intern(&input[..end])))
}
}
@@ -894,13 +894,13 @@
keyword!("false") => { |_| () }
));
-fn op(input: &str) -> PResult<(char, OpKind)> {
+fn op(input: &str) -> PResult<(char, Spacing)> {
let input = skip_whitespace(input);
match op_char(input) {
Ok((rest, ch)) => {
let kind = match op_char(rest) {
- Ok(_) => OpKind::Joint,
- Err(LexError) => OpKind::Alone,
+ Ok(_) => Spacing::Joint,
+ Err(LexError) => Spacing::Alone,
};
Ok((rest, (ch, kind)))
}
diff --git a/src/unstable.rs b/src/unstable.rs
index bf097fb..bf57085 100644
--- a/src/unstable.rs
+++ b/src/unstable.rs
@@ -6,7 +6,7 @@
use proc_macro;
-use {TokenTree, TokenKind, Delimiter, OpKind};
+use {TokenTree, TokenNode, Delimiter, Spacing};
#[derive(Clone)]
pub struct TokenStream(proc_macro::TokenStream);
@@ -55,27 +55,27 @@
TokenStream(proc_macro::TokenTree {
span: (tree.span.0).0,
kind: match tree.kind {
- TokenKind::Sequence(delim, s) => {
+ TokenNode::Group(delim, s) => {
let delim = match delim {
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
Delimiter::Brace => proc_macro::Delimiter::Brace,
Delimiter::None => proc_macro::Delimiter::None,
};
- proc_macro::TokenKind::Sequence(delim, (s.0).0)
+ proc_macro::TokenNode::Group(delim, (s.0).0)
}
- TokenKind::Op(ch, kind) => {
+ TokenNode::Op(ch, kind) => {
let kind = match kind {
- OpKind::Joint => proc_macro::OpKind::Joint,
- OpKind::Alone => proc_macro::OpKind::Alone,
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
};
- proc_macro::TokenKind::Op(ch, kind)
+ proc_macro::TokenNode::Op(ch, kind)
}
- TokenKind::Word(s) => {
- proc_macro::TokenKind::Word((s.0).0)
+ TokenNode::Term(s) => {
+ proc_macro::TokenNode::Term((s.0).0)
}
- TokenKind::Literal(l) => {
- proc_macro::TokenKind::Literal((l.0).0)
+ TokenNode::Literal(l) => {
+ proc_macro::TokenNode::Literal((l.0).0)
}
},
}.into())
@@ -103,18 +103,18 @@
}
}
-pub struct TokenIter(proc_macro::TokenIter);
+pub struct TokenTreeIter(proc_macro::TokenTreeIter);
impl IntoIterator for TokenStream {
type Item = TokenTree;
- type IntoIter = TokenIter;
+ type IntoIter = TokenTreeIter;
- fn into_iter(self) -> TokenIter {
- TokenIter(self.0.into_iter())
+ fn into_iter(self) -> TokenTreeIter {
+ TokenTreeIter(self.0.into_iter())
}
}
-impl Iterator for TokenIter {
+impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
@@ -125,27 +125,27 @@
Some(TokenTree {
span: ::Span(Span(token.span)),
kind: match token.kind {
- proc_macro::TokenKind::Sequence(delim, s) => {
+ proc_macro::TokenNode::Group(delim, s) => {
let delim = match delim {
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
proc_macro::Delimiter::Brace => Delimiter::Brace,
proc_macro::Delimiter::None => Delimiter::None,
};
- TokenKind::Sequence(delim, ::TokenStream(TokenStream(s)))
+ TokenNode::Group(delim, ::TokenStream(TokenStream(s)))
}
- proc_macro::TokenKind::Op(ch, kind) => {
+ proc_macro::TokenNode::Op(ch, kind) => {
let kind = match kind {
- proc_macro::OpKind::Joint => OpKind::Joint,
- proc_macro::OpKind::Alone => OpKind::Alone,
+ proc_macro::Spacing::Joint => Spacing::Joint,
+ proc_macro::Spacing::Alone => Spacing::Alone,
};
- TokenKind::Op(ch, kind)
+ TokenNode::Op(ch, kind)
}
- proc_macro::TokenKind::Word(s) => {
- TokenKind::Word(::Symbol(Symbol(s)))
+ proc_macro::TokenNode::Term(s) => {
+ TokenNode::Term(::Term(Term(s)))
}
- proc_macro::TokenKind::Literal(l) => {
- TokenKind::Literal(::Literal(Literal(l)))
+ proc_macro::TokenNode::Literal(l) => {
+ TokenNode::Literal(::Literal(Literal(l)))
}
},
})
@@ -156,9 +156,9 @@
}
}
-impl fmt::Debug for TokenIter {
+impl fmt::Debug for TokenTreeIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("TokenIter").finish()
+ f.debug_struct("TokenTreeIter").finish()
}
}
@@ -179,23 +179,23 @@
}
#[derive(Copy, Clone)]
-pub struct Symbol(proc_macro::Symbol);
+pub struct Term(proc_macro::Term);
-impl<'a> From<&'a str> for Symbol {
- fn from(string: &'a str) -> Symbol {
- Symbol(string.into())
+impl<'a> From<&'a str> for Term {
+ fn from(string: &'a str) -> Term {
+ Term(proc_macro::Term::intern(string))
}
}
-impl ops::Deref for Symbol {
+impl ops::Deref for Term {
type Target = str;
fn deref(&self) -> &str {
- &self.0
+ self.0.as_str()
}
}
-impl fmt::Debug for Symbol {
+impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(**self).fmt(f)
}
@@ -240,12 +240,12 @@
Literal(to_literal(s))
}
- pub fn float(s: &str) -> Literal {
- Literal(to_literal(s))
+ pub fn float(s: f64) -> Literal {
+ Literal(to_literal(&s.to_string()))
}
- pub fn integer(s: &str) -> Literal {
- Literal(to_literal(s))
+ pub fn integer(s: i64) -> Literal {
+ Literal(to_literal(&s.to_string()))
}
pub fn raw_string(s: &str, pounds: usize) -> Literal {
@@ -284,7 +284,7 @@
fn to_literal(s: &str) -> proc_macro::Literal {
let stream = s.parse::<proc_macro::TokenStream>().unwrap();
match stream.into_iter().next().unwrap().kind {
- proc_macro::TokenKind::Literal(l) => l,
+ proc_macro::TokenNode::Literal(l) => l,
_ => unreachable!(),
}
}
@@ -308,8 +308,7 @@
($($t:ident,)*) => {$(
impl From<$t> for Literal {
fn from(t: $t) -> Literal {
- // TODO: remove this `as f32` when fixed upstream
- Literal(proc_macro::Literal::$t(t as f32))
+ Literal(proc_macro::Literal::$t(t))
}
}
)*}
diff --git a/tests/compile-fail/symbol_send.rs b/tests/compile-fail/symbol_send.rs
index 64727fc..19da4e7 100644
--- a/tests/compile-fail/symbol_send.rs
+++ b/tests/compile-fail/symbol_send.rs
@@ -1,9 +1,9 @@
extern crate proc_macro2;
-use proc_macro2::Symbol;
+use proc_macro2::Term;
fn assert_send<T: Send>() {}
fn main() {
- assert_send::<Symbol>(); //~ the trait bound `*const (): std::marker::Send` is not satisfied in `proc_macro2::Symbol`
+ assert_send::<Term>(); //~ the trait bound `*const (): std::marker::Send` is not satisfied in `proc_macro2::Term`
}
diff --git a/tests/compile-fail/symbol_sync.rs b/tests/compile-fail/symbol_sync.rs
index ede06d1..68c1289 100644
--- a/tests/compile-fail/symbol_sync.rs
+++ b/tests/compile-fail/symbol_sync.rs
@@ -1,9 +1,9 @@
extern crate proc_macro2;
-use proc_macro2::Symbol;
+use proc_macro2::Term;
fn assert_sync<T: Sync>() {}
fn main() {
- assert_sync::<Symbol>(); //~ the trait bound `*const (): std::marker::Sync` is not satisfied in `proc_macro2::Symbol`
+ assert_sync::<Term>(); //~ the trait bound `*const (): std::marker::Sync` is not satisfied in `proc_macro2::Term`
}
diff --git a/tests/test.rs b/tests/test.rs
index 0f81816..4d6831b 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -1,17 +1,17 @@
extern crate proc_macro2;
-use proc_macro2::{Symbol, Literal, TokenStream};
+use proc_macro2::{Term, Literal, TokenStream};
#[test]
fn symbols() {
- assert_eq!(Symbol::from("foo").as_str(), "foo");
- assert_eq!(Symbol::from("bar").as_str(), "bar");
+ assert_eq!(Term::intern("foo").as_str(), "foo");
+ assert_eq!(Term::intern("bar").as_str(), "bar");
}
#[test]
fn literals() {
- assert_eq!(Literal::from("foo").to_string(), "\"foo\"");
- assert_eq!(Literal::from("\"").to_string(), "\"\\\"\"");
+ assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
+ assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
}
#[test]