Skip to content

Commit 7d493bd

Browse files
committed
Add LazyTokenStream.
1 parent e42836b commit 7d493bd

File tree

13 files changed

+151
-89
lines changed

13 files changed

+151
-89
lines changed

src/libproc_macro/lib.rs

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
#![feature(staged_api)]
4343
#![feature(lang_items)]
4444

45+
#[macro_use]
4546
extern crate syntax;
4647
extern crate syntax_pos;
4748

@@ -50,7 +51,8 @@ use std::str::FromStr;
5051

5152
use syntax::ast;
5253
use syntax::errors::DiagnosticBuilder;
53-
use syntax::parse::{self, token};
54+
use syntax::parse::{self, token, parse_stream_from_source_str};
55+
use syntax::print::pprust;
5456
use syntax::symbol;
5557
use syntax::tokenstream;
5658
use syntax_pos::DUMMY_SP;
@@ -337,8 +339,18 @@ impl Iterator for TokenIter {
337339
type Item = TokenTree;
338340

339341
fn next(&mut self) -> Option<TokenTree> {
340-
self.next.take().or_else(|| self.cursor.next_as_stream())
341-
.map(|next| TokenTree::from_raw(next, &mut self.next))
342+
loop {
343+
let next =
344+
unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None);
345+
let tree = TokenTree::from_raw(next, &mut self.next);
346+
if tree.span.0 == DUMMY_SP {
347+
if let TokenKind::Sequence(Delimiter::None, stream) = tree.kind {
348+
self.cursor.insert(stream.0);
349+
continue
350+
}
351+
}
352+
return Some(tree);
353+
}
342354
}
343355
}
344356

@@ -449,7 +461,14 @@ impl TokenTree {
449461
Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)),
450462
Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)),
451463

452-
Interpolated(..) => unimplemented!(),
464+
Interpolated(ref nt) => __internal::with_sess(|(sess, _)| {
465+
TokenKind::Sequence(Delimiter::None, TokenStream(nt.1.force(|| {
466+
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
467+
let name = "<macro expansion>".to_owned();
468+
let source = pprust::token_to_string(&token);
469+
parse_stream_from_source_str(name, source, sess, Some(span))
470+
})))
471+
}),
453472

454473
OpenDelim(..) | CloseDelim(..) => unreachable!(),
455474
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
@@ -530,20 +549,21 @@ pub mod __internal {
530549
pub use self::quote::{Quoter, __rt};
531550

532551
use std::cell::Cell;
533-
use std::rc::Rc;
534552

535553
use syntax::ast;
536554
use syntax::ext::base::ExtCtxt;
537555
use syntax::ext::hygiene::Mark;
538556
use syntax::ptr::P;
539-
use syntax::parse::{self, token, ParseSess};
557+
use syntax::parse::{self, ParseSess};
558+
use syntax::parse::token::{self, Token};
540559
use syntax::tokenstream;
560+
use syntax_pos::DUMMY_SP;
541561

542562
use super::{TokenStream, LexError};
543563

544564
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
545-
let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item))));
546-
TokenStream(tokenstream::TokenTree::Token(span, token).into())
565+
let token = Token::interpolated(token::NtItem(item));
566+
TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into())
547567
}
548568

549569
pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {

src/libsyntax/attr.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1057,7 +1057,7 @@ impl MetaItem {
10571057
{
10581058
let (mut span, name) = match tokens.next() {
10591059
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
1060-
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt {
1060+
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
10611061
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
10621062
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
10631063
_ => return None,
@@ -1229,7 +1229,7 @@ impl LitKind {
12291229
match token {
12301230
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
12311231
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
1232-
Token::Interpolated(ref nt) => match **nt {
1232+
Token::Interpolated(ref nt) => match nt.0 {
12331233
token::NtExpr(ref v) => match v.node {
12341234
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
12351235
_ => None,

src/libsyntax/ext/base.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ impl<F> TTMacroExpander for F
215215
impl Folder for AvoidInterpolatedIdents {
216216
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
217217
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
218-
if let token::NtIdent(ident) = **nt {
218+
if let token::NtIdent(ident) = nt.0 {
219219
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
220220
}
221221
}

src/libsyntax/ext/expand.rs

Lines changed: 10 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,15 @@ use ext::placeholders::{placeholder, PlaceholderExpander};
2121
use feature_gate::{self, Features, is_builtin_attr};
2222
use fold;
2323
use fold::*;
24-
use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token};
24+
use parse::{DirectoryOwnership, PResult};
25+
use parse::token::{self, Token};
2526
use parse::parser::Parser;
26-
use print::pprust;
2727
use ptr::P;
2828
use std_inject;
2929
use symbol::Symbol;
3030
use symbol::keywords;
3131
use syntax_pos::{Span, DUMMY_SP};
32-
use tokenstream::TokenStream;
32+
use tokenstream::{TokenStream, TokenTree};
3333
use util::small_vector::SmallVector;
3434
use visit::Visitor;
3535

@@ -427,11 +427,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
427427
kind.expect_from_annotatables(items)
428428
}
429429
SyntaxExtension::AttrProcMacro(ref mac) => {
430-
let item_toks = stream_for_item(&item, self.cx.parse_sess);
431-
432-
let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
433-
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
434-
self.parse_expansion(tok_result, kind, &attr.path, span)
430+
let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item {
431+
Annotatable::Item(item) => token::NtItem(item),
432+
Annotatable::TraitItem(item) => token::NtTraitItem(item.unwrap()),
433+
Annotatable::ImplItem(item) => token::NtImplItem(item.unwrap()),
434+
})).into();
435+
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_tok);
436+
self.parse_expansion(tok_result, kind, &attr.path, attr.span)
435437
}
436438
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
437439
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path));
@@ -769,28 +771,6 @@ pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute
769771
.map(|i| attrs.remove(i))
770772
}
771773

772-
// These are pretty nasty. Ideally, we would keep the tokens around, linked from
773-
// the AST. However, we don't so we need to create new ones. Since the item might
774-
// have come from a macro expansion (possibly only in part), we can't use the
775-
// existing codemap.
776-
//
777-
// Therefore, we must use the pretty printer (yuck) to turn the AST node into a
778-
// string, which we then re-tokenise (double yuck), but first we have to patch
779-
// the pretty-printed string on to the end of the existing codemap (infinity-yuck).
780-
fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
781-
let text = match *item {
782-
Annotatable::Item(ref i) => pprust::item_to_string(i),
783-
Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
784-
Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
785-
};
786-
string_to_stream(text, parse_sess, item.span())
787-
}
788-
789-
fn string_to_stream(text: String, parse_sess: &ParseSess, span: Span) -> TokenStream {
790-
let filename = String::from("<macro expansion>");
791-
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text), Some(span))
792-
}
793-
794774
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
795775
fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
796776
let mut expr = self.cfg.configure_expr(expr).unwrap();

src/libsyntax/ext/quote.rs

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,9 @@ pub mod rt {
3030
use ast;
3131
use codemap::Spanned;
3232
use ext::base::ExtCtxt;
33-
use parse::{self, token, classify};
33+
use parse::{self, classify};
34+
use parse::token::{self, Token};
3435
use ptr::P;
35-
use std::rc::Rc;
3636
use symbol::Symbol;
3737

3838
use tokenstream::{self, TokenTree, TokenStream};
@@ -82,70 +82,70 @@ pub mod rt {
8282
impl ToTokens for ast::Path {
8383
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
8484
let nt = token::NtPath(self.clone());
85-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
85+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
8686
}
8787
}
8888

8989
impl ToTokens for ast::Ty {
9090
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
9191
let nt = token::NtTy(P(self.clone()));
92-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
92+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
9393
}
9494
}
9595

9696
impl ToTokens for ast::Block {
9797
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
9898
let nt = token::NtBlock(P(self.clone()));
99-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
99+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
100100
}
101101
}
102102

103103
impl ToTokens for ast::Generics {
104104
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
105105
let nt = token::NtGenerics(self.clone());
106-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
106+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
107107
}
108108
}
109109

110110
impl ToTokens for ast::WhereClause {
111111
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
112112
let nt = token::NtWhereClause(self.clone());
113-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
113+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
114114
}
115115
}
116116

117117
impl ToTokens for P<ast::Item> {
118118
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
119119
let nt = token::NtItem(self.clone());
120-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
120+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
121121
}
122122
}
123123

124124
impl ToTokens for ast::ImplItem {
125125
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
126126
let nt = token::NtImplItem(self.clone());
127-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
127+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
128128
}
129129
}
130130

131131
impl ToTokens for P<ast::ImplItem> {
132132
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
133133
let nt = token::NtImplItem((**self).clone());
134-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
134+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
135135
}
136136
}
137137

138138
impl ToTokens for ast::TraitItem {
139139
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
140140
let nt = token::NtTraitItem(self.clone());
141-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
141+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
142142
}
143143
}
144144

145145
impl ToTokens for ast::Stmt {
146146
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
147147
let nt = token::NtStmt(self.clone());
148-
let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))];
148+
let mut tts = vec![TokenTree::Token(self.span, Token::interpolated(nt))];
149149

150150
// Some statements require a trailing semicolon.
151151
if classify::stmt_ends_with_semi(&self.node) {
@@ -159,35 +159,35 @@ pub mod rt {
159159
impl ToTokens for P<ast::Expr> {
160160
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
161161
let nt = token::NtExpr(self.clone());
162-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
162+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
163163
}
164164
}
165165

166166
impl ToTokens for P<ast::Pat> {
167167
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
168168
let nt = token::NtPat(self.clone());
169-
vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]
169+
vec![TokenTree::Token(self.span, Token::interpolated(nt))]
170170
}
171171
}
172172

173173
impl ToTokens for ast::Arm {
174174
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
175175
let nt = token::NtArm(self.clone());
176-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
176+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
177177
}
178178
}
179179

180180
impl ToTokens for ast::Arg {
181181
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
182182
let nt = token::NtArg(self.clone());
183-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
183+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
184184
}
185185
}
186186

187187
impl ToTokens for P<ast::Block> {
188188
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
189189
let nt = token::NtBlock(self.clone());
190-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
190+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
191191
}
192192
}
193193

@@ -215,7 +215,7 @@ pub mod rt {
215215
impl ToTokens for ast::MetaItem {
216216
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
217217
let nt = token::NtMeta(self.clone());
218-
vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))]
218+
vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))]
219219
}
220220
}
221221

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ pub fn transcribe(cx: &ExtCtxt,
156156
result.push(tt.clone().into());
157157
} else {
158158
sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
159-
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
159+
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
160160
result.push(token.into());
161161
}
162162
} else {

src/libsyntax/fold.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ use ast::*;
2222
use ast;
2323
use syntax_pos::Span;
2424
use codemap::{Spanned, respan};
25-
use parse::token;
25+
use parse::token::{self, Token};
2626
use ptr::P;
2727
use symbol::keywords;
2828
use tokenstream::*;
@@ -586,7 +586,7 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
586586
Ok(nt) => nt,
587587
Err(nt) => (*nt).clone(),
588588
};
589-
token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
589+
Token::interpolated(fld.fold_interpolated(nt.0))
590590
}
591591
_ => t
592592
}

src/libsyntax/parse/attr.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ impl<'a> Parser<'a> {
151151

152152
pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
153153
let meta = match self.token {
154-
token::Interpolated(ref nt) => match **nt {
154+
token::Interpolated(ref nt) => match nt.0 {
155155
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
156156
_ => None,
157157
},
@@ -223,7 +223,7 @@ impl<'a> Parser<'a> {
223223
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
224224
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
225225
let nt_meta = match self.token {
226-
token::Interpolated(ref nt) => match **nt {
226+
token::Interpolated(ref nt) => match nt.0 {
227227
token::NtMeta(ref e) => Some(e.clone()),
228228
_ => None,
229229
},

0 commit comments

Comments
 (0)