Skip to content

Commit b8d6686

Browse files
Factor out inner current Earley item loop.
Change multiple functions to be non-public. Change nameize to accept an iterator so as to avoid an allocation.
1 parent 27c0986 commit b8d6686

File tree

1 file changed

+153
-146
lines changed

1 file changed

+153
-146
lines changed

src/libsyntax/ext/tt/macro_parser.rs

+153-146
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ struct MatcherTtFrame {
130130
}
131131

132132
#[derive(Clone)]
133-
pub struct MatcherPos {
133+
struct MatcherPos {
134134
stack: Vec<MatcherTtFrame>,
135135
top_elts: TokenTreeOrTokenTreeVec,
136136
sep: Option<Token>,
@@ -162,14 +162,13 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
162162
})
163163
}
164164

165-
pub fn initial_matcher_pos(ms: Vec<TokenTree>, sep: Option<Token>, lo: BytePos)
166-
-> Box<MatcherPos> {
165+
fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
167166
let match_idx_hi = count_names(&ms[..]);
168-
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
167+
let matches = create_matches(match_idx_hi);
169168
Box::new(MatcherPos {
170169
stack: vec![],
171170
top_elts: TtSeq(ms),
172-
sep: sep,
171+
sep: None,
173172
idx: 0,
174173
up: None,
175174
matches: matches,
@@ -202,26 +201,25 @@ pub enum NamedMatch {
202201
MatchedNonterminal(Rc<Nonterminal>)
203202
}
204203

205-
fn nameize(ms: &[TokenTree], res: &[Rc<NamedMatch>]) -> NamedParseResult {
206-
fn n_rec(m: &TokenTree, res: &[Rc<NamedMatch>],
207-
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize)
204+
fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> NamedParseResult {
205+
fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
206+
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
208207
-> Result<(), (syntax_pos::Span, String)> {
209208
match *m {
210209
TokenTree::Sequence(_, ref seq) => {
211210
for next_m in &seq.tts {
212-
n_rec(next_m, res, ret_val, idx)?
211+
n_rec(next_m, res.by_ref(), ret_val)?
213212
}
214213
}
215214
TokenTree::Delimited(_, ref delim) => {
216215
for next_m in &delim.tts {
217-
n_rec(next_m, res, ret_val, idx)?;
216+
n_rec(next_m, res.by_ref(), ret_val)?;
218217
}
219218
}
220219
TokenTree::Token(sp, MatchNt(bind_name, _)) => {
221220
match ret_val.entry(bind_name) {
222221
Vacant(spot) => {
223-
spot.insert(res[*idx].clone());
224-
*idx += 1;
222+
spot.insert(res.next().unwrap());
225223
}
226224
Occupied(..) => {
227225
return Err((sp, format!("duplicated bind name: {}", bind_name)))
@@ -238,9 +236,8 @@ fn nameize(ms: &[TokenTree], res: &[Rc<NamedMatch>]) -> NamedParseResult {
238236
}
239237

240238
let mut ret_val = HashMap::new();
241-
let mut idx = 0;
242239
for m in ms {
243-
match n_rec(m, res, &mut ret_val, &mut idx) {
240+
match n_rec(m, res.by_ref(), &mut ret_val) {
244241
Ok(_) => {},
245242
Err((sp, msg)) => return Error(sp, msg),
246243
}
@@ -266,9 +263,8 @@ pub fn parse_failure_msg(tok: Token) -> String {
266263
}
267264
}
268265

269-
/// Perform a token equality check, ignoring syntax context (that is, an
270-
/// unhygienic comparison)
271-
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
266+
/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
267+
fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
272268
match (t1,t2) {
273269
(&token::Ident(id1),&token::Ident(id2))
274270
| (&token::Lifetime(id1),&token::Lifetime(id2)) =>
@@ -277,154 +273,165 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
277273
}
278274
}
279275

280-
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
281-
let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
282-
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, parser.span.lo));
283-
284-
loop {
285-
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
286-
let mut next_eis = Vec::new(); // or proceed normally
287-
let mut eof_eis = Vec::new();
276+
fn create_matches(len: usize) -> Vec<Vec<Rc<NamedMatch>>> {
277+
(0..len).into_iter().map(|_| Vec::new()).collect()
278+
}
288279

289-
// for each Earley item
290-
while let Some(mut ei) = cur_eis.pop() {
291-
// When unzipped trees end, remove them
292-
while ei.idx >= ei.top_elts.len() {
293-
match ei.stack.pop() {
294-
Some(MatcherTtFrame { elts, idx }) => {
295-
ei.top_elts = elts;
296-
ei.idx = idx + 1;
297-
}
298-
None => break
280+
fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
281+
next_eis: &mut Vec<Box<MatcherPos>>,
282+
eof_eis: &mut Vec<Box<MatcherPos>>,
283+
bb_eis: &mut Vec<Box<MatcherPos>>,
284+
token: &Token, span: &syntax_pos::Span) -> ParseResult<()> {
285+
while let Some(mut ei) = cur_eis.pop() {
286+
// When unzipped trees end, remove them
287+
while ei.idx >= ei.top_elts.len() {
288+
match ei.stack.pop() {
289+
Some(MatcherTtFrame { elts, idx }) => {
290+
ei.top_elts = elts;
291+
ei.idx = idx + 1;
299292
}
293+
None => break
300294
}
295+
}
301296

302-
let idx = ei.idx;
303-
let len = ei.top_elts.len();
304-
305-
/* at end of sequence */
306-
if idx >= len {
307-
// can't move out of `match`es, so:
308-
if ei.up.is_some() {
309-
// hack: a matcher sequence is repeating iff it has a
310-
// parent (the top level is just a container)
311-
312-
// disregard separator, try to go up
313-
// (remove this condition to make trailing seps ok)
314-
if idx == len {
315-
// pop from the matcher position
316-
317-
let mut new_pos = ei.up.clone().unwrap();
318-
319-
// update matches (the MBE "parse tree") by appending
320-
// each tree as a subtree.
321-
322-
// I bet this is a perf problem: we're preemptively
323-
// doing a lot of array work that will get thrown away
324-
// most of the time.
325-
326-
// Only touch the binders we have actually bound
327-
for idx in ei.match_lo..ei.match_hi {
328-
let sub = ei.matches[idx].clone();
329-
new_pos.matches[idx]
330-
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
331-
parser.span.hi))));
332-
}
333-
334-
new_pos.match_cur = ei.match_hi;
335-
new_pos.idx += 1;
336-
cur_eis.push(new_pos);
297+
let idx = ei.idx;
298+
let len = ei.top_elts.len();
299+
300+
// at end of sequence
301+
if idx >= len {
302+
// We are repeating iff there is a parent
303+
if ei.up.is_some() {
304+
// Disregarding the separator, add the "up" case to the tokens that should be
305+
// examined.
306+
// (remove this condition to make trailing seps ok)
307+
if idx == len {
308+
let mut new_pos = ei.up.clone().unwrap();
309+
310+
// update matches (the MBE "parse tree") by appending
311+
// each tree as a subtree.
312+
313+
// I bet this is a perf problem: we're preemptively
314+
// doing a lot of array work that will get thrown away
315+
// most of the time.
316+
317+
// Only touch the binders we have actually bound
318+
for idx in ei.match_lo..ei.match_hi {
319+
let sub = ei.matches[idx].clone();
320+
new_pos.matches[idx]
321+
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
322+
span.hi))));
337323
}
338324

339-
// can we go around again?
340-
341-
// Check if we need a separator
342-
if idx == len && ei.sep.is_some() {
343-
if ei.sep.as_ref().map(|ref sep| token_name_eq(&parser.token, sep))
344-
.unwrap_or(false) {
345-
// i'm conflicted about whether this should be hygienic.... though in
346-
// this case, if the separators are never legal idents, it shouldn't
347-
// matter.
348-
// ei.match_cur = ei.match_lo;
349-
ei.idx += 1;
350-
next_eis.push(ei);
351-
}
352-
} else { // we don't need a separator
353-
ei.match_cur = ei.match_lo;
354-
ei.idx = 0;
355-
cur_eis.push(ei);
356-
}
357-
} else {
358-
eof_eis.push(ei);
325+
new_pos.match_cur = ei.match_hi;
326+
new_pos.idx += 1;
327+
cur_eis.push(new_pos);
359328
}
360-
} else {
361-
match ei.top_elts.get_tt(idx) {
362-
/* need to descend into sequence */
363-
TokenTree::Sequence(sp, seq) => {
364-
if seq.op == tokenstream::KleeneOp::ZeroOrMore {
365-
let mut new_ei = ei.clone();
366-
new_ei.match_cur += seq.num_captures;
367-
new_ei.idx += 1;
368-
//we specifically matched zero repeats.
369-
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
370-
new_ei.matches[idx].push(Rc::new(MatchedSeq(vec![], sp)));
371-
}
372-
373-
cur_eis.push(new_ei);
374-
}
375329

376-
let matches: Vec<_> = (0..ei.matches.len())
377-
.map(|_| Vec::new()).collect();
378-
cur_eis.push(Box::new(MatcherPos {
379-
stack: vec![],
380-
sep: seq.separator.clone(),
381-
idx: 0,
382-
matches: matches,
383-
match_lo: ei.match_cur,
384-
match_cur: ei.match_cur,
385-
match_hi: ei.match_cur + seq.num_captures,
386-
up: Some(ei),
387-
sp_lo: sp.lo,
388-
top_elts: Tt(TokenTree::Sequence(sp, seq)),
389-
}));
330+
// Check if we need a separator
331+
if idx == len && ei.sep.is_some() {
332+
// We have a separator, and it is the current token.
333+
if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
334+
ei.idx += 1;
335+
next_eis.push(ei);
390336
}
391-
TokenTree::Token(_, MatchNt(..)) => {
392-
// Built-in nonterminals never start with these tokens,
393-
// so we can eliminate them from consideration.
394-
match parser.token {
395-
token::CloseDelim(_) => {},
396-
_ => bb_eis.push(ei),
337+
} else { // we don't need a separator
338+
ei.match_cur = ei.match_lo;
339+
ei.idx = 0;
340+
cur_eis.push(ei);
341+
}
342+
} else {
343+
// We aren't repeating, so we must be potentially at the end of the input.
344+
eof_eis.push(ei);
345+
}
346+
} else {
347+
match ei.top_elts.get_tt(idx) {
348+
/* need to descend into sequence */
349+
TokenTree::Sequence(sp, seq) => {
350+
if seq.op == tokenstream::KleeneOp::ZeroOrMore {
351+
// Examine the case where there are 0 matches of this sequence
352+
let mut new_ei = ei.clone();
353+
new_ei.match_cur += seq.num_captures;
354+
new_ei.idx += 1;
355+
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
356+
new_ei.matches[idx].push(Rc::new(MatchedSeq(vec![], sp)));
397357
}
358+
cur_eis.push(new_ei);
398359
}
399-
TokenTree::Token(sp, SubstNt(..)) => {
400-
return Error(sp, "missing fragment specifier".to_string())
401-
}
402-
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
403-
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
404-
let idx = ei.idx;
405-
ei.stack.push(MatcherTtFrame {
406-
elts: lower_elts,
407-
idx: idx,
408-
});
409-
ei.idx = 0;
410-
cur_eis.push(ei);
360+
361+
// Examine the case where there is at least one match of this sequence
362+
let matches = create_matches(ei.matches.len());
363+
cur_eis.push(Box::new(MatcherPos {
364+
stack: vec![],
365+
sep: seq.separator.clone(),
366+
idx: 0,
367+
matches: matches,
368+
match_lo: ei.match_cur,
369+
match_cur: ei.match_cur,
370+
match_hi: ei.match_cur + seq.num_captures,
371+
up: Some(ei),
372+
sp_lo: sp.lo,
373+
top_elts: Tt(TokenTree::Sequence(sp, seq)),
374+
}));
375+
}
376+
TokenTree::Token(_, MatchNt(..)) => {
377+
// Built-in nonterminals never start with these tokens,
378+
// so we can eliminate them from consideration.
379+
match *token {
380+
token::CloseDelim(_) => {},
381+
_ => bb_eis.push(ei),
411382
}
412-
TokenTree::Token(_, ref t) => {
413-
if token_name_eq(t, &parser.token) {
414-
ei.idx += 1;
415-
next_eis.push(ei);
416-
}
383+
}
384+
TokenTree::Token(sp, SubstNt(..)) => {
385+
return Error(sp, "missing fragment specifier".to_string())
386+
}
387+
seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
388+
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
389+
let idx = ei.idx;
390+
ei.stack.push(MatcherTtFrame {
391+
elts: lower_elts,
392+
idx: idx,
393+
});
394+
ei.idx = 0;
395+
cur_eis.push(ei);
396+
}
397+
TokenTree::Token(_, ref t) => {
398+
if token_name_eq(t, &token) {
399+
ei.idx += 1;
400+
next_eis.push(ei);
417401
}
418402
}
419403
}
420404
}
405+
}
406+
407+
Success(())
408+
}
409+
410+
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
411+
let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
412+
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
413+
414+
loop {
415+
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
416+
let mut next_eis = Vec::new(); // or proceed normally
417+
418+
// FIXME: Use SmallVector since in the successful case we will only have one
419+
let mut eof_eis = Vec::new();
420+
421+
match inner_parse_loop(&mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis,
422+
&parser.token, &parser.span) {
423+
Success(_) => {},
424+
Failure(sp, tok) => return Failure(sp, tok),
425+
Error(sp, msg) => return Error(sp, msg),
426+
}
427+
428+
// inner parse loop handled all cur_eis, so it's empty
429+
assert!(cur_eis.is_empty());
421430

422431
/* error messages here could be improved with links to orig. rules */
423432
if token_name_eq(&parser.token, &token::Eof) {
424433
if eof_eis.len() == 1 {
425-
let v = eof_eis[0].matches.iter_mut()
426-
.map(|dv| dv.pop().unwrap()).collect::<Vec<_>>();
427-
return nameize(ms, &v[..]);
434+
return nameize(ms, eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap()));
428435
} else if eof_eis.len() > 1 {
429436
return Error(parser.span, "ambiguity: multiple successful parses".to_string());
430437
} else {
@@ -473,7 +480,7 @@ pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseRes
473480
}
474481
}
475482

476-
pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
483+
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
477484
match name {
478485
"tt" => {
479486
p.quote_depth += 1; //but in theory, non-quoted tts might be useful

0 commit comments

Comments
 (0)