Skip to content

Commit 1634873

Browse files
Merge pull request #950 from camelid/short-label-cmd
Add a one-word label command
2 parents bbc0d1c + f820999 commit 1634873

File tree

2 files changed

+82
-20
lines changed

2 files changed

+82
-20
lines changed

parser/src/command/relabel.rs

Lines changed: 72 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
//! The grammar is as follows:
66
//!
77
//! ```text
8-
//! Command: `@bot modify labels:? to? <label-list>.`
8+
//! Command: `@bot modify labels:? to? <label-list>.` or `@bot label:? <label-list>.`
99
//!
1010
//! <label-list>:
1111
//! - <label-delta>
@@ -127,29 +127,33 @@ fn delta_empty() {
127127
impl RelabelCommand {
128128
pub fn parse<'a>(input: &mut Tokenizer<'a>) -> Result<Option<Self>, Error<'a>> {
129129
let mut toks = input.clone();
130-
if let Some(Token::Word("modify")) = toks.next_token()? {
131-
// continue
132-
} else {
133-
return Ok(None);
134-
}
135-
if let Some(Token::Word("labels")) = toks.next_token()? {
130+
131+
if toks.eat_token(Token::Word("modify"))? {
132+
if toks.eat_token(Token::Word("labels"))? {
133+
if toks.eat_token(Token::Colon)? {
134+
// ate the colon
135+
} else if toks.eat_token(Token::Word("to"))? {
136+
// optionally eat the colon after to, e.g.:
137+
// @rustbot modify labels to: -S-waiting-on-author, +S-waiting-on-review
138+
toks.eat_token(Token::Colon)?;
139+
} else {
140+
// It's okay if there's no to or colon, we can just eat labels
141+
// afterwards.
142+
}
143+
// continue
144+
{} // FIXME(rustfmt#4506): this is needed to get rustfmt to indent the comment correctly
145+
} else {
146+
return Ok(None);
147+
}
148+
} else if toks.eat_token(Token::Word("label"))? {
149+
// optionally eat a colon
150+
toks.eat_token(Token::Colon)?;
136151
// continue
152+
{} // FIXME(rustfmt#4506): this is needed to get rustfmt to indent the comment correctly
137153
} else {
138154
return Ok(None);
139155
}
140-
if let Some(Token::Colon) = toks.peek_token()? {
141-
toks.next_token()?;
142-
} else if let Some(Token::Word("to")) = toks.peek_token()? {
143-
toks.next_token()?;
144-
// optionally eat the colon after to, e.g.:
145-
// @rustbot modify labels to: -S-waiting-on-author, +S-waiting-on-review
146-
if let Ok(Some(Token::Colon)) = toks.peek_token() {
147-
toks.next_token()?;
148-
}
149-
} else {
150-
// It's okay if there's no to or colon, we can just eat labels
151-
// afterwards.
152-
}
156+
153157
if let Some(Token::Word("to")) = toks.peek_token()? {
154158
return Err(toks.error(ParseError::MisleadingTo));
155159
}
@@ -238,3 +242,51 @@ fn parse_to_colon() {
238242
]))
239243
);
240244
}
245+
246+
#[test]
247+
fn parse_shorter_command() {
248+
assert_eq!(
249+
parse("label +T-compiler -T-lang bug"),
250+
Ok(Some(vec![
251+
LabelDelta::Add(Label("T-compiler".into())),
252+
LabelDelta::Remove(Label("T-lang".into())),
253+
LabelDelta::Add(Label("bug".into())),
254+
]))
255+
);
256+
}
257+
258+
#[test]
259+
fn parse_shorter_command_with_colon() {
260+
assert_eq!(
261+
parse("label: +T-compiler -T-lang bug"),
262+
Ok(Some(vec![
263+
LabelDelta::Add(Label("T-compiler".into())),
264+
LabelDelta::Remove(Label("T-lang".into())),
265+
LabelDelta::Add(Label("bug".into())),
266+
]))
267+
);
268+
}
269+
270+
#[test]
271+
fn parse_shorter_command_with_to() {
272+
assert_eq!(
273+
parse("label to +T-compiler -T-lang bug")
274+
.unwrap_err()
275+
.source()
276+
.unwrap()
277+
.downcast_ref(),
278+
Some(&ParseError::MisleadingTo)
279+
);
280+
}
281+
282+
#[test]
283+
fn parse_shorter_command_with_to_colon() {
284+
assert_eq!(
285+
parse("label to: +T-compiler -T-lang bug")
286+
.unwrap_err()
287+
.source()
288+
.unwrap()
289+
.downcast_ref(),
290+
Some(&ParseError::MisleadingTo)
291+
);
292+
}

parser/src/token.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,16 @@ impl<'a> Tokenizer<'a> {
209209
}
210210
Ok(Some(Token::Word(&self.str_from(start))))
211211
}
212+
213+
pub fn eat_token(&mut self, token: Token<'a>) -> Result<bool, Error<'a>> {
214+
match self.peek_token()? {
215+
Some(next_tok) if next_tok == token => {
216+
self.next_token()?;
217+
Ok(true)
218+
}
219+
_ => Ok(false),
220+
}
221+
}
212222
}
213223

214224
#[cfg(test)]

0 commit comments

Comments
 (0)