get unit tests to pass

This commit is contained in:
Niko Matsakis 2015-07-24 22:48:41 -04:00
parent 92d64f854b
commit 0e19272c01
6 changed files with 26 additions and 21 deletions

View File

@ -16,26 +16,25 @@ grammar;
let actual = expand_macros(grammar).unwrap();
let expected = parser::parse_grammar(r#"
let expected = parser::parse_grammar(r##"
grammar;
Ids = `Comma<"Id">`;
`Comma<"Id">`: Vec<`"Id"`> =
<v:`(<"Id"> ",")*`> <e:`"Id"?`> =>
v.into_iter().chain(e.into_iter()).collect();
`Comma<"Id">`: Vec<#"Id"#> =
<v:`(<"Id"> ",")*`> <e:`"Id"?`> => v.into_iter().chain(e.into_iter()).collect();
`"Id"?`: ::std::option::Option<`"Id"`> = {
`"Id"?`: ::std::option::Option<#"Id"#> = {
"Id" => Some(<>);
=> None;
};
`(<"Id"> ",")*`: ::std::vec::Vec<``(<"Id"> ",")``> = {
`(<"Id"> ",")*`: ::std::vec::Vec<#`(<"Id"> ",")`#> = {
=> vec![];
<v:`(<"Id"> ",")*`> <e:`(<"Id"> ",")`> => { let mut v = v; v.push(e); v };
};
`(<"Id"> ",")`: `"Id"` = <"Id"> "," => (<>);
"#).unwrap();
`(<"Id"> ",")`: #"Id"# = <"Id"> "," => (<>);
"##).unwrap();
compare(actual, expected);
}

View File

@ -53,7 +53,7 @@ fn unknown_nonterminal_in_repeat_question() {
fn repeated_macro_arg() {
check_err(
"multiple macro arguments declared with the name `Y`",
r#"grammar; >>>X<Y,Y> <<<= "foo";"#);
r#"grammar; >>>X<Y,Y><<< = "foo";"#);
}
#[test]
@ -74,7 +74,7 @@ fn named_symbols() {
fn bad_assoc_type() {
check_err(
r#"associated type `Foo` not recognized"#,
r#"grammar; extern token { type >>>Foo <<<= i32; enum Tok { } }"#);
r#"grammar; extern token { type >>>Foo<<< = i32; enum Tok { } }"#);
}
#[test]
@ -82,7 +82,7 @@ fn dup_assoc_type() {
check_err(
r#"associated type `Location` already specified"#,
r#"grammar; extern token { type Location = i32;
type >>>Location <<<= u32;
type >>>Location<<< = u32;
enum Tok { } }"#);
}
@ -97,5 +97,5 @@ fn lookahead_without_loc_type() {
fn multiple_extern_token() {
check_err(
r#"multiple extern token definitions are not permitted"#,
r#"grammar; extern token { enum Tok { } } >>>extern token <<<{ enum Tok { } }"#);
r#"grammar; extern token { enum Tok { } } >>>extern token<<< { enum Tok { } }"#);
}

View File

@ -86,8 +86,8 @@ Alternative: Alternative =
Action: ActionKind = {
"=>@L" => ActionKind::Lookahead;
"=>@R" => ActionKind::Lookbehind;
<c:"=>"> => ActionKind::User(c.to_string());
<c:"=>?"> => ActionKind::Fallible(c.to_string());
<c:"=>"> => ActionKind::User(strip(c).to_string());
<c:"=>?"> => ActionKind::Fallible(strip(c).to_string());
};
Cond: Condition =
@ -159,8 +159,8 @@ pub TypeRef: TypeRef = {
"(" <Comma<TypeRef>> ")" =>
TypeRef::Tuple(<>);
<e:Escape> =>? {
panic!("parse escape symbol")
"#" <Symbol> "#" => {
TypeRef::OfSymbol(<>.kind)
};
"&" <l:Lifetime?> <m:"mut"?> <t:TypeRef> =>
@ -321,6 +321,7 @@ extern token {
"=>@L" => Tok::EqualsGreaterThanLookahead(..),
"=>@R" => Tok::EqualsGreaterThanLookbehind(..),
">" => Tok::GreaterThan(..),
"#" => Tok::Hash(..),
"{" => Tok::LeftBrace(..),
"[" => Tok::LeftBracket(..),
"(" => Tok::LeftParen(..),

View File

@ -15,8 +15,8 @@ pub fn parse_grammar<'input>(input: &'input str)
lrgrammar::parse_Grammar(input, tokenizer)
}
pub fn parse_pattern<'input>(input: &'input str, offset: usize)
-> Result<Pattern<TypeRef>, ParseError<'input>>
fn parse_pattern<'input>(input: &'input str, offset: usize)
-> Result<Pattern<TypeRef>, ParseError<'input>>
{
let tokenizer = tok::Tokenizer::new(input, offset);
lrgrammar::parse_Pattern(input, tokenizer)
@ -26,6 +26,6 @@ pub fn parse_pattern<'input>(input: &'input str, offset: usize)
pub fn parse_type_ref<'input>(input: &'input str)
-> Result<TypeRef, ParseError<'input>>
{
let tokenizer = tok::Tokenizer::new(input);
let tokenizer = tok::Tokenizer::new(input, 0);
lrgrammar::parse_TypeRef(input, tokenizer)
}

View File

@ -65,6 +65,7 @@ pub enum Tok<'input> {
EqualsGreaterThanQuestionCode(&'input str),
EqualsGreaterThanLookahead,
EqualsGreaterThanLookbehind,
Hash,
GreaterThan,
LeftBrace,
LeftBracket,
@ -184,6 +185,10 @@ impl<'input> Tokenizer<'input> {
}
}
}
Some((idx0, '#')) => {
self.bump();
Some(Ok((idx0, Hash, idx0+1)))
}
Some((idx0, '>')) => {
self.bump();
Some(Ok((idx0, GreaterThan, idx0+1)))

View File

@ -8,7 +8,7 @@ fn test(input: &str,
// for spans, and because it applies also to r#XXX# style strings:
let input = input.replace("$", "\n");
let tokenizer = Tokenizer::new(&input);
let tokenizer = Tokenizer::new(&input, 0);
let len = expected.len();
for (token, (expected_span, expected_tok)) in tokenizer.zip(expected.into_iter()) {
println!("token: {:?}", token);
@ -17,7 +17,7 @@ fn test(input: &str,
assert_eq!(Ok((expected_start, expected_tok, expected_end)), token);
}
let tokenizer = Tokenizer::new(&input);
let tokenizer = Tokenizer::new(&input, 0);
assert_eq!(None, tokenizer.skip(len).next());
}