mirror of
https://github.com/fluencelabs/lalrpop
synced 2025-03-31 07:21:04 +00:00
add an interesting test for an S/R conflict and patch up LALRPOP tokenizer
This commit is contained in:
parent
5890485b20
commit
bd006f8c27
@ -141,3 +141,45 @@ grammar;
|
|||||||
// run some random tests
|
// run some random tests
|
||||||
random_test(&grammar, &states, nt("S"));
|
random_test(&grammar, &states, nt("S"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn shift_reduce_conflict1() {
|
||||||
|
// This grammar gets a shift-reduce conflict because if the input
|
||||||
|
// is "&" (*) "L", then we see two possibilities, and we must decide
|
||||||
|
// between them:
|
||||||
|
//
|
||||||
|
// "&" (*) "L" E
|
||||||
|
// | | |
|
||||||
|
// +-------+--|
|
||||||
|
// |
|
||||||
|
// E
|
||||||
|
//
|
||||||
|
// or
|
||||||
|
//
|
||||||
|
// "&" (*) "L"
|
||||||
|
// | |
|
||||||
|
// | OPT_L E
|
||||||
|
// | | |
|
||||||
|
// +---+---+----+
|
||||||
|
// |
|
||||||
|
// E
|
||||||
|
//
|
||||||
|
// to some extent this may be a false conflict, in that inlined
|
||||||
|
// rules would address it, but it's an interesting one for
|
||||||
|
// producing a useful error message.
|
||||||
|
|
||||||
|
let grammar = normalized_grammar(r#"
|
||||||
|
grammar;
|
||||||
|
extern token { enum Tok { } }
|
||||||
|
E: () = {
|
||||||
|
"L";
|
||||||
|
"&" OPT_L E;
|
||||||
|
};
|
||||||
|
OPT_L: () = {
|
||||||
|
;
|
||||||
|
"L";
|
||||||
|
};
|
||||||
|
"#);
|
||||||
|
|
||||||
|
assert!(build_states(&grammar, nt("E")).is_err());
|
||||||
|
}
|
||||||
|
@ -26,6 +26,7 @@ pub enum Tok<'input> {
|
|||||||
If,
|
If,
|
||||||
Mut,
|
Mut,
|
||||||
Pub,
|
Pub,
|
||||||
|
Type,
|
||||||
Token,
|
Token,
|
||||||
|
|
||||||
// Special keywords: these are accompanied by a series of
|
// Special keywords: these are accompanied by a series of
|
||||||
@ -94,6 +95,7 @@ const KEYWORDS: &'static [(&'static str, Tok<'static>)] = &[
|
|||||||
("mut", Mut),
|
("mut", Mut),
|
||||||
("pub", Pub),
|
("pub", Pub),
|
||||||
("token", Token),
|
("token", Token),
|
||||||
|
("type", Type),
|
||||||
];
|
];
|
||||||
|
|
||||||
impl<'input> Tokenizer<'input> {
|
impl<'input> Tokenizer<'input> {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user