Add a Session object and thread some logging through LALRPOP.

This commit is contained in:
Niko Matsakis 2016-02-01 05:23:51 -05:00
parent 20b2ee8e56
commit c7aeb106ad
9 changed files with 173 additions and 38 deletions

View File

@ -18,6 +18,7 @@ diff = "0.1"
itertools = "0.3"
unicode-xid = "0.0.2"
petgraph = "0.1.11"
time = "0.1"
[dev-dependencies]
rand = "0.3"

View File

@ -8,6 +8,7 @@ use lr1;
use normalize;
use parser;
use rust::RustWrite;
use session::Session;
use tok;
use self::filetext::FileText;
@ -21,23 +22,33 @@ mod action;
mod filetext;
pub fn process_root() -> io::Result<()> {
process_dir(try!(current_dir()), false)
let session = Session::new();
process_dir(&session, try!(current_dir()))
}
pub fn process_root_unconditionally() -> io::Result<()> {
process_dir(try!(current_dir()), true)
let mut session = Session::new();
session.set_force_build();
process_dir(&session, try!(current_dir()))
}
fn process_dir<P:AsRef<Path>>(root_dir: P, force_build: bool) -> io::Result<()> {
fn process_dir<P:AsRef<Path>>(session: &Session, root_dir: P) -> io::Result<()> {
let lalrpop_files = try!(lalrpop_files(root_dir));
for lalrpop_file in lalrpop_files {
let rs_file = lalrpop_file.with_extension("rs");
if force_build || try!(needs_rebuild(&lalrpop_file, &rs_file)) {
try!(remove_old_file(&rs_file));
let grammar = try!(parse_and_normalize_grammar(lalrpop_file));
try!(emit_recursive_ascent(&rs_file, &grammar));
try!(make_read_only(&rs_file));
}
try!(process_file(session, lalrpop_file));
}
Ok(())
}
pub fn process_file<P:AsRef<Path>>(session: &Session, lalrpop_file: P) -> io::Result<()> {
let lalrpop_file: &Path = lalrpop_file.as_ref();
let rs_file = lalrpop_file.with_extension("rs");
if session.force_build() || try!(needs_rebuild(&lalrpop_file, &rs_file)) {
log!(session, Informative, "processing file `{}`", lalrpop_file.to_string_lossy());
try!(remove_old_file(&rs_file));
let grammar = try!(parse_and_normalize_grammar(&session, lalrpop_file));
try!(emit_recursive_ascent(&session, &rs_file, &grammar));
try!(make_read_only(&rs_file));
}
Ok(())
}
@ -48,7 +59,7 @@ fn remove_old_file(rs_file: &Path) -> io::Result<()> {
Err(e) => {
// Unix reports NotFound, Windows PermissionDenied!
match e.kind() {
io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied=> Ok(()),
io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied => Ok(()),
_ => Err(e),
}
}
@ -129,7 +140,8 @@ fn lalrpop_files<P:AsRef<Path>>(root_dir: P) -> io::Result<Vec<PathBuf>> {
Ok(result)
}
fn parse_and_normalize_grammar(path: PathBuf) -> io::Result<r::Grammar> {
fn parse_and_normalize_grammar(session: &Session, path: &Path) -> io::Result<r::Grammar> {
let path = path.to_path_buf();
let input = try!(FileText::from_path(path));
let grammar = match parser::parse_grammar(input.text()) {
@ -184,7 +196,7 @@ fn parse_and_normalize_grammar(path: PathBuf) -> io::Result<r::Grammar> {
}
};
match normalize::normalize(grammar) {
match normalize::normalize(session, grammar) {
Ok(grammar) => Ok(grammar),
Err(error) => {
report_error(&input,
@ -211,7 +223,10 @@ fn emit_uses<W:Write>(grammar: &r::Grammar,
rust.write_uses("", grammar)
}
fn emit_recursive_ascent(output_path: &Path, grammar: &r::Grammar) -> io::Result<()>
fn emit_recursive_ascent(session: &Session,
output_path: &Path,
grammar: &r::Grammar)
-> io::Result<()>
{
let output_file = try!(fs::File::create(output_path));
let mut rust = RustWrite::new(output_file);
@ -258,7 +273,9 @@ fn emit_recursive_ascent(output_path: &Path, grammar: &r::Grammar) -> io::Result
// where to stop!
assert_eq!(grammar.productions_for(start_nt).len(), 1);
let states = match lr1::build_states(&grammar, start_nt) {
log!(session, Verbose, "Building states for public nonterminal `{}`", user_nt);
let states = match lr1::build_states(session, &grammar, start_nt) {
Ok(states) => states,
Err(error) => {
try!(lr1::report_error(&mut io::stdout(), &grammar, &error));

View File

@ -14,14 +14,17 @@ extern crate lalrpop_util;
extern crate petgraph;
extern crate regex;
extern crate itertools;
extern crate time;
extern crate unicode_xid;
#[cfg(test)]
extern crate rand;
// rust exports a macro that others use, so hoist it early.
// hoist the modules that define macros up earlier
#[macro_use]
mod rust;
#[macro_use]
mod log;
mod build;
mod grammar;
@ -30,6 +33,7 @@ mod lr1;
mod normalize;
mod parser;
mod kernel_set;
mod session;
mod tok;
mod util;
@ -38,3 +42,7 @@ mod util;
pub use build::process_root;
pub use build::process_root_unconditionally;
pub use build::process_file;
pub use log::Level;
pub use log::Log;
pub use session::Session;

43
lalrpop/src/log.rs Normal file
View File

@ -0,0 +1,43 @@
#[derive(Clone)]
pub struct Log {
level: Level,
}
#[derive(Clone, PartialOrd, Ord, PartialEq, Eq)]
pub enum Level {
/// No updates unless an error arises.
Taciturn,
/// Timing and minimal progress.
Informative,
/// More details, but still stuff an end-user is likely to understand.
Verbose,
/// Everything you could ever want and then some more.
Debug,
}
impl Log {
pub fn new(level: Level) -> Log {
Log { level: level }
}
pub fn set_level(&mut self, level: Level) {
self.level = level;
}
pub fn log<M>(&self, level: Level, message: M)
where M: FnOnce() -> String
{
if self.level >= level {
println!("{}", message());
}
}
}
macro_rules! log {
($session:expr, $level:ident, $($args:expr),*) => {
$session.log(::log::Level::$level, || ::std::fmt::format(format_args!($($args),*)))
}
}

View File

@ -1,6 +1,7 @@
//! Core LR(1) state construction algorithm.
use kernel_set;
use session::Session;
use grammar::repr::*;
use lr1::first;
use lr1::{Action, Lookahead, Item, Items, State, StateIndex, TableConstructionError};
@ -9,23 +10,26 @@ use util::{map, Multimap, Set};
#[cfg(test)] mod test;
pub fn build_lr1_states<'grammar>(grammar: &'grammar Grammar,
pub fn build_lr1_states<'grammar>(session: &Session,
grammar: &'grammar Grammar,
start: NonterminalString)
-> Result<Vec<State<'grammar>>,
TableConstructionError<'grammar>>
{
let lr1 = LR1::new(grammar);
let lr1 = LR1::new(session, grammar);
lr1.build_states(start)
}
struct LR1<'grammar> {
struct LR1<'session, 'grammar> {
session: &'session Session,
grammar: &'grammar Grammar,
first_sets: first::FirstSets,
}
impl<'grammar> LR1<'grammar> {
fn new(grammar: &'grammar Grammar) -> LR1 {
impl<'session, 'grammar> LR1<'session, 'grammar> {
fn new(session: &'session Session, grammar: &'grammar Grammar) -> Self {
LR1 {
session: session,
grammar: grammar,
first_sets: first::FirstSets::new(grammar),
}
@ -44,6 +48,9 @@ impl<'grammar> LR1<'grammar> {
while let Some(items) = kernel_set.next() {
let index = StateIndex(states.len());
log!(self.session, Debug, "Building state {:?} with {} items",
index, items.vec.len());
let mut this_state = State { index: index, items: items.clone(),
tokens: map(), gotos: map() };
@ -58,6 +65,8 @@ impl<'grammar> LR1<'grammar> {
for (symbol, items) in transitions.into_iter() {
let items = self.transitive_closure(items);
let next_state = kernel_set.add_state(items);
log!(self.session, Debug, "on {:?} to state {:?}",
symbol, next_state);
match symbol {
Symbol::Terminal(s) => {

View File

@ -3,6 +3,7 @@
use itertools::Itertools;
use lr1::core;
use grammar::repr::*;
use session::Session;
use std::rc::Rc;
use util::{map, Map};
use util::map::Entry;
@ -28,12 +29,13 @@ struct LALR1State<'grammar> {
gotos: Map<NonterminalString, StateIndex>,
}
pub fn lalr_states<'grammar>(grammar: &'grammar Grammar,
pub fn lalr_states<'grammar>(session: &Session,
grammar: &'grammar Grammar,
start: NonterminalString)
-> Result<Vec<State<'grammar>>, TableConstructionError<'grammar>>
{
// First build the LR(1) states
let lr_states = try!(core::build_lr1_states(grammar, start));
let lr_states = try!(core::build_lr1_states(session, grammar, start));
collapse_to_lalr_states(&lr_states)
}

View File

@ -1,6 +1,7 @@
//! Naive LR(1) generation algorithm.
use kernel_set;
use session::Session;
use grammar::repr::*;
use std::fmt::{Debug, Formatter, Error};
use std::rc::Rc;
@ -67,13 +68,14 @@ pub struct TableConstructionError<'grammar> {
conflict: Action<'grammar>,
}
pub fn build_states<'grammar>(grammar: &'grammar Grammar,
pub fn build_states<'grammar>(session: &Session,
grammar: &'grammar Grammar,
start: NonterminalString)
-> Result<Vec<State<'grammar>>, TableConstructionError<'grammar>>
{
match grammar.algorithm {
Algorithm::LR1 => core::build_lr1_states(grammar, start),
Algorithm::LALR1 => la0::lalr_states(grammar, start),
Algorithm::LR1 => core::build_lr1_states(session, grammar, start),
Algorithm::LALR1 => la0::lalr_states(session, grammar, start),
}
}

View File

@ -6,6 +6,7 @@
use grammar::parse_tree as pt;
use grammar::repr as r;
use session::Session;
pub type NormResult<T> = Result<T, NormError>;
@ -24,8 +25,8 @@ macro_rules! return_err {
}
}
pub fn normalize(grammar: pt::Grammar) -> NormResult<r::Grammar> {
normalize_helper(grammar, true)
pub fn normalize(session: &Session, grammar: pt::Grammar) -> NormResult<r::Grammar> {
normalize_helper(session, grammar, true)
}
/// for unit tests, it is convenient to skip the validation step
@ -34,18 +35,36 @@ pub fn normalize_without_validating(grammar: pt::Grammar) -> NormResult<r::Gramm
normalize_helper(grammar, false)
}
fn normalize_helper(grammar: pt::Grammar, validate: bool) -> NormResult<r::Grammar> {
let grammar = try!(lower_helper(grammar, validate));
inline::inline(grammar)
macro_rules! profile {
($session:expr, $phase_name:expr, $action:expr) => {
{
log!($session, Verbose, "Phase `{}` begun", $phase_name);
let time_stamp = $crate::time::precise_time_s();
let result = $action;
log!($session, Verbose, "Phase `{}` completed in {} seconds",
$phase_name, $crate::time::precise_time_s() - time_stamp);
result
}
}
}
fn lower_helper(grammar: pt::Grammar, validate: bool) -> NormResult<r::Grammar> {
if validate { try!(prevalidate::validate(&grammar)); }
let grammar = try!(resolve::resolve(grammar));
let grammar = try!(macro_expand::expand_macros(grammar));
let grammar = try!(token_check::validate(grammar));
let types = try!(tyinfer::infer_types(&grammar));
lower::lower(grammar, types)
fn normalize_helper(session: &Session,
grammar: pt::Grammar,
validate: bool)
-> NormResult<r::Grammar> {
let grammar = try!(lower_helper(session, grammar, validate));
let grammar = profile!(session, "Inlining", try!(inline::inline(grammar)));
Ok(grammar)
}
fn lower_helper(session: &Session, grammar: pt::Grammar, validate: bool) -> NormResult<r::Grammar> {
profile!(session, "Grammar validation", if validate { try!(prevalidate::validate(&grammar)); });
let grammar = profile!(session, "Grammar resolution", try!(resolve::resolve(grammar)));
let grammar = profile!(session, "Macro expansion", try!(macro_expand::expand_macros(grammar)));
let grammar = profile!(session, "Token check", try!(token_check::validate(grammar)));
let types = profile!(session, "Infer types", try!(tyinfer::infer_types(&grammar)));
let grammar = profile!(session, "Lowering", try!(lower::lower(grammar, types)));
Ok(grammar)
}
// These are executed *IN ORDER*:

34
lalrpop/src/session.rs Normal file
View File

@ -0,0 +1,34 @@
use log::{Log, Level};
#[derive(Clone)]
pub struct Session {
log: Log,
force_build: bool,
}
impl Session {
pub fn new() -> Session {
Session {
log: Log::new(Level::Informative),
force_build: false,
}
}
pub fn set_force_build(&mut self) {
self.force_build = true;
}
pub fn set_log_level(&mut self, level: Level) {
self.log.set_level(level);
}
pub fn force_build(&self) -> bool {
self.force_build
}
pub fn log<M>(&self, level: Level, message: M)
where M: FnOnce() -> String
{
self.log.log(level, message)
}
}