diff --git a/build.rs b/build.rs index cdd1b4ddd..350c41793 100644 --- a/build.rs +++ b/build.rs @@ -27,7 +27,7 @@ fn main() { add_dependency(exh_file); let exh_out = Path::new(&out_dir).join("choices.rs"); - telamon_gen::process_file(&Path::new(exh_file), &exh_out, cfg!(feature="format_exh")); + telamon_gen::process_file(&Path::new(exh_file), &exh_out, cfg!(feature="format_exh")).unwrap(); if cfg!(feature="cuda") { compile_link_cuda(); } if cfg!(feature = "mppa") { diff --git a/examples/common.rs b/examples/common.rs deleted file mode 100644 index 1336e1bc3..000000000 --- a/examples/common.rs +++ /dev/null @@ -1,35 +0,0 @@ -/// Function shared among examples. -use itertools::Itertools; -use telamon::device::Context; -use telamon::{explorer, ir}; -use telamon::search_space::SearchSpace; -use std; - -/// Generates the code for the best candidate in the search space. -pub fn gen_best<'a>(search_space: Vec, - context: &'a Context, - out: &str) { - let conf = explorer::Config::read(); - let begin_time = std::time::Instant::now(); - let best_opt = explorer::find_best(&conf, context, search_space); - let duration = std::time::Instant::now() - begin_time; - warn!("Search completed in {}s", duration.as_secs()); - match best_opt { - Some(best) => { - let mut file = std::fs::File::create(out).unwrap(); - context.device().gen_code(&best, &mut file) - } - None => println!("Did not find any well suited candidate before timeout"), - } -} - -/// Generate a name for the output file. -pub fn file_name(name: &str, - _: ir::Type, - sizes: &[i32], - instantiated: bool) -> String { - const PATH: &str = "examples/out/"; - std::fs::create_dir_all(PATH).unwrap(); - let sizes = sizes.iter().format_with("", |i, f| f(&format_args!("_{}", i))); - format!("{}{}_{}{}.c", PATH, name, instantiated, sizes) -} diff --git a/examples/sgemm_low.rs b/examples/sgemm_low.rs index 46abecce7..c78e12780 100644 --- a/examples/sgemm_low.rs +++ b/examples/sgemm_low.rs @@ -5,7 +5,36 @@ extern crate itertools; extern crate log; extern crate rayon; -mod common; +mod common { + /// Generates the code for the best candidate in the search space. + pub fn gen_best<'a>(search_space: Vec, + context: &'a Context, + out: &str) { + let conf = explorer::Config::read(); + let begin_time = std::time::Instant::now(); + let best_opt = explorer::find_best(&conf, context, search_space); + let duration = std::time::Instant::now() - begin_time; + warn!("Search completed in {}s", duration.as_secs()); + match best_opt { + Some(best) => { + let mut file = std::fs::File::create(out).unwrap(); + context.device().gen_code(&best, &mut file) + } + None => println!("Did not find any well suited candidate before timeout"), + } + } + + /// Generate a name for the output file. + pub fn file_name(name: &str, + _: ir::Type, + sizes: &[i32], + instantiated: bool) -> String { + const PATH: &str = "examples/out/"; + std::fs::create_dir_all(PATH).unwrap(); + let sizes = sizes.iter().format_with("", |i, f| f(&format_args!("_{}", i))); + format!("{}{}_{}{}.c", PATH, name, instantiated, sizes) + } +} #[allow(unused_imports)] use telamon::{explorer, helper, ir}; diff --git a/telamon-gen/Cargo.toml b/telamon-gen/Cargo.toml index 5642ce826..3ab05cf2b 100644 --- a/telamon-gen/Cargo.toml +++ b/telamon-gen/Cargo.toml @@ -11,6 +11,13 @@ lex = [] doc = false name = "cli_gen" +[[bench]] +name = "lexer" +harness = false + +[dev-dependencies] +criterion = "0.2" + [build-dependencies] lalrpop = "0.14" cc = "1.0.12" diff --git a/telamon-gen/benches/lexer.rs b/telamon-gen/benches/lexer.rs new file mode 100644 index 000000000..91c9ad4ce --- /dev/null +++ b/telamon-gen/benches/lexer.rs @@ -0,0 +1,29 @@ +#[macro_use] +extern crate criterion; +extern crate telamon_gen; + +use criterion::Criterion; + +use telamon_gen::lexer; + +use std::fs; +use std::ffi::OsStr; + +fn criterion_benchmark(c: &mut Criterion) { + let entries = fs::read_dir("cc_tests/src/").unwrap(); + for entry in entries { + if let Ok(entry) = entry { + if entry.path().extension().eq(&Some(OsStr::new("exh"))) { + let path = entry.path(); + let mut input = fs::File::open(&path).unwrap(); + let mut name = String::from("lexer "); + name.push_str(path.file_stem().unwrap().to_str().unwrap()); + + c.bench_function(&name, move |b| b.iter(|| lexer::Lexer::new(&mut input))); + } + } + } +} + +criterion_group!(benches, criterion_benchmark); +criterion_main!(benches); diff --git a/telamon-gen/build.rs b/telamon-gen/build.rs index 3c0d29088..9b8e72ca4 100644 --- a/telamon-gen/build.rs +++ b/telamon-gen/build.rs @@ -6,22 +6,26 @@ extern crate lalrpop; fn add_dependency(dep: &str) { println!("cargo:rerun-if-changed={}", dep); } fn main() { - // Compile the lexer.(`LEX="flex" cargo build --features "lex"`) + // Regenerate the lexer.(`LEX="flex" cargo build --features "lex"`) #[cfg(feature = "lex")] { use std::{env,process::Command}; + // Generate the lexer . + add_dependency("src/poc.l"); let bin = env::var("LEX").unwrap_or(String::from("flex")); Command::new(bin) - .arg("-oexh.c") + .arg("-osrc/exh.c") .arg("src/exh.l") .status() .expect("failed to execute Flex's process"); } + // Compile the lexer . cc::Build::new() - .file("exh.c") + .file("src/exh.c") + .include("src") .flag("-Wno-unused-parameter") .flag("-Wno-unused-variable") .flag_if_supported("-Wno-unused-function") diff --git a/telamon-gen/cc_tests/build.rs b/telamon-gen/cc_tests/build.rs index b15331bb8..69664118d 100644 --- a/telamon-gen/cc_tests/build.rs +++ b/telamon-gen/cc_tests/build.rs @@ -14,6 +14,6 @@ fn main() { let file_name = src_path.file_name().unwrap(); println!("cargo:rerun-if-changed={}", file_name.to_str().unwrap()); let dst_path = Path::new(&out_dir).join(&file_name).with_extension("rs"); - telamon_gen::process_file(&src_path, &dst_path, !cfg!(feature="noformat_exh")); + telamon_gen::process_file(&src_path, &dst_path, !cfg!(feature="noformat_exh")).unwrap(); } } diff --git a/telamon-gen/src/bin/cli_gen.rs b/telamon-gen/src/bin/cli_gen.rs index b79abb9ea..8d1eb450c 100644 --- a/telamon-gen/src/bin/cli_gen.rs +++ b/telamon-gen/src/bin/cli_gen.rs @@ -2,7 +2,18 @@ extern crate telamon_gen; extern crate env_logger; +use std::process; +use std::path::Path; + fn main() { env_logger::init(); - telamon_gen::process(&mut std::io::stdin(), &mut std::io::stdout(), true); + if let Err(process_error) = telamon_gen::process( + &mut std::io::stdin(), + &mut std::io::stdout(), + true, + &Path::new("exh") + ) { + eprintln!("error: {}", process_error); + process::exit(-1); + } } diff --git a/telamon-gen/src/error.rs b/telamon-gen/src/error.rs new file mode 100644 index 000000000..eda7919ae --- /dev/null +++ b/telamon-gen/src/error.rs @@ -0,0 +1,103 @@ +use super::lexer; +use super::lalrpop_util::*; + +use std::{path, fmt}; +use std::error::Error; + +#[derive(Debug)] +pub enum Cause { + /// Lalrpop + Parse(ParseError), + /// Will be remplaced by field for Ast [...] + Other, +} + +#[derive(Debug)] +pub struct ProcessError<'a> { + /// Display of filename. + pub path: path::Display<'a>, + /// Position of lexeme. + pub span: Option, + cause: Cause, +} + +impl <'a>From<(path::Display<'a>, + ParseError + )> for ProcessError<'a> { + fn from((path, parse): (path::Display<'a>, + ParseError + )) -> Self { + match parse { + ParseError::InvalidToken { location } + => ProcessError { + path: path, + span: Some(lexer::Span { leg: location, ..Default::default() }), + cause: Cause::Parse(parse), + }, + ParseError::UnrecognizedToken { token: None, .. } + => ProcessError { + path: path, + span: None, + cause: Cause::Parse(parse), + }, + ParseError::UnrecognizedToken { token: Some((l, .., e)), .. } | + ParseError::ExtraToken { token: (l, .., e) } | + ParseError::User { error: lexer::LexicalError::UnexpectedToken(l, .., e) } | + ParseError::User { error: lexer::LexicalError::InvalidToken(l, .., e) } + => ProcessError { + path: path, + span: Some(lexer::Span { leg: l, end: Some(e) }), + cause: Cause::Parse(parse), + }, + } + } +} + +impl <'a> fmt::Display for ProcessError<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ProcessError { path, span, cause: Cause::Parse( + ParseError::UnrecognizedToken { + token: Some((_, ref token, _)), .. + }), ..} | + ProcessError { path, span, cause: Cause::Parse(ParseError::ExtraToken { + token: (_, ref token, _) + }), ..} | + ProcessError { path, span, cause: Cause::Parse( + ParseError::User { + error: lexer::LexicalError::UnexpectedToken(_, ref token, _) + }), ..} | + ProcessError { path, span, cause: Cause::Parse( + ParseError::User { + error: lexer::LexicalError::InvalidToken(_, ref token, _) + }), ..} => { + if let Some(span) = span { + write!(f, "{}, {} -> {}", token, span, path) + } else { + write!(f, "{} -> {}", token, path) + } + }, + _ => Ok(()), + } + } +} + +impl <'a>Error for ProcessError<'a> { + fn description(&self) -> &str { + "Process error" + } + + fn cause(&self) -> Option<&Error> { + if let Cause::Parse(ref parse) = self.cause { + parse.cause() + } else { + None + } + } +} diff --git a/telamon-gen/exh.c b/telamon-gen/src/exh.c similarity index 82% rename from telamon-gen/exh.c rename to telamon-gen/src/exh.c index 530b75460..a6d680fd0 100644 --- a/telamon-gen/exh.c +++ b/telamon-gen/src/exh.c @@ -1,6 +1,6 @@ -#line 2 "exh.c" +#line 2 "src/exh.c" -#line 4 "exh.c" +#line 4 "src/exh.c" #define YY_INT_ALIGNED short int @@ -180,8 +180,27 @@ typedef size_t yy_size_t; #define EOB_ACT_END_OF_FILE 1 #define EOB_ACT_LAST_MATCH 2 - #define YY_LESS_LINENO(n) - #define YY_LINENO_REWIND_TO(ptr) + /* Note: We specifically omit the test for yy_rule_can_match_eol because it requires + * access to the local variable yy_act. Since yyless() is a macro, it would break + * existing scanners that call yyless() from OUTSIDE yylex. + * One obvious solution it to make yy_act a global. I tried that, and saw + * a 5% performance hit in a non-yylineno scanner, because yy_act is + * normally declared as a register variable-- so it is not worth it. + */ + #define YY_LESS_LINENO(n) \ + do { \ + int yyl;\ + for ( yyl = n; yyl < yyleng; ++yyl )\ + if ( yytext[yyl] == '\n' )\ + --yylineno;\ + }while(0) + #define YY_LINENO_REWIND_TO(dst) \ + do {\ + const char *p;\ + for ( p = yy_cp-1; p >= (dst); --p)\ + if ( *p == '\n' )\ + --yylineno;\ + }while(0) /* Return all but the first "n" matched characters back to the input stream. */ #define yyless(n) \ @@ -351,8 +370,8 @@ static void yy_fatal_error (yyconst char msg[] ,yyscan_t yyscanner ); *yy_cp = '\0'; \ yyg->yy_c_buf_p = yy_cp; -#define YY_NUM_RULES 68 -#define YY_END_OF_BUFFER 69 +#define YY_NUM_RULES 70 +#define YY_END_OF_BUFFER 71 /* This struct is not used in this scanner, but its presence is necessary. */ struct yy_trans_info @@ -362,34 +381,34 @@ struct yy_trans_info }; static yyconst flex_int16_t yy_accept[256] = { 0, - 0, 0, 0, 0, 6, 6, 69, 67, 7, 7, - 67, 67, 67, 67, 45, 46, 44, 67, 61, 43, - 51, 56, 50, 65, 64, 64, 64, 64, 64, 64, - 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, - 64, 64, 47, 3, 68, 3, 6, 5, 7, 55, - 0, 63, 62, 49, 60, 1, 7, 53, 54, 52, - 65, 66, 64, 64, 64, 64, 64, 64, 64, 64, - 64, 64, 64, 64, 64, 64, 13, 14, 64, 64, - 64, 64, 37, 64, 64, 64, 64, 64, 64, 64, - 64, 48, 2, 6, 7, 4, 64, 64, 64, 64, - - 64, 64, 64, 57, 64, 64, 64, 64, 64, 64, - 64, 64, 16, 64, 15, 64, 64, 64, 36, 64, - 19, 64, 64, 64, 64, 64, 64, 7, 64, 64, - 64, 25, 64, 64, 64, 11, 64, 64, 64, 23, - 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, - 64, 64, 41, 64, 64, 21, 64, 8, 64, 64, - 64, 64, 42, 64, 64, 64, 64, 64, 64, 64, - 64, 64, 64, 64, 64, 64, 64, 20, 64, 64, - 64, 64, 10, 64, 12, 64, 64, 64, 64, 64, - 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, - - 64, 64, 9, 64, 64, 64, 28, 64, 64, 64, - 64, 64, 64, 17, 33, 64, 64, 22, 64, 64, - 64, 39, 64, 64, 24, 64, 64, 31, 27, 40, - 18, 38, 64, 64, 64, 64, 64, 30, 64, 26, - 58, 64, 34, 64, 64, 64, 32, 64, 64, 29, - 64, 64, 59, 35, 0 + 0, 0, 0, 0, 7, 7, 71, 69, 9, 8, + 69, 69, 69, 69, 47, 48, 46, 69, 63, 45, + 53, 58, 52, 67, 66, 66, 66, 66, 66, 66, + 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, + 66, 66, 49, 4, 2, 4, 7, 6, 9, 57, + 0, 65, 64, 51, 62, 1, 9, 55, 56, 54, + 67, 68, 66, 66, 66, 66, 66, 66, 66, 66, + 66, 66, 66, 66, 66, 66, 15, 16, 66, 66, + 66, 66, 39, 66, 66, 66, 66, 66, 66, 66, + 66, 50, 3, 7, 9, 5, 66, 66, 66, 66, + + 66, 66, 66, 59, 66, 66, 66, 66, 66, 66, + 66, 66, 18, 66, 17, 66, 66, 66, 38, 66, + 21, 66, 66, 66, 66, 66, 66, 9, 66, 66, + 66, 27, 66, 66, 66, 13, 66, 66, 66, 25, + 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, + 66, 66, 43, 66, 66, 23, 66, 10, 66, 66, + 66, 66, 44, 66, 66, 66, 66, 66, 66, 66, + 66, 66, 66, 66, 66, 66, 66, 22, 66, 66, + 66, 66, 12, 66, 14, 66, 66, 66, 66, 66, + 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, + + 66, 66, 11, 66, 66, 66, 30, 66, 66, 66, + 66, 66, 66, 19, 35, 66, 66, 24, 66, 66, + 66, 41, 66, 66, 26, 66, 66, 33, 29, 42, + 20, 40, 66, 66, 66, 66, 66, 32, 66, 28, + 60, 66, 36, 66, 66, 66, 34, 66, 66, 31, + 66, 66, 61, 37, 0 } ; static yyconst flex_int32_t yy_ec[256] = @@ -435,35 +454,35 @@ static yyconst flex_int32_t yy_meta[47] = static yyconst flex_int16_t yy_base[265] = { 0, - 0, 0, 44, 45, 285, 284, 286, 289, 47, 49, - 268, 279, 0, 276, 289, 289, 289, 264, 43, 289, - 264, 263, 262, 43, 34, 257, 242, 35, 242, 38, - 0, 254, 41, 233, 36, 247, 231, 246, 42, 232, - 248, 240, 221, 289, 289, 253, 0, 289, 67, 289, - 260, 289, 0, 289, 289, 289, 251, 289, 289, 289, - 58, 0, 0, 239, 233, 221, 221, 218, 232, 218, - 50, 224, 217, 219, 221, 232, 211, 0, 225, 217, - 205, 207, 0, 211, 42, 205, 60, 211, 56, 54, - 218, 289, 289, 0, 0, 289, 222, 220, 211, 214, - - 204, 208, 206, 0, 202, 195, 212, 199, 205, 61, - 205, 56, 0, 209, 0, 188, 186, 201, 0, 186, - 0, 191, 196, 197, 180, 200, 185, 0, 178, 178, - 177, 0, 175, 180, 178, 0, 187, 179, 190, 0, - 184, 163, 169, 186, 184, 169, 174, 173, 163, 175, - 174, 171, 0, 172, 160, 0, 160, 0, 149, 168, - 167, 162, 0, 158, 150, 148, 151, 152, 63, 145, - 162, 158, 144, 142, 140, 139, 153, 0, 139, 156, - 142, 136, 0, 139, 0, 131, 131, 145, 148, 143, - 122, 131, 135, 130, 138, 137, 126, 122, 121, 133, - - 118, 123, 0, 115, 118, 128, 0, 120, 111, 114, - 111, 109, 107, 107, 0, 119, 115, 0, 117, 117, - 116, 0, 115, 101, 0, 98, 112, 0, 0, 0, - 0, 0, 113, 106, 94, 93, 94, 0, 104, 0, - 0, 81, 0, 83, 74, 64, 0, 71, 74, 0, - 75, 56, 0, 0, 289, 103, 107, 111, 113, 115, - 92, 119, 123, 127 + 0, 0, 44, 45, 281, 280, 282, 285, 279, 285, + 263, 274, 0, 271, 285, 285, 285, 259, 39, 285, + 259, 258, 257, 37, 26, 252, 237, 34, 237, 32, + 0, 249, 37, 228, 37, 242, 226, 241, 39, 227, + 243, 235, 216, 285, 285, 248, 0, 285, 258, 285, + 254, 285, 0, 285, 285, 285, 245, 285, 285, 285, + 54, 0, 0, 233, 227, 215, 215, 212, 226, 212, + 41, 218, 211, 213, 215, 226, 205, 0, 219, 211, + 199, 201, 0, 205, 41, 199, 53, 205, 40, 47, + 212, 285, 285, 0, 0, 285, 216, 214, 205, 208, + + 198, 202, 200, 0, 196, 189, 206, 193, 199, 60, + 199, 55, 0, 203, 0, 182, 180, 195, 0, 180, + 0, 185, 190, 191, 174, 194, 179, 0, 172, 172, + 171, 0, 169, 174, 172, 0, 181, 173, 184, 0, + 178, 157, 163, 180, 178, 163, 168, 167, 157, 169, + 168, 165, 0, 166, 154, 0, 154, 0, 143, 162, + 161, 156, 0, 152, 144, 142, 145, 146, 62, 139, + 156, 152, 138, 136, 134, 133, 147, 0, 133, 150, + 136, 130, 0, 133, 0, 125, 125, 139, 142, 137, + 116, 125, 129, 124, 132, 131, 120, 116, 115, 127, + + 112, 117, 0, 109, 112, 122, 0, 114, 105, 108, + 105, 103, 101, 101, 0, 113, 109, 0, 111, 111, + 110, 0, 109, 95, 0, 92, 106, 0, 0, 0, + 0, 0, 105, 95, 80, 72, 63, 0, 74, 0, + 0, 54, 0, 59, 57, 57, 0, 65, 67, 0, + 68, 50, 0, 0, 285, 102, 106, 110, 112, 114, + 63, 118, 122, 126 } ; static yyconst flex_int16_t yy_def[265] = @@ -499,88 +518,96 @@ static yyconst flex_int16_t yy_def[265] = 255, 255, 255, 255 } ; -static yyconst flex_int16_t yy_nxt[336] = +static yyconst flex_int16_t yy_nxt[332] = { 0, 8, 9, 10, 11, 12, 13, 14, 15, 16, 8, 17, 18, 19, 8, 20, 21, 22, 23, 24, 8, 25, 26, 27, 28, 29, 30, 31, 32, 33, 31, 31, 31, 34, 35, 36, 31, 37, 38, 39, 40, - 31, 41, 42, 31, 31, 43, 45, 45, 49, 49, - 49, 49, 56, 46, 46, 57, 61, 64, 72, 69, - 81, 61, 61, 70, 76, 65, 86, 66, 49, 49, - 82, 61, 73, 104, 77, 74, 61, 61, 117, 78, - 79, 120, 87, 118, 123, 125, 88, 141, 144, 190, - 105, 126, 121, 145, 63, 254, 124, 253, 252, 251, - - 142, 250, 191, 44, 44, 44, 44, 47, 47, 47, - 47, 51, 249, 51, 51, 53, 53, 62, 62, 94, - 248, 94, 94, 95, 247, 95, 95, 128, 246, 128, - 128, 245, 244, 243, 242, 241, 240, 239, 238, 237, - 236, 235, 234, 233, 232, 231, 230, 229, 228, 227, - 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, - 216, 215, 214, 213, 212, 211, 210, 209, 208, 207, - 206, 205, 204, 203, 202, 201, 200, 199, 198, 197, - 196, 195, 194, 193, 192, 189, 188, 187, 186, 185, - 184, 183, 182, 181, 180, 179, 178, 177, 176, 175, - - 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, - 164, 163, 162, 161, 160, 159, 158, 157, 156, 155, - 154, 153, 152, 151, 150, 149, 148, 147, 146, 143, - 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, - 130, 129, 127, 122, 119, 116, 115, 114, 113, 112, - 111, 110, 109, 108, 107, 106, 103, 102, 101, 100, - 99, 98, 97, 96, 52, 93, 92, 91, 90, 89, - 85, 84, 83, 80, 75, 71, 68, 67, 60, 59, - 58, 55, 54, 52, 50, 255, 48, 48, 7, 255, + 31, 41, 42, 31, 31, 43, 45, 45, 56, 64, + 61, 57, 72, 46, 46, 61, 61, 65, 69, 66, + 76, 81, 70, 86, 104, 63, 73, 61, 123, 74, + 77, 82, 61, 61, 120, 78, 79, 117, 125, 87, + 124, 105, 118, 88, 126, 121, 141, 144, 190, 254, + 253, 252, 145, 251, 250, 249, 248, 247, 246, 142, + + 245, 191, 44, 44, 44, 44, 47, 47, 47, 47, + 51, 244, 51, 51, 53, 53, 62, 62, 94, 243, + 94, 94, 95, 242, 95, 95, 128, 241, 128, 128, + 240, 239, 238, 237, 236, 235, 234, 233, 232, 231, + 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, + 220, 219, 218, 217, 216, 215, 214, 213, 212, 211, + 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, + 200, 199, 198, 197, 196, 195, 194, 193, 192, 189, + 188, 187, 186, 185, 184, 183, 182, 181, 180, 179, + 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, + + 168, 167, 166, 165, 164, 163, 162, 161, 160, 159, + 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, + 148, 147, 146, 143, 140, 139, 138, 137, 136, 135, + 134, 133, 132, 131, 130, 129, 127, 122, 119, 116, + 115, 114, 113, 112, 111, 110, 109, 108, 107, 106, + 103, 102, 101, 100, 99, 98, 97, 96, 52, 49, + 93, 92, 91, 90, 89, 85, 84, 83, 80, 75, + 71, 68, 67, 60, 59, 58, 55, 54, 52, 50, + 49, 255, 48, 48, 7, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255 + 255 } ; -static yyconst flex_int16_t yy_chk[336] = +static yyconst flex_int16_t yy_chk[332] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 3, 4, 9, 9, - 10, 10, 19, 3, 4, 19, 24, 25, 30, 28, - 35, 24, 24, 28, 33, 25, 39, 25, 49, 49, - 35, 61, 30, 71, 33, 30, 61, 61, 85, 33, - 33, 87, 39, 85, 89, 90, 39, 110, 112, 169, - 71, 90, 87, 112, 261, 252, 89, 251, 249, 248, - - 110, 246, 169, 256, 256, 256, 256, 257, 257, 257, - 257, 258, 245, 258, 258, 259, 259, 260, 260, 262, - 244, 262, 262, 263, 242, 263, 263, 264, 239, 264, - 264, 237, 236, 235, 234, 233, 227, 226, 224, 223, - 221, 220, 219, 217, 216, 214, 213, 212, 211, 210, - 209, 208, 206, 205, 204, 202, 201, 200, 199, 198, - 197, 196, 195, 194, 193, 192, 191, 190, 189, 188, - 187, 186, 184, 182, 181, 180, 179, 177, 176, 175, - 174, 173, 172, 171, 170, 168, 167, 166, 165, 164, - 162, 161, 160, 159, 157, 155, 154, 152, 151, 150, - - 149, 148, 147, 146, 145, 144, 143, 142, 141, 139, - 138, 137, 135, 134, 133, 131, 130, 129, 127, 126, - 125, 124, 123, 122, 120, 118, 117, 116, 114, 111, - 109, 108, 107, 106, 105, 103, 102, 101, 100, 99, - 98, 97, 91, 88, 86, 84, 82, 81, 80, 79, - 77, 76, 75, 74, 73, 72, 70, 69, 68, 67, - 66, 65, 64, 57, 51, 46, 43, 42, 41, 40, - 38, 37, 36, 34, 32, 29, 27, 26, 23, 22, - 21, 18, 14, 12, 11, 7, 6, 5, 255, 255, + 1, 1, 1, 1, 1, 1, 3, 4, 19, 25, + 24, 19, 30, 3, 4, 24, 24, 25, 28, 25, + 33, 35, 28, 39, 71, 261, 30, 61, 89, 30, + 33, 35, 61, 61, 87, 33, 33, 85, 90, 39, + 89, 71, 85, 39, 90, 87, 110, 112, 169, 252, + 251, 249, 112, 248, 246, 245, 244, 242, 239, 110, + + 237, 169, 256, 256, 256, 256, 257, 257, 257, 257, + 258, 236, 258, 258, 259, 259, 260, 260, 262, 235, + 262, 262, 263, 234, 263, 263, 264, 233, 264, 264, + 227, 226, 224, 223, 221, 220, 219, 217, 216, 214, + 213, 212, 211, 210, 209, 208, 206, 205, 204, 202, + 201, 200, 199, 198, 197, 196, 195, 194, 193, 192, + 191, 190, 189, 188, 187, 186, 184, 182, 181, 180, + 179, 177, 176, 175, 174, 173, 172, 171, 170, 168, + 167, 166, 165, 164, 162, 161, 160, 159, 157, 155, + 154, 152, 151, 150, 149, 148, 147, 146, 145, 144, + + 143, 142, 141, 139, 138, 137, 135, 134, 133, 131, + 130, 129, 127, 126, 125, 124, 123, 122, 120, 118, + 117, 116, 114, 111, 109, 108, 107, 106, 105, 103, + 102, 101, 100, 99, 98, 97, 91, 88, 86, 84, + 82, 81, 80, 79, 77, 76, 75, 74, 73, 72, + 70, 69, 68, 67, 66, 65, 64, 57, 51, 49, + 46, 43, 42, 41, 40, 38, 37, 36, 34, 32, + 29, 27, 26, 23, 22, 21, 18, 14, 12, 11, + 9, 7, 6, 5, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 255 + 255 } ; +/* Table of booleans, true if rule could match eol. */ +static yyconst flex_int32_t yy_rule_can_match_eol[71] = + { 0, +0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; + /* The intent behind this definition is that it'll catch * any uses of REJECT which flex missed. */ @@ -589,103 +616,37 @@ static yyconst flex_int16_t yy_chk[336] = #define YY_MORE_ADJ 0 #define YY_RESTORE_YY_MORE_OFFSET #line 1 "src/exh.l" +/* 19 Reentrant C Scanners § https://westes.github.io/flex/manual/Reentrant.html#Reentrant */ +/* https://westes.github.io/flex/manual/I-get-an-error-about-undefined-yywrap_0028_0029_002e.html#I-get-an-error-about-undefined-yywrap_0028_0029_002e */ +/* number of the current line § https://westes.github.io/flex/manual/Options-Affecting-Scanner-Behavior.html#index-yylineno */ +/* 19.4.6 Extra Data § https://westes.github.io/flex/manual/Extra-Data.html */ + + +#line 14 "src/exh.l" + #include "expression.h" + + typedef struct Pos { + unsigned int line; + unsigned int column; + } Pos; + + typedef int Data; + + // + typedef struct Span { + Pos leg; + Pos end; + Data data; + } Span; + #define YY_USER_ACTION { \ + yyextra.leg = yyextra.end; \ + yyextra.end.line = yylineno; \ + yyextra.end.column += yyleng; \ + } -#line 8 "src/exh.l" - #include - - union yylval { - int val; - } yylval; - - enum token { - VALUEIDENT, - CHOICEIDENT, - VAR, - DOC, - CMPOP, - INVALIDTOKEN, - CODE, - COUNTERKIND, - BOOL, - COUNTERVISIBILITY, - AND, - TRIGGER, - WHEN, - ALIAS, - COUNTER, - DEFINE, - ENUM, - EQUAL, - FORALL, - IN, - IS, - NOT, - REQUIRE, - REQUIRES, - VALUE, - END, - SYMMETRIC, - ANTISYMMETRIC, - ARROW, - COLON, - COMMA, - LPAREN, - RPAREN, - BITOR, - OR, - SETDEFKEY, - SET, - SUBSETOF, - SETIDENT, - BASE, - DISJOINT, - QUOTIENT, - OF, - DIVIDE, - }; - - // Indicates whether a counter sums or adds. - enum counter_kind { - ADD, - MUL, - }; - - // Indicates how a counter exposes how its maximum value. - // The variants are ordered by increasing amount of information available. - enum counter_visibility { - // Only the minimal value is computed and stored. - NOMAX, - // Both the min and max are stored, but only the min is exposed. - HIDDENMAX, - // Both the min and the max value are exposed. - FULL, - }; - - enum cmp_op { - LT, - GT, - LEQ, - GEQ, - EQ, - NEQ, - }; - - enum set_def_key { - ITEMTYPE, - IDTYPE, - ITEMGETTER, - IDGETTER, - ITER, - FROMSUPERSET, - PREFIX, - NEWOBJS, - REVERSE, - ADDTOSET, - }; -/* https://github.com/a-haas/Compilation/blob/6fe46a65d64decdb648feba790bb7b1d2980ca0a/yacc/setparser.l */ -/* 6 Patterns § http://westes.github.io/flex/manual/Patterns.html#Patterns */ -#line 689 "exh.c" +/* Patterns documentation: http://westes.github.io/flex/manual/Patterns.html#Patterns */ +#line 650 "src/exh.c" #define INITIAL 0 #define C_COMMENT 1 @@ -699,9 +660,7 @@ static yyconst flex_int16_t yy_chk[336] = #include #endif -#ifndef YY_EXTRA_TYPE -#define YY_EXTRA_TYPE void * -#endif +#define YY_EXTRA_TYPE Span /* Holds the entire state of the reentrant scanner. */ struct yyguts_t @@ -939,10 +898,10 @@ YY_DECL } { -#line 180 "src/exh.l" +#line 115 "src/exh.l" -#line 946 "exh.c" +#line 905 "src/exh.c" while ( 1 ) /* loops until end-of-file is reached */ { @@ -975,7 +934,7 @@ YY_DECL yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; ++yy_cp; } - while ( yy_base[yy_current_state] != 289 ); + while ( yy_base[yy_current_state] != 285 ); yy_find_action: yy_act = yy_accept[yy_current_state]; @@ -988,6 +947,18 @@ YY_DECL YY_DO_BEFORE_ACTION; + if ( yy_act != YY_END_OF_BUFFER && yy_rule_can_match_eol[yy_act] ) + { + yy_size_t yyl; + for ( yyl = 0; yyl < yyleng; ++yyl ) + if ( yytext[yyl] == '\n' ) + + do{ yylineno++; + yycolumn=0; + }while(0) +; + } + do_action: /* This label is used only to access EOF actions. */ switch ( yy_act ) @@ -1001,353 +972,364 @@ YY_DECL case 1: YY_RULE_SETUP -#line 182 "src/exh.l" +#line 117 "src/exh.l" { BEGIN(C_COMMENT); } YY_BREAK case 2: +/* rule 2 can match eol */ YY_RULE_SETUP -#line 183 "src/exh.l" -{ BEGIN(INITIAL); } +#line 118 "src/exh.l" +{ yyextra.end.line += 1; } YY_BREAK case 3: YY_RULE_SETUP -#line 184 "src/exh.l" -{} +#line 119 "src/exh.l" +{ BEGIN(INITIAL); } YY_BREAK case 4: YY_RULE_SETUP -#line 186 "src/exh.l" -{ BEGIN(LINE_DOC); } +#line 120 "src/exh.l" +{ } YY_BREAK case 5: -/* rule 5 can match eol */ YY_RULE_SETUP -#line 187 "src/exh.l" -{ BEGIN(INITIAL); } +#line 122 "src/exh.l" +{ yyextra.end.column -= 3; BEGIN(LINE_DOC); } YY_BREAK case 6: +/* rule 6 can match eol */ YY_RULE_SETUP -#line 188 "src/exh.l" -{ return DOC; } +#line 123 "src/exh.l" +{ yyextra.end.column = 0; BEGIN(INITIAL); } YY_BREAK case 7: -/* rule 7 can match eol */ YY_RULE_SETUP -#line 190 "src/exh.l" -{} +#line 124 "src/exh.l" +{ yyextra.end.column += 3; return DOC; } YY_BREAK case 8: +/* rule 8 can match eol */ YY_RULE_SETUP -#line 192 "src/exh.l" -{ return ALIAS; } +#line 126 "src/exh.l" +{ yyextra.end.column = 0; } YY_BREAK case 9: YY_RULE_SETUP -#line 193 "src/exh.l" -{ return COUNTER; } +#line 128 "src/exh.l" +{} YY_BREAK case 10: YY_RULE_SETUP -#line 194 "src/exh.l" -{ return DEFINE; } +#line 130 "src/exh.l" +{ return ALIAS; } YY_BREAK case 11: YY_RULE_SETUP -#line 195 "src/exh.l" -{ return ENUM; } +#line 131 "src/exh.l" +{ return COUNTER; } YY_BREAK case 12: YY_RULE_SETUP -#line 196 "src/exh.l" -{ return FORALL; } +#line 132 "src/exh.l" +{ return DEFINE; } YY_BREAK case 13: YY_RULE_SETUP -#line 197 "src/exh.l" -{ return IN; } +#line 133 "src/exh.l" +{ return ENUM; } YY_BREAK case 14: YY_RULE_SETUP -#line 198 "src/exh.l" -{ return IS; } +#line 134 "src/exh.l" +{ return FORALL; } YY_BREAK case 15: YY_RULE_SETUP -#line 199 "src/exh.l" -{ return NOT; } +#line 135 "src/exh.l" +{ return IN; } YY_BREAK case 16: YY_RULE_SETUP -#line 200 "src/exh.l" -{yylval.val = MUL; return COUNTERKIND; } +#line 136 "src/exh.l" +{ return IS; } YY_BREAK case 17: YY_RULE_SETUP -#line 201 "src/exh.l" -{ return REQUIRE; } +#line 137 "src/exh.l" +{ return NOT; } YY_BREAK case 18: YY_RULE_SETUP -#line 202 "src/exh.l" -{ return REQUIRES; } +#line 138 "src/exh.l" +{ yyextra.data = MUL; return COUNTERKIND; } YY_BREAK case 19: YY_RULE_SETUP -#line 203 "src/exh.l" -{yylval.val = ADD; return COUNTERKIND; } +#line 139 "src/exh.l" +{ return REQUIRE; } YY_BREAK case 20: YY_RULE_SETUP -#line 204 "src/exh.l" -{ return VALUE; } +#line 140 "src/exh.l" +{ return REQUIRES; } YY_BREAK case 21: YY_RULE_SETUP -#line 205 "src/exh.l" -{ return WHEN; } +#line 141 "src/exh.l" +{ yyextra.data = ADD; return COUNTERKIND; } YY_BREAK case 22: YY_RULE_SETUP -#line 206 "src/exh.l" -{ return TRIGGER; } +#line 142 "src/exh.l" +{ return VALUE; } YY_BREAK case 23: YY_RULE_SETUP -#line 207 "src/exh.l" -{yylval.val = NOMAX; return COUNTERVISIBILITY; } +#line 143 "src/exh.l" +{ return WHEN; } YY_BREAK case 24: YY_RULE_SETUP -#line 208 "src/exh.l" -{yylval.val = HIDDENMAX; return COUNTERVISIBILITY; } +#line 144 "src/exh.l" +{ return TRIGGER; } YY_BREAK case 25: YY_RULE_SETUP -#line 209 "src/exh.l" -{ return BASE; } +#line 145 "src/exh.l" +{ yyextra.data = NOMAX; return COUNTERVISIBILITY; } YY_BREAK case 26: YY_RULE_SETUP -#line 211 "src/exh.l" -{yylval.val = ITEMTYPE; return SETDEFKEY; } +#line 146 "src/exh.l" +{ yyextra.data = HIDDENMAX; return COUNTERVISIBILITY; } YY_BREAK case 27: YY_RULE_SETUP -#line 212 "src/exh.l" -{yylval.val = NEWOBJS; return SETDEFKEY; } +#line 147 "src/exh.l" +{ return BASE; } YY_BREAK case 28: YY_RULE_SETUP -#line 213 "src/exh.l" -{yylval.val = IDTYPE; return SETDEFKEY; } +#line 149 "src/exh.l" +{ yyextra.data = ITEMTYPE; return SETDEFKEY; } YY_BREAK case 29: YY_RULE_SETUP -#line 214 "src/exh.l" -{yylval.val = ITEMGETTER; return SETDEFKEY; } +#line 150 "src/exh.l" +{ yyextra.data = NEWOBJS; return SETDEFKEY; } YY_BREAK case 30: YY_RULE_SETUP -#line 215 "src/exh.l" -{yylval.val = IDGETTER; return SETDEFKEY; } +#line 151 "src/exh.l" +{ yyextra.data = IDTYPE; return SETDEFKEY; } YY_BREAK case 31: YY_RULE_SETUP -#line 216 "src/exh.l" -{yylval.val = ITER; return SETDEFKEY; } +#line 152 "src/exh.l" +{ yyextra.data = ITEMGETTER; return SETDEFKEY; } YY_BREAK case 32: YY_RULE_SETUP -#line 217 "src/exh.l" -{yylval.val = PREFIX; return SETDEFKEY; } +#line 153 "src/exh.l" +{ yyextra.data = IDGETTER; return SETDEFKEY; } YY_BREAK case 33: YY_RULE_SETUP -#line 218 "src/exh.l" -{yylval.val = REVERSE; return SETDEFKEY; } +#line 154 "src/exh.l" +{ yyextra.data = ITER; return SETDEFKEY; } YY_BREAK case 34: YY_RULE_SETUP -#line 219 "src/exh.l" -{yylval.val = ADDTOSET; return SETDEFKEY; } +#line 155 "src/exh.l" +{ yyextra.data = PREFIX; return SETDEFKEY; } YY_BREAK case 35: YY_RULE_SETUP -#line 220 "src/exh.l" -{yylval.val = FROMSUPERSET; return SETDEFKEY; } +#line 156 "src/exh.l" +{ yyextra.data = REVERSE; return SETDEFKEY; } YY_BREAK case 36: YY_RULE_SETUP -#line 221 "src/exh.l" -{ return SET; } +#line 157 "src/exh.l" +{ yyextra.data = ADDTOSET; return SETDEFKEY; } YY_BREAK case 37: YY_RULE_SETUP -#line 222 "src/exh.l" -{ return OF; } +#line 158 "src/exh.l" +{ yyextra.data = FROMSUPERSET; return SETDEFKEY; } YY_BREAK case 38: YY_RULE_SETUP -#line 223 "src/exh.l" -{ return SUBSETOF; } +#line 159 "src/exh.l" +{ return SET; } YY_BREAK case 39: YY_RULE_SETUP -#line 224 "src/exh.l" -{ return DISJOINT; } +#line 160 "src/exh.l" +{ return OF; } YY_BREAK case 40: YY_RULE_SETUP -#line 225 "src/exh.l" -{ return QUOTIENT; } +#line 161 "src/exh.l" +{ return SUBSETOF; } YY_BREAK case 41: YY_RULE_SETUP -#line 226 "src/exh.l" -{yylval.val = 1; return BOOL; } +#line 162 "src/exh.l" +{ return DISJOINT; } YY_BREAK case 42: YY_RULE_SETUP -#line 227 "src/exh.l" -{yylval.val = 0; return BOOL; } +#line 163 "src/exh.l" +{ return QUOTIENT; } YY_BREAK case 43: YY_RULE_SETUP -#line 229 "src/exh.l" -{ return COLON; } +#line 164 "src/exh.l" +{ yyextra.data = 1; return BOOL; } YY_BREAK case 44: YY_RULE_SETUP -#line 230 "src/exh.l" -{ return COMMA; } +#line 165 "src/exh.l" +{ yyextra.data = 0; return BOOL; } YY_BREAK case 45: YY_RULE_SETUP -#line 231 "src/exh.l" -{ return LPAREN; } +#line 167 "src/exh.l" +{ return COLON; } YY_BREAK case 46: YY_RULE_SETUP -#line 232 "src/exh.l" -{ return RPAREN; } +#line 168 "src/exh.l" +{ return COMMA; } YY_BREAK case 47: YY_RULE_SETUP -#line 233 "src/exh.l" -{ return BITOR; } +#line 169 "src/exh.l" +{ return LPAREN; } YY_BREAK case 48: YY_RULE_SETUP -#line 234 "src/exh.l" -{ return OR; } +#line 170 "src/exh.l" +{ return RPAREN; } YY_BREAK case 49: YY_RULE_SETUP -#line 235 "src/exh.l" -{ return AND; } +#line 171 "src/exh.l" +{ return BITOR; } YY_BREAK case 50: YY_RULE_SETUP -#line 236 "src/exh.l" -{yylval.val = GT; return CMPOP; } +#line 172 "src/exh.l" +{ return OR; } YY_BREAK case 51: YY_RULE_SETUP -#line 237 "src/exh.l" -{yylval.val = LT; return CMPOP; } +#line 173 "src/exh.l" +{ return AND; } YY_BREAK case 52: YY_RULE_SETUP -#line 238 "src/exh.l" -{yylval.val = GEQ; return CMPOP; } +#line 174 "src/exh.l" +{ yyextra.data = GT; return CMPOP; } YY_BREAK case 53: YY_RULE_SETUP -#line 239 "src/exh.l" -{yylval.val = LEQ; return CMPOP; } +#line 175 "src/exh.l" +{ yyextra.data = LT; return CMPOP; } YY_BREAK case 54: YY_RULE_SETUP -#line 240 "src/exh.l" -{yylval.val = EQ; return CMPOP; } +#line 176 "src/exh.l" +{ yyextra.data = GEQ; return CMPOP; } YY_BREAK case 55: YY_RULE_SETUP -#line 241 "src/exh.l" -{yylval.val = NEQ; return CMPOP; } +#line 177 "src/exh.l" +{ yyextra.data = LEQ; return CMPOP; } YY_BREAK case 56: YY_RULE_SETUP -#line 242 "src/exh.l" -{ return EQUAL; } +#line 178 "src/exh.l" +{ yyextra.data = EQ; return CMPOP; } YY_BREAK case 57: YY_RULE_SETUP -#line 243 "src/exh.l" -{ return END; } +#line 179 "src/exh.l" +{ yyextra.data = NEQ; return CMPOP; } YY_BREAK case 58: YY_RULE_SETUP -#line 244 "src/exh.l" -{ return SYMMETRIC; } +#line 180 "src/exh.l" +{ return EQUAL; } YY_BREAK case 59: YY_RULE_SETUP -#line 245 "src/exh.l" -{ return ANTISYMMETRIC; } +#line 181 "src/exh.l" +{ return END; } YY_BREAK case 60: YY_RULE_SETUP -#line 246 "src/exh.l" -{ return ARROW; } +#line 182 "src/exh.l" +{ return SYMMETRIC; } YY_BREAK case 61: YY_RULE_SETUP -#line 247 "src/exh.l" -{ return DIVIDE; } +#line 183 "src/exh.l" +{ return ANTISYMMETRIC; } YY_BREAK case 62: YY_RULE_SETUP -#line 249 "src/exh.l" -{ yytext++; return VAR; } +#line 184 "src/exh.l" +{ return ARROW; } YY_BREAK case 63: YY_RULE_SETUP -#line 250 "src/exh.l" -{ yytext++; return CODE; } +#line 185 "src/exh.l" +{ return DIVIDE; } YY_BREAK case 64: YY_RULE_SETUP -#line 251 "src/exh.l" -{ return CHOICEIDENT; } +#line 187 "src/exh.l" +{ yytext++; return VAR; } YY_BREAK case 65: YY_RULE_SETUP -#line 252 "src/exh.l" -{ return VALUEIDENT; } +#line 188 "src/exh.l" +{ yytext++; return CODE; } YY_BREAK case 66: YY_RULE_SETUP -#line 253 "src/exh.l" +#line 189 "src/exh.l" +{ return CHOICEIDENT; } + YY_BREAK +case 67: +YY_RULE_SETUP +#line 190 "src/exh.l" +{ return VALUEIDENT; } + YY_BREAK +case 68: +YY_RULE_SETUP +#line 191 "src/exh.l" { return SETIDENT; } YY_BREAK case YY_STATE_EOF(INITIAL): case YY_STATE_EOF(C_COMMENT): case YY_STATE_EOF(LINE_DOC): -#line 254 "src/exh.l" +#line 192 "src/exh.l" { return EOF; } YY_BREAK -case 67: +case 69: YY_RULE_SETUP -#line 255 "src/exh.l" +#line 193 "src/exh.l" { return INVALIDTOKEN; } YY_BREAK -case 68: +case 70: YY_RULE_SETUP -#line 256 "src/exh.l" +#line 194 "src/exh.l" ECHO; YY_BREAK -#line 1351 "exh.c" +#line 1333 "src/exh.c" case YY_END_OF_BUFFER: { @@ -1712,6 +1694,10 @@ static int yy_get_next_buffer (yyscan_t yyscanner) *--yy_cp = (char) c; + if ( c == '\n' ){ + --yylineno; + } + yyg->yytext_ptr = yy_bp; yyg->yy_hold_char = *yy_cp; yyg->yy_c_buf_p = yy_cp; @@ -1788,6 +1774,13 @@ static int yy_get_next_buffer (yyscan_t yyscanner) *yyg->yy_c_buf_p = '\0'; /* preserve yytext */ yyg->yy_hold_char = *++yyg->yy_c_buf_p; + if ( c == '\n' ) + + do{ yylineno++; + yycolumn=0; + }while(0) +; + return c; } #endif /* ifndef YY_NO_INPUT */ @@ -2498,7 +2491,7 @@ void yyfree (void * ptr , yyscan_t yyscanner) #define YYTABLES_NAME "yytables" -#line 256 "src/exh.l" +#line 194 "src/exh.l" diff --git a/telamon-gen/src/exh.l b/telamon-gen/src/exh.l index cd24618cc..7658017b1 100644 --- a/telamon-gen/src/exh.l +++ b/telamon-gen/src/exh.l @@ -1,108 +1,45 @@ +/* 19 Reentrant C Scanners § https://westes.github.io/flex/manual/Reentrant.html#Reentrant */ %option reentrant +/* https://westes.github.io/flex/manual/I-get-an-error-about-undefined-yywrap_0028_0029_002e.html#I-get-an-error-about-undefined-yywrap_0028_0029_002e */ %option noyywrap +/* number of the current line § https://westes.github.io/flex/manual/Options-Affecting-Scanner-Behavior.html#index-yylineno */ +%option yylineno +/* 19.4.6 Extra Data § https://westes.github.io/flex/manual/Extra-Data.html */ +%option extra-type="Span" %x C_COMMENT %x LINE_DOC %{ - #include - - union yylval { - int val; - } yylval; - - enum token { - VALUEIDENT, - CHOICEIDENT, - VAR, - DOC, - CMPOP, - INVALIDTOKEN, - CODE, - COUNTERKIND, - BOOL, - COUNTERVISIBILITY, - AND, - TRIGGER, - WHEN, - ALIAS, - COUNTER, - DEFINE, - ENUM, - EQUAL, - FORALL, - IN, - IS, - NOT, - REQUIRE, - REQUIRES, - VALUE, - END, - SYMMETRIC, - ANTISYMMETRIC, - ARROW, - COLON, - COMMA, - LPAREN, - RPAREN, - BITOR, - OR, - SETDEFKEY, - SET, - SUBSETOF, - SETIDENT, - BASE, - DISJOINT, - QUOTIENT, - OF, - DIVIDE, - }; - - // Indicates whether a counter sums or adds. - enum counter_kind { - ADD, - MUL, - }; - - // Indicates how a counter exposes how its maximum value. - // The variants are ordered by increasing amount of information available. - enum counter_visibility { - // Only the minimal value is computed and stored. - NOMAX, - // Both the min and max are stored, but only the min is exposed. - HIDDENMAX, - // Both the min and the max value are exposed. - FULL, - }; - - enum cmp_op { - LT, - GT, - LEQ, - GEQ, - EQ, - NEQ, - }; - - enum set_def_key { - ITEMTYPE, - IDTYPE, - ITEMGETTER, - IDGETTER, - ITER, - FROMSUPERSET, - PREFIX, - NEWOBJS, - REVERSE, - ADDTOSET, - }; + #include "expression.h" + + typedef struct Pos { + unsigned int line; + unsigned int column; + } Pos; + + typedef int Data; + + // + typedef struct Span { + Pos leg; + Pos end; + Data data; + } Span; + + #define YY_USER_ACTION { \ + yyextra.leg = yyextra.end; \ + yyextra.end.line = yylineno; \ + yyextra.end.column += yyleng; \ + } + %} num [0-9] alpha [a-zA-Z_] alpha_num {alpha}|{num} -whitespace [ \t\r\n]+ +whitespace [ \t\r]+ comment "//"[^/\n][^\n]*|"//" blank {comment}|{whitespace} @@ -178,12 +115,15 @@ code \"[^\n\"]*\" %% {c_comment_beg} { BEGIN(C_COMMENT); } +[\n] { yyextra.end.line += 1; } {c_comment_end} { BEGIN(INITIAL); } -. {} +. { } + +{doc} { yyextra.end.column -= 3; BEGIN(LINE_DOC); } +\n { yyextra.end.column = 0; BEGIN(INITIAL); } +[^\n]* { yyextra.end.column += 3; return DOC; } -{doc} { BEGIN(LINE_DOC); } -\n { BEGIN(INITIAL); } -[^\n]* { return DOC; } +[\n] { yyextra.end.column = 0; } {blank} {} @@ -195,34 +135,34 @@ code \"[^\n\"]*\" {in} { return IN; } {is} { return IS; } {not} { return NOT; } -{product} {yylval.val = MUL; return COUNTERKIND; } +{product} { yyextra.data = MUL; return COUNTERKIND; } {require} { return REQUIRE; } {requires} { return REQUIRES; } -{sum} {yylval.val = ADD; return COUNTERKIND; } +{sum} { yyextra.data = ADD; return COUNTERKIND; } {value} { return VALUE; } {when} { return WHEN; } {trigger} { return TRIGGER; } -{half} {yylval.val = NOMAX; return COUNTERVISIBILITY; } -{hidden} {yylval.val = HIDDENMAX; return COUNTERVISIBILITY; } +{half} { yyextra.data = NOMAX; return COUNTERVISIBILITY; } +{hidden} { yyextra.data = HIDDENMAX; return COUNTERVISIBILITY; } {base} { return BASE; } -{item_type} {yylval.val = ITEMTYPE; return SETDEFKEY; } -{new_objs} {yylval.val = NEWOBJS; return SETDEFKEY; } -{id_type} {yylval.val = IDTYPE; return SETDEFKEY; } -{item_getter} {yylval.val = ITEMGETTER; return SETDEFKEY; } -{id_getter} {yylval.val = IDGETTER; return SETDEFKEY; } -{iter} {yylval.val = ITER; return SETDEFKEY; } -{prefix} {yylval.val = PREFIX; return SETDEFKEY; } -{reverse} {yylval.val = REVERSE; return SETDEFKEY; } -{add_to_set} {yylval.val = ADDTOSET; return SETDEFKEY; } -{from_superset} {yylval.val = FROMSUPERSET; return SETDEFKEY; } +{item_type} { yyextra.data = ITEMTYPE; return SETDEFKEY; } +{new_objs} { yyextra.data = NEWOBJS; return SETDEFKEY; } +{id_type} { yyextra.data = IDTYPE; return SETDEFKEY; } +{item_getter} { yyextra.data = ITEMGETTER; return SETDEFKEY; } +{id_getter} { yyextra.data = IDGETTER; return SETDEFKEY; } +{iter} { yyextra.data = ITER; return SETDEFKEY; } +{prefix} { yyextra.data = PREFIX; return SETDEFKEY; } +{reverse} { yyextra.data = REVERSE; return SETDEFKEY; } +{add_to_set} { yyextra.data = ADDTOSET; return SETDEFKEY; } +{from_superset} { yyextra.data = FROMSUPERSET; return SETDEFKEY; } {set} { return SET; } {of} { return OF; } {subsetof} { return SUBSETOF; } {disjoint} { return DISJOINT; } {quotient} { return QUOTIENT; } -{true} {yylval.val = 1; return BOOL; } -{false} {yylval.val = 0; return BOOL; } +{true} { yyextra.data = 1; return BOOL; } +{false} { yyextra.data = 0; return BOOL; } {colon} { return COLON; } {comma} { return COMMA; } @@ -231,12 +171,12 @@ code \"[^\n\"]*\" {bit_or} { return BITOR; } {or} { return OR; } {and} { return AND; } -{gt} {yylval.val = GT; return CMPOP; } -{lt} {yylval.val = LT; return CMPOP; } -{ge} {yylval.val = GEQ; return CMPOP; } -{le} {yylval.val = LEQ; return CMPOP; } -{equals} {yylval.val = EQ; return CMPOP; } -{not_equals} {yylval.val = NEQ; return CMPOP; } +{gt} { yyextra.data = GT; return CMPOP; } +{lt} { yyextra.data = LT; return CMPOP; } +{ge} { yyextra.data = GEQ; return CMPOP; } +{le} { yyextra.data = LEQ; return CMPOP; } +{equals} { yyextra.data = EQ; return CMPOP; } +{not_equals} { yyextra.data = NEQ; return CMPOP; } {equal} { return EQUAL; } {end} { return END; } {symmetric} { return SYMMETRIC; } diff --git a/telamon-gen/src/expression.h b/telamon-gen/src/expression.h new file mode 100644 index 000000000..a2cb68bf6 --- /dev/null +++ b/telamon-gen/src/expression.h @@ -0,0 +1,90 @@ +#ifndef __EXPRESSION_H__ +#define __EXPRESSION_H__ + +enum token { + VALUEIDENT, + CHOICEIDENT, + VAR, + DOC, + CMPOP, + INVALIDTOKEN, + CODE, + COUNTERKIND, + BOOL, + COUNTERVISIBILITY, + AND, + TRIGGER, + WHEN, + ALIAS, + COUNTER, + DEFINE, + ENUM, + EQUAL, + FORALL, + IN, + IS, + NOT, + REQUIRE, + REQUIRES, + VALUE, + END, + SYMMETRIC, + ANTISYMMETRIC, + ARROW, + COLON, + COMMA, + LPAREN, + RPAREN, + BITOR, + OR, + SETDEFKEY, + SET, + SUBSETOF, + SETIDENT, + BASE, + DISJOINT, + QUOTIENT, + OF, + DIVIDE, +}; + +// Indicates whether a counter sums or adds. +enum counter_kind { + ADD, + MUL, +}; + +// Indicates how a counter exposes how its maximum value. +// The variants are ordered by increasing amount of information available. +enum counter_visibility { + // Only the minimal value is computed and stored. + NOMAX, + // Both the min and max are stored, but only the min is exposed. + HIDDENMAX, + // Both the min and the max value are exposed. + FULL, +}; + +enum cmp_op { + LT, + GT, + LEQ, + GEQ, + EQ, + NEQ, +}; + +enum set_def_key { + ITEMTYPE, + IDTYPE, + ITEMGETTER, + IDGETTER, + ITER, + FROMSUPERSET, + PREFIX, + NEWOBJS, + REVERSE, + ADDTOSET, +}; + +#endif // __EXPRESSION_H__ diff --git a/telamon-gen/src/lexer/ffi.rs b/telamon-gen/src/lexer/ffi.rs index f4b652220..fa3599e1f 100644 --- a/telamon-gen/src/lexer/ffi.rs +++ b/telamon-gen/src/lexer/ffi.rs @@ -2,22 +2,73 @@ use ::libc; use ::ir; -/// https://westes.github.io/flex/manual/About-yyscan_005ft.html +use std::fmt; + +/// A [yyscan](https://westes.github.io/flex/manual/About-yyscan_005ft.html) type is the internal +/// representation of a [yylex_init](https://westes.github.io/flex/manual/Init-and-Destroy-Functions.html) structure. pub type YyScan = *const libc::c_void; +/// State per character. pub type YyBufferState = *const libc::c_void; +/// Unsigned integer type used to represent the sizes f/lex. pub type YySize = libc::size_t; +/// A sequence's row/column position #[derive(Copy, Clone)] #[repr(C)] pub union YyLval { - pub val: libc::c_int, + /// Indicate a comparison operators. pub cmp_op: ir::CmpOp, pub boolean: bool, + /// Indicates whether a counter sums or adds. pub counter_kind: ir::CounterKind, + /// Indicates how a counter exposes how its maximum value. pub counter_visibility: ir::CounterVisibility, pub set_def_key: ir::SetDefKey, } +/// A sequence's row/column position +#[derive(Default, Copy, Clone, Debug, PartialEq)] +#[repr(C)] +pub struct Position { + pub line: libc::c_uint, + pub column: libc::c_uint, +} + +impl fmt::Display for Position { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "line {}, column {}", self.line, self.column) + } +} + +/// A double sequence's row/column position +#[derive(Default, Copy, Clone, Debug, PartialEq)] +pub struct Span { + pub leg: Position, + pub end: Option, +} + +impl fmt::Display for Span { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(end) = self.end { + write!(f, "between {} and {}", self.leg, end) + } else { + write!(f, "at {}", self.leg) + } + } +} + +/// A F/lex's token with a span. +#[derive(Copy, Clone, Debug)] +#[repr(C)] +pub struct Spanned { + pub leg: Position, + pub end: Position, + /// Spanned data + pub data: Y, +} + +pub type YyExtraType = Spanned; + #[derive(Copy, Clone, Debug)] #[repr(C)] pub enum YyToken { @@ -65,18 +116,19 @@ pub enum YyToken { Quotient, Of, Divide, + /// End-of-File EOF = libc::EOF as _, } extern { - pub static yylval: YyLval; - pub fn yylex_init(scanner: *const YyScan) -> libc::c_int; pub fn yy_scan_string(yy_str: *const libc::c_char, yyscanner: YyScan) -> YyBufferState; pub fn yy_scan_buffer(base: *const libc::c_char, size: YySize, yyscanner: YyScan) -> YyBufferState; pub fn yy_scan_bytes(base: *const libc::c_char, len: libc::c_int, yyscanner: YyScan) -> YyBufferState; + pub fn yyget_extra(yyscanner: YyScan) -> YyExtraType; pub fn yylex(yyscanner: YyScan) -> YyToken; pub fn yyget_text(yyscanner: YyScan) -> *mut libc::c_char; + pub fn yyset_lineno(line_number: libc::c_int, yyscanner: YyScan) -> libc::c_int; pub fn yy_delete_buffer(b: YyBufferState, yyscanner: YyScan); pub fn yylex_destroy(yyscanner: YyScan) -> libc::c_int; } diff --git a/telamon-gen/src/lexer/mod.rs b/telamon-gen/src/lexer/mod.rs index 767316a06..dad3f89d9 100644 --- a/telamon-gen/src/lexer/mod.rs +++ b/telamon-gen/src/lexer/mod.rs @@ -1,9 +1,12 @@ -/// Tokens from the textual representation of constraints. - +/// This lexer is a application of +/// [Writing a custom lexer](https://github.com/lalrpop/lalrpop/blob/master/doc/src/lexer_tutorial/index.md)'s +/// documentation. This includes a Spanned definition and a Iterator. mod ffi; mod token; use std::{io,ptr}; +use std::error::Error; +use std::fmt; pub use self::token::Token; @@ -14,16 +17,56 @@ use std::ffi::CStr; use self::ffi::{ YyScan, YyBufferState, + YyExtraType, yylex_init, yy_scan_bytes, + yyset_lineno, yy_delete_buffer, yylex_destroy, yylex, YyToken, - yylval, yyget_text, + yyget_extra, }; +pub use self::ffi::{Position, Span}; + +#[derive(Debug, PartialEq)] +pub enum LexicalError { + InvalidToken(Position, Token, Position), + UnexpectedToken(Position, Token, Position), +} + +impl Error for LexicalError { + fn description(&self) -> &str { + match self { + LexicalError::InvalidToken(..) => "invalid token", + LexicalError::UnexpectedToken(..) => "expected expression", + } + } +} + +impl fmt::Display for LexicalError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + LexicalError::UnexpectedToken(leg, tok, end) | + LexicalError::InvalidToken(leg, tok, end) => { + write!(f, "{}, found '{:?}' between {}:{}", + self.description(), + tok, + leg, + end + ) + }, + } + } +} + +/// The alias Spanned is a definition of the stream format. +/// The parser will accept an iterator where each item +/// in the stream has the following structure. +pub type Spanned = Result<(Pos, Tok, Pos), Err>; + pub struct Lexer { scanner: YyScan, buffer: YyBufferState, @@ -39,14 +82,27 @@ impl Lexer { } impl From> for Lexer { - fn from(buffer:Vec) -> Self { + fn from(buffer: Vec) -> Self { unsafe { let scanner: YyScan = ptr::null(); - yylex_init(&scanner); // https://westes.github.io/flex/manual/Init-and-Destroy-Functions.html#index-yylex_005finit + // The function [yylex_init](https://westes.github.io/flex/manual/Init-and-Destroy-Functions.html#index-yylex_005finit) + // innitializes the scanner. + yylex_init(&scanner); + + // scans len bytes starting at location bytes. + let buffer: YyBufferState = yy_scan_bytes(buffer.as_ptr() as *const _, buffer.len() as _, scanner); + + // Issue [flex/60](https://github.com/westes/flex/issues/60) + // yylineno should be set. + // The function [yyset_lineno](https://westes.github.io/flex/manual/Reentrant-Functions.html#index-yyset_005flineno) + // sets the current line number. + yyset_lineno(0, scanner); Lexer { scanner: scanner, - buffer: yy_scan_bytes(buffer.as_ptr() as *const _, buffer.len() as _, scanner), // https://westes.github.io/flex/manual/Multiple-Input-Buffers.html + // The function [yy_scan_bytes](https://westes.github.io/flex/manual/Multiple-Input-Buffers.html) + // scans len bytes starting at location bytes. + buffer: buffer, } } } @@ -55,52 +111,65 @@ impl From> for Lexer { impl Drop for Lexer { fn drop(&mut self) { unsafe { - yy_delete_buffer(self.buffer, self.scanner); // https://westes.github.io/flex/manual/Multiple-Input-Buffers.html - yylex_destroy(self.scanner); // https://westes.github.io/flex/manual/Init-and-Destroy-Functions.html#index-yylex_005finit + // The function [yy_delete_buffer](https://westes.github.io/flex/manual/Multiple-Input-Buffers.html) + // clears the current contents of a buffer using. + yy_delete_buffer(self.buffer, self.scanner); + // The function [yylex_destroy](https://westes.github.io/flex/manual/Init-and-Destroy-Functions.html#index-yylex_005finit) + // frees the resources used by the scanner. + yylex_destroy(self.scanner); } } } +/// the Lalrpop Iterator is a exh implementation:for lexer. impl Iterator for Lexer { - type Item = Token; + type Item = Spanned; fn next(&mut self) -> Option { unsafe { - match yylex(self.scanner) { + // The function [yylex](https://westes.github.io/flex/manual/Generated-Scanner.html) + // returns statement in one of the actions, the scanner may then + // be called again and it will resume scanning where it left off. + let code: YyToken = yylex(self.scanner); + // The accessor function [yyget_extra](https://westes.github.io/flex/manual/Extra-Data.html) + // returns a extra copy. + let extra: YyExtraType = yyget_extra(self.scanner); + + match code { YyToken::InvalidToken => { let out = yyget_text(self.scanner); CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::InvalidToken(s.to_owned()))) + .and_then(|s: &str| Some(Err(LexicalError::InvalidToken(extra.leg, Token::InvalidToken(s.to_owned()), extra.end)))) }, YyToken::ChoiceIdent => { let out = yyget_text(self.scanner); CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::ChoiceIdent(s.to_owned()))) + .and_then(|s: &str| Some(Ok((extra.leg, Token::ChoiceIdent(s.to_owned()), extra.end)))) }, YyToken::SetIdent => { let out = yyget_text(self.scanner); CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::SetIdent(s.to_owned()))) + .and_then(|s: &str| Some(Ok((extra.leg, Token::SetIdent(s.to_owned()), extra.end)))) }, YyToken::ValueIdent => { let out = yyget_text(self.scanner); CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::ValueIdent(s.to_owned()))) + .and_then(|s: &str| Some(Ok((extra.leg, Token::ValueIdent(s.to_owned()), extra.end)))) }, YyToken::Var => { let out = yyget_text(self.scanner); CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::Var(s.to_owned()))) + .and_then(|s: &str| Some(Ok((extra.leg, Token::Var(s.to_owned()), extra.end)))) }, YyToken::Code => { let out = yyget_text(self.scanner); @@ -109,52 +178,53 @@ impl Iterator for Lexer { *out.offset(len as _) = b'\0' as _; CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::Code(s.to_owned()))) + .and_then(|s: &str| Some(Ok((extra.leg, Token::Code(s.to_owned()), extra.end)))) }, YyToken::Doc => { let out = yyget_text(self.scanner); CStr::from_ptr(out) .to_str().ok() - .and_then(|s: &str| Some(Token::Doc(s.to_owned()))) + .and_then(|s: &str| Some(Ok((extra.leg, Token::Doc(s.to_owned()), extra.end)))) }, - YyToken::Alias => Some(Token::Alias), - YyToken::Counter => Some(Token::Counter), - YyToken::Define => Some(Token::Define), - YyToken::Enum => Some(Token::Enum), - YyToken::Forall => Some(Token::Forall), - YyToken::In => Some(Token::In), - YyToken::Is => Some(Token::Is), - YyToken::Not => Some(Token::Not), - YyToken::Require => Some(Token::Require), - YyToken::Requires => Some(Token::Requires), - YyToken::CounterKind => Some(Token::CounterKind(yylval.counter_kind)), - YyToken::Value => Some(Token::Value), - YyToken::When => Some(Token::When), - YyToken::Trigger => Some(Token::Trigger), - YyToken::CounterVisibility => Some(Token::CounterVisibility(yylval.counter_visibility)), - YyToken::Base => Some(Token::Base), - YyToken::SetDefkey => Some(Token::SetDefKey(yylval.set_def_key)), - YyToken::Set => Some(Token::Set), - YyToken::SubsetOf => Some(Token::SubsetOf), - YyToken::Disjoint => Some(Token::Disjoint), - YyToken::Quotient => Some(Token::Quotient), - YyToken::Of => Some(Token::Of), - YyToken::Bool => Some(Token::Bool(yylval.boolean)), - YyToken::Colon => Some(Token::Colon), - YyToken::Comma => Some(Token::Comma), - YyToken::LParen => Some(Token::LParen), - YyToken::RParen => Some(Token::RParen), - YyToken::BitOr => Some(Token::BitOr), - YyToken::Or => Some(Token::Or), - YyToken::And => Some(Token::And), - YyToken::CmpOp => Some(Token::CmpOp(yylval.cmp_op)), - YyToken::Equal => Some(Token::Equal), - YyToken::End => Some(Token::End), - YyToken::Symmetric => Some(Token::Symmetric), - YyToken::AntiSymmetric => Some(Token::AntiSymmetric), - YyToken::Arrow => Some(Token::Arrow), - YyToken::Divide => Some(Token::Divide), + YyToken::Alias => Some(Ok((extra.leg, Token::Alias, extra.end))), + YyToken::Counter => Some(Ok((extra.leg, Token::Counter, extra.end))), + YyToken::Define => Some(Ok((extra.leg, Token::Define, extra.end))), + YyToken::Enum => Some(Ok((extra.leg, Token::Enum, extra.end))), + YyToken::Forall => Some(Ok((extra.leg, Token::Forall, extra.end))), + YyToken::In => Some(Ok((extra.leg, Token::In, extra.end))), + YyToken::Is => Some(Ok((extra.leg, Token::Is, extra.end))), + YyToken::Not => Some(Ok((extra.leg, Token::Not, extra.end))), + YyToken::Require => Some(Ok((extra.leg, Token::Require, extra.end))), + YyToken::Requires => Some(Ok((extra.leg, Token::Requires, extra.end))), + YyToken::CounterKind => Some(Ok((extra.leg, Token::CounterKind(extra.data.counter_kind), extra.end))), + YyToken::Value => Some(Ok((extra.leg, Token::Value, extra.end))), + YyToken::When => Some(Ok((extra.leg, Token::When, extra.end))), + YyToken::Trigger => Some(Ok((extra.leg, Token::Trigger, extra.end))), + YyToken::CounterVisibility => Some(Ok((extra.leg, Token::CounterVisibility(extra.data.counter_visibility), extra.end))), + YyToken::Base => Some(Ok((extra.leg, Token::Base, extra.end))), + YyToken::SetDefkey => Some(Ok((extra.leg, Token::SetDefKey(extra.data.set_def_key), extra.end))), + YyToken::Set => Some(Ok((extra.leg, Token::Set, extra.end))), + YyToken::SubsetOf => Some(Ok((extra.leg, Token::SubsetOf, extra.end))), + YyToken::Disjoint => Some(Ok((extra.leg, Token::Disjoint, extra.end))), + YyToken::Quotient => Some(Ok((extra.leg, Token::Quotient, extra.end))), + YyToken::Of => Some(Ok((extra.leg, Token::Of, extra.end))), + YyToken::Bool => Some(Ok((extra.leg, Token::Bool(extra.data.boolean), extra.end))), + YyToken::Colon => Some(Ok((extra.leg, Token::Colon, extra.end))), + YyToken::Comma => Some(Ok((extra.leg, Token::Comma, extra.end))), + YyToken::LParen => Some(Ok((extra.leg, Token::LParen, extra.end))), + YyToken::RParen => Some(Ok((extra.leg, Token::RParen, extra.end))), + YyToken::BitOr => Some(Ok((extra.leg, Token::BitOr, extra.end))), + YyToken::Or => Some(Ok((extra.leg, Token::Or, extra.end))), + YyToken::And => Some(Ok((extra.leg, Token::And, extra.end))), + YyToken::CmpOp => Some(Ok((extra.leg, Token::CmpOp(extra.data.cmp_op), extra.end))), + YyToken::Equal => Some(Ok((extra.leg, Token::Equal, extra.end))), + YyToken::End => Some(Ok((extra.leg, Token::End, extra.end))), + YyToken::Symmetric => Some(Ok((extra.leg, Token::Symmetric, extra.end))), + YyToken::AntiSymmetric => Some(Ok((extra.leg, Token::AntiSymmetric, extra.end))), + YyToken::Arrow => Some(Ok((extra.leg, Token::Arrow, extra.end))), + YyToken::Divide => Some(Ok((extra.leg, Token::Divide, extra.end))), + // Return None to signal EOF.for a reached end of the string. YyToken::EOF => None, } } diff --git a/telamon-gen/src/lexer/token.rs b/telamon-gen/src/lexer/token.rs index b2f80d999..9507cf9f6 100644 --- a/telamon-gen/src/lexer/token.rs +++ b/telamon-gen/src/lexer/token.rs @@ -1,12 +1,21 @@ +/// Tokens from the textual representation of constraints. use ir; -#[derive(Debug, PartialEq)] +use std::fmt; + +#[derive(Debug, Clone, PartialEq)] pub enum Token { - ValueIdent(String), ChoiceIdent(String), Var(String), Doc(String), CmpOp(ir::CmpOp), - InvalidToken(String), Code(String), CounterKind(ir::CounterKind), Bool(bool), + InvalidToken(String), ValueIdent(String), ChoiceIdent(String), Var(String), Doc(String), CmpOp(ir::CmpOp), + Code(String), CounterKind(ir::CounterKind), Bool(bool), CounterVisibility(ir::CounterVisibility), And, Trigger, When, Alias, Counter, Define, Enum, Equal, Forall, In, Is, Not, Require, Requires, Value, End, Symmetric, AntiSymmetric, Arrow, Colon, Comma, LParen, RParen, BitOr, Or, SetDefKey(ir::SetDefKey), Set, SubsetOf, SetIdent(String), Base, Disjoint, Quotient, Of, Divide, } + +impl fmt::Display for Token { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", self) + } +} diff --git a/telamon-gen/src/lib.rs b/telamon-gen/src/lib.rs index 145e101ab..99fd6f9fb 100644 --- a/telamon-gen/src/lib.rs +++ b/telamon-gen/src/lib.rs @@ -13,16 +13,20 @@ extern crate telamon_utils as utils; extern crate topological_sort; extern crate libc; +extern crate lalrpop_util; + mod ast; mod constraint; mod flat_filter; pub mod ir; pub mod lexer; -generated_file!(parser); +generated_file!(pub parser); mod print; mod truth_table; +pub mod error; use std::{fs, io, path}; + use utils::*; /// Converts a choice name to a rust type name. @@ -45,19 +49,30 @@ fn to_type_name(name: &str) -> String { } /// Process a file and stores the result in an other file. -pub fn process_file(input_path: &path::Path, output_path: &path::Path, format: bool) { +pub fn process_file<'a>( + input_path: &'a path::Path, + output_path: &path::Path, + format: bool +) -> Result<(), error::ProcessError<'a>> { let mut input = fs::File::open(path::Path::new(input_path)).unwrap(); let mut output = fs::File::create(path::Path::new(output_path)).unwrap(); let input_path_str = input_path.to_string_lossy(); info!("compiling {} to {}", input_path_str, output_path.to_string_lossy()); - process(&mut input, &mut output, format); + process(&mut input, &mut output, format, input_path) } /// Parses a constraint description file. -pub fn process(input: &mut io::Read, output: &mut T, format: bool) { +pub fn process<'a, T: io::Write>( + input: &mut io::Read, + output: &mut T, + format: bool, + input_path: &'a path::Path +) -> Result<(), error::ProcessError<'a>> { // Parse and check the input. let tokens = lexer::Lexer::new(input); - let ast = parser::parse_ast(tokens).unwrap(); + let ast: ast::Ast = + parser::parse_ast(tokens) + .map_err(|c| error::ProcessError::from((input_path.display(), c)))?; let (mut ir_desc, constraints) = ast.type_check(); debug!("constraints: {:?}", constraints); // Generate flat filters. @@ -106,6 +121,7 @@ pub fn process(input: &mut io::Read, output: &mut T, format: bool) } else { write!(output, "{}", code).unwrap(); } + Ok(()) } // TODO(cleanup): avoid name conflicts in the printer diff --git a/telamon-gen/src/parser.lalrpop b/telamon-gen/src/parser.lalrpop index 81b6961aa..9cfc9f0c8 100644 --- a/telamon-gen/src/parser.lalrpop +++ b/telamon-gen/src/parser.lalrpop @@ -1,7 +1,9 @@ //! Lalrpop parser for constraints description. use ast; use ir; -use lexer::Token; + +use lexer::*; + use utils::*; grammar; @@ -158,6 +160,9 @@ list: Vec = { }; extern { + type Location = Position; + type Error = LexicalError; + enum Token { choice_ident => Token::ChoiceIdent(), val_ident => Token::ValueIdent(), diff --git a/telamon-gen/tests/lexer.rs b/telamon-gen/tests/lexer.rs index 66d1b8f84..0152a8d9d 100644 --- a/telamon-gen/tests/lexer.rs +++ b/telamon-gen/tests/lexer.rs @@ -1,270 +1,483 @@ extern crate telamon_gen; -use telamon_gen::lexer::{Lexer,Token}; +use telamon_gen::lexer::*; use telamon_gen::ir::{CounterKind, CounterVisibility, SetDefKey, CmpOp}; #[test] fn initial() { // Invalid's Token - assert_eq!(Lexer::from(b"!".to_vec()).collect::>(), vec![ - Token::InvalidToken(String::from("!")), + assert_eq!(Lexer::from(b"!".to_vec()).collect::>(), vec![ + Err(LexicalError::InvalidToken( + Position::default(), + Token::InvalidToken(String::from("!")), + Position { column: 1, ..Default::default() } + )), ]); + // ChoiceIdent's Token - assert_eq!(Lexer::from(b"az_09".to_vec()).collect::>(), vec![ - Token::ChoiceIdent(String::from("az_09")) + assert_eq!(Lexer::from(b"az_09".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::ChoiceIdent(String::from("az_09")), + Position { column: 5, ..Default::default() } + )), ]); // SetIdent's Token - assert_eq!(Lexer::from(b"Az_09".to_vec()).collect::>(), vec![ - Token::SetIdent(String::from("Az_09")) + assert_eq!(Lexer::from(b"Az_09".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetIdent(String::from("Az_09")), + Position { column: 5, ..Default::default() } + )), ]); // ValueIdent's Token - assert_eq!(Lexer::from(b"AZ_09".to_vec()).collect::>(), vec![ - Token::ValueIdent(String::from("AZ_09")) + assert_eq!(Lexer::from(b"AZ_09".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::ValueIdent(String::from("AZ_09")), + Position { column: 5, ..Default::default() } + )), ]); // Var's Token - assert_eq!(Lexer::from(b"$vV".to_vec()).collect::>(), vec![ - Token::Var(String::from("vV")), + assert_eq!(Lexer::from(b"$vV".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Var(String::from("vV")), + Position { column: 3, ..Default::default() } + )), ]); // Code's Token - assert_eq!(Lexer::from(b"\"ir::...\"".to_vec()).collect::>(), vec![ - Token::Code(String::from("ir::...")), + assert_eq!(Lexer::from(b"\"ir::...\"".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Code(String::from("ir::...")), + Position { column: 9, ..Default::default() } + )), ]); // Alias's Token - assert_eq!(Lexer::from(b"alias".to_vec()).collect::>(), vec![ - Token::Alias + assert_eq!(Lexer::from(b"alias".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Alias, + Position { column: 5, ..Default::default() } + )), ]); // Counter's Token - assert_eq!(Lexer::from(b"counter".to_vec()).collect::>(), vec![ - Token::Counter + assert_eq!(Lexer::from(b"counter".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Counter, + Position { column: 7, ..Default::default() } + )), ]); // Define's Token - assert_eq!(Lexer::from(b"define".to_vec()).collect::>(), vec![ - Token::Define + assert_eq!(Lexer::from(b"define".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Define, + Position { column: 6, ..Default::default() } + )), ]); // Enum's Token - assert_eq!(Lexer::from(b"enum".to_vec()).collect::>(), vec![ - Token::Enum + assert_eq!(Lexer::from(b"enum".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Enum, + Position { column: 4, ..Default::default() } + )), ]); // Forall's Token - assert_eq!(Lexer::from(b"forall".to_vec()).collect::>(), vec![ - Token::Forall + assert_eq!(Lexer::from(b"forall".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Forall, + Position { column: 6, ..Default::default() } + )), ]); // In's Token - assert_eq!(Lexer::from(b"in".to_vec()).collect::>(), vec![ - Token::In + assert_eq!(Lexer::from(b"in".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::In, + Position { column: 2, ..Default::default() } + )), ]); // Is's Token - assert_eq!(Lexer::from(b"is".to_vec()).collect::>(), vec![ - Token::Is + assert_eq!(Lexer::from(b"is".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Is, + Position { column: 2, ..Default::default() } + )), ]); // Not's Token - assert_eq!(Lexer::from(b"not".to_vec()).collect::>(), vec![ - Token::Not + assert_eq!(Lexer::from(b"not".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Not, + Position { column: 3, ..Default::default() } + )), ]); // Require's Token - assert_eq!(Lexer::from(b"require".to_vec()).collect::>(), vec![ - Token::Require + assert_eq!(Lexer::from(b"require".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Require, + Position { column: 7, ..Default::default() } + )), ]); // Mul's CounterKind Token - assert_eq!(Lexer::from(b"mul".to_vec()).collect::>(), vec![ - Token::CounterKind(CounterKind::Mul) + assert_eq!(Lexer::from(b"mul".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CounterKind(CounterKind::Mul), + Position { column: 3, ..Default::default() } + )), ]); // Sum's CounterKind Token - assert_eq!(Lexer::from(b"sum".to_vec()).collect::>(), vec![ - Token::CounterKind(CounterKind::Add) + assert_eq!(Lexer::from(b"sum".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CounterKind(CounterKind::Add), + Position { column: 3, ..Default::default() } + )), ]); // Value's Token - assert_eq!(Lexer::from(b"value".to_vec()).collect::>(), vec![ - Token::Value + assert_eq!(Lexer::from(b"value".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Value, + Position { column: 5, ..Default::default() } + )), ]); // When's Token - assert_eq!(Lexer::from(b"when".to_vec()).collect::>(), vec![ - Token::When + assert_eq!(Lexer::from(b"when".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::When, + Position { column: 4, ..Default::default() } + )), ]); // Trigger's Token - assert_eq!(Lexer::from(b"trigger".to_vec()).collect::>(), vec![ - Token::Trigger + assert_eq!(Lexer::from(b"trigger".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Trigger, + Position { column: 7, ..Default::default() } + )), ]); // NoMax's CounterVisibility Token - assert_eq!(Lexer::from(b"half".to_vec()).collect::>(), vec![ - Token::CounterVisibility(CounterVisibility::NoMax) + assert_eq!(Lexer::from(b"half".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CounterVisibility(CounterVisibility::NoMax), + Position { column: 4, ..Default::default() } + )), ]); // HiddenMax's CounterVisibility Token - assert_eq!(Lexer::from(b"internal".to_vec()).collect::>(), vec![ - Token::CounterVisibility(CounterVisibility::HiddenMax) + assert_eq!(Lexer::from(b"internal".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CounterVisibility(CounterVisibility::HiddenMax), + Position { column: 8, ..Default::default() } + )), ]); // Base's Token - assert_eq!(Lexer::from(b"base".to_vec()).collect::>(), vec![ - Token::Base + assert_eq!(Lexer::from(b"base".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Base, + Position { column: 4, ..Default::default() } + )), ]); // item_type's SetDefKey Token - assert_eq!(Lexer::from(b"item_type".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::ItemType) + assert_eq!(Lexer::from(b"item_type".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::ItemType), + Position { column: 9, ..Default::default() } + )), ]); // NewObjs's SetDefKey Token - assert_eq!(Lexer::from(b"new_objs".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::NewObjs) + assert_eq!(Lexer::from(b"new_objs".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::NewObjs), + Position { column: 8, ..Default::default() } + )), ]); // IdType's SetDefKey Token - assert_eq!(Lexer::from(b"id_type".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::IdType) + assert_eq!(Lexer::from(b"id_type".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::IdType), + Position { column: 7, ..Default::default() } + )), ]); // ItemGetter's SetDefKey Token - assert_eq!(Lexer::from(b"item_getter".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::ItemGetter) + assert_eq!(Lexer::from(b"item_getter".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::ItemGetter), + Position { column: 11, ..Default::default() } + )), ]); // IdGetter's SetDefKey Token - assert_eq!(Lexer::from(b"id_getter".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::IdGetter) + assert_eq!(Lexer::from(b"id_getter".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::IdGetter), + Position { column: 9, ..Default::default() } + )), ]); // Iter's SetDefKey Token - assert_eq!(Lexer::from(b"iterator".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::Iter) + assert_eq!(Lexer::from(b"iterator".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::Iter), + Position { column: 8, ..Default::default() } + )), ]); // Prefix's SetDefKey Token - assert_eq!(Lexer::from(b"var_prefix".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::Prefix) + assert_eq!(Lexer::from(b"var_prefix".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::Prefix), + Position { column: 10, ..Default::default() } + )), ]); // Reverse's SetDefKey Token - assert_eq!(Lexer::from(b"reverse".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::Reverse) + assert_eq!(Lexer::from(b"reverse".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::Reverse), + Position { column: 7, ..Default::default() } + )), ]); // AddToSet's SetDefKey Token - assert_eq!(Lexer::from(b"add_to_set".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::AddToSet) + assert_eq!(Lexer::from(b"add_to_set".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::AddToSet), + Position { column: 10, ..Default::default() } + )), ]); // FromSuperset's SetDefKey Token - assert_eq!(Lexer::from(b"from_superset".to_vec()).collect::>(), vec![ - Token::SetDefKey(SetDefKey::FromSuperset) + assert_eq!(Lexer::from(b"from_superset".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SetDefKey(SetDefKey::FromSuperset), + Position { column: 13, ..Default::default() } + )), ]); // Set's Token - assert_eq!(Lexer::from(b"set".to_vec()).collect::>(), vec![ - Token::Set + assert_eq!(Lexer::from(b"set".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Set, + Position { column: 3, ..Default::default() } + )), ]); // SubsetOf's Token - assert_eq!(Lexer::from(b"subsetof".to_vec()).collect::>(), vec![ - Token::SubsetOf + assert_eq!(Lexer::from(b"subsetof".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::SubsetOf, + Position { column: 8, ..Default::default() } + )), ]); // Disjoint's Token - assert_eq!(Lexer::from(b"disjoint".to_vec()).collect::>(), vec![ - Token::Disjoint + assert_eq!(Lexer::from(b"disjoint".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Disjoint, + Position { column: 8, ..Default::default() } + )), ]); // Quotient's Token - assert_eq!(Lexer::from(b"quotient".to_vec()).collect::>(), vec![ - Token::Quotient + assert_eq!(Lexer::from(b"quotient".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Quotient, + Position { column: 8, ..Default::default() } + )), ]); // Of's Token - assert_eq!(Lexer::from(b"of".to_vec()).collect::>(), vec![ - Token::Of + assert_eq!(Lexer::from(b"of".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Of, + Position { column: 2, ..Default::default() } + )), ]); // False's Bool Token - assert_eq!(Lexer::from(b"false".to_vec()).collect::>(), vec![ - Token::Bool(false) + assert_eq!(Lexer::from(b"false".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Bool(false), + Position { column: 5, ..Default::default() } + )), ]); // True's Bool Token - assert_eq!(Lexer::from(b"true".to_vec()).collect::>(), vec![ - Token::Bool(true) + assert_eq!(Lexer::from(b"true".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Bool(true), + Position { column: 4, ..Default::default() } + )), ]); // Colon's Token - assert_eq!(Lexer::from(b":".to_vec()).collect::>(), vec![ - Token::Colon + assert_eq!(Lexer::from(b":".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Colon, + Position { column: 1, ..Default::default() } + )), ]); // Comma's Token - assert_eq!(Lexer::from(b",".to_vec()).collect::>(), vec![ - Token::Comma + assert_eq!(Lexer::from(b",".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Comma, + Position { column: 1, ..Default::default() } + )), ]); // LParen's Token - assert_eq!(Lexer::from(b"(".to_vec()).collect::>(), vec![ - Token::LParen + assert_eq!(Lexer::from(b"(".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::LParen, + Position { column: 1, ..Default::default() } + )), ]); // RParen's Token - assert_eq!(Lexer::from(b")".to_vec()).collect::>(), vec![ - Token::RParen + assert_eq!(Lexer::from(b")".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::RParen, + Position { column: 1, ..Default::default() } + )), ]); // Bitor's Token - assert_eq!(Lexer::from(b"|".to_vec()).collect::>(), vec![ - Token::BitOr + assert_eq!(Lexer::from(b"|".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::BitOr, + Position { column: 1, ..Default::default() } + )), ]); // Or's Token - assert_eq!(Lexer::from(b"||".to_vec()).collect::>(), vec![ - Token::Or + assert_eq!(Lexer::from(b"||".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Or, + Position { column: 2, ..Default::default() } + )), ]); // And's Token - assert_eq!(Lexer::from(b"&&".to_vec()).collect::>(), vec![ - Token::And + assert_eq!(Lexer::from(b"&&".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::And, + Position { column: 2, ..Default::default() } + )), ]); // Gt's CmpOp Token - assert_eq!(Lexer::from(b">".to_vec()).collect::>(), vec![ - Token::CmpOp(CmpOp::Gt) + assert_eq!(Lexer::from(b">".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CmpOp(CmpOp::Gt), + Position { column: 1, ..Default::default() } + )), ]); // Lt's CmpOp Token - assert_eq!(Lexer::from(b"<".to_vec()).collect::>(), vec![ - Token::CmpOp(CmpOp::Lt) + assert_eq!(Lexer::from(b"<".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CmpOp(CmpOp::Lt), + Position { column: 1, ..Default::default() } + )), ]); // Ge's CmpOp Token - assert_eq!(Lexer::from(b">=".to_vec()).collect::>(), vec![ - Token::CmpOp(CmpOp::Geq) + assert_eq!(Lexer::from(b">=".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CmpOp(CmpOp::Geq), + Position { column: 2, ..Default::default() } + )), ]); // Le's CmpOp Token - assert_eq!(Lexer::from(b"<=".to_vec()).collect::>(), vec![ - Token::CmpOp(CmpOp::Leq) + assert_eq!(Lexer::from(b"<=".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CmpOp(CmpOp::Leq), + Position { column: 2, ..Default::default() } + )), ]); // Eq's CmpOp Token - assert_eq!(Lexer::from(b"==".to_vec()).collect::>(), vec![ - Token::CmpOp(CmpOp::Eq) + assert_eq!(Lexer::from(b"==".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CmpOp(CmpOp::Eq), + Position { column: 2, ..Default::default() } + )), ]); // Neq's CmpOp Token - assert_eq!(Lexer::from(b"!=".to_vec()).collect::>(), vec![ - Token::CmpOp(CmpOp::Neq) + assert_eq!(Lexer::from(b"!=".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::CmpOp(CmpOp::Neq), + Position { column: 2, ..Default::default() } + )), ]); // Equal's Token - assert_eq!(Lexer::from(b"=".to_vec()).collect::>(), vec![ - Token::Equal + assert_eq!(Lexer::from(b"=".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Equal, + Position { column: 1, ..Default::default() } + )), ]); // End's Token - assert_eq!(Lexer::from(b"end".to_vec()).collect::>(), vec![ - Token::End + assert_eq!(Lexer::from(b"end".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::End, + Position { column: 3, ..Default::default() } + )), ]); // Symmetric's Token - assert_eq!(Lexer::from(b"symmetric".to_vec()).collect::>(), vec![ - Token::Symmetric + assert_eq!(Lexer::from(b"symmetric".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Symmetric, + Position { column: 9, ..Default::default() } + )), ]); // AntiSymmetric's Token - assert_eq!(Lexer::from(b"antisymmetric".to_vec()).collect::>(), vec![ - Token::AntiSymmetric + assert_eq!(Lexer::from(b"antisymmetric".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::AntiSymmetric, + Position { column: 13, ..Default::default() } + )), ]); // Arrow's Token - assert_eq!(Lexer::from(b"->".to_vec()).collect::>(), vec![ - Token::Arrow + assert_eq!(Lexer::from(b"->".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Arrow, + Position { column: 2, ..Default::default() } + )), ]); // Divide's Token - assert_eq!(Lexer::from(b"/".to_vec()).collect::>(), vec![ - Token::Divide + assert_eq!(Lexer::from(b"/".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::Divide, + Position { column: 1, ..Default::default() } + )), ]); } #[test] fn comment_mode() { // C_COMMENT's Token - assert_eq!(Lexer::from(b"/* comment */ ".to_vec()).collect::>(), vec![]); - assert_eq!(Lexer::from(b"/* comment \n comment */ ".to_vec()).collect::>(), vec![]); + assert_eq!(Lexer::from(b"/* comment */ ".to_vec()).collect::>(), vec![]); + assert_eq!(Lexer::from(b"/* comment \n comment */ ".to_vec()).collect::>(), vec![]); + + assert_eq!(Lexer::from(b"| /* comment */ |".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::BitOr, + Position { column: 1, ..Default::default() } + )), + Ok((Position { column: 16, ..Default::default() }, + Token::BitOr, + Position { column: 17, ..Default::default() } + )), + ]); + assert_eq!(Lexer::from(b"| /* comment \n comment */ |".to_vec()).collect::>(), vec![ + Ok((Position::default(), + Token::BitOr, + Position { column: 1, ..Default::default() } + )), + Ok((Position { column: 26, line: 1 }, + Token::BitOr, + Position { column: 27, line: 1 } + )), + ]); } #[test] fn doc_mode() { // Outer Line Doc's Token - assert_eq!(Lexer::from(b"/// comment".to_vec()).collect::>(), vec![ - Token::Doc(String::from(" comment")) + assert_eq!(Lexer::from(b"/// comment ".to_vec()).collect::>(), vec![ + Ok((Position { column: 0, ..Default::default() }, + Token::Doc(String::from(" comment ")), + Position { column: 12, ..Default::default() } + )), + ]); + assert_eq!(Lexer::from(b" /// comment ".to_vec()).collect::>(), vec![ + Ok((Position { column: 1, ..Default::default() }, + Token::Doc(String::from(" comment ")), + Position { column: 13, ..Default::default() } + )), ]); // Outer Line MultiDoc's Token - assert_eq!(Lexer::from(b"/// comment \n /// comment".to_vec()).collect::>(), vec![ - Token::Doc(String::from(" comment ")), - Token::Doc(String::from(" comment")) + assert_eq!(Lexer::from(b"/// comment \n /// comment ".to_vec()).collect::>(), vec![ + Ok((Position { column: 0, ..Default::default() }, + Token::Doc(String::from(" comment ")), + Position { column: 12, ..Default::default() }, + )), + Ok((Position { column: 1, line: 1 }, + Token::Doc(String::from(" comment ")), + Position { column: 13, line: 1 } + )), ]); // Line Comment Doc's Token - assert_eq!(Lexer::from(b"// comment".to_vec()).collect::>(), vec![ - ]); + assert_eq!(Lexer::from(b"// comment".to_vec()).collect::>(), vec![]); // Line Comment MultiDoc's Token - assert_eq!(Lexer::from(b"// comment \n // comment".to_vec()).collect::>(), vec![ - ]); + assert_eq!(Lexer::from(b"// comment \n // comment".to_vec()).collect::>(), vec![]); } diff --git a/telamon-gen/tests/parser.rs b/telamon-gen/tests/parser.rs new file mode 100644 index 000000000..f200096fa --- /dev/null +++ b/telamon-gen/tests/parser.rs @@ -0,0 +1,31 @@ +extern crate telamon_gen; +extern crate lalrpop_util; + +use telamon_gen::lexer::{Lexer, Token, LexicalError, Position}; +use telamon_gen::parser; +use telamon_gen::error; + +use lalrpop_util::ParseError; + +use std::path::Path; + +#[test] +fn invalid_token() { + assert_eq!(parser::parse_ast(Lexer::from(b"!".to_vec())).err(), Some( + ParseError::User { + error: LexicalError::InvalidToken( + Position::default(), + Token::InvalidToken(String::from("!")), + Position { column: 1, ..Default::default() } + ), + } + )); + + assert_eq!(format!("{}", + parser::parse_ast(Lexer::from(b"!".to_vec())) + .map_err(|c| + error::ProcessError::from( + (Path::new("exh").display(), c))) + .err().unwrap()), + "InvalidToken(\"!\"), between line 0, column 0 and line 0, column 1 -> exh"); +} diff --git a/telamon-utils/src/lib.rs b/telamon-utils/src/lib.rs index 3b0aec896..173fed533 100644 --- a/telamon-utils/src/lib.rs +++ b/telamon-utils/src/lib.rs @@ -173,6 +173,12 @@ macro_rules! generated_file { mod $name { include!(concat!(env!("OUT_DIR"), "/", stringify!($name), ".rs")); } + }; + (pub $name:ident) => { + #[cfg_attr(feature = "cargo-clippy", allow(clippy))] + pub mod $name { + include!(concat!(env!("OUT_DIR"), "/", stringify!($name), ".rs")); + } } }