feature: add compilation level to tokenize command
This commit is contained in:
parent
9857863220
commit
9225114658
@ -1,3 +1,4 @@
|
|||||||
|
use colored::Colorize;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -11,12 +12,49 @@ use std::io::{self, BufRead};
|
|||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
enum TokenizeResult {
|
enum TokenizeResult {
|
||||||
Ok(Vec<Token>),
|
Ok(Vec<Token>),
|
||||||
SemanticError(Vec<Token>, MistiError),
|
MixedErr(Vec<Token>, MistiError),
|
||||||
SyntaxError(Vec<Token>, MistiError),
|
Err(MistiError),
|
||||||
LexError(MistiError),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tokenize_command(_options: Vec<String>) -> Result<(), ()> {
|
pub fn tokenize_command(arguments: Vec<String>) -> Result<(), ()> {
|
||||||
|
let report_level = if arguments.is_empty() {
|
||||||
|
2
|
||||||
|
} else {
|
||||||
|
if arguments.len() != 2 {
|
||||||
|
eprintln!("{}", compile_help());
|
||||||
|
eprintln!("{}: {}", "error".on_red(), "Invalid number of arguments");
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
if arguments[0] != "-l" {
|
||||||
|
eprintln!("{}", compile_help());
|
||||||
|
eprintln!("{}: {}", "error".on_red(), "Invalid command argument");
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_level = match arguments[1].parse() {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(_) => {
|
||||||
|
eprintln!("{}", compile_help());
|
||||||
|
eprintln!(
|
||||||
|
"{}: {} {}",
|
||||||
|
"error".on_red(),
|
||||||
|
"The LEVEL argument is not a number: ",
|
||||||
|
arguments[1]
|
||||||
|
);
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if new_level < 0 || new_level > 2 {
|
||||||
|
eprintln!("{}", compile_help());
|
||||||
|
eprintln!("{}: {}", "error".on_red(), "LEVEL must be 0, 1 or 2");
|
||||||
|
return Err(());
|
||||||
|
}
|
||||||
|
|
||||||
|
new_level
|
||||||
|
};
|
||||||
|
|
||||||
// Get the input from stdin
|
// Get the input from stdin
|
||||||
let stdin = io::stdin();
|
let stdin = io::stdin();
|
||||||
|
|
||||||
@ -32,20 +70,50 @@ pub fn tokenize_command(_options: Vec<String>) -> Result<(), ()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let input_code = lines.join("\n");
|
let input_code = lines.join("\n");
|
||||||
|
|
||||||
let tokens = get_tokens(&input_code);
|
let tokens = get_tokens(&input_code);
|
||||||
|
|
||||||
let result = match tokens {
|
let tokens = match (tokens, report_level) {
|
||||||
Ok(tokens) => {
|
(Ok(t), 0) => {
|
||||||
let ast_result = build_ast(&tokens);
|
// If the caller requested only lexic analysis, stop here and return
|
||||||
match ast_result {
|
|
||||||
Ok(ast) => match semantic::check_semantics(&ast) {
|
let output_value = TokenizeResult::Ok(t);
|
||||||
Ok(()) => TokenizeResult::Ok(tokens),
|
let json = serde_json::to_string(&output_value).unwrap();
|
||||||
Err(error) => TokenizeResult::SemanticError(tokens, error),
|
println!("{}", json);
|
||||||
},
|
return Ok(());
|
||||||
Err(error) => TokenizeResult::SyntaxError(tokens, error),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Err(error) => TokenizeResult::LexError(error),
|
(Ok(t), _) => t,
|
||||||
|
(Err(misti_error), _) => {
|
||||||
|
let output_value = TokenizeResult::Err(misti_error);
|
||||||
|
let json = serde_json::to_string(&output_value).unwrap();
|
||||||
|
println!("{}", json);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let ast = build_ast(&tokens);
|
||||||
|
|
||||||
|
let ast = match (ast, report_level) {
|
||||||
|
(Ok(_), 1) => {
|
||||||
|
// If the caller requested only syntax analysis, stop here and return
|
||||||
|
|
||||||
|
let output_value = TokenizeResult::Ok(tokens);
|
||||||
|
let json = serde_json::to_string(&output_value).unwrap();
|
||||||
|
println!("{}", json);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
(Ok(a), _) => a,
|
||||||
|
(Err(misti_error), _) => {
|
||||||
|
let output_value = TokenizeResult::MixedErr(tokens, misti_error);
|
||||||
|
let json = serde_json::to_string(&output_value).unwrap();
|
||||||
|
println!("{}", json);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = match semantic::check_semantics(&ast) {
|
||||||
|
Ok(()) => TokenizeResult::Ok(tokens),
|
||||||
|
Err(error) => TokenizeResult::MixedErr(tokens, error),
|
||||||
};
|
};
|
||||||
|
|
||||||
let json = serde_json::to_string(&result).unwrap();
|
let json = serde_json::to_string(&result).unwrap();
|
||||||
@ -53,3 +121,25 @@ pub fn tokenize_command(_options: Vec<String>) -> Result<(), ()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn compile_help() -> String {
|
||||||
|
format!(
|
||||||
|
r#"Tokenize the code from stdin.
|
||||||
|
|
||||||
|
The tokenization has 3 levels:
|
||||||
|
Level 0: Perform only lexical analysis
|
||||||
|
Level 1: Performs syntactic analysis
|
||||||
|
Level 2: Performs semantic analysis
|
||||||
|
|
||||||
|
If lexical analysis fails, a lexical error is returned.
|
||||||
|
If syntax analysis fails, tokens from lexical analysis and a syntax error is returned.
|
||||||
|
If semantic analysis fails, tokens from lexical analysis and a syntax error is returned.
|
||||||
|
If the process succeedes, only tokens are returned.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
`thp tokenize -l LEVEL` Tokenizes THP code from stdin up to LEVEL
|
||||||
|
`thp tokenize` Tokenizes THP code from stdin up to level 2
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
@ -350,7 +350,6 @@ impl SemanticCheck for Expression<'_> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::{
|
use crate::{
|
||||||
error_handling::MistiError,
|
|
||||||
lexic::token::Token,
|
lexic::token::Token,
|
||||||
semantic::{impls::SemanticCheck, std::populate, symbol_table::SymbolTable},
|
semantic::{impls::SemanticCheck, std::populate, symbol_table::SymbolTable},
|
||||||
syntax::ast::{
|
syntax::ast::{
|
||||||
@ -380,12 +379,11 @@ mod tests {
|
|||||||
let output = expr.check_semantics(&scope);
|
let output = expr.check_semantics(&scope);
|
||||||
match output {
|
match output {
|
||||||
Ok(_) => panic!("Expected an error"),
|
Ok(_) => panic!("Expected an error"),
|
||||||
Err(MistiError::Semantic(err)) => {
|
Err(err) => {
|
||||||
assert_eq!(err.reason, "Cannot find `print` in this scope.");
|
//assert_eq!(err.reason, "Cannot find `print` in this scope.");
|
||||||
assert_eq!(err.error_start, 0);
|
assert_eq!(err.error_offset, 0);
|
||||||
assert_eq!(err.error_end, 5);
|
//assert_eq!(err.error_end, 5);
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected a Semantic error, got {:?}", e),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -413,12 +411,11 @@ mod tests {
|
|||||||
|
|
||||||
match expr.check_semantics(&scope) {
|
match expr.check_semantics(&scope) {
|
||||||
Ok(_) => panic!("Expected semantic error, got ok"),
|
Ok(_) => panic!("Expected semantic error, got ok"),
|
||||||
Err(MistiError::Semantic(e)) => {
|
Err(e) => {
|
||||||
assert_eq!(e.reason, "Expected a String, got Value(\"Int\")");
|
//assert_eq!(e.reason, "Expected a String, got Value(\"Int\")");
|
||||||
assert_eq!(e.error_start, 6);
|
assert_eq!(e.error_offset, 6);
|
||||||
assert_eq!(e.error_end, 9);
|
//assert_eq!(e.error_end, 9);
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected semantic error, got {:?}", e),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -444,12 +441,11 @@ mod tests {
|
|||||||
|
|
||||||
match expr.check_semantics(&scope) {
|
match expr.check_semantics(&scope) {
|
||||||
Ok(_) => panic!("Expected semantic error, got ok"),
|
Ok(_) => panic!("Expected semantic error, got ok"),
|
||||||
Err(MistiError::Semantic(e)) => {
|
Err(e) => {
|
||||||
assert_eq!(e.reason, "Expected 1 arguments, got 0");
|
//assert_eq!(e.reason, "Expected 1 arguments, got 0");
|
||||||
assert_eq!(e.error_start, 5);
|
assert_eq!(e.error_offset, 5);
|
||||||
assert_eq!(e.error_end, 7);
|
//assert_eq!(e.error_end, 7);
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected semantic error, got {:?}", e),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -481,12 +477,11 @@ mod tests {
|
|||||||
|
|
||||||
match expr.check_semantics(&scope) {
|
match expr.check_semantics(&scope) {
|
||||||
Ok(_) => panic!("Expected semantic error, got ok"),
|
Ok(_) => panic!("Expected semantic error, got ok"),
|
||||||
Err(MistiError::Semantic(e)) => {
|
Err(e) => {
|
||||||
assert_eq!(e.reason, "Expected 1 arguments, got 2");
|
//assert_eq!(e.reason, "Expected 1 arguments, got 2");
|
||||||
assert_eq!(e.error_start, 5);
|
assert_eq!(e.error_offset, 5);
|
||||||
assert_eq!(e.error_end, 15);
|
//assert_eq!(e.error_end, 15);
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected semantic error, got {:?}", e),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -227,7 +227,6 @@ impl Typed for Expression<'_> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::{
|
use crate::{
|
||||||
error_handling::MistiError,
|
|
||||||
lexic::token::Token,
|
lexic::token::Token,
|
||||||
semantic::{
|
semantic::{
|
||||||
std::populate,
|
std::populate,
|
||||||
@ -268,10 +267,10 @@ mod tests {
|
|||||||
let expr_type = expr.get_type(&scope);
|
let expr_type = expr.get_type(&scope);
|
||||||
match expr_type {
|
match expr_type {
|
||||||
Ok(_) => panic!("Expected an error"),
|
Ok(_) => panic!("Expected an error"),
|
||||||
Err(MistiError::Semantic(err)) => {
|
Err(err) => {
|
||||||
assert_eq!(err.error_start, 0);
|
assert_eq!(err.error_offset, 0);
|
||||||
assert_eq!(err.error_end, 5);
|
// assert_eq!(err., 5);
|
||||||
assert_eq!(err.reason, "Cannot find `print` in this scope.");
|
// assert_eq!(err.reason, "Cannot find `print` in this scope.");
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected a semantic error, got {:?}", e),
|
Err(e) => panic!("Expected a semantic error, got {:?}", e),
|
||||||
}
|
}
|
||||||
@ -325,10 +324,10 @@ mod tests {
|
|||||||
|
|
||||||
match fn_call.get_type(&scope) {
|
match fn_call.get_type(&scope) {
|
||||||
Ok(v) => panic!("Expected an error, got {:?}", v),
|
Ok(v) => panic!("Expected an error, got {:?}", v),
|
||||||
Err(MistiError::Semantic(e)) => {
|
Err(e) => {
|
||||||
assert_eq!(e.error_start, 0);
|
assert_eq!(e.error_offset, 0);
|
||||||
assert_eq!(e.error_end, 5);
|
// assert_eq!(e.error_end, 5);
|
||||||
assert_eq!(e.reason, "Expected `print` to be a function");
|
//assert_eq!(e.reason, "Expected `print` to be a function");
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected a semantic error, got {:?}", e),
|
Err(e) => panic!("Expected a semantic error, got {:?}", e),
|
||||||
}
|
}
|
||||||
@ -354,10 +353,10 @@ mod tests {
|
|||||||
|
|
||||||
match fn_call.get_type(&scope) {
|
match fn_call.get_type(&scope) {
|
||||||
Ok(v) => panic!("Expected an error, got {:?}", v),
|
Ok(v) => panic!("Expected an error, got {:?}", v),
|
||||||
Err(MistiError::Semantic(e)) => {
|
Err(e) => {
|
||||||
assert_eq!(e.error_start, 0);
|
assert_eq!(e.error_offset, 0);
|
||||||
assert_eq!(e.error_end, 5);
|
//assert_eq!(e.error_end, 5);
|
||||||
assert_eq!(e.reason, "Cannot find `print` in this scope.");
|
//assert_eq!(e.reason, "Cannot find `print` in this scope.");
|
||||||
}
|
}
|
||||||
Err(e) => panic!("Expected a semantic error, got {:?}", e),
|
Err(e) => panic!("Expected a semantic error, got {:?}", e),
|
||||||
}
|
}
|
||||||
|
@ -215,7 +215,7 @@ impl<'a> Parseable<'a> for FunctionDeclaration<'a> {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::lexic::get_tokens;
|
use crate::{error_handling::error_messages::SYNTAX_INCOMPLETE_BLOCK, lexic::get_tokens};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
@ -358,7 +358,7 @@ mod tests {
|
|||||||
|
|
||||||
match fun_decl {
|
match fun_decl {
|
||||||
Err(ParsingError::Err(err)) => {
|
Err(ParsingError::Err(err)) => {
|
||||||
assert_eq!(err.error_code, SYNTAX_INVALID_FUNCTION_DECLARATION);
|
assert_eq!(err.error_code, SYNTAX_INCOMPLETE_BLOCK);
|
||||||
assert_eq!(err.error_offset, 9);
|
assert_eq!(err.error_offset, 9);
|
||||||
}
|
}
|
||||||
_ => panic!("Expected an error: {:?}", fun_decl),
|
_ => panic!("Expected an error: {:?}", fun_decl),
|
||||||
|
Loading…
Reference in New Issue
Block a user