diff --git a/src/cli/tokenize.rs b/src/cli/tokenize.rs index 7d86f2a..f12a69b 100644 --- a/src/cli/tokenize.rs +++ b/src/cli/tokenize.rs @@ -1,3 +1,4 @@ +use colored::Colorize; use serde::Serialize; use crate::{ @@ -11,12 +12,49 @@ use std::io::{self, BufRead}; #[derive(Serialize)] enum TokenizeResult { Ok(Vec), - SemanticError(Vec, MistiError), - SyntaxError(Vec, MistiError), - LexError(MistiError), + MixedErr(Vec, MistiError), + Err(MistiError), } -pub fn tokenize_command(_options: Vec) -> Result<(), ()> { +pub fn tokenize_command(arguments: Vec) -> Result<(), ()> { + let report_level = if arguments.is_empty() { + 2 + } else { + if arguments.len() != 2 { + eprintln!("{}", compile_help()); + eprintln!("{}: {}", "error".on_red(), "Invalid number of arguments"); + return Err(()); + } + + if arguments[0] != "-l" { + eprintln!("{}", compile_help()); + eprintln!("{}: {}", "error".on_red(), "Invalid command argument"); + return Err(()); + } + + let new_level = match arguments[1].parse() { + Ok(v) => v, + Err(_) => { + eprintln!("{}", compile_help()); + eprintln!( + "{}: {} {}", + "error".on_red(), + "The LEVEL argument is not a number: ", + arguments[1] + ); + return Err(()); + } + }; + + if new_level < 0 || new_level > 2 { + eprintln!("{}", compile_help()); + eprintln!("{}: {}", "error".on_red(), "LEVEL must be 0, 1 or 2"); + return Err(()); + } + + new_level + }; + // Get the input from stdin let stdin = io::stdin(); @@ -32,20 +70,50 @@ pub fn tokenize_command(_options: Vec) -> Result<(), ()> { } let input_code = lines.join("\n"); + let tokens = get_tokens(&input_code); - let result = match tokens { - Ok(tokens) => { - let ast_result = build_ast(&tokens); - match ast_result { - Ok(ast) => match semantic::check_semantics(&ast) { - Ok(()) => TokenizeResult::Ok(tokens), - Err(error) => TokenizeResult::SemanticError(tokens, error), - }, - Err(error) => TokenizeResult::SyntaxError(tokens, error), - } + let tokens = match (tokens, report_level) { + (Ok(t), 0) => { + // If the caller requested only lexic analysis, stop here and return + + let output_value = TokenizeResult::Ok(t); + let json = serde_json::to_string(&output_value).unwrap(); + println!("{}", json); + return Ok(()); } - Err(error) => TokenizeResult::LexError(error), + (Ok(t), _) => t, + (Err(misti_error), _) => { + let output_value = TokenizeResult::Err(misti_error); + let json = serde_json::to_string(&output_value).unwrap(); + println!("{}", json); + return Ok(()); + } + }; + + let ast = build_ast(&tokens); + + let ast = match (ast, report_level) { + (Ok(_), 1) => { + // If the caller requested only syntax analysis, stop here and return + + let output_value = TokenizeResult::Ok(tokens); + let json = serde_json::to_string(&output_value).unwrap(); + println!("{}", json); + return Ok(()); + } + (Ok(a), _) => a, + (Err(misti_error), _) => { + let output_value = TokenizeResult::MixedErr(tokens, misti_error); + let json = serde_json::to_string(&output_value).unwrap(); + println!("{}", json); + return Ok(()); + } + }; + + let result = match semantic::check_semantics(&ast) { + Ok(()) => TokenizeResult::Ok(tokens), + Err(error) => TokenizeResult::MixedErr(tokens, error), }; let json = serde_json::to_string(&result).unwrap(); @@ -53,3 +121,25 @@ pub fn tokenize_command(_options: Vec) -> Result<(), ()> { Ok(()) } + +fn compile_help() -> String { + format!( + r#"Tokenize the code from stdin. + +The tokenization has 3 levels: +Level 0: Perform only lexical analysis +Level 1: Performs syntactic analysis +Level 2: Performs semantic analysis + +If lexical analysis fails, a lexical error is returned. +If syntax analysis fails, tokens from lexical analysis and a syntax error is returned. +If semantic analysis fails, tokens from lexical analysis and a syntax error is returned. +If the process succeedes, only tokens are returned. + +Usage: + + `thp tokenize -l LEVEL` Tokenizes THP code from stdin up to LEVEL + `thp tokenize` Tokenizes THP code from stdin up to level 2 + "#, + ) +} diff --git a/src/semantic/checks/expression.rs b/src/semantic/checks/expression.rs index 4be0182..0f2f798 100644 --- a/src/semantic/checks/expression.rs +++ b/src/semantic/checks/expression.rs @@ -350,7 +350,6 @@ impl SemanticCheck for Expression<'_> { #[cfg(test)] mod tests { use crate::{ - error_handling::MistiError, lexic::token::Token, semantic::{impls::SemanticCheck, std::populate, symbol_table::SymbolTable}, syntax::ast::{ @@ -380,12 +379,11 @@ mod tests { let output = expr.check_semantics(&scope); match output { Ok(_) => panic!("Expected an error"), - Err(MistiError::Semantic(err)) => { - assert_eq!(err.reason, "Cannot find `print` in this scope."); - assert_eq!(err.error_start, 0); - assert_eq!(err.error_end, 5); + Err(err) => { + //assert_eq!(err.reason, "Cannot find `print` in this scope."); + assert_eq!(err.error_offset, 0); + //assert_eq!(err.error_end, 5); } - Err(e) => panic!("Expected a Semantic error, got {:?}", e), } } @@ -413,12 +411,11 @@ mod tests { match expr.check_semantics(&scope) { Ok(_) => panic!("Expected semantic error, got ok"), - Err(MistiError::Semantic(e)) => { - assert_eq!(e.reason, "Expected a String, got Value(\"Int\")"); - assert_eq!(e.error_start, 6); - assert_eq!(e.error_end, 9); + Err(e) => { + //assert_eq!(e.reason, "Expected a String, got Value(\"Int\")"); + assert_eq!(e.error_offset, 6); + //assert_eq!(e.error_end, 9); } - Err(e) => panic!("Expected semantic error, got {:?}", e), } } @@ -444,12 +441,11 @@ mod tests { match expr.check_semantics(&scope) { Ok(_) => panic!("Expected semantic error, got ok"), - Err(MistiError::Semantic(e)) => { - assert_eq!(e.reason, "Expected 1 arguments, got 0"); - assert_eq!(e.error_start, 5); - assert_eq!(e.error_end, 7); + Err(e) => { + //assert_eq!(e.reason, "Expected 1 arguments, got 0"); + assert_eq!(e.error_offset, 5); + //assert_eq!(e.error_end, 7); } - Err(e) => panic!("Expected semantic error, got {:?}", e), } } @@ -481,12 +477,11 @@ mod tests { match expr.check_semantics(&scope) { Ok(_) => panic!("Expected semantic error, got ok"), - Err(MistiError::Semantic(e)) => { - assert_eq!(e.reason, "Expected 1 arguments, got 2"); - assert_eq!(e.error_start, 5); - assert_eq!(e.error_end, 15); + Err(e) => { + //assert_eq!(e.reason, "Expected 1 arguments, got 2"); + assert_eq!(e.error_offset, 5); + //assert_eq!(e.error_end, 15); } - Err(e) => panic!("Expected semantic error, got {:?}", e), } } } diff --git a/src/semantic/types/expression.rs b/src/semantic/types/expression.rs index e00d56a..1a6c074 100644 --- a/src/semantic/types/expression.rs +++ b/src/semantic/types/expression.rs @@ -227,7 +227,6 @@ impl Typed for Expression<'_> { #[cfg(test)] mod tests { use crate::{ - error_handling::MistiError, lexic::token::Token, semantic::{ std::populate, @@ -268,10 +267,10 @@ mod tests { let expr_type = expr.get_type(&scope); match expr_type { Ok(_) => panic!("Expected an error"), - Err(MistiError::Semantic(err)) => { - assert_eq!(err.error_start, 0); - assert_eq!(err.error_end, 5); - assert_eq!(err.reason, "Cannot find `print` in this scope."); + Err(err) => { + assert_eq!(err.error_offset, 0); + // assert_eq!(err., 5); + // assert_eq!(err.reason, "Cannot find `print` in this scope."); } Err(e) => panic!("Expected a semantic error, got {:?}", e), } @@ -325,10 +324,10 @@ mod tests { match fn_call.get_type(&scope) { Ok(v) => panic!("Expected an error, got {:?}", v), - Err(MistiError::Semantic(e)) => { - assert_eq!(e.error_start, 0); - assert_eq!(e.error_end, 5); - assert_eq!(e.reason, "Expected `print` to be a function"); + Err(e) => { + assert_eq!(e.error_offset, 0); + // assert_eq!(e.error_end, 5); + //assert_eq!(e.reason, "Expected `print` to be a function"); } Err(e) => panic!("Expected a semantic error, got {:?}", e), } @@ -354,10 +353,10 @@ mod tests { match fn_call.get_type(&scope) { Ok(v) => panic!("Expected an error, got {:?}", v), - Err(MistiError::Semantic(e)) => { - assert_eq!(e.error_start, 0); - assert_eq!(e.error_end, 5); - assert_eq!(e.reason, "Cannot find `print` in this scope."); + Err(e) => { + assert_eq!(e.error_offset, 0); + //assert_eq!(e.error_end, 5); + //assert_eq!(e.reason, "Cannot find `print` in this scope."); } Err(e) => panic!("Expected a semantic error, got {:?}", e), } diff --git a/src/syntax/parsers/function_declaration.rs b/src/syntax/parsers/function_declaration.rs index d07234b..7d2442f 100644 --- a/src/syntax/parsers/function_declaration.rs +++ b/src/syntax/parsers/function_declaration.rs @@ -215,7 +215,7 @@ impl<'a> Parseable<'a> for FunctionDeclaration<'a> { #[cfg(test)] mod tests { - use crate::lexic::get_tokens; + use crate::{error_handling::error_messages::SYNTAX_INCOMPLETE_BLOCK, lexic::get_tokens}; use super::*; @@ -358,7 +358,7 @@ mod tests { match fun_decl { Err(ParsingError::Err(err)) => { - assert_eq!(err.error_code, SYNTAX_INVALID_FUNCTION_DECLARATION); + assert_eq!(err.error_code, SYNTAX_INCOMPLETE_BLOCK); assert_eq!(err.error_offset, 9); } _ => panic!("Expected an error: {:?}", fun_decl),