From f1b15db509ef2c0f237057421ca43e2b78eb85fd Mon Sep 17 00:00:00 2001 From: Araozu Date: Thu, 1 Aug 2024 20:34:08 -0500 Subject: [PATCH] feat: Add semantic errors to tokenize command output --- src/cli/tokenize.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/cli/tokenize.rs b/src/cli/tokenize.rs index 0691e5b..8fb3706 100644 --- a/src/cli/tokenize.rs +++ b/src/cli/tokenize.rs @@ -1,15 +1,14 @@ use serde::Serialize; use crate::{ - error_handling::MistiError, - lexic::{get_tokens, token::Token}, - syntax::build_ast, + error_handling::MistiError, lexic::{get_tokens, token::Token}, semantic, syntax::build_ast }; use std::io::{self, BufRead}; #[derive(Serialize)] enum TokenizeResult { Ok(Vec), + SyntaxOnly(Vec, MistiError), TokensOnly(Vec, MistiError), Err(MistiError), } @@ -36,7 +35,12 @@ pub fn tokenize_command(_options: Vec) -> Result<(), ()> { Ok(tokens) => { let ast_result = build_ast(&tokens); match ast_result { - Ok(_) => TokenizeResult::Ok(tokens), + Ok(ast) => { + match semantic::check_semantics(&ast) { + Ok(()) => TokenizeResult::Ok(tokens), + Err(error) => TokenizeResult::SyntaxOnly(tokens, error) + } + }, Err(error) => TokenizeResult::TokensOnly(tokens, error), } }