diff --git a/src/syntax/functions/params_list.rs b/src/syntax/functions/params_list.rs index ecb238e..9ded90c 100644 --- a/src/syntax/functions/params_list.rs +++ b/src/syntax/functions/params_list.rs @@ -4,7 +4,10 @@ use crate::{ ErrorContainer, ErrorLabel, }, lexic::token::{Token, TokenType}, - syntax::{utils::parse_token_type, ParsingError, ParsingResult}, + syntax::{ + utils::{parse_token_type, Tokenizer}, + ParsingError, ParsingResult, + }, }; use super::super::{ @@ -93,14 +96,15 @@ pub fn parse_params_list(tokens: &Vec, pos: usize) -> ParsingResult, pos: usize) -> ParsingResult { + assert_eq!(SYNTAX_INCOMPLETE_PARAMETER_LIST, err.error_code); + assert_eq!(err.error_offset, 4); + + let label = &err.labels[0]; + assert_eq!(label.message, "The parameter list starts here"); + assert_eq!(label.start, 3); + assert_eq!(label.end, 4); + + let label = &err.labels[1]; + assert_eq!( + label.message, + "The code ends here without closing the parameter list" + ); + assert_eq!(label.start, 4); + assert_eq!(label.end, 5); + } + _ => panic!("Expected a ParsingError::Err"), + } + } + + #[test] + fn should_fail_on_invalid_params_closing() { + let tokens = get_tokens(&String::from(" ( &")).unwrap(); + let result = parse_params_list(&tokens, 0); + + match result { + Err(ParsingError::Err(err)) => { + assert_eq!(SYNTAX_INCOMPLETE_PARAMETER_LIST, err.error_code); + assert_eq!(err.error_offset, 7); + + let label = &err.labels[0]; + assert_eq!(label.message, "The parameter list starts here"); + assert_eq!(label.start, 3); + assert_eq!(label.end, 4); + + let label = &err.labels[1]; + assert_eq!(label.message, "Expected a closing paren `)` here"); + assert_eq!(label.start, 7); + assert_eq!(label.end, 8); + } + _ => panic!("Expected a ParsingError::Err"), + } + } + + #[test] + fn should_fail_on_invalid_params_closing_2() { + let tokens = get_tokens(&String::from(" ( Int i &")).unwrap(); + let result = parse_params_list(&tokens, 0); + + match result { + Err(ParsingError::Err(err)) => { + assert_eq!(SYNTAX_INCOMPLETE_PARAMETER_LIST, err.error_code); + assert_eq!(err.error_offset, 13); + + let label = &err.labels[0]; + assert_eq!(label.message, "The parameter list starts here"); + assert_eq!(label.start, 3); + assert_eq!(label.end, 4); + + let label = &err.labels[1]; + assert_eq!(label.message, "Expected a closing paren `)` here"); + assert_eq!(label.start, 13); + assert_eq!(label.end, 14); + } + _ => panic!("Expected a ParsingError::Err"), + } + } +} + +#[cfg(test)] +mod params_tests { + use super::*; + use crate::lexic::get_tokens; + + #[test] + fn should_fail_on_missing_identifier() { + let tokens = get_tokens(&String::from(" Int ")).unwrap(); + let result = parse_param_definition(&tokens, 0); + + match result { + Err(ParsingError::Err(err)) => { + assert_eq!(SYNTAX_INVALID_PARAMETER_DECLARATION, err.error_code); + assert_eq!(err.error_offset, 5); + + let label = &err.labels[0]; + assert_eq!(label.message, "Expected an identifier after this datatype"); + assert_eq!(label.start, 2); + assert_eq!(label.end, 5); + } + _ => panic!("Expected a ParsingError::Err"), + } + } + + #[test] + fn should_fail_on_wrong_identifier() { + let tokens = get_tokens(&String::from(" Int 322")).unwrap(); + let result = parse_param_definition(&tokens, 0); + + match result { + Err(ParsingError::Err(err)) => { + assert_eq!(SYNTAX_INVALID_PARAMETER_DECLARATION, err.error_code); + assert_eq!(err.error_offset, 6); + + let label = &err.labels[0]; + assert_eq!( + label.message, + "Expected an identifier here, found this instead" + ); + assert_eq!(label.start, 6); + assert_eq!(label.end, 9); + } + _ => panic!("Expected a ParsingError::Err"), + } + } } diff --git a/src/syntax/utils.rs b/src/syntax/utils.rs index f5a1044..9681b4b 100644 --- a/src/syntax/utils.rs +++ b/src/syntax/utils.rs @@ -4,6 +4,7 @@ use super::{ParsingError, ParsingResult}; pub trait Tokenizer { fn get_significant<'a>(&'a self, index: usize) -> Option<(&'a Token, usize)>; + fn code_position_from_idx(&self, idx: usize) -> usize; } impl Tokenizer for Vec { @@ -30,6 +31,30 @@ impl Tokenizer for Vec { } } } + + /// Returns the position in the code from the token idx. + /// + /// If the token at `idx` exists, returns `tokens[idx].position`. + /// + /// Otherwise returns `tokens[idx - 1].get_end_position()` + fn code_position_from_idx(&self, idx: usize) -> usize { + // try to get the token at idx + match self.get(idx) { + Some(t) if t.token_type == TokenType::EOF => { + // If idx points at EOF, return the end position of the previous token + // This shouldnt fail + self[idx - 1].get_end_position() + } + Some(t) => t.position, + None => { + // this should never happen. + // the token stream always ends with an EOF token, + // and the parser should never be able to go + // to a position after that EOF token + unreachable!("Compiler error: Tried to get an out of bound token. This means that somewhere a token beyond EOF was requested.") + } + } + } } /// Expects the token at `pos` to be an operator of value `operator`. Doesn't ignore whitespace or newlines