Use val/var again instead of let/mut
This commit is contained in:
parent
8a039ffc64
commit
b0606195f5
@ -41,7 +41,7 @@
|
|||||||
- [ ] Parse bindings and function declarations as top level constructs
|
- [ ] Parse bindings and function declarations as top level constructs
|
||||||
- [ ] Parse function declaration arguments (`Type id`)
|
- [ ] Parse function declaration arguments (`Type id`)
|
||||||
- [ ] Parse function return datatype (`fun f() -> Type`)
|
- [ ] Parse function return datatype (`fun f() -> Type`)
|
||||||
- [ ] Return parsing to variables to var/val
|
- [x] Return parsing to variables to var/val
|
||||||
- [ ] Write tests
|
- [ ] Write tests
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,8 +4,8 @@ use crate::lexic::{token::Token, utils, LexResult};
|
|||||||
/// Checks if a String is a keyword, and returns its TokenType
|
/// Checks if a String is a keyword, and returns its TokenType
|
||||||
fn str_is_keyword(s: &String) -> Option<TokenType> {
|
fn str_is_keyword(s: &String) -> Option<TokenType> {
|
||||||
match s.as_str() {
|
match s.as_str() {
|
||||||
"let" => Some(TokenType::LET),
|
"val" => Some(TokenType::VAL),
|
||||||
"mut" => Some(TokenType::MUT),
|
"var" => Some(TokenType::VAR),
|
||||||
"fun" => Some(TokenType::FUN),
|
"fun" => Some(TokenType::FUN),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
@ -141,23 +141,23 @@ mod tests {
|
|||||||
// Should scan keywords
|
// Should scan keywords
|
||||||
#[test]
|
#[test]
|
||||||
fn test_4() {
|
fn test_4() {
|
||||||
let input = str_to_vec("mut");
|
let input = str_to_vec("var");
|
||||||
let start_pos = 0;
|
let start_pos = 0;
|
||||||
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
||||||
assert_eq!(3, next);
|
assert_eq!(3, next);
|
||||||
assert_eq!(TokenType::MUT, token.token_type);
|
assert_eq!(TokenType::VAR, token.token_type);
|
||||||
assert_eq!("mut", token.value);
|
assert_eq!("var", token.value);
|
||||||
assert_eq!(0, token.position);
|
assert_eq!(0, token.position);
|
||||||
} else {
|
} else {
|
||||||
panic!()
|
panic!()
|
||||||
}
|
}
|
||||||
|
|
||||||
let input = str_to_vec("let");
|
let input = str_to_vec("val");
|
||||||
let start_pos = 0;
|
let start_pos = 0;
|
||||||
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
||||||
assert_eq!(3, next);
|
assert_eq!(3, next);
|
||||||
assert_eq!(TokenType::LET, token.token_type);
|
assert_eq!(TokenType::VAL, token.token_type);
|
||||||
assert_eq!("let", token.value);
|
assert_eq!("val", token.value);
|
||||||
assert_eq!(0, token.position);
|
assert_eq!(0, token.position);
|
||||||
} else {
|
} else {
|
||||||
panic!()
|
panic!()
|
||||||
|
@ -16,8 +16,8 @@ pub enum TokenType {
|
|||||||
Comma,
|
Comma,
|
||||||
INDENT,
|
INDENT,
|
||||||
DEDENT,
|
DEDENT,
|
||||||
LET,
|
VAL,
|
||||||
MUT,
|
VAR,
|
||||||
EOF,
|
EOF,
|
||||||
FUN,
|
FUN,
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ impl SemanticCheck for TopLevelDeclaration<'_> {
|
|||||||
error_start: binding.identifier.position,
|
error_start: binding.identifier.position,
|
||||||
error_end: binding.identifier.get_end_position(),
|
error_end: binding.identifier.get_end_position(),
|
||||||
reason: format!(
|
reason: format!(
|
||||||
"Duplicated function: A function with name {} was already defined",
|
"Duplicated: A symbol with name {} was already defined",
|
||||||
binding_name
|
binding_name
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
@ -72,7 +72,7 @@ impl SemanticCheck for TopLevelDeclaration<'_> {
|
|||||||
error_start: function.identifier.position,
|
error_start: function.identifier.position,
|
||||||
error_end: function.identifier.get_end_position(),
|
error_end: function.identifier.get_end_position(),
|
||||||
reason: format!(
|
reason: format!(
|
||||||
"Duplicated function: A function with name {} was already defined",
|
"Duplicated: A symbol with name {} was already defined",
|
||||||
function_name
|
function_name
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
@ -14,16 +14,15 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Bindin
|
|||||||
* let keyword
|
* let keyword
|
||||||
*/
|
*/
|
||||||
let (is_mutable, binding_token, next_pos) = {
|
let (is_mutable, binding_token, next_pos) = {
|
||||||
let let_token = parse_token_type(tokens, current_pos, TokenType::LET);
|
match parse_token_type(tokens, current_pos, TokenType::VAL) {
|
||||||
match let_token {
|
Ok((val_token, next_pos)) => (false, val_token, next_pos),
|
||||||
Ok((let_token, next_let)) => {
|
_ => {
|
||||||
let mut_token = parse_token_type(tokens, next_let, TokenType::MUT);
|
// If VAL is not found, search for VAR
|
||||||
match mut_token {
|
match parse_token_type(tokens, current_pos, TokenType::VAR) {
|
||||||
Ok((_mut_token, next_mut)) => (true, let_token, next_mut),
|
Ok((var_token, next_pos)) => (true, var_token, next_pos),
|
||||||
_ => (false, let_token, next_let),
|
_ => return Err(ParsingError::Unmatched),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => return Err(ParsingError::Unmatched),
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
current_pos = next_pos;
|
current_pos = next_pos;
|
||||||
@ -112,7 +111,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_parse_val_binding() {
|
fn should_parse_val_binding() {
|
||||||
let tokens = get_tokens(&String::from("let identifier = 20")).unwrap();
|
let tokens = get_tokens(&String::from("val identifier = 20")).unwrap();
|
||||||
let Ok((binding, _)) = try_parse(&tokens, 0) else {
|
let Ok((binding, _)) = try_parse(&tokens, 0) else {
|
||||||
panic!()
|
panic!()
|
||||||
};
|
};
|
||||||
@ -122,11 +121,11 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_parse_val() {
|
fn should_parse_val() {
|
||||||
let tokens = get_tokens(&String::from("let")).unwrap();
|
let tokens = get_tokens(&String::from("val")).unwrap();
|
||||||
let (token, _) = parse_token_type(&tokens, 0, TokenType::LET).unwrap();
|
let (token, _) = parse_token_type(&tokens, 0, TokenType::VAL).unwrap();
|
||||||
|
|
||||||
assert_eq!(TokenType::LET, token.token_type);
|
assert_eq!(TokenType::VAL, token.token_type);
|
||||||
assert_eq!("let", token.value);
|
assert_eq!("val", token.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -168,8 +167,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_correct_error() {
|
fn should_return_correct_error() {
|
||||||
let tokens = get_tokens(&String::from("let")).unwrap();
|
let tokens = get_tokens(&String::from("val")).unwrap();
|
||||||
assert_eq!(TokenType::LET, tokens[0].token_type);
|
assert_eq!(TokenType::VAL, tokens[0].token_type);
|
||||||
assert_eq!(0, tokens[0].position);
|
assert_eq!(0, tokens[0].position);
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
@ -184,8 +183,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_error_when_identifier_is_wrong() {
|
fn should_return_error_when_identifier_is_wrong() {
|
||||||
let tokens = get_tokens(&String::from("let 322")).unwrap();
|
let tokens = get_tokens(&String::from("val 322")).unwrap();
|
||||||
assert_eq!(TokenType::LET, tokens[0].token_type);
|
assert_eq!(TokenType::VAL, tokens[0].token_type);
|
||||||
assert_eq!(0, tokens[0].position);
|
assert_eq!(0, tokens[0].position);
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
@ -197,7 +196,7 @@ mod tests {
|
|||||||
_ => panic!("Error expected"),
|
_ => panic!("Error expected"),
|
||||||
}
|
}
|
||||||
|
|
||||||
let tokens = get_tokens(&String::from("let \"hello\"")).unwrap();
|
let tokens = get_tokens(&String::from("val \"hello\"")).unwrap();
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
match binding {
|
match binding {
|
||||||
@ -211,7 +210,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_error_when_equal_op_is_wrong() {
|
fn should_return_error_when_equal_op_is_wrong() {
|
||||||
let tokens = get_tokens(&String::from("let id \"error\"")).unwrap();
|
let tokens = get_tokens(&String::from("val id \"error\"")).unwrap();
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
match binding {
|
match binding {
|
||||||
|
@ -16,6 +16,7 @@ simple function calls, and then implement other features top down
|
|||||||
```ebnf
|
```ebnf
|
||||||
top level statement = expression
|
top level statement = expression
|
||||||
| function declaration
|
| function declaration
|
||||||
|
| binding
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -81,6 +81,22 @@ fn next_construct<'a>(
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try to parse a binding
|
||||||
|
match binding::try_parse(tokens, current_pos) {
|
||||||
|
Ok((binding, next_pos)) => return Ok((TopLevelDeclaration::Binding(binding), next_pos)),
|
||||||
|
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse an expression
|
||||||
|
match expression::try_parse(tokens, current_pos) {
|
||||||
|
Ok((expression, next_pos)) => {
|
||||||
|
return Ok((TopLevelDeclaration::Expression(expression), next_pos))
|
||||||
|
}
|
||||||
|
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
// No top level construct was found, return unmatched
|
// No top level construct was found, return unmatched
|
||||||
Err(ParsingError::Unmatched)
|
Err(ParsingError::Unmatched)
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_parse_binding() {
|
fn should_parse_binding() {
|
||||||
let input = String::from("let identifier = 20");
|
let input = String::from("val identifier = 20");
|
||||||
let tokens = crate::lexic::get_tokens(&input).unwrap();
|
let tokens = crate::lexic::get_tokens(&input).unwrap();
|
||||||
let statement = try_parse(&tokens, 0);
|
let statement = try_parse(&tokens, 0);
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user