Compare commits
2 Commits
8a039ffc64
...
f60992c303
Author | SHA1 | Date | |
---|---|---|---|
f60992c303 | |||
b0606195f5 |
@ -40,8 +40,8 @@
|
|||||||
- [ ] Formally define the top level constructs
|
- [ ] Formally define the top level constructs
|
||||||
- [ ] Parse bindings and function declarations as top level constructs
|
- [ ] Parse bindings and function declarations as top level constructs
|
||||||
- [ ] Parse function declaration arguments (`Type id`)
|
- [ ] Parse function declaration arguments (`Type id`)
|
||||||
- [ ] Parse function return datatype (`fun f() -> Type`)
|
- [x] Parse function return datatype (`fun f() -> Type`)
|
||||||
- [ ] Return parsing to variables to var/val
|
- [x] Return parsing to variables to var/val
|
||||||
- [ ] Write tests
|
- [ ] Write tests
|
||||||
|
|
||||||
|
|
||||||
|
@ -4,8 +4,8 @@ use crate::lexic::{token::Token, utils, LexResult};
|
|||||||
/// Checks if a String is a keyword, and returns its TokenType
|
/// Checks if a String is a keyword, and returns its TokenType
|
||||||
fn str_is_keyword(s: &String) -> Option<TokenType> {
|
fn str_is_keyword(s: &String) -> Option<TokenType> {
|
||||||
match s.as_str() {
|
match s.as_str() {
|
||||||
"let" => Some(TokenType::LET),
|
"val" => Some(TokenType::VAL),
|
||||||
"mut" => Some(TokenType::MUT),
|
"var" => Some(TokenType::VAR),
|
||||||
"fun" => Some(TokenType::FUN),
|
"fun" => Some(TokenType::FUN),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
@ -141,23 +141,23 @@ mod tests {
|
|||||||
// Should scan keywords
|
// Should scan keywords
|
||||||
#[test]
|
#[test]
|
||||||
fn test_4() {
|
fn test_4() {
|
||||||
let input = str_to_vec("mut");
|
let input = str_to_vec("var");
|
||||||
let start_pos = 0;
|
let start_pos = 0;
|
||||||
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
||||||
assert_eq!(3, next);
|
assert_eq!(3, next);
|
||||||
assert_eq!(TokenType::MUT, token.token_type);
|
assert_eq!(TokenType::VAR, token.token_type);
|
||||||
assert_eq!("mut", token.value);
|
assert_eq!("var", token.value);
|
||||||
assert_eq!(0, token.position);
|
assert_eq!(0, token.position);
|
||||||
} else {
|
} else {
|
||||||
panic!()
|
panic!()
|
||||||
}
|
}
|
||||||
|
|
||||||
let input = str_to_vec("let");
|
let input = str_to_vec("val");
|
||||||
let start_pos = 0;
|
let start_pos = 0;
|
||||||
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
if let LexResult::Some(token, next) = scan(*input.get(0).unwrap(), &input, start_pos) {
|
||||||
assert_eq!(3, next);
|
assert_eq!(3, next);
|
||||||
assert_eq!(TokenType::LET, token.token_type);
|
assert_eq!(TokenType::VAL, token.token_type);
|
||||||
assert_eq!("let", token.value);
|
assert_eq!("val", token.value);
|
||||||
assert_eq!(0, token.position);
|
assert_eq!(0, token.position);
|
||||||
} else {
|
} else {
|
||||||
panic!()
|
panic!()
|
||||||
|
@ -16,13 +16,13 @@ pub enum TokenType {
|
|||||||
Comma,
|
Comma,
|
||||||
INDENT,
|
INDENT,
|
||||||
DEDENT,
|
DEDENT,
|
||||||
LET,
|
VAL,
|
||||||
MUT,
|
VAR,
|
||||||
EOF,
|
EOF,
|
||||||
FUN,
|
FUN,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
pub token_type: TokenType,
|
pub token_type: TokenType,
|
||||||
// The token as a raw string
|
// The token as a raw string
|
||||||
|
@ -32,7 +32,7 @@ impl SemanticCheck for TopLevelDeclaration<'_> {
|
|||||||
error_start: binding.identifier.position,
|
error_start: binding.identifier.position,
|
||||||
error_end: binding.identifier.get_end_position(),
|
error_end: binding.identifier.get_end_position(),
|
||||||
reason: format!(
|
reason: format!(
|
||||||
"Duplicated function: A function with name {} was already defined",
|
"Duplicated: A symbol with name {} was already defined",
|
||||||
binding_name
|
binding_name
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
@ -72,7 +72,7 @@ impl SemanticCheck for TopLevelDeclaration<'_> {
|
|||||||
error_start: function.identifier.position,
|
error_start: function.identifier.position,
|
||||||
error_end: function.identifier.get_end_position(),
|
error_end: function.identifier.get_end_position(),
|
||||||
reason: format!(
|
reason: format!(
|
||||||
"Duplicated function: A function with name {} was already defined",
|
"Duplicated: A symbol with name {} was already defined",
|
||||||
function_name
|
function_name
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
@ -20,6 +20,7 @@ pub enum TopLevelDeclaration<'a> {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct FunctionDeclaration<'a> {
|
pub struct FunctionDeclaration<'a> {
|
||||||
pub identifier: &'a Token,
|
pub identifier: &'a Token,
|
||||||
|
pub return_type: Option<&'a Token>,
|
||||||
pub params_list: Box<ParamsList>,
|
pub params_list: Box<ParamsList>,
|
||||||
pub block: Box<Block<'a>>,
|
pub block: Box<Block<'a>>,
|
||||||
}
|
}
|
||||||
|
@ -14,17 +14,16 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Bindin
|
|||||||
* let keyword
|
* let keyword
|
||||||
*/
|
*/
|
||||||
let (is_mutable, binding_token, next_pos) = {
|
let (is_mutable, binding_token, next_pos) = {
|
||||||
let let_token = parse_token_type(tokens, current_pos, TokenType::LET);
|
match parse_token_type(tokens, current_pos, TokenType::VAL) {
|
||||||
match let_token {
|
Ok((val_token, next_pos)) => (false, val_token, next_pos),
|
||||||
Ok((let_token, next_let)) => {
|
_ => {
|
||||||
let mut_token = parse_token_type(tokens, next_let, TokenType::MUT);
|
// If VAL is not found, search for VAR
|
||||||
match mut_token {
|
match parse_token_type(tokens, current_pos, TokenType::VAR) {
|
||||||
Ok((_mut_token, next_mut)) => (true, let_token, next_mut),
|
Ok((var_token, next_pos)) => (true, var_token, next_pos),
|
||||||
_ => (false, let_token, next_let),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => return Err(ParsingError::Unmatched),
|
_ => return Err(ParsingError::Unmatched),
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
current_pos = next_pos;
|
current_pos = next_pos;
|
||||||
|
|
||||||
@ -112,7 +111,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_parse_val_binding() {
|
fn should_parse_val_binding() {
|
||||||
let tokens = get_tokens(&String::from("let identifier = 20")).unwrap();
|
let tokens = get_tokens(&String::from("val identifier = 20")).unwrap();
|
||||||
let Ok((binding, _)) = try_parse(&tokens, 0) else {
|
let Ok((binding, _)) = try_parse(&tokens, 0) else {
|
||||||
panic!()
|
panic!()
|
||||||
};
|
};
|
||||||
@ -122,11 +121,11 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_parse_val() {
|
fn should_parse_val() {
|
||||||
let tokens = get_tokens(&String::from("let")).unwrap();
|
let tokens = get_tokens(&String::from("val")).unwrap();
|
||||||
let (token, _) = parse_token_type(&tokens, 0, TokenType::LET).unwrap();
|
let (token, _) = parse_token_type(&tokens, 0, TokenType::VAL).unwrap();
|
||||||
|
|
||||||
assert_eq!(TokenType::LET, token.token_type);
|
assert_eq!(TokenType::VAL, token.token_type);
|
||||||
assert_eq!("let", token.value);
|
assert_eq!("val", token.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -168,8 +167,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_correct_error() {
|
fn should_return_correct_error() {
|
||||||
let tokens = get_tokens(&String::from("let")).unwrap();
|
let tokens = get_tokens(&String::from("val")).unwrap();
|
||||||
assert_eq!(TokenType::LET, tokens[0].token_type);
|
assert_eq!(TokenType::VAL, tokens[0].token_type);
|
||||||
assert_eq!(0, tokens[0].position);
|
assert_eq!(0, tokens[0].position);
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
@ -184,8 +183,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_error_when_identifier_is_wrong() {
|
fn should_return_error_when_identifier_is_wrong() {
|
||||||
let tokens = get_tokens(&String::from("let 322")).unwrap();
|
let tokens = get_tokens(&String::from("val 322")).unwrap();
|
||||||
assert_eq!(TokenType::LET, tokens[0].token_type);
|
assert_eq!(TokenType::VAL, tokens[0].token_type);
|
||||||
assert_eq!(0, tokens[0].position);
|
assert_eq!(0, tokens[0].position);
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
@ -197,7 +196,7 @@ mod tests {
|
|||||||
_ => panic!("Error expected"),
|
_ => panic!("Error expected"),
|
||||||
}
|
}
|
||||||
|
|
||||||
let tokens = get_tokens(&String::from("let \"hello\"")).unwrap();
|
let tokens = get_tokens(&String::from("val \"hello\"")).unwrap();
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
match binding {
|
match binding {
|
||||||
@ -211,7 +210,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_error_when_equal_op_is_wrong() {
|
fn should_return_error_when_equal_op_is_wrong() {
|
||||||
let tokens = get_tokens(&String::from("let id \"error\"")).unwrap();
|
let tokens = get_tokens(&String::from("val id \"error\"")).unwrap();
|
||||||
let binding = try_parse(&tokens, 0);
|
let binding = try_parse(&tokens, 0);
|
||||||
|
|
||||||
match binding {
|
match binding {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
error_handling::SyntaxError,
|
error_handling::SyntaxError,
|
||||||
lexic::token::{Token, TokenType},
|
lexic::token::{Token, TokenType},
|
||||||
syntax::{ParsingError, ParsingResult},
|
syntax::{utils::try_operator, ParsingError, ParsingResult},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
@ -9,6 +9,13 @@ use super::{
|
|||||||
params_list::parse_params_list,
|
params_list::parse_params_list,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
function declaration = "fun", identifier, params list, return type?, block;
|
||||||
|
|
||||||
|
params list = "(", ")";
|
||||||
|
|
||||||
|
return type = ;
|
||||||
|
*/
|
||||||
pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<FunctionDeclaration> {
|
pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<FunctionDeclaration> {
|
||||||
let mut current_pos = pos;
|
let mut current_pos = pos;
|
||||||
|
|
||||||
@ -19,6 +26,7 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Functi
|
|||||||
};
|
};
|
||||||
current_pos = next_pos;
|
current_pos = next_pos;
|
||||||
|
|
||||||
|
// identifier
|
||||||
let (identifier, next_pos) = match parse_token_type(tokens, current_pos, TokenType::Identifier)
|
let (identifier, next_pos) = match parse_token_type(tokens, current_pos, TokenType::Identifier)
|
||||||
{
|
{
|
||||||
Ok((id, next)) => (id, next),
|
Ok((id, next)) => (id, next),
|
||||||
@ -40,19 +48,20 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Functi
|
|||||||
};
|
};
|
||||||
current_pos = next_pos;
|
current_pos = next_pos;
|
||||||
|
|
||||||
|
// Params list
|
||||||
let (params_list, next_pos) = match parse_params_list(tokens, current_pos) {
|
let (params_list, next_pos) = match parse_params_list(tokens, current_pos) {
|
||||||
Ok((params, next_pos)) => (params, next_pos),
|
Ok((params, next_pos)) => (params, next_pos),
|
||||||
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
||||||
Err(ParsingError::Mismatch(wrong_token)) => {
|
Err(ParsingError::Mismatch(wrong_token)) => {
|
||||||
return Err(ParsingError::Err(SyntaxError {
|
return Err(ParsingError::Err(SyntaxError {
|
||||||
reason: String::from("Expected an opening paren afted the function identifier."),
|
reason: String::from("Expected an opening paren after the function identifier."),
|
||||||
error_start: wrong_token.position,
|
error_start: wrong_token.position,
|
||||||
error_end: wrong_token.get_end_position(),
|
error_end: wrong_token.get_end_position(),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
Err(ParsingError::Unmatched) => {
|
Err(ParsingError::Unmatched) => {
|
||||||
return Err(ParsingError::Err(SyntaxError {
|
return Err(ParsingError::Err(SyntaxError {
|
||||||
reason: String::from("Expected an opening paren afted the function identifier."),
|
reason: String::from("Expected an opening paren after the function identifier."),
|
||||||
error_start: identifier.position,
|
error_start: identifier.position,
|
||||||
error_end: identifier.get_end_position(),
|
error_end: identifier.get_end_position(),
|
||||||
}));
|
}));
|
||||||
@ -60,6 +69,37 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Functi
|
|||||||
};
|
};
|
||||||
current_pos = next_pos;
|
current_pos = next_pos;
|
||||||
|
|
||||||
|
|
||||||
|
// Try to parse a return type
|
||||||
|
let (return_type, next_pos) = 'return_label: {
|
||||||
|
let (arrow_op, next_pos) = match try_operator(tokens, current_pos, "->".into()) {
|
||||||
|
Ok((op, next)) => (op, next),
|
||||||
|
_ => break 'return_label (None, current_pos),
|
||||||
|
};
|
||||||
|
|
||||||
|
// At this point the '->' operator was matched, so we expect a datatype
|
||||||
|
match parse_token_type(tokens, next_pos, TokenType::Datatype) {
|
||||||
|
Ok((t, next)) => (Some(t), next),
|
||||||
|
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
||||||
|
Err(ParsingError::Mismatch(wrong_token)) => {
|
||||||
|
return Err(ParsingError::Err(SyntaxError {
|
||||||
|
reason: String::from("Expected a datatype after the arrow operator."),
|
||||||
|
error_start: wrong_token.position,
|
||||||
|
error_end: wrong_token.get_end_position(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
Err(ParsingError::Unmatched) => {
|
||||||
|
return Err(ParsingError::Err(SyntaxError {
|
||||||
|
reason: String::from("Expected a datatype after the arrow operator."),
|
||||||
|
error_start: arrow_op.position,
|
||||||
|
error_end: arrow_op.get_end_position(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
current_pos = next_pos;
|
||||||
|
|
||||||
|
// Function body (block)
|
||||||
let (block, next_pos) = match parse_block(tokens, current_pos) {
|
let (block, next_pos) = match parse_block(tokens, current_pos) {
|
||||||
Ok((block, next_pos)) => (block, next_pos),
|
Ok((block, next_pos)) => (block, next_pos),
|
||||||
Err(ParsingError::Err(error)) => {
|
Err(ParsingError::Err(error)) => {
|
||||||
@ -86,6 +126,7 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Functi
|
|||||||
Ok((
|
Ok((
|
||||||
FunctionDeclaration {
|
FunctionDeclaration {
|
||||||
identifier: &identifier,
|
identifier: &identifier,
|
||||||
|
return_type,
|
||||||
params_list: Box::new(params_list),
|
params_list: Box::new(params_list),
|
||||||
block: Box::new(block),
|
block: Box::new(block),
|
||||||
},
|
},
|
||||||
@ -93,6 +134,7 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> ParsingResult<Functi
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::lexic::get_tokens;
|
use crate::lexic::get_tokens;
|
||||||
@ -150,7 +192,7 @@ mod tests {
|
|||||||
Err(ParsingError::Err(err)) => {
|
Err(ParsingError::Err(err)) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
err.reason,
|
err.reason,
|
||||||
"Expected an opening paren afted the function identifier."
|
"Expected an opening paren after the function identifier."
|
||||||
);
|
);
|
||||||
assert_eq!(err.error_start, 7);
|
assert_eq!(err.error_start, 7);
|
||||||
assert_eq!(err.error_end, 8);
|
assert_eq!(err.error_end, 8);
|
||||||
@ -164,7 +206,7 @@ mod tests {
|
|||||||
Err(ParsingError::Err(err)) => {
|
Err(ParsingError::Err(err)) => {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
err.reason,
|
err.reason,
|
||||||
"Expected an opening paren afted the function identifier."
|
"Expected an opening paren after the function identifier."
|
||||||
);
|
);
|
||||||
assert_eq!(err.error_start, 4);
|
assert_eq!(err.error_start, 4);
|
||||||
assert_eq!(err.error_end, 6);
|
assert_eq!(err.error_end, 6);
|
||||||
@ -304,6 +346,46 @@ mod tests {
|
|||||||
let (function_declaration, _) = try_parse(&tokens, 0).unwrap();
|
let (function_declaration, _) = try_parse(&tokens, 0).unwrap();
|
||||||
|
|
||||||
assert_eq!(function_declaration.identifier.value, String::from("id"));
|
assert_eq!(function_declaration.identifier.value, String::from("id"));
|
||||||
|
assert_eq!(function_declaration.return_type, None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_parse_return_type() {
|
||||||
|
let tokens = get_tokens(&String::from("fun id() -> String {}")).unwrap();
|
||||||
|
let (function_declaration, _) = try_parse(&tokens, 0).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(function_declaration.identifier.value, String::from("id"));
|
||||||
|
assert_eq!(function_declaration.return_type.unwrap().value, String::from("String"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_throw_error_on_return_type_1() {
|
||||||
|
let tokens = get_tokens(&String::from("fun id() -> {}")).unwrap();
|
||||||
|
let fun_decl = try_parse(&tokens, 0);
|
||||||
|
|
||||||
|
match fun_decl {
|
||||||
|
Err(ParsingError::Err(err)) => {
|
||||||
|
assert_eq!(err.reason, "Expected a datatype after the arrow operator.");
|
||||||
|
assert_eq!(err.error_start, 12);
|
||||||
|
assert_eq!(err.error_end, 13);
|
||||||
|
}
|
||||||
|
_ => panic!("Expected an error: {:?}", fun_decl),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_throw_error_on_return_type_2() {
|
||||||
|
let tokens = get_tokens(&String::from("fun id() -> ")).unwrap();
|
||||||
|
let fun_decl = try_parse(&tokens, 0);
|
||||||
|
|
||||||
|
match fun_decl {
|
||||||
|
Err(ParsingError::Err(err)) => {
|
||||||
|
assert_eq!(err.reason, "Expected a datatype after the arrow operator.");
|
||||||
|
assert_eq!(err.error_start, 9);
|
||||||
|
assert_eq!(err.error_end, 11);
|
||||||
|
}
|
||||||
|
_ => panic!("Expected an error: {:?}", fun_decl),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ simple function calls, and then implement other features top down
|
|||||||
```ebnf
|
```ebnf
|
||||||
top level statement = expression
|
top level statement = expression
|
||||||
| function declaration
|
| function declaration
|
||||||
|
| binding
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -81,6 +81,22 @@ fn next_construct<'a>(
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try to parse a binding
|
||||||
|
match binding::try_parse(tokens, current_pos) {
|
||||||
|
Ok((binding, next_pos)) => return Ok((TopLevelDeclaration::Binding(binding), next_pos)),
|
||||||
|
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse an expression
|
||||||
|
match expression::try_parse(tokens, current_pos) {
|
||||||
|
Ok((expression, next_pos)) => {
|
||||||
|
return Ok((TopLevelDeclaration::Expression(expression), next_pos))
|
||||||
|
}
|
||||||
|
Err(ParsingError::Err(err)) => return Err(ParsingError::Err(err)),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
// No top level construct was found, return unmatched
|
// No top level construct was found, return unmatched
|
||||||
Err(ParsingError::Unmatched)
|
Err(ParsingError::Unmatched)
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_parse_binding() {
|
fn should_parse_binding() {
|
||||||
let input = String::from("let identifier = 20");
|
let input = String::from("val identifier = 20");
|
||||||
let tokens = crate::lexic::get_tokens(&input).unwrap();
|
let tokens = crate::lexic::get_tokens(&input).unwrap();
|
||||||
let statement = try_parse(&tokens, 0);
|
let statement = try_parse(&tokens, 0);
|
||||||
|
|
||||||
|
@ -75,3 +75,5 @@ pub fn parse_token_type(
|
|||||||
None => Err(ParsingError::Unmatched),
|
None => Err(ParsingError::Unmatched),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user