Refactor last change

master
Araozu 2023-09-10 11:39:52 -05:00
parent 8b80dad589
commit 19474eb85e
4 changed files with 64 additions and 47 deletions

View File

@ -8,12 +8,13 @@ impl Transpilable for FunctionDeclaration {
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{lexic::get_tokens, syntax::{construct_ast, ast::TopLevelConstruct}};
use crate::{
lexic::get_tokens,
syntax::{ast::TopLevelConstruct, construct_ast},
};
#[test]
fn should_transpile() {
@ -28,8 +29,7 @@ mod tests {
let transpiled = fun_decl.transpile();
assert_eq!("function id() {}", transpiled);
},
}
}
}
}

View File

@ -2,9 +2,9 @@ use crate::syntax::ast::ModuleAST;
mod binding;
mod expression;
mod function_declaration;
mod module_ast;
mod top_level_construct;
mod function_declaration;
/// Trait that the AST and its nodes implement to support transformation to PHP
trait Transpilable {

View File

@ -41,16 +41,14 @@ pub fn get_tokens(input: &String) -> Result<Vec<Token>, MistiError> {
let chars: Vec<char> = input.chars().into_iter().collect();
let mut results = Vec::new();
let mut current_pos: usize = 0;
let mut indentation_stack = Vec::<usize>::new();
let mut indentation_stack = vec![0];
// Used to emit INDENT & DEDENT tokens
let mut at_new_line = false;
while has_input(&chars, current_pos) {
match next_token(&chars, current_pos, &mut indentation_stack, at_new_line) {
LexResult::Some(token, next_pos) => {
if token.token_type == TokenType::NewLine {
at_new_line = true;
}
at_new_line = token.token_type == TokenType::NewLine;
results.push(token);
current_pos = next_pos;
@ -73,20 +71,61 @@ pub fn get_tokens(input: &String) -> Result<Vec<Token>, MistiError> {
}
/// Scans a single token from `chars`, starting from `current_pos`
fn next_token(chars: &Chars, current_pos: usize, indentation_stack: &mut Vec<usize>, at_new_line: bool) -> LexResult {
let next_char = peek(chars, current_pos);
fn next_token(
chars: &Chars,
current_pos: usize,
indentation_stack: &mut Vec<usize>,
at_new_line: bool,
) -> LexResult {
let mut current_pos = current_pos;
// If EOF is reached return nothing but the current position
if next_char == '\0' {
// Handle whitespace
if peek(chars, current_pos) == ' ' {
if at_new_line {
return handle_indentation(chars, current_pos, indentation_stack);
} else {
// Consume whitespace
current_pos += 1;
while peek(chars, current_pos) == ' ' {
current_pos += 1;
}
}
}
// If EOF is reached return only the current position
if peek(chars, current_pos) == '\0' {
return LexResult::None(current_pos);
}
// Handle whitespace recursively.
if next_char == ' ' && !at_new_line {
return next_token(chars, current_pos + 1, indentation_stack, false);
let next_char = peek(chars, current_pos);
// Scanners
None.or_else(|| scanner::number(next_char, chars, current_pos))
.or_else(|| scanner::identifier(next_char, chars, current_pos))
.or_else(|| scanner::datatype(next_char, chars, current_pos))
.or_else(|| scanner::string(next_char, chars, current_pos))
.or_else(|| scanner::new_comment(next_char, chars, current_pos))
.or_else(|| scanner::operator(next_char, chars, current_pos))
.or_else(|| scanner::grouping_sign(next_char, chars, current_pos))
.or_else(|| scanner::new_line(next_char, chars, current_pos))
.unwrap_or_else(|| {
let error = LexError {
position: current_pos,
reason: format!(
"Unrecognized character `{}` (escaped: `{}`)",
next_char,
next_char.escape_default().to_string(),
),
};
LexResult::Err(error)
})
}
// When whitespace is found at the start of the line, emit INDENT/DEDENT
else if next_char == ' ' && at_new_line {
fn handle_indentation(
chars: &Chars,
current_pos: usize,
indentation_stack: &mut Vec<usize>,
) -> LexResult {
// Count the number of spaces
let mut spaces = 0;
let mut sub_pos = current_pos;
@ -111,28 +150,6 @@ fn next_token(chars: &Chars, current_pos: usize, indentation_stack: &mut Vec<usi
}
}
// Scanners
None.or_else(|| scanner::number(next_char, chars, current_pos))
.or_else(|| scanner::identifier(next_char, chars, current_pos))
.or_else(|| scanner::datatype(next_char, chars, current_pos))
.or_else(|| scanner::string(next_char, chars, current_pos))
.or_else(|| scanner::new_comment(next_char, chars, current_pos))
.or_else(|| scanner::operator(next_char, chars, current_pos))
.or_else(|| scanner::grouping_sign(next_char, chars, current_pos))
.or_else(|| scanner::new_line(next_char, chars, current_pos))
.unwrap_or_else(|| {
let error = LexError {
position: current_pos,
reason: format!(
"Unrecognized character `{}` (escaped: `{}`)",
next_char,
next_char.escape_default().to_string(),
),
};
LexResult::Err(error)
})
}
/// Returns the char at `pos`
fn peek(input: &Chars, pos: usize) -> char {
let result = input.get(pos).unwrap_or(&'\0');
@ -151,7 +168,7 @@ mod tests {
/// Should return an EOF token if the input has no tokens
#[test]
fn test1() {
fn should_emit_eof() {
let input = String::from("");
let tokens = get_tokens(&input).unwrap();
// 1 semicolon and 1 EOF token

View File

@ -120,9 +120,9 @@ pub fn try_parse<'a>(tokens: &'a Vec<Token>, pos: usize) -> Option<SyntaxResult>
})
};
Some(SyntaxResult::Ok(
super::ast::TopLevelConstruct::Binding(binding)
))
Some(SyntaxResult::Ok(super::ast::TopLevelConstruct::Binding(
binding,
)))
}
/// Expects the token at `pos` to be of type `token_type`