refactor: reenable file compilation

master
Araozu 2024-08-02 08:41:25 -05:00
parent f1b15db509
commit d5f2176fa7
4 changed files with 43 additions and 41 deletions

View File

@ -1,7 +1,10 @@
use serde::Serialize;
use crate::{
error_handling::MistiError, lexic::{get_tokens, token::Token}, semantic, syntax::build_ast
error_handling::MistiError,
lexic::{get_tokens, token::Token},
semantic,
syntax::build_ast,
};
use std::io::{self, BufRead};
@ -35,11 +38,9 @@ pub fn tokenize_command(_options: Vec<String>) -> Result<(), ()> {
Ok(tokens) => {
let ast_result = build_ast(&tokens);
match ast_result {
Ok(ast) => {
match semantic::check_semantics(&ast) {
Ok(ast) => match semantic::check_semantics(&ast) {
Ok(()) => TokenizeResult::Ok(tokens),
Err(error) => TokenizeResult::SyntaxOnly(tokens, error)
}
Err(error) => TokenizeResult::SyntaxOnly(tokens, error),
},
Err(error) => TokenizeResult::TokensOnly(tokens, error),
}

View File

@ -8,8 +8,3 @@ pub trait Transpilable {
/// Transforms this struct into PHP
fn transpile(&self) -> String;
}
/// Transforms an AST to its representation in PHP
pub fn codegen<'a>(ast: &'a impl Transpilable) -> String {
ast.transpile()
}

View File

@ -1,8 +1,9 @@
use colored::*;
use std::{fs, path::Path};
use crate::lexic::token::Token;
use crate::{codegen, error_handling::PrintableError, lexic, syntax};
use crate::codegen::Transpilable;
use crate::php_ast::transformers::PHPTransformable;
use crate::{error_handling::PrintableError, lexic, syntax};
pub fn compile_file(input: &String) -> Result<(), ()> {
let input_path = Path::new(input);
@ -58,49 +59,50 @@ pub fn compile_file(input: &String) -> Result<(), ()> {
}
}
/// THP source code goes in, PHP code or an error comes out
/// Full pipeline from THP source code to PHP output
fn compile(input: &String) -> Result<String, String> {
let tokens = lexic::get_tokens(input);
let tokens = match tokens {
Ok(tokens) => tokens,
//
// Lexical analysis
//
let tokens = match lexic::get_tokens(input) {
Ok(t) => t,
Err(error) => {
let chars: Vec<char> = input.chars().into_iter().collect();
return Err(format!(
"{}:\n{}",
"syntax error".on_red(),
error.get_error_str(&chars)
));
return Err(error.get_error_str(&chars));
}
};
build_ast(input, tokens)
}
/// Executes Syntax analysis, and for now, Semantic analysis and Code generation.
///
/// Prints the generated code in stdin
fn build_ast(input: &String, tokens: Vec<Token>) -> Result<String, String> {
let ast = syntax::build_ast(&tokens);
let ast = match ast {
//
// Syntax analysis
//
let ast = match syntax::build_ast(&tokens) {
Ok(ast) => ast,
Err(reason) => {
let chars: Vec<char> = input.chars().into_iter().collect();
let error = format!("{}: {}", "error".on_red(), reason.get_error_str(&chars));
return Err(error);
return Err(reason.get_error_str(&chars));
}
};
match crate::semantic::check_semantics(&ast) {
//
// Semantic analysis
//
let res1 = crate::semantic::check_semantics(&ast);
match res1 {
Ok(_) => {}
Err(reason) => {
let chars: Vec<char> = input.chars().into_iter().collect();
let error = format!("{}: {}", "error".on_red(), reason.get_error_str(&chars));
return Err(error);
}
};
}
Err("Code generation disabled: rewriting into PHP AST".into())
// Ok(codegen::codegen(&ast))
//
// Intermediate representation (THP -> PHP ast)
//
let php_ast = ast.into_php_ast();
//
// Codegen
//
Ok(php_ast.transpile())
}

View File

@ -11,13 +11,15 @@ impl Tokenizer for Vec<Token> {
fn get_significant<'a>(&'a self, index: usize) -> Option<(&'a Token, usize)> {
let mut current_pos = index;
// Ignore all whitespace and newlines
// Ignore all whitespace, newlines and comments
loop {
match self.get(current_pos) {
Some(token) => {
if token.token_type == TokenType::INDENT
|| token.token_type == TokenType::DEDENT
|| token.token_type == TokenType::NewLine
|| token.token_type == TokenType::Comment
|| token.token_type == TokenType::MultilineComment
{
current_pos += 1;
} else {
@ -44,7 +46,7 @@ pub fn try_operator(tokens: &Vec<Token>, pos: usize, operator: String) -> Parsin
/// Expects the token at `pos` to be of type `token_type`, and returns the token and the next position.
///
/// Ignores all whitespace and newlines.
/// Ignores all whitespace, newlines and comments.
///
/// Only returns: Ok, Unmatched, Mismatched
pub fn parse_token_type(
@ -59,6 +61,8 @@ pub fn parse_token_type(
if t.token_type == TokenType::INDENT
|| t.token_type == TokenType::DEDENT
|| t.token_type == TokenType::NewLine
|| t.token_type == TokenType::Comment
|| t.token_type == TokenType::MultilineComment
{
current_pos += 1;
} else {