feat: receive error list in parser root

This commit is contained in:
Fernando Araoz 2025-01-27 18:52:12 -05:00
parent db8756a1d5
commit b1ada55ff2
3 changed files with 27 additions and 16 deletions

View File

@ -41,6 +41,7 @@ pub fn tokenize(
break; break;
} }
// FIXME: should defer deinit, otherwise we leak memory?
var current_error: errors.ErrorData = undefined; var current_error: errors.ErrorData = undefined;
// attempt to lex a number // attempt to lex a number
@ -50,6 +51,8 @@ pub fn tokenize(
// add to list of errors // add to list of errors
try err_arrl.append(current_error); try err_arrl.append(current_error);
// FIXME: should deinit current_error now that its been allocated, otherwise we leak memory?
// ignore everything until whitespace and loop // ignore everything until whitespace and loop
current_pos = ignore_until_whitespace(input, actual_next_pos); current_pos = ignore_until_whitespace(input, actual_next_pos);
continue; continue;

View File

@ -27,7 +27,7 @@ pub const Module = struct {
tokens: *const TokenStream, tokens: *const TokenStream,
pos: usize, pos: usize,
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
error_target: *errors.ErrorData, err_arrl: *std.ArrayList(errors.ErrorData),
) ParseError!void { ) ParseError!void {
var arrl = std.ArrayList(*statement.Statement).init(allocator); var arrl = std.ArrayList(*statement.Statement).init(allocator);
errdefer arrl.deinit(); errdefer arrl.deinit();
@ -48,12 +48,15 @@ pub const Module = struct {
switch (e) { switch (e) {
error.Unmatched => { error.Unmatched => {
// create the error value // create the error value
var error_target: errors.ErrorData = undefined;
try error_target.init( try error_target.init(
"No statement found", "No statement found",
current_pos, current_pos,
current_pos + 1, current_pos + 1,
allocator, allocator,
); );
defer error_target.deinit();
try err_arrl.append(error_target);
return error.Unmatched; return error.Unmatched;
}, },
else => return e, else => return e,
@ -90,11 +93,8 @@ test "should parse a single statement" {
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list); const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
defer tokens.deinit(); defer tokens.deinit();
const error_target = try std.testing.allocator.create(errors.ErrorData);
defer std.testing.allocator.destroy(error_target);
var module: Module = undefined; var module: Module = undefined;
_ = try module.init(&tokens, 0, std.testing.allocator, error_target); _ = try module.init(&tokens, 0, std.testing.allocator, &error_list);
defer module.deinit(); defer module.deinit();
} }
@ -106,11 +106,8 @@ test "should clean memory if a statement parsing fails after one item has been i
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list); const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
defer tokens.deinit(); defer tokens.deinit();
const error_target = try std.testing.allocator.create(errors.ErrorData);
defer std.testing.allocator.destroy(error_target);
var module: Module = undefined; var module: Module = undefined;
_ = module.init(&tokens, 0, std.testing.allocator, error_target) catch { _ = module.init(&tokens, 0, std.testing.allocator, &error_list) catch {
return; return;
}; };
defer module.deinit(); defer module.deinit();

View File

@ -96,15 +96,26 @@ fn repl() !void {
} }
} }
// Print errors // Print errors and continue, if any
for (error_array.items) |e| { if (error_array.items.len > 0) {
var err = e; for (error_array.items) |e| {
const err_str = try err.get_error_str(line, "repl", alloc); var err = e;
try stdout.print("\n{s}\n", .{err_str}); const err_str = try err.get_error_str(line, "repl", alloc);
try bw.flush(); try stdout.print("\n{s}\n", .{err_str});
alloc.free(err_str); try bw.flush();
alloc.free(err_str);
}
continue;
} }
std.debug.print("should be syntax analizing the tokens...\n", .{});
//
// Syntax analysis
//
var ast: syntax.Module = undefined;
try ast.init(&tokens, 0, alloc, &error_array);
// next repl line // next repl line
} }