refactor: lexer serialization

This commit is contained in:
Fernando Araoz 2025-01-22 19:34:27 -05:00
parent 890ac41cbe
commit 32aa8b1de7
2 changed files with 20 additions and 45 deletions

View File

@ -2,62 +2,37 @@ const std = @import("std");
const errors = @import("errors");
const lexic = @import("lexic");
const LexResult = struct {
tokens: std.ArrayList(lexic.Token),
error_array: std.ArrayList(errors.ErrorData),
};
pub fn tokenize_to_json() !void {
// setup stdin, stdout and allocators
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const alloc = gpa.allocator();
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
const alloc = arena.allocator();
// Setup buffered stdout once
const stdout_file = std.io.getStdOut().writer();
var bw = std.io.bufferedWriter(stdout_file);
const stdout = bw.writer();
// read up to 8192 bytes from stdin until EOF
var buffer: [8192]u8 = undefined;
const stdin = std.io.getStdIn();
const bytes_read = try stdin.readAll(&buffer);
// Read all stdin
var stdin_buf = std.ArrayList(u8).init(alloc);
defer stdin_buf.deinit();
// 16MB, why would anyone ever have source code bigger than that??
const max_file_size = 16 * 1024 * 1024;
try std.io.getStdIn().reader().readAllArrayList(&stdin_buf, max_file_size);
const bytes = buffer[0..bytes_read];
// tokenize
// Tokenize
var error_array = std.ArrayList(errors.ErrorData).init(alloc);
defer error_array.deinit();
const tokens = try lexic.tokenize(bytes, alloc, &error_array);
const tokens = try lexic.tokenize(stdin_buf.items, alloc, &error_array);
defer tokens.deinit();
// serialize & print json to stdout
var json_arrl = std.ArrayList(u8).init(alloc);
defer json_arrl.deinit();
var json_writer = json_arrl.writer();
try json_writer.writeAll("{\"errors\":[");
const errors_len = error_array.items.len - 1;
// Write JSON directly to stdout
try stdout.writeAll("{\"errors\":[");
for (error_array.items, 0..) |err, idx| {
try err.write_json(alloc, json_writer);
// write a comma only if there are items left
if (idx < errors_len) {
try json_writer.writeAll(",");
}
try err.write_json(alloc, stdout);
if (idx < error_array.items.len - 1) try stdout.writeAll(",");
}
try json_writer.writeAll("],\"tokens\":");
// write tokens as JSON
const tokens_json = try std.json.stringifyAlloc(alloc, tokens.items, .{});
defer alloc.free(tokens_json);
try json_writer.writeAll(tokens_json);
try json_writer.writeAll("}");
try stdout.print("{s}", .{json_arrl.items});
try stdout.writeAll("],\"tokens\":");
try std.json.stringify(tokens.items, .{}, stdout);
try stdout.writeAll("}");
try bw.flush();
// the end
}

View File

@ -154,7 +154,7 @@ pub const ErrorData = struct {
// - Display message
/// Writes this error as a JSON to the writer
pub fn write_json(self: ErrorData, alloc: std.mem.Allocator, writer: std.ArrayList(u8).Writer) !void {
pub fn write_json(self: ErrorData, alloc: std.mem.Allocator, writer: anytype) !void {
// get this as JSON
const json_str = try std.json.stringifyAlloc(alloc, .{
.reason = self.reason,