From 1bd463998cc9acdb60870cd2c274477f1fb8c712 Mon Sep 17 00:00:00 2001 From: Araozu Date: Fri, 29 Nov 2024 06:13:02 -0500 Subject: [PATCH] chore: update files --- CHANGELOG.md | 8 +++++++- src/01_lexic/root.zig | 13 +++++++++---- src/01_lexic/token.zig | 3 +-- src/main.zig | 10 ++-------- 4 files changed, 19 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 99ae05c..e3e6abf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,7 +24,13 @@ Now in Zig! ## v0.0.1 -- [x] Lex integers & floating point numbers +- [x] Lex numbers +- [x] Lex identifier +- [x] Lex datatypes +- [x] Lex operators +- [x] Lex single line comments +- [x] Lex strings +- [x] Lex grouping signs - [ ] diff --git a/src/01_lexic/root.zig b/src/01_lexic/root.zig index 48ecdb2..2b16d38 100644 --- a/src/01_lexic/root.zig +++ b/src/01_lexic/root.zig @@ -13,12 +13,13 @@ const punctuation = @import("./punctiation.zig"); const TokenType = token.TokenType; const Token = token.Token; -pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !void { +// Creates an array list of tokens. The caller is responsible of +// calling `deinit` to free the array list +pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !std.ArrayList(Token) { const input_len = input.len; var current_pos: usize = 0; var tokens = std.ArrayList(Token).init(alloc); - defer tokens.deinit(); while (current_pos < input_len) { const actual_next_pos = ignore_whitespace(input, current_pos); @@ -98,6 +99,8 @@ pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !void { break; } } + + return tokens; } /// Ignores all whitespace on `input` since `start` @@ -117,10 +120,12 @@ pub fn ignore_whitespace(input: []const u8, start: usize) usize { test "should insert 1 item" { const input = "322"; - try tokenize(input, std.testing.allocator); + const arrl = try tokenize(input, std.testing.allocator); + arrl.deinit(); } test "should insert 2 item" { const input = "322 644"; - try tokenize(input, std.testing.allocator); + const arrl = try tokenize(input, std.testing.allocator); + arrl.deinit(); } diff --git a/src/01_lexic/token.zig b/src/01_lexic/token.zig index 7a667e2..bebd2ad 100644 --- a/src/01_lexic/token.zig +++ b/src/01_lexic/token.zig @@ -16,8 +16,7 @@ pub const TokenType = enum { // punctiation that carries special meaning Comma, Newline, - // Others - Keyword, + // Each keyword will have its own token }; pub const Token = struct { diff --git a/src/main.zig b/src/main.zig index 4d4f8d3..374dcde 100644 --- a/src/main.zig +++ b/src/main.zig @@ -27,14 +27,8 @@ fn repl() !void { var gpa = std.heap.GeneralPurposeAllocator(.{}){}; const alloc = gpa.allocator(); - try lexic.tokenize(line, alloc); + const tokens = try lexic.tokenize(line, alloc); + defer tokens.deinit(); try bw.flush(); } - -test "simple test" { - var list = std.ArrayList(i32).init(std.testing.allocator); - defer list.deinit(); // try commenting this out and see if zig detects the memory leak! - try list.append(42); - try std.testing.expectEqual(@as(i32, 42), list.pop()); -}