chore: update files

This commit is contained in:
Araozu 2024-11-29 06:13:02 -05:00
parent b71cfe4370
commit 1bd463998c
4 changed files with 19 additions and 15 deletions

View File

@ -24,7 +24,13 @@ Now in Zig!
## v0.0.1 ## v0.0.1
- [x] Lex integers & floating point numbers - [x] Lex numbers
- [x] Lex identifier
- [x] Lex datatypes
- [x] Lex operators
- [x] Lex single line comments
- [x] Lex strings
- [x] Lex grouping signs
- [ ] - [ ]

View File

@ -13,12 +13,13 @@ const punctuation = @import("./punctiation.zig");
const TokenType = token.TokenType; const TokenType = token.TokenType;
const Token = token.Token; const Token = token.Token;
pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !void { // Creates an array list of tokens. The caller is responsible of
// calling `deinit` to free the array list
pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !std.ArrayList(Token) {
const input_len = input.len; const input_len = input.len;
var current_pos: usize = 0; var current_pos: usize = 0;
var tokens = std.ArrayList(Token).init(alloc); var tokens = std.ArrayList(Token).init(alloc);
defer tokens.deinit();
while (current_pos < input_len) { while (current_pos < input_len) {
const actual_next_pos = ignore_whitespace(input, current_pos); const actual_next_pos = ignore_whitespace(input, current_pos);
@ -98,6 +99,8 @@ pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !void {
break; break;
} }
} }
return tokens;
} }
/// Ignores all whitespace on `input` since `start` /// Ignores all whitespace on `input` since `start`
@ -117,10 +120,12 @@ pub fn ignore_whitespace(input: []const u8, start: usize) usize {
test "should insert 1 item" { test "should insert 1 item" {
const input = "322"; const input = "322";
try tokenize(input, std.testing.allocator); const arrl = try tokenize(input, std.testing.allocator);
arrl.deinit();
} }
test "should insert 2 item" { test "should insert 2 item" {
const input = "322 644"; const input = "322 644";
try tokenize(input, std.testing.allocator); const arrl = try tokenize(input, std.testing.allocator);
arrl.deinit();
} }

View File

@ -16,8 +16,7 @@ pub const TokenType = enum {
// punctiation that carries special meaning // punctiation that carries special meaning
Comma, Comma,
Newline, Newline,
// Others // Each keyword will have its own token
Keyword,
}; };
pub const Token = struct { pub const Token = struct {

View File

@ -27,14 +27,8 @@ fn repl() !void {
var gpa = std.heap.GeneralPurposeAllocator(.{}){}; var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const alloc = gpa.allocator(); const alloc = gpa.allocator();
try lexic.tokenize(line, alloc); const tokens = try lexic.tokenize(line, alloc);
defer tokens.deinit();
try bw.flush(); try bw.flush();
} }
test "simple test" {
var list = std.ArrayList(i32).init(std.testing.allocator);
defer list.deinit(); // try commenting this out and see if zig detects the memory leak!
try list.append(42);
try std.testing.expectEqual(@as(i32, 42), list.pop());
}