fix: memory leak when the lexer failed after lexing one or more items

This commit is contained in:
Fernando Araoz 2024-12-22 06:25:34 -05:00
parent f5c6e8556d
commit 62b92f59b4

View File

@ -20,6 +20,7 @@ pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !std.ArrayList(Toke
var current_pos: usize = 0; var current_pos: usize = 0;
var tokens = std.ArrayList(Token).init(alloc); var tokens = std.ArrayList(Token).init(alloc);
errdefer tokens.deinit();
while (current_pos < input_len) { while (current_pos < input_len) {
const actual_next_pos = ignore_whitespace(input, current_pos); const actual_next_pos = ignore_whitespace(input, current_pos);
@ -137,3 +138,18 @@ test "should insert 2 item" {
const arrl = try tokenize(input, std.testing.allocator); const arrl = try tokenize(input, std.testing.allocator);
arrl.deinit(); arrl.deinit();
} }
test "should insert an item, fail, and not leak" {
const input = "322 \"hello";
const arrl = tokenize(input, std.testing.allocator) catch |e| switch (e) {
error.IncompleteString => {
return;
},
else => {
try std.testing.expect(false);
return;
},
};
try std.testing.expect(false);
arrl.deinit();
}