refactor: tests of lexer and parser
This commit is contained in:
parent
d600c575f3
commit
8425f621eb
@ -24,13 +24,12 @@ const LexError = token.LexError;
|
|||||||
/// found while lexing. The caller is responsible for freeing it.
|
/// found while lexing. The caller is responsible for freeing it.
|
||||||
pub fn tokenize(
|
pub fn tokenize(
|
||||||
input: []const u8,
|
input: []const u8,
|
||||||
alloc: std.mem.Allocator,
|
|
||||||
ctx: *context.CompilerContext,
|
ctx: *context.CompilerContext,
|
||||||
) !std.ArrayList(Token) {
|
) !std.ArrayList(Token) {
|
||||||
const input_len = input.len;
|
const input_len = input.len;
|
||||||
var current_pos: usize = 0;
|
var current_pos: usize = 0;
|
||||||
|
|
||||||
var tokens = std.ArrayList(Token).init(alloc);
|
var tokens = std.ArrayList(Token).init(ctx.allocator);
|
||||||
errdefer tokens.deinit();
|
errdefer tokens.deinit();
|
||||||
|
|
||||||
while (current_pos < input_len) {
|
while (current_pos < input_len) {
|
||||||
@ -189,30 +188,27 @@ test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should insert 1 item" {
|
test "should insert 1 item" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "322";
|
const input = "322";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
const arrl = try tokenize(input, &ctx);
|
||||||
defer error_list.deinit();
|
|
||||||
const arrl = try tokenize(input, std.testing.allocator, &error_list);
|
|
||||||
arrl.deinit();
|
arrl.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
test "should insert 2 item" {
|
test "should insert 2 item" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "322 644";
|
const input = "322 644";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
const arrl = try tokenize(input, &ctx);
|
||||||
defer error_list.deinit();
|
|
||||||
const arrl = try tokenize(input, std.testing.allocator, &error_list);
|
|
||||||
arrl.deinit();
|
arrl.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
test "should insert an item, fail, and not leak" {
|
test "should insert an item, fail, and not leak" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "322 \"hello";
|
const input = "322 \"hello";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
|
||||||
defer error_list.deinit();
|
|
||||||
defer for (error_list.items) |*i| {
|
|
||||||
i.deinit();
|
|
||||||
};
|
|
||||||
|
|
||||||
const arrl = tokenize(input, std.testing.allocator, &error_list) catch |e| switch (e) {
|
const arrl = tokenize(input, &ctx) catch |e| switch (e) {
|
||||||
else => {
|
else => {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
return;
|
return;
|
||||||
@ -222,25 +218,25 @@ test "should insert an item, fail, and not leak" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "shouldnt leak" {
|
test "shouldnt leak" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "";
|
const input = "";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
const arrl = try tokenize(input, &ctx);
|
||||||
defer error_list.deinit();
|
|
||||||
const arrl = try tokenize(input, std.testing.allocator, &error_list);
|
|
||||||
arrl.deinit();
|
arrl.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
test "should handle recoverable errors" {
|
test "should handle recoverable errors" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
|
|
||||||
const input = "322 0b 644";
|
const input = "322 0b 644";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
const arrl = try tokenize(input, &ctx);
|
||||||
defer error_list.deinit();
|
|
||||||
defer for (error_list.items) |*err| err.deinit();
|
|
||||||
const arrl = try tokenize(input, std.testing.allocator, &error_list);
|
|
||||||
defer arrl.deinit();
|
defer arrl.deinit();
|
||||||
|
|
||||||
try std.testing.expectEqual(@as(usize, 1), error_list.items.len);
|
try std.testing.expectEqual(@as(usize, 1), ctx.errors.items.len);
|
||||||
try std.testing.expectEqual(@as(usize, 2), arrl.items.len);
|
try std.testing.expectEqual(@as(usize, 2), arrl.items.len);
|
||||||
|
|
||||||
try std.testing.expectEqualStrings("Incomplete number", error_list.items[0].reason);
|
try std.testing.expectEqualStrings("Incomplete number", ctx.errors.items[0].reason);
|
||||||
try std.testing.expectEqual(@as(usize, 4), error_list.items[0].start_position);
|
try std.testing.expectEqual(@as(usize, 4), ctx.errors.items[0].start_position);
|
||||||
try std.testing.expectEqual(@as(usize, 6), error_list.items[0].end_position);
|
try std.testing.expectEqual(@as(usize, 6), ctx.errors.items[0].end_position);
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const lexic = @import("lexic");
|
const lexic = @import("lexic");
|
||||||
const errors = @import("errors");
|
const errors = @import("errors");
|
||||||
|
const context = @import("context");
|
||||||
|
|
||||||
const Token = lexic.Token;
|
const Token = lexic.Token;
|
||||||
const TokenType = lexic.TokenType;
|
const TokenType = lexic.TokenType;
|
||||||
|
|
||||||
@ -27,10 +29,12 @@ pub const Expression = union(enum) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
test "should parse expression" {
|
test "should parse expression" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "322";
|
const input = "322";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var expr: Expression = undefined;
|
var expr: Expression = undefined;
|
||||||
@ -43,10 +47,12 @@ test "should parse expression" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail on non expression" {
|
test "should fail on non expression" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "identifier";
|
const input = "identifier";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var expr: Expression = undefined;
|
var expr: Expression = undefined;
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const lexic = @import("lexic");
|
const lexic = @import("lexic");
|
||||||
const errors = @import("errors");
|
const errors = @import("errors");
|
||||||
|
const context = @import("context");
|
||||||
|
|
||||||
const expression = @import("./expression.zig");
|
const expression = @import("./expression.zig");
|
||||||
const variable = @import("./variable.zig");
|
const variable = @import("./variable.zig");
|
||||||
@ -92,10 +93,12 @@ test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should parse a single statement" {
|
test "should parse a single statement" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_variable = 322";
|
const input = "var my_variable = 322";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var module: Module = undefined;
|
var module: Module = undefined;
|
||||||
@ -105,10 +108,12 @@ test "should parse a single statement" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should clean memory if a statement parsing fails after one item has been inserted" {
|
test "should clean memory if a statement parsing fails after one item has been inserted" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_variable = 322 unrelated()";
|
const input = "var my_variable = 322 unrelated()";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var module: Module = undefined;
|
var module: Module = undefined;
|
||||||
|
@ -5,6 +5,7 @@ const types = @import("./types.zig");
|
|||||||
const utils = @import("./utils.zig");
|
const utils = @import("./utils.zig");
|
||||||
const variable = @import("./variable.zig");
|
const variable = @import("./variable.zig");
|
||||||
const errors = @import("errors");
|
const errors = @import("errors");
|
||||||
|
const context = @import("context");
|
||||||
|
|
||||||
const TokenStream = types.TokenStream;
|
const TokenStream = types.TokenStream;
|
||||||
const ParseError = types.ParseError;
|
const ParseError = types.ParseError;
|
||||||
@ -55,10 +56,12 @@ pub const Statement = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
test "should parse a variable declaration statement" {
|
test "should parse a variable declaration statement" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_variable = 322";
|
const input = "var my_variable = 322";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var statement: Statement = undefined;
|
var statement: Statement = undefined;
|
||||||
@ -74,10 +77,12 @@ test "should parse a variable declaration statement" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail on other constructs" {
|
test "should fail on other constructs" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "a_function_call(322)";
|
const input = "a_function_call(322)";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var statement: Statement = undefined;
|
var statement: Statement = undefined;
|
||||||
|
@ -4,6 +4,7 @@ const expression = @import("expression.zig");
|
|||||||
const types = @import("./types.zig");
|
const types = @import("./types.zig");
|
||||||
const utils = @import("./utils.zig");
|
const utils = @import("./utils.zig");
|
||||||
const errors = @import("errors");
|
const errors = @import("errors");
|
||||||
|
const context = @import("context");
|
||||||
|
|
||||||
const TokenStream = types.TokenStream;
|
const TokenStream = types.TokenStream;
|
||||||
const ParseError = types.ParseError;
|
const ParseError = types.ParseError;
|
||||||
@ -88,10 +89,12 @@ pub const VariableBinding = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
test "should parse a minimal var" {
|
test "should parse a minimal var" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_variable = 322";
|
const input = "var my_variable = 322";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var binding: VariableBinding = undefined;
|
var binding: VariableBinding = undefined;
|
||||||
@ -110,10 +113,12 @@ test "should parse a minimal var" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should return null if stream doesnt start with var" {
|
test "should return null if stream doesnt start with var" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "different_token_stream()";
|
const input = "different_token_stream()";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var binding: VariableBinding = undefined;
|
var binding: VariableBinding = undefined;
|
||||||
@ -123,10 +128,12 @@ test "should return null if stream doesnt start with var" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail if the identifier is missing" {
|
test "should fail if the identifier is missing" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var ";
|
const input = "var ";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var error_data: errors.ErrorData = undefined;
|
var error_data: errors.ErrorData = undefined;
|
||||||
@ -148,10 +155,12 @@ test "should fail if the identifier is missing" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail if there is not an identifier after var" {
|
test "should fail if there is not an identifier after var" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var 322";
|
const input = "var 322";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var binding: VariableBinding = undefined;
|
var binding: VariableBinding = undefined;
|
||||||
@ -164,10 +173,12 @@ test "should fail if there is not an identifier after var" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail if the equal sign is missing" {
|
test "should fail if the equal sign is missing" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_id ";
|
const input = "var my_id ";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var binding: VariableBinding = undefined;
|
var binding: VariableBinding = undefined;
|
||||||
@ -180,10 +191,12 @@ test "should fail if the equal sign is missing" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail if the equal sign is not found" {
|
test "should fail if the equal sign is not found" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_id is string";
|
const input = "var my_id is string";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var binding: VariableBinding = undefined;
|
var binding: VariableBinding = undefined;
|
||||||
@ -196,10 +209,12 @@ test "should fail if the equal sign is not found" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test "should fail if the expression parsing fails" {
|
test "should fail if the expression parsing fails" {
|
||||||
|
var ctx = context.CompilerContext.init(std.testing.allocator);
|
||||||
|
defer ctx.deinit();
|
||||||
const input = "var my_id = ehhh";
|
const input = "var my_id = ehhh";
|
||||||
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
var error_list = std.ArrayList(errors.ErrorData).init(std.testing.allocator);
|
||||||
defer error_list.deinit();
|
defer error_list.deinit();
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator, &error_list);
|
const tokens = try lexic.tokenize(input, &ctx);
|
||||||
defer tokens.deinit();
|
defer tokens.deinit();
|
||||||
|
|
||||||
var binding: VariableBinding = undefined;
|
var binding: VariableBinding = undefined;
|
||||||
|
Loading…
Reference in New Issue
Block a user