refactor: move expression parsing to its own file
This commit is contained in:
parent
095aebcad5
commit
362c591b4b
47
src/02_syntax/expression.zig
Normal file
47
src/02_syntax/expression.zig
Normal file
@ -0,0 +1,47 @@
|
||||
const std = @import("std");
|
||||
const lexic = @import("lexic");
|
||||
const Token = lexic.Token;
|
||||
const TokenType = lexic.TokenType;
|
||||
const ParseError = @import("./types.zig").ParseError;
|
||||
|
||||
const Expression = union(enum) {
|
||||
number: *const Token,
|
||||
|
||||
/// Attempts to parse an expression from a token stream.
|
||||
fn parse(tokens: *const std.ArrayList(Token), pos: usize) ParseError!@This() {
|
||||
std.debug.assert(pos < tokens.items.len);
|
||||
|
||||
const t = tokens.items[pos];
|
||||
if (t.token_type != TokenType.Int) {
|
||||
return ParseError.Unmatched;
|
||||
}
|
||||
|
||||
return .{
|
||||
.number = &t,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
test "should parse expression" {
|
||||
const input = "322";
|
||||
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||
defer tokens.deinit();
|
||||
|
||||
const expr = try Expression.parse(&tokens, 0);
|
||||
try std.testing.expectEqualDeep("322", expr.number.value);
|
||||
try std.testing.expectEqualDeep(TokenType.Int, expr.number.token_type);
|
||||
}
|
||||
|
||||
test "should fail on non expression" {
|
||||
const input = "identifier";
|
||||
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||
defer tokens.deinit();
|
||||
|
||||
const expr = Expression.parse(&tokens, 0) catch |err| {
|
||||
try std.testing.expectEqual(ParseError.Unmatched, err);
|
||||
return;
|
||||
};
|
||||
|
||||
std.debug.print("v: {s}", .{expr.number.value});
|
||||
try std.testing.expect(false);
|
||||
}
|
@ -1,14 +1,9 @@
|
||||
const std = @import("std");
|
||||
const lexic = @import("lexic");
|
||||
const expression = @import("./expression.zig");
|
||||
const Token = lexic.Token;
|
||||
const TokenType = lexic.TokenType;
|
||||
|
||||
const TokenStream = std.ArrayList(Token);
|
||||
|
||||
const ParseError = error{
|
||||
Unmatched,
|
||||
Error,
|
||||
};
|
||||
const ParseError = @import("./types.zig").ParseError;
|
||||
|
||||
const Statement = union(enum) {
|
||||
VariableBinding: u8,
|
||||
@ -18,39 +13,11 @@ const VariableBinding = struct {
|
||||
is_mutable: bool,
|
||||
datatype: ?*Token,
|
||||
identifier: *Token,
|
||||
expression: Expression,
|
||||
expression: expression.Expression,
|
||||
|
||||
fn parse() !@This() {}
|
||||
};
|
||||
|
||||
const Expression = union(enum) {
|
||||
number: *const Token,
|
||||
|
||||
/// Attempts to parse an expression from a token stream.
|
||||
fn parse(tokens: *const TokenStream, pos: usize) ParseError!@This() {
|
||||
std.debug.assert(pos < tokens.items.len);
|
||||
|
||||
const t = tokens.items[pos];
|
||||
if (t.token_type != TokenType.Int) {
|
||||
return ParseError.Unmatched;
|
||||
}
|
||||
|
||||
return .{
|
||||
.number = &t,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
test {
|
||||
std.testing.refAllDecls(@This());
|
||||
}
|
||||
|
||||
test "should parse expression" {
|
||||
const input = "322";
|
||||
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||
defer tokens.deinit();
|
||||
|
||||
const expr = try Expression.parse(&tokens, 0);
|
||||
try std.testing.expectEqualDeep("322", expr.number.value);
|
||||
try std.testing.expectEqualDeep(TokenType.Int, expr.number.token_type);
|
||||
}
|
||||
|
4
src/02_syntax/types.zig
Normal file
4
src/02_syntax/types.zig
Normal file
@ -0,0 +1,4 @@
|
||||
pub const ParseError = error{
|
||||
Unmatched,
|
||||
Error,
|
||||
};
|
Loading…
Reference in New Issue
Block a user