diff --git a/src/01_lexic/root.zig b/src/01_lexic/root.zig index 8baff06..b3c026c 100644 --- a/src/01_lexic/root.zig +++ b/src/01_lexic/root.zig @@ -10,8 +10,8 @@ const string = @import("string.zig"); const grouping = @import("grouping.zig"); const punctuation = @import("punctiation.zig"); -const TokenType = token.TokenType; -const Token = token.Token; +pub const TokenType = token.TokenType; +pub const Token = token.Token; /// Creates an array list of tokens. The caller is responsible of /// calling `deinit` to free the array list diff --git a/src/02_syntax/root.zig b/src/02_syntax/root.zig index c12b911..deffc7e 100644 --- a/src/02_syntax/root.zig +++ b/src/02_syntax/root.zig @@ -1,5 +1,56 @@ const std = @import("std"); +const lexic = @import("lexic"); +const Token = lexic.Token; +const TokenType = lexic.TokenType; + +const TokenStream = std.ArrayList(Token); + +const ParseError = error{ + Unmatched, + Error, +}; + +const Statement = union(enum) { + VariableBinding: u8, +}; + +const VariableBinding = struct { + is_mutable: bool, + datatype: ?*Token, + identifier: *Token, + expression: Expression, + + fn parse() !@This() {} +}; + +const Expression = union(enum) { + number: *const Token, + + /// Attempts to parse an expression from a token stream. + fn parse(tokens: *const TokenStream, pos: usize) ParseError!@This() { + std.debug.assert(pos < tokens.items.len); + + const t = tokens.items[pos]; + if (t.token_type != TokenType.Int) { + return ParseError.Unmatched; + } + + return .{ + .number = &t, + }; + } +}; test { std.testing.refAllDecls(@This()); } + +test "should parse expression" { + const input = "322"; + const tokens = try lexic.tokenize(input, std.testing.allocator); + defer tokens.deinit(); + + const expr = try Expression.parse(&tokens, 0); + try std.testing.expectEqualDeep("322", expr.number.value); + try std.testing.expectEqualDeep(TokenType.Int, expr.number.token_type); +}