Compare commits
5 Commits
095aebcad5
...
e48e36bea8
Author | SHA1 | Date | |
---|---|---|---|
e48e36bea8 | |||
fd0459aef1 | |||
55231e986e | |||
44bd9fc5ad | |||
362c591b4b |
@ -92,6 +92,8 @@ pub fn build(b: *std.Build) void {
|
|||||||
const files = [_][]const u8{
|
const files = [_][]const u8{
|
||||||
"src/01_lexic/root.zig",
|
"src/01_lexic/root.zig",
|
||||||
"src/02_syntax/root.zig",
|
"src/02_syntax/root.zig",
|
||||||
|
"src/02_syntax/variable.zig",
|
||||||
|
"src/02_syntax/expression.zig",
|
||||||
};
|
};
|
||||||
for (files) |file| {
|
for (files) |file| {
|
||||||
const file_unit_test = b.addTest(.{
|
const file_unit_test = b.addTest(.{
|
||||||
|
@ -25,8 +25,13 @@ pub fn lex(input: []const u8, start: usize) LexError!?LexReturn {
|
|||||||
final_pos = new_pos;
|
final_pos = new_pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const value = input[start..final_pos];
|
||||||
|
|
||||||
|
// check for keywords
|
||||||
|
const new_token_type = if (utils.try_keyword("var", value)) TokenType.K_Var else TokenType.Identifier;
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
Token.init(input[start..final_pos], TokenType.Identifier, start),
|
Token.init(value, new_token_type, start),
|
||||||
final_pos,
|
final_pos,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -38,6 +43,7 @@ test "should lex single letter" {
|
|||||||
if (output) |tuple| {
|
if (output) |tuple| {
|
||||||
const t = tuple[0];
|
const t = tuple[0];
|
||||||
try std.testing.expectEqualDeep("a", t.value);
|
try std.testing.expectEqualDeep("a", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.Identifier, t.token_type);
|
||||||
} else {
|
} else {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
}
|
}
|
||||||
@ -50,6 +56,7 @@ test "should lex single underscore" {
|
|||||||
if (output) |tuple| {
|
if (output) |tuple| {
|
||||||
const t = tuple[0];
|
const t = tuple[0];
|
||||||
try std.testing.expectEqualDeep("_", t.value);
|
try std.testing.expectEqualDeep("_", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.Identifier, t.token_type);
|
||||||
} else {
|
} else {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
}
|
}
|
||||||
@ -62,6 +69,7 @@ test "should lex identifier 1" {
|
|||||||
if (output) |tuple| {
|
if (output) |tuple| {
|
||||||
const t = tuple[0];
|
const t = tuple[0];
|
||||||
try std.testing.expectEqualDeep("abc", t.value);
|
try std.testing.expectEqualDeep("abc", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.Identifier, t.token_type);
|
||||||
} else {
|
} else {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
}
|
}
|
||||||
@ -74,6 +82,7 @@ test "should lex identifier 2" {
|
|||||||
if (output) |tuple| {
|
if (output) |tuple| {
|
||||||
const t = tuple[0];
|
const t = tuple[0];
|
||||||
try std.testing.expectEqualDeep("snake_case", t.value);
|
try std.testing.expectEqualDeep("snake_case", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.Identifier, t.token_type);
|
||||||
} else {
|
} else {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
}
|
}
|
||||||
@ -86,6 +95,7 @@ test "should lex identifier 3" {
|
|||||||
if (output) |tuple| {
|
if (output) |tuple| {
|
||||||
const t = tuple[0];
|
const t = tuple[0];
|
||||||
try std.testing.expectEqualDeep("camelCase", t.value);
|
try std.testing.expectEqualDeep("camelCase", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.Identifier, t.token_type);
|
||||||
} else {
|
} else {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
}
|
}
|
||||||
@ -98,6 +108,7 @@ test "should lex identifier 4" {
|
|||||||
if (output) |tuple| {
|
if (output) |tuple| {
|
||||||
const t = tuple[0];
|
const t = tuple[0];
|
||||||
try std.testing.expectEqualDeep("identifier_number_3", t.value);
|
try std.testing.expectEqualDeep("identifier_number_3", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.Identifier, t.token_type);
|
||||||
} else {
|
} else {
|
||||||
try std.testing.expect(false);
|
try std.testing.expect(false);
|
||||||
}
|
}
|
||||||
@ -109,3 +120,16 @@ test "shouldnt lex datatype" {
|
|||||||
|
|
||||||
try std.testing.expect(output == null);
|
try std.testing.expect(output == null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test "should lex var keyword" {
|
||||||
|
const input = "var";
|
||||||
|
const output = try lex(input, 0);
|
||||||
|
|
||||||
|
if (output) |tuple| {
|
||||||
|
const t = tuple[0];
|
||||||
|
try std.testing.expectEqualDeep("var", t.value);
|
||||||
|
try std.testing.expectEqual(TokenType.K_Var, t.token_type);
|
||||||
|
} else {
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -25,6 +25,11 @@ pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !std.ArrayList(Toke
|
|||||||
const actual_next_pos = ignore_whitespace(input, current_pos);
|
const actual_next_pos = ignore_whitespace(input, current_pos);
|
||||||
assert(current_pos <= actual_next_pos);
|
assert(current_pos <= actual_next_pos);
|
||||||
|
|
||||||
|
// if after processing whitespace we reach eof, exit
|
||||||
|
if (actual_next_pos == input_len) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
// attempt to lex a number
|
// attempt to lex a number
|
||||||
if (try number.lex(input, input_len, actual_next_pos)) |tuple| {
|
if (try number.lex(input, input_len, actual_next_pos)) |tuple| {
|
||||||
assert(tuple[1] > current_pos);
|
assert(tuple[1] > current_pos);
|
||||||
|
@ -17,6 +17,7 @@ pub const TokenType = enum {
|
|||||||
Comma,
|
Comma,
|
||||||
Newline,
|
Newline,
|
||||||
// Each keyword will have its own token
|
// Each keyword will have its own token
|
||||||
|
K_Var,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Token = struct {
|
pub const Token = struct {
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
const std = @import("std");
|
||||||
const token = @import("./token.zig");
|
const token = @import("./token.zig");
|
||||||
const LexError = token.LexError;
|
const LexError = token.LexError;
|
||||||
const LexReturn = token.LexReturn;
|
const LexReturn = token.LexReturn;
|
||||||
@ -96,3 +97,7 @@ pub fn lex_many(
|
|||||||
|
|
||||||
return current_pos;
|
return current_pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub inline fn try_keyword(comptime expected: []const u8, actual: []const u8) bool {
|
||||||
|
return std.mem.eql(u8, expected, actual);
|
||||||
|
}
|
||||||
|
51
src/02_syntax/expression.zig
Normal file
51
src/02_syntax/expression.zig
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const lexic = @import("lexic");
|
||||||
|
const Token = lexic.Token;
|
||||||
|
const TokenType = lexic.TokenType;
|
||||||
|
const ParseError = @import("./types.zig").ParseError;
|
||||||
|
|
||||||
|
pub const Expression = union(enum) {
|
||||||
|
number: *const Token,
|
||||||
|
|
||||||
|
/// Attempts to parse an expression from a token stream.
|
||||||
|
///
|
||||||
|
/// Receives a pointer to the memory for initialization
|
||||||
|
pub fn init(target: *Expression, tokens: *const std.ArrayList(Token), pos: usize) error{Unmatched}!void {
|
||||||
|
std.debug.assert(pos < tokens.items.len);
|
||||||
|
|
||||||
|
const t = tokens.items[pos];
|
||||||
|
if (t.token_type != TokenType.Int) {
|
||||||
|
return error.Unmatched;
|
||||||
|
}
|
||||||
|
|
||||||
|
target.* = .{
|
||||||
|
.number = &t,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
test "should parse expression" {
|
||||||
|
const input = "322";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var expr: Expression = undefined;
|
||||||
|
try expr.init(&tokens, 0);
|
||||||
|
try std.testing.expectEqualDeep("322", expr.number.value);
|
||||||
|
try std.testing.expectEqualDeep(TokenType.Int, expr.number.token_type);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail on non expression" {
|
||||||
|
const input = "identifier";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var expr: Expression = undefined;
|
||||||
|
expr.init(&tokens, 0) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Unmatched, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
std.debug.print("v: {s}", .{expr.number.value});
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
@ -1,56 +1,23 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const lexic = @import("lexic");
|
const lexic = @import("lexic");
|
||||||
|
const expression = @import("./expression.zig");
|
||||||
|
const variable = @import("./variable.zig");
|
||||||
|
const types = @import("./types.zig");
|
||||||
|
|
||||||
const Token = lexic.Token;
|
const Token = lexic.Token;
|
||||||
const TokenType = lexic.TokenType;
|
const TokenType = lexic.TokenType;
|
||||||
|
const ParseError = types.ParseError;
|
||||||
const TokenStream = std.ArrayList(Token);
|
|
||||||
|
|
||||||
const ParseError = error{
|
|
||||||
Unmatched,
|
|
||||||
Error,
|
|
||||||
};
|
|
||||||
|
|
||||||
const Statement = union(enum) {
|
const Statement = union(enum) {
|
||||||
VariableBinding: u8,
|
VariableBinding: u8,
|
||||||
};
|
|
||||||
|
|
||||||
const VariableBinding = struct {
|
fn parse(tokens: *const std.ArrayList(Token), pos: usize) ParseError!@This() {
|
||||||
is_mutable: bool,
|
_ = tokens;
|
||||||
datatype: ?*Token,
|
_ = pos;
|
||||||
identifier: *Token,
|
return ParseError.Error;
|
||||||
expression: Expression,
|
|
||||||
|
|
||||||
fn parse() !@This() {}
|
|
||||||
};
|
|
||||||
|
|
||||||
const Expression = union(enum) {
|
|
||||||
number: *const Token,
|
|
||||||
|
|
||||||
/// Attempts to parse an expression from a token stream.
|
|
||||||
fn parse(tokens: *const TokenStream, pos: usize) ParseError!@This() {
|
|
||||||
std.debug.assert(pos < tokens.items.len);
|
|
||||||
|
|
||||||
const t = tokens.items[pos];
|
|
||||||
if (t.token_type != TokenType.Int) {
|
|
||||||
return ParseError.Unmatched;
|
|
||||||
}
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.number = &t,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
test {
|
test {
|
||||||
std.testing.refAllDecls(@This());
|
std.testing.refAllDecls(@This());
|
||||||
}
|
}
|
||||||
|
|
||||||
test "should parse expression" {
|
|
||||||
const input = "322";
|
|
||||||
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
|
||||||
defer tokens.deinit();
|
|
||||||
|
|
||||||
const expr = try Expression.parse(&tokens, 0);
|
|
||||||
try std.testing.expectEqualDeep("322", expr.number.value);
|
|
||||||
try std.testing.expectEqualDeep(TokenType.Int, expr.number.token_type);
|
|
||||||
}
|
|
||||||
|
17
src/02_syntax/types.zig
Normal file
17
src/02_syntax/types.zig
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const lexic = @import("lexic");
|
||||||
|
|
||||||
|
/// Respresents a failure of parsing.
|
||||||
|
pub const ParseError = error{
|
||||||
|
/// The parse operation failed, but it is recoverable.
|
||||||
|
/// Other parsers should be considered.
|
||||||
|
Unmatched,
|
||||||
|
/// The parse operation parsed after a point of no return.
|
||||||
|
/// For example, a `var` keyword was found, but then no identifier
|
||||||
|
/// The parsing should stop
|
||||||
|
Error,
|
||||||
|
/// OOM. Fatal error, blows up everything
|
||||||
|
OutOfMemory,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const TokenStream = std.ArrayList(lexic.Token);
|
21
src/02_syntax/utils.zig
Normal file
21
src/02_syntax/utils.zig
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const lexic = @import("lexic");
|
||||||
|
|
||||||
|
/// Expects that the given token `t` has type `value`.
|
||||||
|
/// If it fails returns `error.Unmatched`, otherwise
|
||||||
|
/// returns the same token passed (`t`)
|
||||||
|
pub inline fn expect_token_type(comptime value: lexic.TokenType, t: *lexic.Token) error{Unmatched}!*lexic.Token {
|
||||||
|
if (t.token_type == value) {
|
||||||
|
return t;
|
||||||
|
} else {
|
||||||
|
return error.Unmatched;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub inline fn expect_operator(comptime value: []const u8, t: *lexic.Token) error{Unmatched}!*lexic.Token {
|
||||||
|
if (t.token_type == lexic.TokenType.Operator and std.mem.eql(u8, value, t.value)) {
|
||||||
|
return t;
|
||||||
|
} else {
|
||||||
|
return error.Unmatched;
|
||||||
|
}
|
||||||
|
}
|
162
src/02_syntax/variable.zig
Normal file
162
src/02_syntax/variable.zig
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const lexic = @import("lexic");
|
||||||
|
const expression = @import("expression.zig");
|
||||||
|
const types = @import("./types.zig");
|
||||||
|
const utils = @import("./utils.zig");
|
||||||
|
|
||||||
|
const TokenStream = types.TokenStream;
|
||||||
|
const ParseError = types.ParseError;
|
||||||
|
|
||||||
|
const VariableBinding = struct {
|
||||||
|
is_mutable: bool,
|
||||||
|
datatype: ?*lexic.Token,
|
||||||
|
identifier: *lexic.Token,
|
||||||
|
expression: *expression.Expression,
|
||||||
|
alloc: std.mem.Allocator,
|
||||||
|
|
||||||
|
/// Parses a variable binding
|
||||||
|
fn init(target: *VariableBinding, tokens: *const TokenStream, pos: usize, allocator: std.mem.Allocator) ParseError!void {
|
||||||
|
std.debug.assert(pos < tokens.items.len);
|
||||||
|
|
||||||
|
// try to parse a var keyword
|
||||||
|
const var_keyword = try utils.expect_token_type(lexic.TokenType.K_Var, &tokens.items[pos]);
|
||||||
|
_ = var_keyword;
|
||||||
|
|
||||||
|
// check there is still input
|
||||||
|
if (pos + 1 >= tokens.items.len) {
|
||||||
|
// return error
|
||||||
|
return ParseError.Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// try to parse an identifier
|
||||||
|
const identifier = utils.expect_token_type(lexic.TokenType.Identifier, &tokens.items[pos + 1]) catch {
|
||||||
|
return ParseError.Error;
|
||||||
|
};
|
||||||
|
|
||||||
|
// parse equal sign
|
||||||
|
if (pos + 2 >= tokens.items.len) return ParseError.Error;
|
||||||
|
const equal_sign = utils.expect_operator("=", &tokens.items[pos + 2]) catch {
|
||||||
|
return ParseError.Error;
|
||||||
|
};
|
||||||
|
_ = equal_sign;
|
||||||
|
|
||||||
|
// parse expression
|
||||||
|
if (pos + 3 >= tokens.items.len) return ParseError.Error;
|
||||||
|
var exp = allocator.create(expression.Expression) catch {
|
||||||
|
return ParseError.Error;
|
||||||
|
};
|
||||||
|
errdefer allocator.destroy(exp);
|
||||||
|
exp.init(tokens, pos + 3) catch {
|
||||||
|
return ParseError.Error;
|
||||||
|
};
|
||||||
|
|
||||||
|
// return
|
||||||
|
target.* = .{
|
||||||
|
.is_mutable = true,
|
||||||
|
.datatype = null,
|
||||||
|
.identifier = identifier,
|
||||||
|
.expression = exp,
|
||||||
|
.alloc = allocator,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deinit(self: *VariableBinding) void {
|
||||||
|
self.alloc.destroy(self.expression);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
test "should parse a minimal var" {
|
||||||
|
const input = "var my_variable = 322";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
try binding.init(&tokens, 0, std.testing.allocator);
|
||||||
|
defer binding.deinit();
|
||||||
|
|
||||||
|
try std.testing.expectEqual(true, binding.is_mutable);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail is it doesnt start with var" {
|
||||||
|
const input = "different_token_stream()";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
binding.init(&tokens, 0, std.testing.allocator) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Unmatched, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail if the idenfier is missing" {
|
||||||
|
const input = "var ";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
binding.init(&tokens, 0, std.testing.allocator) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Error, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail if there is not an identifier after var" {
|
||||||
|
const input = "var 322";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
binding.init(&tokens, 0, std.testing.allocator) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Error, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail if the equal sign is missing" {
|
||||||
|
const input = "var my_id ";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
binding.init(&tokens, 0, std.testing.allocator) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Error, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail if the equal sign is not found" {
|
||||||
|
const input = "var my_id is string";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
binding.init(&tokens, 0, std.testing.allocator) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Error, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should fail if the expression parsing fails" {
|
||||||
|
const input = "var my_id = ehhh";
|
||||||
|
const tokens = try lexic.tokenize(input, std.testing.allocator);
|
||||||
|
defer tokens.deinit();
|
||||||
|
|
||||||
|
var binding: VariableBinding = undefined;
|
||||||
|
binding.init(&tokens, 0, std.testing.allocator) catch |err| {
|
||||||
|
try std.testing.expectEqual(ParseError.Error, err);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
try std.testing.expect(false);
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user