feat: lex many productions
This commit is contained in:
parent
26f5fed321
commit
d6a83ff46c
@ -5,15 +5,53 @@ const token = @import("./token.zig");
|
|||||||
const TokenType = token.TokenType;
|
const TokenType = token.TokenType;
|
||||||
const Token = token.Token;
|
const Token = token.Token;
|
||||||
|
|
||||||
pub fn tokenize(input: []const u8) !void {
|
pub fn tokenize(input: []const u8, alloc: std.mem.Allocator) !void {
|
||||||
const input_len = input.len;
|
const input_len = input.len;
|
||||||
const next_token = try number.lex(input, input_len, 0);
|
var current_pos: usize = 0;
|
||||||
|
|
||||||
if (next_token) |tuple| {
|
var tokens = std.ArrayList(Token).init(alloc);
|
||||||
const t = tuple[0];
|
defer tokens.deinit();
|
||||||
|
|
||||||
std.debug.print("{s}\n", .{t.value});
|
while (current_pos < input_len) {
|
||||||
} else {
|
const actual_next_pos = ignore_whitespace(input, current_pos);
|
||||||
std.debug.print("no token found :c", .{});
|
|
||||||
|
const next_token = try number.lex(input, input_len, actual_next_pos);
|
||||||
|
if (next_token) |tuple| {
|
||||||
|
const t = tuple[0];
|
||||||
|
current_pos = tuple[1];
|
||||||
|
|
||||||
|
try tokens.append(t);
|
||||||
|
} else {
|
||||||
|
// no lexer matched
|
||||||
|
std.debug.print("unmatched args: anytype:c\n", .{});
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std.debug.print("array list len: {d}", .{tokens.items.len});
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ignores all whitespace from usize,
|
||||||
|
/// and returns the position where whitespace ends.
|
||||||
|
///
|
||||||
|
/// Whitespace is: tabs, spaces
|
||||||
|
pub fn ignore_whitespace(input: []const u8, start: usize) usize {
|
||||||
|
const cap = input.len;
|
||||||
|
var pos = start;
|
||||||
|
|
||||||
|
while (pos < cap and (input[pos] == ' ' or input[pos] == '\t')) {
|
||||||
|
pos += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should insert 1 item" {
|
||||||
|
const input = "322";
|
||||||
|
try tokenize(input, std.testing.allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "should insert 2 item" {
|
||||||
|
const input = "322 644";
|
||||||
|
try tokenize(input, std.testing.allocator);
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,10 @@ fn repl() !void {
|
|||||||
defer std.heap.page_allocator.free(bare_line);
|
defer std.heap.page_allocator.free(bare_line);
|
||||||
const line = std.mem.trim(u8, bare_line, "\r");
|
const line = std.mem.trim(u8, bare_line, "\r");
|
||||||
|
|
||||||
try lexic.tokenize(line);
|
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||||
|
const alloc = gpa.allocator();
|
||||||
|
|
||||||
|
try lexic.tokenize(line, alloc);
|
||||||
|
|
||||||
try bw.flush();
|
try bw.flush();
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user