feat: use the new thp-zig compiler
This commit is contained in:
parent
a1641046cb
commit
c7419a837a
4
Jenkinsfile
vendored
4
Jenkinsfile
vendored
@ -5,14 +5,14 @@ pipeline {
|
|||||||
stage('Build') {
|
stage('Build') {
|
||||||
agent {
|
agent {
|
||||||
docker {
|
docker {
|
||||||
image 'node:22'
|
image 'node:22-alpine'
|
||||||
reuseNode true
|
reuseNode true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
sh 'npm i -g pnpm'
|
sh 'npm i -g pnpm'
|
||||||
sh 'pnpm i'
|
sh 'pnpm i'
|
||||||
sh 'THP_BINARY=/var/bin/thp pnpm build'
|
sh 'THP_BINARY=/var/bin/thp-zig pnpm build'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage('Deploy') {
|
stage('Deploy') {
|
||||||
|
@ -1,13 +1,55 @@
|
|||||||
import { spawn } from "node:child_process";
|
import { spawn } from "node:child_process";
|
||||||
import { leftTrimDedent } from "../components/utils";
|
import { leftTrimDedent } from "../components/utils";
|
||||||
import { HighlightLevel } from "./types";
|
import { HighlightLevel } from "./types";
|
||||||
import type {
|
|
||||||
ErrorLabel,
|
/**
|
||||||
MistiErr,
|
* Output of running the thp-zig compiler
|
||||||
Token,
|
* with the lex option
|
||||||
TokenizeResult,
|
*/
|
||||||
TokenType,
|
export interface THPZigOutput {
|
||||||
} from "./types";
|
errors: ZigError[]
|
||||||
|
tokens: ZigToken[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ZigError {
|
||||||
|
reason: string
|
||||||
|
help?: string
|
||||||
|
start_position: number
|
||||||
|
end_position: number
|
||||||
|
labels: ZigErrorLabel[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ZigErrorLabel {
|
||||||
|
message: string
|
||||||
|
start: number
|
||||||
|
end: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ZigToken {
|
||||||
|
value: string
|
||||||
|
token_type: TokenType
|
||||||
|
start_pos: number
|
||||||
|
}
|
||||||
|
|
||||||
|
type TokenType =
|
||||||
|
| "Int"
|
||||||
|
| "Float"
|
||||||
|
| "Identifier"
|
||||||
|
| "Datatype"
|
||||||
|
| "Operator"
|
||||||
|
| "Comment"
|
||||||
|
| "String"
|
||||||
|
| "LeftParen"
|
||||||
|
| "RightParen"
|
||||||
|
| "LeftBracket"
|
||||||
|
| "RightBracket"
|
||||||
|
| "LeftBrace"
|
||||||
|
| "RightBrace"
|
||||||
|
| "Comma"
|
||||||
|
| "Newline"
|
||||||
|
| "K_Var"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const error_classes =
|
const error_classes =
|
||||||
"underline underline-offset-4 decoration-wavy decoration-red-500";
|
"underline underline-offset-4 decoration-wavy decoration-red-500";
|
||||||
@ -29,7 +71,7 @@ export async function native_highlighter(
|
|||||||
let result = await native_lex(formatted_code, level);
|
let result = await native_lex(formatted_code, level);
|
||||||
return highlight_syntax(formatted_code, result);
|
return highlight_syntax(formatted_code, result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return compiler_error(formatted_code, error as MistiErr);
|
return compiler_error(formatted_code, error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,34 +82,13 @@ export async function native_highlighter(
|
|||||||
* - The tokens as a list of <span /> elements
|
* - The tokens as a list of <span /> elements
|
||||||
* - An error message, if any
|
* - An error message, if any
|
||||||
*/
|
*/
|
||||||
function highlight_syntax(
|
function highlight_syntax(code: string, result: THPZigOutput): [string, string | null] {
|
||||||
code: string,
|
const tokens_html = render_tokens(code, result.tokens, result.errors);
|
||||||
result: TokenizeResult,
|
return [tokens_html, null];
|
||||||
): [string, string | null] {
|
|
||||||
if (result.Ok) {
|
|
||||||
const tokens_html = render_tokens(code, result.Ok);
|
|
||||||
|
|
||||||
return [tokens_html, null];
|
|
||||||
} else if (result.MixedErr) {
|
|
||||||
const [tokens, errors] = result.MixedErr;
|
|
||||||
// TODO: Implement error rendering, based on the new error schema
|
|
||||||
|
|
||||||
const tokens_html = render_tokens(code, tokens, errors.labels);
|
|
||||||
return [tokens_html, `error code ${errors.error_code}`];
|
|
||||||
} else if (result.Err) {
|
|
||||||
// TODO: Implement error rendering, based on the new error schema
|
|
||||||
|
|
||||||
return [code, `lexical error ${result.Err.error_code}`];
|
|
||||||
} else {
|
|
||||||
console.error(result);
|
|
||||||
throw new Error(
|
|
||||||
"Web page error: The compiler returned a case that wasn't handled.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** A fatal error with the THP compiler */
|
/** A fatal error with the THP compiler */
|
||||||
function compiler_error(code: string, error: MistiErr): [string, string] {
|
function compiler_error(code: string, error: any): [string, string] {
|
||||||
console.log(error);
|
console.log(error);
|
||||||
return [code, "Fatal compiler error"];
|
return [code, "Fatal compiler error"];
|
||||||
}
|
}
|
||||||
@ -83,23 +104,23 @@ function compiler_error(code: string, error: MistiErr): [string, string] {
|
|||||||
*/
|
*/
|
||||||
function render_tokens(
|
function render_tokens(
|
||||||
input: string,
|
input: string,
|
||||||
tokens: Array<Token>,
|
tokens: Array<ZigToken>,
|
||||||
error_labels: Array<ErrorLabel> = [],
|
error_labels: Array<ZigError> = [],
|
||||||
): string {
|
): string {
|
||||||
const input_chars = input.split("");
|
const input_chars = input.split("");
|
||||||
let output = "";
|
let output = "";
|
||||||
|
|
||||||
// Collects all the token ranges in all error labels
|
// Collects all the token ranges in all error labels
|
||||||
const error_ranges: Array<[number, number]> = error_labels.map((l) => [
|
const error_ranges: Array<[number, number]> = error_labels.map((l) => [
|
||||||
l.start,
|
l.start_position,
|
||||||
l.end,
|
l.end_position,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
let current_pos = 0;
|
let current_pos = 0;
|
||||||
for (let i = 0; i < tokens.length; i += 1) {
|
for (let i = 0; i < tokens.length; i += 1) {
|
||||||
const t = tokens[i]!;
|
const t = tokens[i]!;
|
||||||
const token_start = t.position;
|
const token_start = t.start_pos;
|
||||||
const token_end = t.position + t.value.length;
|
const token_end = t.start_pos + t.value.length;
|
||||||
|
|
||||||
// check if the current token is in any error label
|
// check if the current token is in any error label
|
||||||
let is_errored = false;
|
let is_errored = false;
|
||||||
@ -145,7 +166,7 @@ function render_tokens(
|
|||||||
// get the line number of the label
|
// get the line number of the label
|
||||||
const [line_number, col_number] = absolute_to_line_column(
|
const [line_number, col_number] = absolute_to_line_column(
|
||||||
input,
|
input,
|
||||||
label.start,
|
label.start_position,
|
||||||
);
|
);
|
||||||
let spaces_len = col_number - 1;
|
let spaces_len = col_number - 1;
|
||||||
if (spaces_len < 0) {
|
if (spaces_len < 0) {
|
||||||
@ -156,7 +177,7 @@ function render_tokens(
|
|||||||
lines.splice(
|
lines.splice(
|
||||||
line_number + offset,
|
line_number + offset,
|
||||||
0,
|
0,
|
||||||
create_inline_error_message(spaces, label.message),
|
create_inline_error_message(spaces, label.reason),
|
||||||
);
|
);
|
||||||
offset += 1;
|
offset += 1;
|
||||||
}
|
}
|
||||||
@ -215,11 +236,13 @@ function process_token_value_and_end(
|
|||||||
): [string, number] {
|
): [string, number] {
|
||||||
let token_value = value;
|
let token_value = value;
|
||||||
let new_end = first_end;
|
let new_end = first_end;
|
||||||
if (token_type === "MultilineComment") {
|
|
||||||
token_value = `/*${token_value}*/`;
|
//if (token_type === "MultilineComment") {
|
||||||
new_end += 4;
|
// token_value = `/*${token_value}*/`;
|
||||||
} else if (token_type === "String") {
|
// new_end += 4;
|
||||||
token_value = `"${token_value}"`;
|
// } else if (token_type === "String") {
|
||||||
|
if (token_type === "String") {
|
||||||
|
token_value = `${token_value}`;
|
||||||
new_end += 2;
|
new_end += 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -287,43 +310,25 @@ function translate_token_type(tt: TokenType, value: string): string {
|
|||||||
case "String":
|
case "String":
|
||||||
return "string";
|
return "string";
|
||||||
case "Comment":
|
case "Comment":
|
||||||
case "MultilineComment":
|
|
||||||
return "comment";
|
return "comment";
|
||||||
// keywords:
|
// keywords:
|
||||||
case "VAL":
|
case "K_Var":
|
||||||
case "VAR":
|
|
||||||
case "FUN":
|
|
||||||
case "IF":
|
|
||||||
case "ELSE":
|
|
||||||
case "FOR":
|
|
||||||
case "IN":
|
|
||||||
case "WHILE":
|
|
||||||
case "MATCH":
|
|
||||||
case "CASE":
|
|
||||||
return "keyword";
|
return "keyword";
|
||||||
default:
|
default:
|
||||||
return tt;
|
return tt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const native_lex = (code: string, level: HighlightLevel) =>
|
const native_lex = (code: string, _level: HighlightLevel) =>
|
||||||
new Promise<TokenizeResult>((resolve, reject) => {
|
new Promise<THPZigOutput>((resolve, reject) => {
|
||||||
// Get binary path from .env
|
// Get binary path from .env
|
||||||
const binary = import.meta.env.THP_BINARY;
|
const binary = import.meta.env.THP_BINARY;
|
||||||
if (!binary) {
|
if (!binary) {
|
||||||
console.error("THP_BINARY not set in .env");
|
console.error("THP_BINARY not set in .env");
|
||||||
resolve({
|
resolve({ errors: [], tokens: [] })
|
||||||
Err: {
|
|
||||||
error_code: 0,
|
|
||||||
error_offset: 0,
|
|
||||||
labels: [],
|
|
||||||
note: null,
|
|
||||||
help: null,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const subprocess = spawn(binary, ["tokenize", "-l", level.toString()]);
|
const subprocess = spawn(binary, ["lex"]);
|
||||||
let response = "";
|
let response = "";
|
||||||
let error = "";
|
let error = "";
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user