dusk: better IR.generate and Ast.parse return types

This commit is contained in:
Ali Chraghi 2023-03-27 15:03:40 +03:30 committed by Stephen Gutekanst
parent b086bdee3a
commit 059411fa97
7 changed files with 317 additions and 377 deletions

View file

@ -2,7 +2,7 @@ const std = @import("std");
const Parser = @import("Parser.zig");
const Token = @import("Token.zig");
const Tokenizer = @import("Tokenizer.zig");
const ErrorMsg = @import("main.zig").ErrorMsg;
const ErrorList = @import("ErrorList.zig");
const Extension = @import("main.zig").Extension;
const Ast = @This();
@ -14,21 +14,18 @@ source: [:0]const u8,
tokens: TokenList.Slice,
nodes: NodeList.Slice,
extra: []const Index,
errors: ErrorList,
pub fn deinit(tree: *Ast, allocator: std.mem.Allocator) void {
tree.tokens.deinit(allocator);
tree.nodes.deinit(allocator);
allocator.free(tree.extra);
tree.errors.deinit();
tree.* = undefined;
}
pub const ParseResult = union(enum) {
errors: []ErrorMsg,
tree: Ast,
};
/// parses a TranslationUnit (WGSL Program)
pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) !ParseResult {
pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) error{OutOfMemory}!Ast {
var p = Parser{
.allocator = allocator,
.source = source,
@ -53,26 +50,29 @@ pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) !ParseResult {
.nodes = .{},
.extra = .{},
.scratch = .{},
.errors = .{},
.errors = try ErrorList.init(allocator),
.extensions = Extension.Array.initFill(false),
};
defer p.deinit();
defer p.scratch.deinit(allocator);
errdefer {
p.tokens.deinit(allocator);
p.nodes.deinit(allocator);
p.extra.deinit(allocator);
p.errors.deinit();
}
// TODO: make sure tokens:nodes ratio is right
const estimated_node_count = (p.tokens.len + 2) / 2;
try p.nodes.ensureTotalCapacity(allocator, estimated_node_count);
_ = try p.translationUnit() orelse {
return .{ .errors = try p.errors.toOwnedSlice(allocator) };
};
try p.translationUnit();
return .{
.tree = .{
.source = source,
.tokens = p.tokens.toOwnedSlice(),
.nodes = p.nodes.toOwnedSlice(),
.extra = try p.extra.toOwnedSlice(allocator),
},
.source = source,
.tokens = p.tokens.toOwnedSlice(),
.nodes = p.nodes.toOwnedSlice(),
.extra = try p.extra.toOwnedSlice(allocator),
.errors = p.errors,
};
}

View file

@ -2,7 +2,7 @@ const std = @import("std");
const Ast = @import("Ast.zig");
const Token = @import("Token.zig");
const IR = @import("IR.zig");
const ErrorMsg = @import("main.zig").ErrorMsg;
const ErrorList = @import("ErrorList.zig");
const AstGen = @This();
allocator: std.mem.Allocator,
@ -11,7 +11,7 @@ instructions: std.ArrayListUnmanaged(IR.Inst) = .{},
refs: std.ArrayListUnmanaged(IR.Inst.Ref) = .{},
strings: std.ArrayListUnmanaged(u8) = .{},
scratch: std.ArrayListUnmanaged(IR.Inst.Ref) = .{},
errors: std.ArrayListUnmanaged(ErrorMsg) = .{},
errors: ErrorList,
scope_pool: std.heap.MemoryPool(Scope),
pub const Scope = struct {
@ -27,16 +27,6 @@ pub const Scope = struct {
};
};
pub fn deinit(self: *AstGen) void {
self.instructions.deinit(self.allocator);
self.refs.deinit(self.allocator);
self.strings.deinit(self.allocator);
self.scratch.deinit(self.allocator);
self.scope_pool.deinit();
for (self.errors.items) |*err_msg| err_msg.deinit(self.allocator);
self.errors.deinit(self.allocator);
}
pub fn genTranslationUnit(self: *AstGen) !u32 {
const global_decls = self.tree.spanToList(0);
@ -46,7 +36,10 @@ pub fn genTranslationUnit(self: *AstGen) !u32 {
var root_scope = try self.scope_pool.create();
root_scope.* = .{ .tag = .root, .parent = null };
try self.scanDecls(root_scope, global_decls);
self.scanDecls(root_scope, global_decls) catch |err| switch (err) {
error.AnalysisFail => return try self.addRefList(self.scratch.items[scratch_top..]),
error.OutOfMemory => return error.OutOfMemory,
};
for (global_decls) |node| {
const global = self.genDecl(root_scope, node) catch |err| switch (err) {
@ -56,10 +49,6 @@ pub fn genTranslationUnit(self: *AstGen) !u32 {
try self.scratch.append(self.allocator, global);
}
if (self.errors.items.len > 0) {
return error.AnalysisFail;
}
return try self.addRefList(self.scratch.items[scratch_top..]);
}
@ -72,7 +61,7 @@ pub fn scanDecls(self: *AstGen, scope: *Scope, decls: []const Ast.Index) !void {
// TODO
// if (Token.isReserved(name)) {
// try self.addError(
// try self.errors.add(
// loc,
// "the name '{s}' has ben reserved",
// .{name},
@ -83,12 +72,11 @@ pub fn scanDecls(self: *AstGen, scope: *Scope, decls: []const Ast.Index) !void {
var name_iter = scope.decls.keyIterator();
while (name_iter.next()) |node| {
if (std.mem.eql(u8, self.declNameLoc(node.*).?.slice(self.tree.source), name)) {
try self.addError(
try self.errors.add(
loc,
"redeclaration of '{s}'",
.{name},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
self.declNameLoc(node.*).?,
"other declaration here",
.{},
@ -130,7 +118,7 @@ pub fn declRef(self: *AstGen, scope: *Scope, loc: Token.Loc) !IR.Inst.Ref {
s = scope.parent orelse break;
}
try self.addError(
try self.errors.add(
loc,
"use of undeclared identifier '{s}'",
.{name},
@ -208,7 +196,7 @@ pub fn genStruct(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref {
switch (member_type_ref) {
.bool_type, .i32_type, .u32_type, .f32_type, .f16_type => {},
.sampler_type, .comparison_sampler_type, .external_sampled_texture_type => {
try self.addError(
try self.errors.add(
member_loc,
"invalid struct member type '{s}'",
.{member_type_name.slice(self.tree.source)},
@ -221,7 +209,7 @@ pub fn genStruct(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref {
.vector_type, .matrix_type, .atomic_type, .struct_decl => {},
.array_type => {
if (self.instructions.items[member_type_ref.toIndex().?].data.array_type.size == .none and i + 1 != member_list.len) {
try self.addError(
try self.errors.add(
member_loc,
"struct member with runtime-sized array type, must be the last member of the structure",
.{},
@ -236,7 +224,7 @@ pub fn genStruct(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref {
.storage_texture_type,
.depth_texture_type,
=> {
try self.addError(
try self.errors.add(
member_loc,
"invalid struct member type '{s}'",
.{member_type_name.slice(self.tree.source)},
@ -320,7 +308,7 @@ pub fn addInst(self: *AstGen, inst: IR.Inst) error{OutOfMemory}!IR.Inst.Index {
// // ((lir == .construct and lir.construct == .vector) and (rir == .construct and rir.construct == .vector)) or
// // ((lir == .construct and lir.construct == .matrix) and (rir == .construct and rir.construct == .matrix));
// // if (!is_valid_op) {
// // try self.addError(
// // try self.errors.add(
// // loc,
// // "invalid operation with '{s}' and '{s}'",
// // .{ @tagName(std.meta.activeTag(lir)), @tagName(std.meta.activeTag(rir)) },
@ -356,7 +344,7 @@ pub fn addInst(self: *AstGen, inst: IR.Inst) error{OutOfMemory}!IR.Inst.Index {
// // !std.mem.endsWith(u8, str, "f") and
// // !std.mem.endsWith(u8, str, "h"))
// // {
// // try self.addError(
// // try self.errors.add(
// // loc,
// // "number literal cannot have leading 0",
// // .{str},
@ -409,7 +397,7 @@ pub fn genType(self: *AstGen, scope: *Scope, node: Ast.Index) error{ AnalysisFai
.struct_decl,
=> return decl_ref,
.global_variable_decl => {
try self.addError(
try self.errors.add(
node_loc,
"'{s}' is not a type",
.{node_loc.slice(self.tree.source)},
@ -449,12 +437,11 @@ pub fn genSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.
.comparison_sampler_type,
.external_sampled_texture_type,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid sampled texture component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32', 'u32' or 'f32'",
.{},
@ -475,12 +462,11 @@ pub fn genSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.
.depth_texture_type,
.struct_decl,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid sampled texture component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32', 'u32' or 'f32'",
.{},
@ -531,12 +517,11 @@ pub fn genMultigenSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Ind
.comparison_sampler_type,
.external_sampled_texture_type,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid multisampled texture component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32', 'u32' or 'f32'",
.{},
@ -557,12 +542,11 @@ pub fn genMultigenSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Ind
.depth_texture_type,
.struct_decl,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid multisampled texture component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32', 'u32' or 'f32'",
.{},
@ -602,12 +586,11 @@ pub fn genStorageTextureType(self: *AstGen, node: Ast.Index) !IR.Inst.Ref {
const access_mode = switch (access_mode_full) {
.write => IR.Inst.StorageTextureType.AccessMode.write,
else => {
try self.addError(
try self.errors.add(
access_mode_loc,
"invalid access mode",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"only 'write' is allowed",
.{},
@ -704,12 +687,11 @@ pub fn genVectorType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
switch (component_type_ref) {
.bool_type, .i32_type, .u32_type, .f32_type, .f16_type => {},
.sampler_type, .comparison_sampler_type, .external_sampled_texture_type => {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid vector component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32', 'u32', 'f32', 'f16' or 'bool'",
.{},
@ -730,12 +712,11 @@ pub fn genVectorType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.depth_texture_type,
.struct_decl,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid vector component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32', 'u32', 'f32', 'f16' or 'bool'",
.{},
@ -784,12 +765,11 @@ pub fn genMatrixType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.comparison_sampler_type,
.external_sampled_texture_type,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid matrix component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'f32' or 'f16'",
.{},
@ -810,12 +790,11 @@ pub fn genMatrixType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.depth_texture_type,
.struct_decl,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid matrix component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'f32' or 'f16'",
.{},
@ -870,12 +849,11 @@ pub fn genAtomicType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.comparison_sampler_type,
.external_sampled_texture_type,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid atomic component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32' or 'u32'",
.{},
@ -896,12 +874,11 @@ pub fn genAtomicType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.depth_texture_type,
.struct_decl,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid atomic component type",
.{},
try ErrorMsg.Note.create(
self.allocator,
try self.errors.createNote(
null,
"must be 'i32' or 'u32'",
.{},
@ -939,7 +916,7 @@ pub fn genArrayType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.comparison_sampler_type,
.external_sampled_texture_type,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid array component type",
.{},
@ -956,7 +933,7 @@ pub fn genArrayType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
=> {},
.array_type => {
if (self.instructions.items[component_type_ref.toIndex().?].data.array_type.size == .none) {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"array componet type can not be a runtime-sized array",
.{},
@ -971,7 +948,7 @@ pub fn genArrayType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.storage_texture_type,
.depth_texture_type,
=> {
try self.addError(
try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid array component type",
.{},
@ -1016,14 +993,3 @@ pub fn declNameLoc(self: *AstGen, node: Ast.Index) ?Token.Loc {
};
return self.tree.tokenLoc(token);
}
pub fn addError(
self: *AstGen,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = try ErrorMsg.create(self.allocator, loc, format, args, note);
try self.errors.append(self.allocator, err_msg);
}

131
libs/dusk/src/ErrorList.zig Normal file
View file

@ -0,0 +1,131 @@
const std = @import("std");
const Token = @import("Token.zig");
pub const ErrorList = @This();
pub const ErrorMsg = struct {
loc: Token.Loc,
msg: []const u8,
note: ?Note = null,
pub const Note = struct {
loc: ?Token.Loc = null,
msg: []const u8,
};
};
arena: std.heap.ArenaAllocator,
list: std.ArrayListUnmanaged(ErrorMsg) = .{},
pub fn init(allocator: std.mem.Allocator) !ErrorList {
return .{
.arena = std.heap.ArenaAllocator.init(allocator),
};
}
pub fn deinit(self: *ErrorList) void {
self.arena.deinit();
self.* = undefined;
}
pub fn add(
self: *ErrorList,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = .{
.loc = loc,
.msg = try std.fmt.allocPrint(self.arena.allocator(), comptime format, args),
.note = note,
};
try self.list.append(self.arena.allocator(), err_msg);
}
pub fn createNote(
self: *ErrorList,
loc: ?Token.Loc,
comptime format: []const u8,
args: anytype,
) !ErrorMsg.Note {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(self.arena.allocator(), comptime format, args),
};
}
pub fn print(self: ErrorList, source: []const u8, file_path: ?[]const u8) !void {
const stderr = std.io.getStdErr();
var bw = std.io.bufferedWriter(stderr.writer());
const b = bw.writer();
const term = if (stderr.supportsAnsiEscapeCodes())
std.debug.TTY.Config{ .escape_codes = {} }
else
std.debug.TTY.Config{ .no_color = {} };
for (self.list.items) |*err| {
const loc_extra = err.loc.extraInfo(source);
// 'file:line:column error: MSG'
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, loc_extra.line, loc_extra.col });
try term.setColor(b, .Red);
try b.writeAll("error: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(err.msg);
try b.writeByte('\n');
try printCode(b, term, source, err.loc);
// note
if (err.note) |note| {
if (note.loc) |note_loc| {
const note_loc_extra = note_loc.extraInfo(source);
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, note_loc_extra.line, note_loc_extra.col });
}
try term.setColor(b, .Cyan);
try b.writeAll("note: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(note.msg);
try b.writeByte('\n');
if (note.loc) |note_loc| {
try printCode(b, term, source, note_loc);
}
}
try term.setColor(b, .Reset);
}
try bw.flush();
}
fn printCode(writer: anytype, term: std.debug.TTY.Config, source: []const u8, loc: Token.Loc) !void {
const loc_extra = loc.extraInfo(source);
try term.setColor(writer, .Dim);
try writer.print("{d} │ ", .{loc_extra.line});
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc_extra.line_start..loc.start]);
try term.setColor(writer, .Green);
try writer.writeAll(source[loc.start..loc.end]);
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc.end..loc_extra.line_end]);
try writer.writeByte('\n');
// location pointer
const line_number_len = (std.math.log10(loc_extra.line) + 1) + 3;
try writer.writeByteNTimes(
' ',
line_number_len + (loc_extra.col - 1),
);
try term.setColor(writer, .Bold);
try term.setColor(writer, .Green);
try writer.writeByte('^');
try writer.writeByteNTimes('~', loc.end - loc.start - 1);
try writer.writeByte('\n');
}

View file

@ -1,7 +1,7 @@
const std = @import("std");
const AstGen = @import("AstGen.zig");
const Ast = @import("Ast.zig");
const ErrorMsg = @import("main.zig").ErrorMsg;
const ErrorList = @import("ErrorList.zig");
const IR = @This();
allocator: std.mem.Allocator,
@ -9,38 +9,43 @@ globals_index: u32,
instructions: []const Inst,
refs: []const Inst.Ref,
strings: []const u8,
errors: ErrorList,
pub fn deinit(self: IR) void {
pub fn deinit(self: *IR) void {
self.allocator.free(self.instructions);
self.allocator.free(self.refs);
self.allocator.free(self.strings);
self.errors.deinit();
self.* = undefined;
}
pub const AstGenResult = union(enum) {
ir: IR,
errors: []ErrorMsg,
};
pub fn generate(allocator: std.mem.Allocator, tree: *const Ast) !AstGenResult {
pub fn generate(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!IR {
var astgen = AstGen{
.allocator = allocator,
.tree = tree,
.errors = try ErrorList.init(allocator),
.scope_pool = std.heap.MemoryPool(AstGen.Scope).init(allocator),
};
defer astgen.deinit();
defer {
astgen.scope_pool.deinit();
astgen.scratch.deinit(allocator);
}
errdefer {
astgen.instructions.deinit(allocator);
astgen.refs.deinit(allocator);
astgen.strings.deinit(allocator);
}
const globals_index = astgen.genTranslationUnit() catch |err| switch (err) {
error.AnalysisFail => return .{ .errors = try astgen.errors.toOwnedSlice(allocator) },
error.OutOfMemory => return error.OutOfMemory,
};
const globals_index = try astgen.genTranslationUnit();
return .{ .ir = .{
return .{
.allocator = allocator,
.globals_index = globals_index,
.instructions = try astgen.instructions.toOwnedSlice(allocator),
.refs = try astgen.refs.toOwnedSlice(allocator),
.strings = try astgen.strings.toOwnedSlice(allocator),
} };
.errors = astgen.errors,
};
}
pub fn getStr(self: IR, index: u32) []const u8 {

View file

@ -3,7 +3,7 @@ const std = @import("std");
const Ast = @import("Ast.zig");
const Token = @import("Token.zig");
const Extension = @import("main.zig").Extension;
const ErrorMsg = @import("main.zig").ErrorMsg;
const ErrorList = @import("ErrorList.zig");
const fieldNames = std.meta.fieldNames;
const Parser = @This();
@ -14,19 +14,10 @@ tokens: std.MultiArrayList(Token),
nodes: std.MultiArrayList(Ast.Node),
extra: std.ArrayListUnmanaged(Ast.Index),
scratch: std.ArrayListUnmanaged(Ast.Index),
errors: std.ArrayListUnmanaged(ErrorMsg),
errors: ErrorList,
extensions: Extension.Array,
pub fn deinit(p: *Parser) void {
p.tokens.deinit(p.allocator);
p.nodes.deinit(p.allocator);
p.extra.deinit(p.allocator);
p.scratch.deinit(p.allocator);
for (p.errors.items) |*err_msg| err_msg.deinit(p.allocator);
p.errors.deinit(p.allocator);
}
pub fn translationUnit(p: *Parser) !?Ast.Index {
pub fn translationUnit(p: *Parser) !void {
const root = try p.addNode(.{ .tag = .span, .main_token = undefined });
while (try p.globalDirectiveRecoverable()) |ext| {
@ -38,15 +29,9 @@ pub fn translationUnit(p: *Parser) !?Ast.Index {
try p.scratch.append(p.allocator, decl);
}
if (p.errors.items.len > 0) {
return null;
}
try p.extra.appendSlice(p.allocator, p.scratch.items);
p.nodes.items(.lhs)[root] = @intCast(Ast.Index, p.extra.items.len - p.scratch.items.len);
p.nodes.items(.rhs)[root] = @intCast(Ast.Index, p.extra.items.len);
return root;
}
pub fn globalDirectiveRecoverable(p: *Parser) !?Extension {
@ -55,7 +40,7 @@ pub fn globalDirectiveRecoverable(p: *Parser) !?Extension {
p.findNextGlobalDirective();
return null;
},
else => return err,
error.OutOfMemory => error.OutOfMemory,
};
}
@ -63,7 +48,7 @@ pub fn globalDirective(p: *Parser) !?Extension {
_ = p.eatToken(.k_enable) orelse return null;
const ext_token = try p.expectToken(.ident);
const ext = std.meta.stringToEnum(Extension, p.getToken(.loc, ext_token).slice(p.source)) orelse {
try p.addError(p.getToken(.loc, ext_token), "invalid extension", .{}, null);
try p.errors.add(p.getToken(.loc, ext_token), "invalid extension", .{}, null);
return error.Parsing;
};
return ext;
@ -75,7 +60,7 @@ pub fn expectGlobalDeclRecoverable(p: *Parser) !?Ast.Index {
p.findNextGlobalDecl();
return null;
},
else => return err,
error.OutOfMemory => error.OutOfMemory,
};
}
@ -100,7 +85,7 @@ pub fn expectGlobalDecl(p: *Parser) !Ast.Index {
return node;
}
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected global declaration, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -126,12 +111,11 @@ pub fn attribute(p: *Parser) !?Ast.Index {
const ident_tok = try p.expectToken(.ident);
const str = p.getToken(.loc, ident_tok).slice(p.source);
const tag = std.meta.stringToEnum(Ast.Attribute, str) orelse {
try p.addError(
try p.errors.add(
p.getToken(.loc, ident_tok),
"unknown attribute '{s}'",
.{p.getToken(.loc, ident_tok).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.Attribute)},
@ -165,7 +149,7 @@ pub fn attribute(p: *Parser) !?Ast.Index {
} else {
node.tag = .attr_one_arg;
node.lhs = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected expression, but found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -183,20 +167,20 @@ pub fn attribute(p: *Parser) !?Ast.Index {
node.tag = .attr_workgroup_size;
var workgroup_size = Ast.Node.WorkgroupSize{
.x = try p.expression() orelse {
try p.addError(p.peekToken(.loc, 0), "expected workgroup_size x parameter", .{}, null);
try p.errors.add(p.peekToken(.loc, 0), "expected workgroup_size x parameter", .{}, null);
return error.Parsing;
},
};
if (p.eatToken(.comma) != null and p.peekToken(.tag, 0) != .paren_right) {
workgroup_size.y = try p.expression() orelse {
try p.addError(p.peekToken(.loc, 0), "expected workgroup_size y parameter", .{}, null);
try p.errors.add(p.peekToken(.loc, 0), "expected workgroup_size y parameter", .{}, null);
return error.Parsing;
};
if (p.eatToken(.comma) != null and p.peekToken(.tag, 0) != .paren_right) {
workgroup_size.z = try p.expression() orelse {
try p.addError(p.peekToken(.loc, 0), "expected workgroup_size z parameter", .{}, null);
try p.errors.add(p.peekToken(.loc, 0), "expected workgroup_size z parameter", .{}, null);
return error.Parsing;
};
@ -233,12 +217,11 @@ pub fn expectBuiltinValue(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.BuiltinValue, str)) |_| return token;
}
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"unknown builtin value name '{s}'",
.{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.BuiltinValue)},
@ -254,12 +237,11 @@ pub fn expectInterpolationType(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.InterpolationType, str)) |_| return token;
}
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"unknown interpolation type name '{s}'",
.{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.InterpolationType)},
@ -275,12 +257,11 @@ pub fn expectInterpolationSample(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.InterpolationSample, str)) |_| return token;
}
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"unknown interpolation sample name '{s}'",
.{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.InterpolationSample)},
@ -314,7 +295,7 @@ pub fn globalVarDecl(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
var initializer = Ast.null_index;
if (p.eatToken(.equal)) |_| {
initializer = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -350,7 +331,7 @@ pub fn globalConstDecl(p: *Parser) !?Ast.Index {
_ = try p.expectToken(.equal);
const initializer = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -380,7 +361,7 @@ pub fn globalOverrideDecl(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
var initializer = Ast.null_index;
if (p.eatToken(.equal)) |_| {
initializer = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -425,7 +406,7 @@ pub fn structDecl(p: *Parser) !?Ast.Index {
const attrs = try p.attributeList();
const member = try p.structMember(attrs) orelse {
if (attrs != null) {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected struct member, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -466,7 +447,7 @@ pub fn structMember(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
pub fn constAssert(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_const_assert) orelse return null;
const expr = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -497,7 +478,7 @@ pub fn functionDecl(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
}
const body = try p.block() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected function body, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -527,7 +508,7 @@ pub fn parameterList(p: *Parser) !?Ast.Index {
const attrs = try p.attributeList();
const param = try p.parameter(attrs) orelse {
if (attrs != null) {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected function parameter, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -568,7 +549,7 @@ pub fn statementRecoverable(p: *Parser) !?Ast.Index {
else => continue,
}
},
else => return err,
error.OutOfMemory => error.OutOfMemory,
};
}
}
@ -608,7 +589,7 @@ pub fn statement(p: *Parser) !?Ast.Index {
pub fn expectBlock(p: *Parser) error{ OutOfMemory, Parsing }!Ast.Index {
return try p.block() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected block statement, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -629,7 +610,7 @@ pub fn block(p: *Parser) error{ OutOfMemory, Parsing }!?Ast.Index {
const stmt = try p.statementRecoverable() orelse {
if (p.peekToken(.tag, 0) == .brace_right) break;
failed = true;
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected statement, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -659,7 +640,7 @@ pub fn breakIfStatement(p: *Parser) !?Ast.Index {
const main_token = p.advanceToken();
_ = p.advanceToken();
const cond = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -735,7 +716,7 @@ pub fn ifStatement(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_if) orelse return null;
const cond = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -744,7 +725,7 @@ pub fn ifStatement(p: *Parser) !?Ast.Index {
return error.Parsing;
};
const body = try p.block() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected if body block, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -770,7 +751,7 @@ pub fn ifStatement(p: *Parser) !?Ast.Index {
}
const else_body = try p.block() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected else body block, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -819,7 +800,7 @@ pub fn switchStatement(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_switch) orelse return null;
const expr = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -903,7 +884,7 @@ pub fn varStatement(p: *Parser) !?Ast.Index {
var initializer = Ast.null_index;
if (p.eatToken(.equal)) |_| {
initializer = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -936,7 +917,7 @@ pub fn varStatement(p: *Parser) !?Ast.Index {
_ = try p.expectToken(.equal);
const initializer = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -960,7 +941,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
if (p.eatToken(.underscore)) |_| {
const equal_token = try p.expectToken(.equal);
const expr = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -996,7 +977,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
.shift_left_equal,
=> {
const expr = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1012,7 +993,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
});
},
else => {
try p.addError(
try p.errors.add(
p.getToken(.loc, op_token),
"invalid assignment operator '{s}'",
.{p.getToken(.tag, op_token).symbol()},
@ -1029,7 +1010,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
pub fn whileStatement(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_while) orelse return null;
const cond = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1048,7 +1029,7 @@ pub fn whileStatement(p: *Parser) !?Ast.Index {
pub fn expectTypeSpecifier(p: *Parser) error{ OutOfMemory, Parsing }!Ast.Index {
return try p.typeSpecifier() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected type sepecifier, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1131,7 +1112,7 @@ pub fn typeSpecifierWithoutIdent(p: *Parser) !?Ast.Index {
var size = Ast.null_index;
if (p.eatToken(.comma)) |_| {
size = try p.elementCountExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected array size expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1267,12 +1248,11 @@ pub fn expectAddressSpace(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.AddressSpace, str)) |_| return token;
}
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"unknown address space '{s}'",
.{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.AddressSpace)},
@ -1288,12 +1268,11 @@ pub fn expectAccessMode(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.AccessMode, str)) |_| return token;
}
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"unknown access mode '{s}'",
.{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.AccessMode)},
@ -1309,12 +1288,11 @@ pub fn expectTexelFormat(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.TexelFormat, str)) |_| return token;
}
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"unknown address space '{s}'",
.{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create(
p.allocator,
try p.errors.createNote(
null,
"valid options are [{s}]",
.{fieldNames(Ast.TexelFormat)},
@ -1326,7 +1304,7 @@ pub fn expectTexelFormat(p: *Parser) !Ast.Index {
pub fn expectParenExpr(p: *Parser) !Ast.Index {
_ = try p.expectToken(.paren_left);
const expr = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse expression '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1365,7 +1343,7 @@ pub fn callExpr(p: *Parser) !?Ast.Index {
.array_type,
=> lhs = type_node,
else => {
try p.addError(
try p.errors.add(
p.getToken(.loc, main_token),
"type '{s}' can not be constructed",
.{p.getToken(.tag, main_token).symbol()},
@ -1418,7 +1396,7 @@ pub fn lhsExpression(p: *Parser) !?Ast.Index {
if (p.eatToken(.paren_left)) |_| {
const expr = try p.lhsExpression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected lhs expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1435,7 +1413,7 @@ pub fn lhsExpression(p: *Parser) !?Ast.Index {
.tag = .deref,
.main_token = star_token,
.lhs = try p.lhsExpression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected lhs expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1451,7 +1429,7 @@ pub fn lhsExpression(p: *Parser) !?Ast.Index {
.tag = .addr_of,
.main_token = addr_of_token,
.lhs = try p.lhsExpression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected lhs expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1524,7 +1502,7 @@ pub fn unaryExpr(p: *Parser) error{ OutOfMemory, Parsing }!?Ast.Index {
_ = p.advanceToken();
const expr = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()},
@ -1555,7 +1533,7 @@ pub fn expectRelationalExpr(p: *Parser, lhs_unary: Ast.Index) !Ast.Index {
_ = p.advanceToken();
const rhs_unary = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()},
@ -1586,7 +1564,7 @@ pub fn expectShortCircuitExpr(p: *Parser, lhs_relational: Ast.Index) !Ast.Index
_ = p.advanceToken();
const rhs_unary = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()},
@ -1620,7 +1598,7 @@ pub fn bitwiseExpr(p: *Parser, lhs: Ast.Index) !?Ast.Index {
var lhs_result = lhs;
while (true) {
const rhs = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()},
@ -1650,7 +1628,7 @@ pub fn expectShiftExpr(p: *Parser, lhs: Ast.Index) !Ast.Index {
_ = p.advanceToken();
const rhs = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()},
@ -1683,7 +1661,7 @@ pub fn expectAdditiveExpr(p: *Parser, lhs_mul: Ast.Index) !Ast.Index {
};
_ = p.advanceToken();
const unary = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()},
@ -1713,7 +1691,7 @@ pub fn expectMultiplicativeExpr(p: *Parser, lhs_unary: Ast.Index) !Ast.Index {
};
_ = p.advanceToken();
const rhs = try p.unaryExpr() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression",
.{p.peekToken(.tag, 0).symbol()},
@ -1742,7 +1720,7 @@ pub fn componentOrSwizzleSpecifier(p: *Parser, prefix: Ast.Index) !Ast.Index {
});
} else if (p.eatToken(.bracket_left)) |bracket_left_token| {
const index_expr = try p.expression() orelse {
try p.addError(
try p.errors.add(
p.peekToken(.loc, 0),
"expected expression, but found '{s}'",
.{p.peekToken(.tag, 0).symbol()},
@ -1843,17 +1821,6 @@ fn findNextStmt(p: *Parser) void {
}
}
pub fn addError(
p: *Parser,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = try ErrorMsg.create(p.allocator, loc, format, args, note);
try p.errors.append(p.allocator, err_msg);
}
fn listToSpan(p: *Parser, list: []const Ast.Index) !Ast.Index {
try p.extra.appendSlice(p.allocator, list);
return p.addNode(.{
@ -1911,7 +1878,7 @@ pub fn expectToken(p: *Parser, tag: Token.Tag) !Ast.Index {
const token = p.advanceToken();
if (p.getToken(.tag, token) == tag) return token;
try p.addError(
try p.errors.add(
p.getToken(.loc, token),
"expected '{s}', but found '{s}'",
.{ tag.symbol(), p.getToken(.tag, token).symbol() },

View file

@ -5,57 +5,10 @@ pub const IR = @import("IR.zig");
pub const Parser = @import("Parser.zig");
pub const Token = @import("Token.zig");
pub const Tokenizer = @import("Tokenizer.zig");
pub const ErrorList = @import("ErrorList.zig");
pub const Extension = enum {
f16,
pub const Array = std.enums.EnumArray(Extension, bool);
};
pub const ErrorMsg = struct {
loc: Token.Loc,
msg: []const u8,
note: ?Note = null,
pub const Note = struct {
loc: ?Token.Loc = null,
msg: []const u8,
pub fn create(
allocator: std.mem.Allocator,
loc: ?Token.Loc,
comptime format: []const u8,
args: anytype,
) !Note {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(allocator, comptime format, args),
};
}
pub fn deinit(note: *Note, allocator: std.mem.Allocator) void {
allocator.free(note.msg);
note.* = undefined;
}
};
pub fn create(
allocator: std.mem.Allocator,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?Note,
) !ErrorMsg {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(allocator, comptime format, args),
.note = note,
};
}
pub fn deinit(err_msg: *ErrorMsg, allocator: std.mem.Allocator) void {
if (err_msg.note) |*note| note.*.deinit(allocator);
allocator.free(err_msg.msg);
err_msg.* = undefined;
}
};

View file

@ -11,164 +11,82 @@ fn sdkPath(comptime suffix: []const u8) []const u8 {
};
}
// TODO: move this to cli/main.zig
pub fn printErrors(errors: []dusk.ErrorMsg, source: []const u8, file_path: ?[]const u8) !void {
var bw = std.io.bufferedWriter(std.io.getStdErr().writer());
const b = bw.writer();
const term = std.debug.TTY.Config{ .escape_codes = {} };
for (errors) |*err| {
defer err.deinit(allocator);
const loc_extra = err.loc.extraInfo(source);
// 'file:line:column error: <MSG>'
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, loc_extra.line, loc_extra.col });
try term.setColor(b, .Red);
try b.writeAll("error: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(err.msg);
try b.writeByte('\n');
try printCode(b, term, source, err.loc);
// note
if (err.note) |note| {
if (note.loc) |note_loc| {
const note_loc_extra = note_loc.extraInfo(source);
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, note_loc_extra.line, note_loc_extra.col });
}
try term.setColor(b, .Cyan);
try b.writeAll("note: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(note.msg);
try b.writeByte('\n');
if (note.loc) |note_loc| {
try printCode(b, term, source, note_loc);
}
}
try term.setColor(b, .Reset);
}
try bw.flush();
}
fn printCode(writer: anytype, term: std.debug.TTY.Config, source: []const u8, loc: dusk.Token.Loc) !void {
const loc_extra = loc.extraInfo(source);
try term.setColor(writer, .Dim);
try writer.print("{d} │ ", .{loc_extra.line});
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc_extra.line_start..loc.start]);
try term.setColor(writer, .Green);
try writer.writeAll(source[loc.start..loc.end]);
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc.end..loc_extra.line_end]);
try writer.writeByte('\n');
// location pointer
const line_number_len = (std.math.log10(loc_extra.line) + 1) + 3;
try writer.writeByteNTimes(
' ',
line_number_len + (loc_extra.col - 1),
);
try term.setColor(writer, .Bold);
try term.setColor(writer, .Green);
try writer.writeByte('^');
try writer.writeByteNTimes('~', loc.end - loc.start - 1);
try writer.writeByte('\n');
}
fn expectIR(source: [:0]const u8) !dusk.IR {
var res = try dusk.Ast.parse(allocator, source);
switch (res) {
.tree => |*tree| {
defer tree.deinit(allocator);
switch (try dusk.IR.generate(allocator, tree)) {
.ir => |ir| return ir,
.errors => |err_msgs| {
try printErrors(err_msgs, source, null);
allocator.free(err_msgs);
return error.ExpectedIR;
},
}
},
.errors => |err_msgs| {
try printErrors(err_msgs, source, null);
allocator.free(err_msgs);
return error.Parsing;
},
var tree = try dusk.Ast.parse(allocator, source);
defer tree.deinit(allocator);
if (tree.errors.list.items.len > 0) {
try tree.errors.print(source, null);
return error.Parsing;
}
var ir = try dusk.IR.generate(allocator, &tree);
errdefer ir.deinit();
if (ir.errors.list.items.len > 0) {
try ir.errors.print(source, null);
return error.ExpectedIR;
}
return ir;
}
fn expectError(source: [:0]const u8, err: dusk.ErrorMsg) !void {
var gpa = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = 12 }){};
const all = gpa.allocator();
defer _ = gpa.deinit();
var res = try dusk.Ast.parse(all, source);
const err_list = switch (res) {
.tree => |*tree| blk: {
defer tree.deinit(all);
switch (try dusk.IR.generate(all, tree)) {
.ir => |*ir| {
ir.deinit();
return error.ExpectedError;
},
.errors => |err_msgs| break :blk err_msgs,
}
fn expectError(source: [:0]const u8, err: dusk.ErrorList.ErrorMsg) !void {
var tree = try dusk.Ast.parse(allocator, source);
defer tree.deinit(allocator);
var err_list = tree.errors;
var ir: ?dusk.IR = null;
defer if (ir != null) ir.?.deinit();
if (err_list.list.items.len == 0) {
ir = try dusk.IR.generate(allocator, &tree);
err_list = ir.?.errors;
if (err_list.list.items.len == 0) {
return error.ExpectedError;
},
.errors => |err_msgs| err_msgs,
};
defer {
for (err_list) |*err_msg| err_msg.deinit(all);
all.free(err_list);
}
}
const first_error = err_list.list.items[0];
{
errdefer {
std.debug.print(
"\n\x1b[31mexpected error({d}..{d}):\n{s}\n\x1b[32mactual error({d}..{d}):\n{s}\n\x1b[0m",
.{
err.loc.start, err.loc.end, err.msg,
err_list[0].loc.start, err_list[0].loc.end, err_list[0].msg,
first_error.loc.start, first_error.loc.end, first_error.msg,
},
);
}
try expect(std.mem.eql(u8, err.msg, err_list[0].msg));
try expect(err_list[0].loc.start == err.loc.start);
try expect(err_list[0].loc.end == err.loc.end);
try expect(std.mem.eql(u8, err.msg, first_error.msg));
try expect(first_error.loc.start == err.loc.start);
try expect(first_error.loc.end == err.loc.end);
}
if (err_list[0].note) |_| {
if (first_error.note) |_| {
errdefer {
std.debug.print(
"\n\x1b[31mexpected note msg:\n{s}\n\x1b[32mactual note msg:\n{s}\n\x1b[0m",
.{ err.note.?.msg, err_list[0].note.?.msg },
.{ err.note.?.msg, first_error.note.?.msg },
);
}
if (err.note == null) {
std.debug.print("\x1b[31mnote missed: {s}\x1b[0m\n", .{err_list[0].note.?.msg});
std.debug.print("\x1b[31mnote missed: {s}\x1b[0m\n", .{first_error.note.?.msg});
return error.NoteMissed;
}
try expect(std.mem.eql(u8, err.note.?.msg, err_list[0].note.?.msg));
if (err_list[0].note.?.loc) |_| {
try expect(std.mem.eql(u8, err.note.?.msg, first_error.note.?.msg));
if (first_error.note.?.loc) |_| {
errdefer {
std.debug.print(
"\n\x1b[31mexpected note loc: {d}..{d}\n\x1b[32mactual note loc: {d}..{d}\n\x1b[0m",
.{
err.note.?.loc.?.start, err.note.?.loc.?.end,
err_list[0].note.?.loc.?.start, err_list[0].note.?.loc.?.end,
first_error.note.?.loc.?.start, first_error.note.?.loc.?.end,
},
);
}
try expect(err_list[0].note.?.loc.?.start == err.note.?.loc.?.start);
try expect(err_list[0].note.?.loc.?.end == err.note.?.loc.?.end);
try expect(first_error.note.?.loc.?.start == err.note.?.loc.?.start);
try expect(first_error.note.?.loc.?.end == err.note.?.loc.?.end);
}
}
}