dusk: better IR.generate and Ast.parse return types

This commit is contained in:
Ali Chraghi 2023-03-27 15:03:40 +03:30 committed by Stephen Gutekanst
parent b086bdee3a
commit 059411fa97
7 changed files with 317 additions and 377 deletions

View file

@ -2,7 +2,7 @@ const std = @import("std");
const Parser = @import("Parser.zig"); const Parser = @import("Parser.zig");
const Token = @import("Token.zig"); const Token = @import("Token.zig");
const Tokenizer = @import("Tokenizer.zig"); const Tokenizer = @import("Tokenizer.zig");
const ErrorMsg = @import("main.zig").ErrorMsg; const ErrorList = @import("ErrorList.zig");
const Extension = @import("main.zig").Extension; const Extension = @import("main.zig").Extension;
const Ast = @This(); const Ast = @This();
@ -14,21 +14,18 @@ source: [:0]const u8,
tokens: TokenList.Slice, tokens: TokenList.Slice,
nodes: NodeList.Slice, nodes: NodeList.Slice,
extra: []const Index, extra: []const Index,
errors: ErrorList,
pub fn deinit(tree: *Ast, allocator: std.mem.Allocator) void { pub fn deinit(tree: *Ast, allocator: std.mem.Allocator) void {
tree.tokens.deinit(allocator); tree.tokens.deinit(allocator);
tree.nodes.deinit(allocator); tree.nodes.deinit(allocator);
allocator.free(tree.extra); allocator.free(tree.extra);
tree.errors.deinit();
tree.* = undefined; tree.* = undefined;
} }
pub const ParseResult = union(enum) {
errors: []ErrorMsg,
tree: Ast,
};
/// parses a TranslationUnit (WGSL Program) /// parses a TranslationUnit (WGSL Program)
pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) !ParseResult { pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) error{OutOfMemory}!Ast {
var p = Parser{ var p = Parser{
.allocator = allocator, .allocator = allocator,
.source = source, .source = source,
@ -53,26 +50,29 @@ pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) !ParseResult {
.nodes = .{}, .nodes = .{},
.extra = .{}, .extra = .{},
.scratch = .{}, .scratch = .{},
.errors = .{}, .errors = try ErrorList.init(allocator),
.extensions = Extension.Array.initFill(false), .extensions = Extension.Array.initFill(false),
}; };
defer p.deinit(); defer p.scratch.deinit(allocator);
errdefer {
p.tokens.deinit(allocator);
p.nodes.deinit(allocator);
p.extra.deinit(allocator);
p.errors.deinit();
}
// TODO: make sure tokens:nodes ratio is right // TODO: make sure tokens:nodes ratio is right
const estimated_node_count = (p.tokens.len + 2) / 2; const estimated_node_count = (p.tokens.len + 2) / 2;
try p.nodes.ensureTotalCapacity(allocator, estimated_node_count); try p.nodes.ensureTotalCapacity(allocator, estimated_node_count);
_ = try p.translationUnit() orelse { try p.translationUnit();
return .{ .errors = try p.errors.toOwnedSlice(allocator) };
};
return .{ return .{
.tree = .{ .source = source,
.source = source, .tokens = p.tokens.toOwnedSlice(),
.tokens = p.tokens.toOwnedSlice(), .nodes = p.nodes.toOwnedSlice(),
.nodes = p.nodes.toOwnedSlice(), .extra = try p.extra.toOwnedSlice(allocator),
.extra = try p.extra.toOwnedSlice(allocator), .errors = p.errors,
},
}; };
} }

View file

@ -2,7 +2,7 @@ const std = @import("std");
const Ast = @import("Ast.zig"); const Ast = @import("Ast.zig");
const Token = @import("Token.zig"); const Token = @import("Token.zig");
const IR = @import("IR.zig"); const IR = @import("IR.zig");
const ErrorMsg = @import("main.zig").ErrorMsg; const ErrorList = @import("ErrorList.zig");
const AstGen = @This(); const AstGen = @This();
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
@ -11,7 +11,7 @@ instructions: std.ArrayListUnmanaged(IR.Inst) = .{},
refs: std.ArrayListUnmanaged(IR.Inst.Ref) = .{}, refs: std.ArrayListUnmanaged(IR.Inst.Ref) = .{},
strings: std.ArrayListUnmanaged(u8) = .{}, strings: std.ArrayListUnmanaged(u8) = .{},
scratch: std.ArrayListUnmanaged(IR.Inst.Ref) = .{}, scratch: std.ArrayListUnmanaged(IR.Inst.Ref) = .{},
errors: std.ArrayListUnmanaged(ErrorMsg) = .{}, errors: ErrorList,
scope_pool: std.heap.MemoryPool(Scope), scope_pool: std.heap.MemoryPool(Scope),
pub const Scope = struct { pub const Scope = struct {
@ -27,16 +27,6 @@ pub const Scope = struct {
}; };
}; };
pub fn deinit(self: *AstGen) void {
self.instructions.deinit(self.allocator);
self.refs.deinit(self.allocator);
self.strings.deinit(self.allocator);
self.scratch.deinit(self.allocator);
self.scope_pool.deinit();
for (self.errors.items) |*err_msg| err_msg.deinit(self.allocator);
self.errors.deinit(self.allocator);
}
pub fn genTranslationUnit(self: *AstGen) !u32 { pub fn genTranslationUnit(self: *AstGen) !u32 {
const global_decls = self.tree.spanToList(0); const global_decls = self.tree.spanToList(0);
@ -46,7 +36,10 @@ pub fn genTranslationUnit(self: *AstGen) !u32 {
var root_scope = try self.scope_pool.create(); var root_scope = try self.scope_pool.create();
root_scope.* = .{ .tag = .root, .parent = null }; root_scope.* = .{ .tag = .root, .parent = null };
try self.scanDecls(root_scope, global_decls); self.scanDecls(root_scope, global_decls) catch |err| switch (err) {
error.AnalysisFail => return try self.addRefList(self.scratch.items[scratch_top..]),
error.OutOfMemory => return error.OutOfMemory,
};
for (global_decls) |node| { for (global_decls) |node| {
const global = self.genDecl(root_scope, node) catch |err| switch (err) { const global = self.genDecl(root_scope, node) catch |err| switch (err) {
@ -56,10 +49,6 @@ pub fn genTranslationUnit(self: *AstGen) !u32 {
try self.scratch.append(self.allocator, global); try self.scratch.append(self.allocator, global);
} }
if (self.errors.items.len > 0) {
return error.AnalysisFail;
}
return try self.addRefList(self.scratch.items[scratch_top..]); return try self.addRefList(self.scratch.items[scratch_top..]);
} }
@ -72,7 +61,7 @@ pub fn scanDecls(self: *AstGen, scope: *Scope, decls: []const Ast.Index) !void {
// TODO // TODO
// if (Token.isReserved(name)) { // if (Token.isReserved(name)) {
// try self.addError( // try self.errors.add(
// loc, // loc,
// "the name '{s}' has ben reserved", // "the name '{s}' has ben reserved",
// .{name}, // .{name},
@ -83,12 +72,11 @@ pub fn scanDecls(self: *AstGen, scope: *Scope, decls: []const Ast.Index) !void {
var name_iter = scope.decls.keyIterator(); var name_iter = scope.decls.keyIterator();
while (name_iter.next()) |node| { while (name_iter.next()) |node| {
if (std.mem.eql(u8, self.declNameLoc(node.*).?.slice(self.tree.source), name)) { if (std.mem.eql(u8, self.declNameLoc(node.*).?.slice(self.tree.source), name)) {
try self.addError( try self.errors.add(
loc, loc,
"redeclaration of '{s}'", "redeclaration of '{s}'",
.{name}, .{name},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
self.declNameLoc(node.*).?, self.declNameLoc(node.*).?,
"other declaration here", "other declaration here",
.{}, .{},
@ -130,7 +118,7 @@ pub fn declRef(self: *AstGen, scope: *Scope, loc: Token.Loc) !IR.Inst.Ref {
s = scope.parent orelse break; s = scope.parent orelse break;
} }
try self.addError( try self.errors.add(
loc, loc,
"use of undeclared identifier '{s}'", "use of undeclared identifier '{s}'",
.{name}, .{name},
@ -208,7 +196,7 @@ pub fn genStruct(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref {
switch (member_type_ref) { switch (member_type_ref) {
.bool_type, .i32_type, .u32_type, .f32_type, .f16_type => {}, .bool_type, .i32_type, .u32_type, .f32_type, .f16_type => {},
.sampler_type, .comparison_sampler_type, .external_sampled_texture_type => { .sampler_type, .comparison_sampler_type, .external_sampled_texture_type => {
try self.addError( try self.errors.add(
member_loc, member_loc,
"invalid struct member type '{s}'", "invalid struct member type '{s}'",
.{member_type_name.slice(self.tree.source)}, .{member_type_name.slice(self.tree.source)},
@ -221,7 +209,7 @@ pub fn genStruct(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref {
.vector_type, .matrix_type, .atomic_type, .struct_decl => {}, .vector_type, .matrix_type, .atomic_type, .struct_decl => {},
.array_type => { .array_type => {
if (self.instructions.items[member_type_ref.toIndex().?].data.array_type.size == .none and i + 1 != member_list.len) { if (self.instructions.items[member_type_ref.toIndex().?].data.array_type.size == .none and i + 1 != member_list.len) {
try self.addError( try self.errors.add(
member_loc, member_loc,
"struct member with runtime-sized array type, must be the last member of the structure", "struct member with runtime-sized array type, must be the last member of the structure",
.{}, .{},
@ -236,7 +224,7 @@ pub fn genStruct(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref {
.storage_texture_type, .storage_texture_type,
.depth_texture_type, .depth_texture_type,
=> { => {
try self.addError( try self.errors.add(
member_loc, member_loc,
"invalid struct member type '{s}'", "invalid struct member type '{s}'",
.{member_type_name.slice(self.tree.source)}, .{member_type_name.slice(self.tree.source)},
@ -320,7 +308,7 @@ pub fn addInst(self: *AstGen, inst: IR.Inst) error{OutOfMemory}!IR.Inst.Index {
// // ((lir == .construct and lir.construct == .vector) and (rir == .construct and rir.construct == .vector)) or // // ((lir == .construct and lir.construct == .vector) and (rir == .construct and rir.construct == .vector)) or
// // ((lir == .construct and lir.construct == .matrix) and (rir == .construct and rir.construct == .matrix)); // // ((lir == .construct and lir.construct == .matrix) and (rir == .construct and rir.construct == .matrix));
// // if (!is_valid_op) { // // if (!is_valid_op) {
// // try self.addError( // // try self.errors.add(
// // loc, // // loc,
// // "invalid operation with '{s}' and '{s}'", // // "invalid operation with '{s}' and '{s}'",
// // .{ @tagName(std.meta.activeTag(lir)), @tagName(std.meta.activeTag(rir)) }, // // .{ @tagName(std.meta.activeTag(lir)), @tagName(std.meta.activeTag(rir)) },
@ -356,7 +344,7 @@ pub fn addInst(self: *AstGen, inst: IR.Inst) error{OutOfMemory}!IR.Inst.Index {
// // !std.mem.endsWith(u8, str, "f") and // // !std.mem.endsWith(u8, str, "f") and
// // !std.mem.endsWith(u8, str, "h")) // // !std.mem.endsWith(u8, str, "h"))
// // { // // {
// // try self.addError( // // try self.errors.add(
// // loc, // // loc,
// // "number literal cannot have leading 0", // // "number literal cannot have leading 0",
// // .{str}, // // .{str},
@ -409,7 +397,7 @@ pub fn genType(self: *AstGen, scope: *Scope, node: Ast.Index) error{ AnalysisFai
.struct_decl, .struct_decl,
=> return decl_ref, => return decl_ref,
.global_variable_decl => { .global_variable_decl => {
try self.addError( try self.errors.add(
node_loc, node_loc,
"'{s}' is not a type", "'{s}' is not a type",
.{node_loc.slice(self.tree.source)}, .{node_loc.slice(self.tree.source)},
@ -449,12 +437,11 @@ pub fn genSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.
.comparison_sampler_type, .comparison_sampler_type,
.external_sampled_texture_type, .external_sampled_texture_type,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid sampled texture component type", "invalid sampled texture component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32', 'u32' or 'f32'", "must be 'i32', 'u32' or 'f32'",
.{}, .{},
@ -475,12 +462,11 @@ pub fn genSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.
.depth_texture_type, .depth_texture_type,
.struct_decl, .struct_decl,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid sampled texture component type", "invalid sampled texture component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32', 'u32' or 'f32'", "must be 'i32', 'u32' or 'f32'",
.{}, .{},
@ -531,12 +517,11 @@ pub fn genMultigenSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Ind
.comparison_sampler_type, .comparison_sampler_type,
.external_sampled_texture_type, .external_sampled_texture_type,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid multisampled texture component type", "invalid multisampled texture component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32', 'u32' or 'f32'", "must be 'i32', 'u32' or 'f32'",
.{}, .{},
@ -557,12 +542,11 @@ pub fn genMultigenSampledTextureType(self: *AstGen, scope: *Scope, node: Ast.Ind
.depth_texture_type, .depth_texture_type,
.struct_decl, .struct_decl,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid multisampled texture component type", "invalid multisampled texture component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32', 'u32' or 'f32'", "must be 'i32', 'u32' or 'f32'",
.{}, .{},
@ -602,12 +586,11 @@ pub fn genStorageTextureType(self: *AstGen, node: Ast.Index) !IR.Inst.Ref {
const access_mode = switch (access_mode_full) { const access_mode = switch (access_mode_full) {
.write => IR.Inst.StorageTextureType.AccessMode.write, .write => IR.Inst.StorageTextureType.AccessMode.write,
else => { else => {
try self.addError( try self.errors.add(
access_mode_loc, access_mode_loc,
"invalid access mode", "invalid access mode",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"only 'write' is allowed", "only 'write' is allowed",
.{}, .{},
@ -704,12 +687,11 @@ pub fn genVectorType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
switch (component_type_ref) { switch (component_type_ref) {
.bool_type, .i32_type, .u32_type, .f32_type, .f16_type => {}, .bool_type, .i32_type, .u32_type, .f32_type, .f16_type => {},
.sampler_type, .comparison_sampler_type, .external_sampled_texture_type => { .sampler_type, .comparison_sampler_type, .external_sampled_texture_type => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid vector component type", "invalid vector component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32', 'u32', 'f32', 'f16' or 'bool'", "must be 'i32', 'u32', 'f32', 'f16' or 'bool'",
.{}, .{},
@ -730,12 +712,11 @@ pub fn genVectorType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.depth_texture_type, .depth_texture_type,
.struct_decl, .struct_decl,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid vector component type", "invalid vector component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32', 'u32', 'f32', 'f16' or 'bool'", "must be 'i32', 'u32', 'f32', 'f16' or 'bool'",
.{}, .{},
@ -784,12 +765,11 @@ pub fn genMatrixType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.comparison_sampler_type, .comparison_sampler_type,
.external_sampled_texture_type, .external_sampled_texture_type,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid matrix component type", "invalid matrix component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'f32' or 'f16'", "must be 'f32' or 'f16'",
.{}, .{},
@ -810,12 +790,11 @@ pub fn genMatrixType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.depth_texture_type, .depth_texture_type,
.struct_decl, .struct_decl,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid matrix component type", "invalid matrix component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'f32' or 'f16'", "must be 'f32' or 'f16'",
.{}, .{},
@ -870,12 +849,11 @@ pub fn genAtomicType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.comparison_sampler_type, .comparison_sampler_type,
.external_sampled_texture_type, .external_sampled_texture_type,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid atomic component type", "invalid atomic component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32' or 'u32'", "must be 'i32' or 'u32'",
.{}, .{},
@ -896,12 +874,11 @@ pub fn genAtomicType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.depth_texture_type, .depth_texture_type,
.struct_decl, .struct_decl,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid atomic component type", "invalid atomic component type",
.{}, .{},
try ErrorMsg.Note.create( try self.errors.createNote(
self.allocator,
null, null,
"must be 'i32' or 'u32'", "must be 'i32' or 'u32'",
.{}, .{},
@ -939,7 +916,7 @@ pub fn genArrayType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.comparison_sampler_type, .comparison_sampler_type,
.external_sampled_texture_type, .external_sampled_texture_type,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid array component type", "invalid array component type",
.{}, .{},
@ -956,7 +933,7 @@ pub fn genArrayType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
=> {}, => {},
.array_type => { .array_type => {
if (self.instructions.items[component_type_ref.toIndex().?].data.array_type.size == .none) { if (self.instructions.items[component_type_ref.toIndex().?].data.array_type.size == .none) {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"array componet type can not be a runtime-sized array", "array componet type can not be a runtime-sized array",
.{}, .{},
@ -971,7 +948,7 @@ pub fn genArrayType(self: *AstGen, scope: *Scope, node: Ast.Index) !IR.Inst.Ref
.storage_texture_type, .storage_texture_type,
.depth_texture_type, .depth_texture_type,
=> { => {
try self.addError( try self.errors.add(
self.tree.tokenLoc(self.tree.nodeToken(component_type_node)), self.tree.tokenLoc(self.tree.nodeToken(component_type_node)),
"invalid array component type", "invalid array component type",
.{}, .{},
@ -1016,14 +993,3 @@ pub fn declNameLoc(self: *AstGen, node: Ast.Index) ?Token.Loc {
}; };
return self.tree.tokenLoc(token); return self.tree.tokenLoc(token);
} }
pub fn addError(
self: *AstGen,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = try ErrorMsg.create(self.allocator, loc, format, args, note);
try self.errors.append(self.allocator, err_msg);
}

131
libs/dusk/src/ErrorList.zig Normal file
View file

@ -0,0 +1,131 @@
const std = @import("std");
const Token = @import("Token.zig");
pub const ErrorList = @This();
pub const ErrorMsg = struct {
loc: Token.Loc,
msg: []const u8,
note: ?Note = null,
pub const Note = struct {
loc: ?Token.Loc = null,
msg: []const u8,
};
};
arena: std.heap.ArenaAllocator,
list: std.ArrayListUnmanaged(ErrorMsg) = .{},
pub fn init(allocator: std.mem.Allocator) !ErrorList {
return .{
.arena = std.heap.ArenaAllocator.init(allocator),
};
}
pub fn deinit(self: *ErrorList) void {
self.arena.deinit();
self.* = undefined;
}
pub fn add(
self: *ErrorList,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = .{
.loc = loc,
.msg = try std.fmt.allocPrint(self.arena.allocator(), comptime format, args),
.note = note,
};
try self.list.append(self.arena.allocator(), err_msg);
}
pub fn createNote(
self: *ErrorList,
loc: ?Token.Loc,
comptime format: []const u8,
args: anytype,
) !ErrorMsg.Note {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(self.arena.allocator(), comptime format, args),
};
}
pub fn print(self: ErrorList, source: []const u8, file_path: ?[]const u8) !void {
const stderr = std.io.getStdErr();
var bw = std.io.bufferedWriter(stderr.writer());
const b = bw.writer();
const term = if (stderr.supportsAnsiEscapeCodes())
std.debug.TTY.Config{ .escape_codes = {} }
else
std.debug.TTY.Config{ .no_color = {} };
for (self.list.items) |*err| {
const loc_extra = err.loc.extraInfo(source);
// 'file:line:column error: MSG'
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, loc_extra.line, loc_extra.col });
try term.setColor(b, .Red);
try b.writeAll("error: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(err.msg);
try b.writeByte('\n');
try printCode(b, term, source, err.loc);
// note
if (err.note) |note| {
if (note.loc) |note_loc| {
const note_loc_extra = note_loc.extraInfo(source);
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, note_loc_extra.line, note_loc_extra.col });
}
try term.setColor(b, .Cyan);
try b.writeAll("note: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(note.msg);
try b.writeByte('\n');
if (note.loc) |note_loc| {
try printCode(b, term, source, note_loc);
}
}
try term.setColor(b, .Reset);
}
try bw.flush();
}
fn printCode(writer: anytype, term: std.debug.TTY.Config, source: []const u8, loc: Token.Loc) !void {
const loc_extra = loc.extraInfo(source);
try term.setColor(writer, .Dim);
try writer.print("{d} │ ", .{loc_extra.line});
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc_extra.line_start..loc.start]);
try term.setColor(writer, .Green);
try writer.writeAll(source[loc.start..loc.end]);
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc.end..loc_extra.line_end]);
try writer.writeByte('\n');
// location pointer
const line_number_len = (std.math.log10(loc_extra.line) + 1) + 3;
try writer.writeByteNTimes(
' ',
line_number_len + (loc_extra.col - 1),
);
try term.setColor(writer, .Bold);
try term.setColor(writer, .Green);
try writer.writeByte('^');
try writer.writeByteNTimes('~', loc.end - loc.start - 1);
try writer.writeByte('\n');
}

View file

@ -1,7 +1,7 @@
const std = @import("std"); const std = @import("std");
const AstGen = @import("AstGen.zig"); const AstGen = @import("AstGen.zig");
const Ast = @import("Ast.zig"); const Ast = @import("Ast.zig");
const ErrorMsg = @import("main.zig").ErrorMsg; const ErrorList = @import("ErrorList.zig");
const IR = @This(); const IR = @This();
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
@ -9,38 +9,43 @@ globals_index: u32,
instructions: []const Inst, instructions: []const Inst,
refs: []const Inst.Ref, refs: []const Inst.Ref,
strings: []const u8, strings: []const u8,
errors: ErrorList,
pub fn deinit(self: IR) void { pub fn deinit(self: *IR) void {
self.allocator.free(self.instructions); self.allocator.free(self.instructions);
self.allocator.free(self.refs); self.allocator.free(self.refs);
self.allocator.free(self.strings); self.allocator.free(self.strings);
self.errors.deinit();
self.* = undefined;
} }
pub const AstGenResult = union(enum) { pub fn generate(allocator: std.mem.Allocator, tree: *const Ast) error{OutOfMemory}!IR {
ir: IR,
errors: []ErrorMsg,
};
pub fn generate(allocator: std.mem.Allocator, tree: *const Ast) !AstGenResult {
var astgen = AstGen{ var astgen = AstGen{
.allocator = allocator, .allocator = allocator,
.tree = tree, .tree = tree,
.errors = try ErrorList.init(allocator),
.scope_pool = std.heap.MemoryPool(AstGen.Scope).init(allocator), .scope_pool = std.heap.MemoryPool(AstGen.Scope).init(allocator),
}; };
defer astgen.deinit(); defer {
astgen.scope_pool.deinit();
astgen.scratch.deinit(allocator);
}
errdefer {
astgen.instructions.deinit(allocator);
astgen.refs.deinit(allocator);
astgen.strings.deinit(allocator);
}
const globals_index = astgen.genTranslationUnit() catch |err| switch (err) { const globals_index = try astgen.genTranslationUnit();
error.AnalysisFail => return .{ .errors = try astgen.errors.toOwnedSlice(allocator) },
error.OutOfMemory => return error.OutOfMemory,
};
return .{ .ir = .{ return .{
.allocator = allocator, .allocator = allocator,
.globals_index = globals_index, .globals_index = globals_index,
.instructions = try astgen.instructions.toOwnedSlice(allocator), .instructions = try astgen.instructions.toOwnedSlice(allocator),
.refs = try astgen.refs.toOwnedSlice(allocator), .refs = try astgen.refs.toOwnedSlice(allocator),
.strings = try astgen.strings.toOwnedSlice(allocator), .strings = try astgen.strings.toOwnedSlice(allocator),
} }; .errors = astgen.errors,
};
} }
pub fn getStr(self: IR, index: u32) []const u8 { pub fn getStr(self: IR, index: u32) []const u8 {

View file

@ -3,7 +3,7 @@ const std = @import("std");
const Ast = @import("Ast.zig"); const Ast = @import("Ast.zig");
const Token = @import("Token.zig"); const Token = @import("Token.zig");
const Extension = @import("main.zig").Extension; const Extension = @import("main.zig").Extension;
const ErrorMsg = @import("main.zig").ErrorMsg; const ErrorList = @import("ErrorList.zig");
const fieldNames = std.meta.fieldNames; const fieldNames = std.meta.fieldNames;
const Parser = @This(); const Parser = @This();
@ -14,19 +14,10 @@ tokens: std.MultiArrayList(Token),
nodes: std.MultiArrayList(Ast.Node), nodes: std.MultiArrayList(Ast.Node),
extra: std.ArrayListUnmanaged(Ast.Index), extra: std.ArrayListUnmanaged(Ast.Index),
scratch: std.ArrayListUnmanaged(Ast.Index), scratch: std.ArrayListUnmanaged(Ast.Index),
errors: std.ArrayListUnmanaged(ErrorMsg), errors: ErrorList,
extensions: Extension.Array, extensions: Extension.Array,
pub fn deinit(p: *Parser) void { pub fn translationUnit(p: *Parser) !void {
p.tokens.deinit(p.allocator);
p.nodes.deinit(p.allocator);
p.extra.deinit(p.allocator);
p.scratch.deinit(p.allocator);
for (p.errors.items) |*err_msg| err_msg.deinit(p.allocator);
p.errors.deinit(p.allocator);
}
pub fn translationUnit(p: *Parser) !?Ast.Index {
const root = try p.addNode(.{ .tag = .span, .main_token = undefined }); const root = try p.addNode(.{ .tag = .span, .main_token = undefined });
while (try p.globalDirectiveRecoverable()) |ext| { while (try p.globalDirectiveRecoverable()) |ext| {
@ -38,15 +29,9 @@ pub fn translationUnit(p: *Parser) !?Ast.Index {
try p.scratch.append(p.allocator, decl); try p.scratch.append(p.allocator, decl);
} }
if (p.errors.items.len > 0) {
return null;
}
try p.extra.appendSlice(p.allocator, p.scratch.items); try p.extra.appendSlice(p.allocator, p.scratch.items);
p.nodes.items(.lhs)[root] = @intCast(Ast.Index, p.extra.items.len - p.scratch.items.len); p.nodes.items(.lhs)[root] = @intCast(Ast.Index, p.extra.items.len - p.scratch.items.len);
p.nodes.items(.rhs)[root] = @intCast(Ast.Index, p.extra.items.len); p.nodes.items(.rhs)[root] = @intCast(Ast.Index, p.extra.items.len);
return root;
} }
pub fn globalDirectiveRecoverable(p: *Parser) !?Extension { pub fn globalDirectiveRecoverable(p: *Parser) !?Extension {
@ -55,7 +40,7 @@ pub fn globalDirectiveRecoverable(p: *Parser) !?Extension {
p.findNextGlobalDirective(); p.findNextGlobalDirective();
return null; return null;
}, },
else => return err, error.OutOfMemory => error.OutOfMemory,
}; };
} }
@ -63,7 +48,7 @@ pub fn globalDirective(p: *Parser) !?Extension {
_ = p.eatToken(.k_enable) orelse return null; _ = p.eatToken(.k_enable) orelse return null;
const ext_token = try p.expectToken(.ident); const ext_token = try p.expectToken(.ident);
const ext = std.meta.stringToEnum(Extension, p.getToken(.loc, ext_token).slice(p.source)) orelse { const ext = std.meta.stringToEnum(Extension, p.getToken(.loc, ext_token).slice(p.source)) orelse {
try p.addError(p.getToken(.loc, ext_token), "invalid extension", .{}, null); try p.errors.add(p.getToken(.loc, ext_token), "invalid extension", .{}, null);
return error.Parsing; return error.Parsing;
}; };
return ext; return ext;
@ -75,7 +60,7 @@ pub fn expectGlobalDeclRecoverable(p: *Parser) !?Ast.Index {
p.findNextGlobalDecl(); p.findNextGlobalDecl();
return null; return null;
}, },
else => return err, error.OutOfMemory => error.OutOfMemory,
}; };
} }
@ -100,7 +85,7 @@ pub fn expectGlobalDecl(p: *Parser) !Ast.Index {
return node; return node;
} }
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected global declaration, found '{s}'", "expected global declaration, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -126,12 +111,11 @@ pub fn attribute(p: *Parser) !?Ast.Index {
const ident_tok = try p.expectToken(.ident); const ident_tok = try p.expectToken(.ident);
const str = p.getToken(.loc, ident_tok).slice(p.source); const str = p.getToken(.loc, ident_tok).slice(p.source);
const tag = std.meta.stringToEnum(Ast.Attribute, str) orelse { const tag = std.meta.stringToEnum(Ast.Attribute, str) orelse {
try p.addError( try p.errors.add(
p.getToken(.loc, ident_tok), p.getToken(.loc, ident_tok),
"unknown attribute '{s}'", "unknown attribute '{s}'",
.{p.getToken(.loc, ident_tok).slice(p.source)}, .{p.getToken(.loc, ident_tok).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.Attribute)}, .{fieldNames(Ast.Attribute)},
@ -165,7 +149,7 @@ pub fn attribute(p: *Parser) !?Ast.Index {
} else { } else {
node.tag = .attr_one_arg; node.tag = .attr_one_arg;
node.lhs = try p.expression() orelse { node.lhs = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected expression, but found '{s}'", "expected expression, but found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -183,20 +167,20 @@ pub fn attribute(p: *Parser) !?Ast.Index {
node.tag = .attr_workgroup_size; node.tag = .attr_workgroup_size;
var workgroup_size = Ast.Node.WorkgroupSize{ var workgroup_size = Ast.Node.WorkgroupSize{
.x = try p.expression() orelse { .x = try p.expression() orelse {
try p.addError(p.peekToken(.loc, 0), "expected workgroup_size x parameter", .{}, null); try p.errors.add(p.peekToken(.loc, 0), "expected workgroup_size x parameter", .{}, null);
return error.Parsing; return error.Parsing;
}, },
}; };
if (p.eatToken(.comma) != null and p.peekToken(.tag, 0) != .paren_right) { if (p.eatToken(.comma) != null and p.peekToken(.tag, 0) != .paren_right) {
workgroup_size.y = try p.expression() orelse { workgroup_size.y = try p.expression() orelse {
try p.addError(p.peekToken(.loc, 0), "expected workgroup_size y parameter", .{}, null); try p.errors.add(p.peekToken(.loc, 0), "expected workgroup_size y parameter", .{}, null);
return error.Parsing; return error.Parsing;
}; };
if (p.eatToken(.comma) != null and p.peekToken(.tag, 0) != .paren_right) { if (p.eatToken(.comma) != null and p.peekToken(.tag, 0) != .paren_right) {
workgroup_size.z = try p.expression() orelse { workgroup_size.z = try p.expression() orelse {
try p.addError(p.peekToken(.loc, 0), "expected workgroup_size z parameter", .{}, null); try p.errors.add(p.peekToken(.loc, 0), "expected workgroup_size z parameter", .{}, null);
return error.Parsing; return error.Parsing;
}; };
@ -233,12 +217,11 @@ pub fn expectBuiltinValue(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.BuiltinValue, str)) |_| return token; if (std.meta.stringToEnum(Ast.BuiltinValue, str)) |_| return token;
} }
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"unknown builtin value name '{s}'", "unknown builtin value name '{s}'",
.{p.getToken(.loc, token).slice(p.source)}, .{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.BuiltinValue)}, .{fieldNames(Ast.BuiltinValue)},
@ -254,12 +237,11 @@ pub fn expectInterpolationType(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.InterpolationType, str)) |_| return token; if (std.meta.stringToEnum(Ast.InterpolationType, str)) |_| return token;
} }
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"unknown interpolation type name '{s}'", "unknown interpolation type name '{s}'",
.{p.getToken(.loc, token).slice(p.source)}, .{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.InterpolationType)}, .{fieldNames(Ast.InterpolationType)},
@ -275,12 +257,11 @@ pub fn expectInterpolationSample(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.InterpolationSample, str)) |_| return token; if (std.meta.stringToEnum(Ast.InterpolationSample, str)) |_| return token;
} }
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"unknown interpolation sample name '{s}'", "unknown interpolation sample name '{s}'",
.{p.getToken(.loc, token).slice(p.source)}, .{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.InterpolationSample)}, .{fieldNames(Ast.InterpolationSample)},
@ -314,7 +295,7 @@ pub fn globalVarDecl(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
var initializer = Ast.null_index; var initializer = Ast.null_index;
if (p.eatToken(.equal)) |_| { if (p.eatToken(.equal)) |_| {
initializer = try p.expression() orelse { initializer = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'", "expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -350,7 +331,7 @@ pub fn globalConstDecl(p: *Parser) !?Ast.Index {
_ = try p.expectToken(.equal); _ = try p.expectToken(.equal);
const initializer = try p.expression() orelse { const initializer = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'", "expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -380,7 +361,7 @@ pub fn globalOverrideDecl(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
var initializer = Ast.null_index; var initializer = Ast.null_index;
if (p.eatToken(.equal)) |_| { if (p.eatToken(.equal)) |_| {
initializer = try p.expression() orelse { initializer = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'", "expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -425,7 +406,7 @@ pub fn structDecl(p: *Parser) !?Ast.Index {
const attrs = try p.attributeList(); const attrs = try p.attributeList();
const member = try p.structMember(attrs) orelse { const member = try p.structMember(attrs) orelse {
if (attrs != null) { if (attrs != null) {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected struct member, found '{s}'", "expected struct member, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -466,7 +447,7 @@ pub fn structMember(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
pub fn constAssert(p: *Parser) !?Ast.Index { pub fn constAssert(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_const_assert) orelse return null; const main_token = p.eatToken(.k_const_assert) orelse return null;
const expr = try p.expression() orelse { const expr = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected expression, found '{s}'", "expected expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -497,7 +478,7 @@ pub fn functionDecl(p: *Parser, attrs: ?Ast.Index) !?Ast.Index {
} }
const body = try p.block() orelse { const body = try p.block() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected function body, found '{s}'", "expected function body, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -527,7 +508,7 @@ pub fn parameterList(p: *Parser) !?Ast.Index {
const attrs = try p.attributeList(); const attrs = try p.attributeList();
const param = try p.parameter(attrs) orelse { const param = try p.parameter(attrs) orelse {
if (attrs != null) { if (attrs != null) {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected function parameter, found '{s}'", "expected function parameter, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -568,7 +549,7 @@ pub fn statementRecoverable(p: *Parser) !?Ast.Index {
else => continue, else => continue,
} }
}, },
else => return err, error.OutOfMemory => error.OutOfMemory,
}; };
} }
} }
@ -608,7 +589,7 @@ pub fn statement(p: *Parser) !?Ast.Index {
pub fn expectBlock(p: *Parser) error{ OutOfMemory, Parsing }!Ast.Index { pub fn expectBlock(p: *Parser) error{ OutOfMemory, Parsing }!Ast.Index {
return try p.block() orelse { return try p.block() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected block statement, found '{s}'", "expected block statement, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -629,7 +610,7 @@ pub fn block(p: *Parser) error{ OutOfMemory, Parsing }!?Ast.Index {
const stmt = try p.statementRecoverable() orelse { const stmt = try p.statementRecoverable() orelse {
if (p.peekToken(.tag, 0) == .brace_right) break; if (p.peekToken(.tag, 0) == .brace_right) break;
failed = true; failed = true;
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected statement, found '{s}'", "expected statement, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -659,7 +640,7 @@ pub fn breakIfStatement(p: *Parser) !?Ast.Index {
const main_token = p.advanceToken(); const main_token = p.advanceToken();
_ = p.advanceToken(); _ = p.advanceToken();
const cond = try p.expression() orelse { const cond = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected condition expression, found '{s}'", "expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -735,7 +716,7 @@ pub fn ifStatement(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_if) orelse return null; const main_token = p.eatToken(.k_if) orelse return null;
const cond = try p.expression() orelse { const cond = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected condition expression, found '{s}'", "expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -744,7 +725,7 @@ pub fn ifStatement(p: *Parser) !?Ast.Index {
return error.Parsing; return error.Parsing;
}; };
const body = try p.block() orelse { const body = try p.block() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected if body block, found '{s}'", "expected if body block, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -770,7 +751,7 @@ pub fn ifStatement(p: *Parser) !?Ast.Index {
} }
const else_body = try p.block() orelse { const else_body = try p.block() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected else body block, found '{s}'", "expected else body block, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -819,7 +800,7 @@ pub fn switchStatement(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_switch) orelse return null; const main_token = p.eatToken(.k_switch) orelse return null;
const expr = try p.expression() orelse { const expr = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected condition expression, found '{s}'", "expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -903,7 +884,7 @@ pub fn varStatement(p: *Parser) !?Ast.Index {
var initializer = Ast.null_index; var initializer = Ast.null_index;
if (p.eatToken(.equal)) |_| { if (p.eatToken(.equal)) |_| {
initializer = try p.expression() orelse { initializer = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'", "expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -936,7 +917,7 @@ pub fn varStatement(p: *Parser) !?Ast.Index {
_ = try p.expectToken(.equal); _ = try p.expectToken(.equal);
const initializer = try p.expression() orelse { const initializer = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected initializer expression, found '{s}'", "expected initializer expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -960,7 +941,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
if (p.eatToken(.underscore)) |_| { if (p.eatToken(.underscore)) |_| {
const equal_token = try p.expectToken(.equal); const equal_token = try p.expectToken(.equal);
const expr = try p.expression() orelse { const expr = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected expression, found '{s}'", "expected expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -996,7 +977,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
.shift_left_equal, .shift_left_equal,
=> { => {
const expr = try p.expression() orelse { const expr = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected expression, found '{s}'", "expected expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1012,7 +993,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
}); });
}, },
else => { else => {
try p.addError( try p.errors.add(
p.getToken(.loc, op_token), p.getToken(.loc, op_token),
"invalid assignment operator '{s}'", "invalid assignment operator '{s}'",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1029,7 +1010,7 @@ pub fn varUpdateStatement(p: *Parser) !?Ast.Index {
pub fn whileStatement(p: *Parser) !?Ast.Index { pub fn whileStatement(p: *Parser) !?Ast.Index {
const main_token = p.eatToken(.k_while) orelse return null; const main_token = p.eatToken(.k_while) orelse return null;
const cond = try p.expression() orelse { const cond = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected condition expression, found '{s}'", "expected condition expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1048,7 +1029,7 @@ pub fn whileStatement(p: *Parser) !?Ast.Index {
pub fn expectTypeSpecifier(p: *Parser) error{ OutOfMemory, Parsing }!Ast.Index { pub fn expectTypeSpecifier(p: *Parser) error{ OutOfMemory, Parsing }!Ast.Index {
return try p.typeSpecifier() orelse { return try p.typeSpecifier() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected type sepecifier, found '{s}'", "expected type sepecifier, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1131,7 +1112,7 @@ pub fn typeSpecifierWithoutIdent(p: *Parser) !?Ast.Index {
var size = Ast.null_index; var size = Ast.null_index;
if (p.eatToken(.comma)) |_| { if (p.eatToken(.comma)) |_| {
size = try p.elementCountExpr() orelse { size = try p.elementCountExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected array size expression, found '{s}'", "expected array size expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1267,12 +1248,11 @@ pub fn expectAddressSpace(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.AddressSpace, str)) |_| return token; if (std.meta.stringToEnum(Ast.AddressSpace, str)) |_| return token;
} }
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"unknown address space '{s}'", "unknown address space '{s}'",
.{p.getToken(.loc, token).slice(p.source)}, .{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.AddressSpace)}, .{fieldNames(Ast.AddressSpace)},
@ -1288,12 +1268,11 @@ pub fn expectAccessMode(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.AccessMode, str)) |_| return token; if (std.meta.stringToEnum(Ast.AccessMode, str)) |_| return token;
} }
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"unknown access mode '{s}'", "unknown access mode '{s}'",
.{p.getToken(.loc, token).slice(p.source)}, .{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.AccessMode)}, .{fieldNames(Ast.AccessMode)},
@ -1309,12 +1288,11 @@ pub fn expectTexelFormat(p: *Parser) !Ast.Index {
if (std.meta.stringToEnum(Ast.TexelFormat, str)) |_| return token; if (std.meta.stringToEnum(Ast.TexelFormat, str)) |_| return token;
} }
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"unknown address space '{s}'", "unknown address space '{s}'",
.{p.getToken(.loc, token).slice(p.source)}, .{p.getToken(.loc, token).slice(p.source)},
try ErrorMsg.Note.create( try p.errors.createNote(
p.allocator,
null, null,
"valid options are [{s}]", "valid options are [{s}]",
.{fieldNames(Ast.TexelFormat)}, .{fieldNames(Ast.TexelFormat)},
@ -1326,7 +1304,7 @@ pub fn expectTexelFormat(p: *Parser) !Ast.Index {
pub fn expectParenExpr(p: *Parser) !Ast.Index { pub fn expectParenExpr(p: *Parser) !Ast.Index {
_ = try p.expectToken(.paren_left); _ = try p.expectToken(.paren_left);
const expr = try p.expression() orelse { const expr = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse expression '{s}'", "unable to parse expression '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1365,7 +1343,7 @@ pub fn callExpr(p: *Parser) !?Ast.Index {
.array_type, .array_type,
=> lhs = type_node, => lhs = type_node,
else => { else => {
try p.addError( try p.errors.add(
p.getToken(.loc, main_token), p.getToken(.loc, main_token),
"type '{s}' can not be constructed", "type '{s}' can not be constructed",
.{p.getToken(.tag, main_token).symbol()}, .{p.getToken(.tag, main_token).symbol()},
@ -1418,7 +1396,7 @@ pub fn lhsExpression(p: *Parser) !?Ast.Index {
if (p.eatToken(.paren_left)) |_| { if (p.eatToken(.paren_left)) |_| {
const expr = try p.lhsExpression() orelse { const expr = try p.lhsExpression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected lhs expression, found '{s}'", "expected lhs expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1435,7 +1413,7 @@ pub fn lhsExpression(p: *Parser) !?Ast.Index {
.tag = .deref, .tag = .deref,
.main_token = star_token, .main_token = star_token,
.lhs = try p.lhsExpression() orelse { .lhs = try p.lhsExpression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected lhs expression, found '{s}'", "expected lhs expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1451,7 +1429,7 @@ pub fn lhsExpression(p: *Parser) !?Ast.Index {
.tag = .addr_of, .tag = .addr_of,
.main_token = addr_of_token, .main_token = addr_of_token,
.lhs = try p.lhsExpression() orelse { .lhs = try p.lhsExpression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected lhs expression, found '{s}'", "expected lhs expression, found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1524,7 +1502,7 @@ pub fn unaryExpr(p: *Parser) error{ OutOfMemory, Parsing }!?Ast.Index {
_ = p.advanceToken(); _ = p.advanceToken();
const expr = try p.unaryExpr() orelse { const expr = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1555,7 +1533,7 @@ pub fn expectRelationalExpr(p: *Parser, lhs_unary: Ast.Index) !Ast.Index {
_ = p.advanceToken(); _ = p.advanceToken();
const rhs_unary = try p.unaryExpr() orelse { const rhs_unary = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1586,7 +1564,7 @@ pub fn expectShortCircuitExpr(p: *Parser, lhs_relational: Ast.Index) !Ast.Index
_ = p.advanceToken(); _ = p.advanceToken();
const rhs_unary = try p.unaryExpr() orelse { const rhs_unary = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1620,7 +1598,7 @@ pub fn bitwiseExpr(p: *Parser, lhs: Ast.Index) !?Ast.Index {
var lhs_result = lhs; var lhs_result = lhs;
while (true) { while (true) {
const rhs = try p.unaryExpr() orelse { const rhs = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1650,7 +1628,7 @@ pub fn expectShiftExpr(p: *Parser, lhs: Ast.Index) !Ast.Index {
_ = p.advanceToken(); _ = p.advanceToken();
const rhs = try p.unaryExpr() orelse { const rhs = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1683,7 +1661,7 @@ pub fn expectAdditiveExpr(p: *Parser, lhs_mul: Ast.Index) !Ast.Index {
}; };
_ = p.advanceToken(); _ = p.advanceToken();
const unary = try p.unaryExpr() orelse { const unary = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.getToken(.tag, op_token).symbol()}, .{p.getToken(.tag, op_token).symbol()},
@ -1713,7 +1691,7 @@ pub fn expectMultiplicativeExpr(p: *Parser, lhs_unary: Ast.Index) !Ast.Index {
}; };
_ = p.advanceToken(); _ = p.advanceToken();
const rhs = try p.unaryExpr() orelse { const rhs = try p.unaryExpr() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"unable to parse right side of '{s}' expression", "unable to parse right side of '{s}' expression",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1742,7 +1720,7 @@ pub fn componentOrSwizzleSpecifier(p: *Parser, prefix: Ast.Index) !Ast.Index {
}); });
} else if (p.eatToken(.bracket_left)) |bracket_left_token| { } else if (p.eatToken(.bracket_left)) |bracket_left_token| {
const index_expr = try p.expression() orelse { const index_expr = try p.expression() orelse {
try p.addError( try p.errors.add(
p.peekToken(.loc, 0), p.peekToken(.loc, 0),
"expected expression, but found '{s}'", "expected expression, but found '{s}'",
.{p.peekToken(.tag, 0).symbol()}, .{p.peekToken(.tag, 0).symbol()},
@ -1843,17 +1821,6 @@ fn findNextStmt(p: *Parser) void {
} }
} }
pub fn addError(
p: *Parser,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = try ErrorMsg.create(p.allocator, loc, format, args, note);
try p.errors.append(p.allocator, err_msg);
}
fn listToSpan(p: *Parser, list: []const Ast.Index) !Ast.Index { fn listToSpan(p: *Parser, list: []const Ast.Index) !Ast.Index {
try p.extra.appendSlice(p.allocator, list); try p.extra.appendSlice(p.allocator, list);
return p.addNode(.{ return p.addNode(.{
@ -1911,7 +1878,7 @@ pub fn expectToken(p: *Parser, tag: Token.Tag) !Ast.Index {
const token = p.advanceToken(); const token = p.advanceToken();
if (p.getToken(.tag, token) == tag) return token; if (p.getToken(.tag, token) == tag) return token;
try p.addError( try p.errors.add(
p.getToken(.loc, token), p.getToken(.loc, token),
"expected '{s}', but found '{s}'", "expected '{s}', but found '{s}'",
.{ tag.symbol(), p.getToken(.tag, token).symbol() }, .{ tag.symbol(), p.getToken(.tag, token).symbol() },

View file

@ -5,57 +5,10 @@ pub const IR = @import("IR.zig");
pub const Parser = @import("Parser.zig"); pub const Parser = @import("Parser.zig");
pub const Token = @import("Token.zig"); pub const Token = @import("Token.zig");
pub const Tokenizer = @import("Tokenizer.zig"); pub const Tokenizer = @import("Tokenizer.zig");
pub const ErrorList = @import("ErrorList.zig");
pub const Extension = enum { pub const Extension = enum {
f16, f16,
pub const Array = std.enums.EnumArray(Extension, bool); pub const Array = std.enums.EnumArray(Extension, bool);
}; };
pub const ErrorMsg = struct {
loc: Token.Loc,
msg: []const u8,
note: ?Note = null,
pub const Note = struct {
loc: ?Token.Loc = null,
msg: []const u8,
pub fn create(
allocator: std.mem.Allocator,
loc: ?Token.Loc,
comptime format: []const u8,
args: anytype,
) !Note {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(allocator, comptime format, args),
};
}
pub fn deinit(note: *Note, allocator: std.mem.Allocator) void {
allocator.free(note.msg);
note.* = undefined;
}
};
pub fn create(
allocator: std.mem.Allocator,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?Note,
) !ErrorMsg {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(allocator, comptime format, args),
.note = note,
};
}
pub fn deinit(err_msg: *ErrorMsg, allocator: std.mem.Allocator) void {
if (err_msg.note) |*note| note.*.deinit(allocator);
allocator.free(err_msg.msg);
err_msg.* = undefined;
}
};

View file

@ -11,164 +11,82 @@ fn sdkPath(comptime suffix: []const u8) []const u8 {
}; };
} }
// TODO: move this to cli/main.zig
pub fn printErrors(errors: []dusk.ErrorMsg, source: []const u8, file_path: ?[]const u8) !void {
var bw = std.io.bufferedWriter(std.io.getStdErr().writer());
const b = bw.writer();
const term = std.debug.TTY.Config{ .escape_codes = {} };
for (errors) |*err| {
defer err.deinit(allocator);
const loc_extra = err.loc.extraInfo(source);
// 'file:line:column error: <MSG>'
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, loc_extra.line, loc_extra.col });
try term.setColor(b, .Red);
try b.writeAll("error: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(err.msg);
try b.writeByte('\n');
try printCode(b, term, source, err.loc);
// note
if (err.note) |note| {
if (note.loc) |note_loc| {
const note_loc_extra = note_loc.extraInfo(source);
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.print("{?s}:{d}:{d} ", .{ file_path, note_loc_extra.line, note_loc_extra.col });
}
try term.setColor(b, .Cyan);
try b.writeAll("note: ");
try term.setColor(b, .Reset);
try term.setColor(b, .Bold);
try b.writeAll(note.msg);
try b.writeByte('\n');
if (note.loc) |note_loc| {
try printCode(b, term, source, note_loc);
}
}
try term.setColor(b, .Reset);
}
try bw.flush();
}
fn printCode(writer: anytype, term: std.debug.TTY.Config, source: []const u8, loc: dusk.Token.Loc) !void {
const loc_extra = loc.extraInfo(source);
try term.setColor(writer, .Dim);
try writer.print("{d} │ ", .{loc_extra.line});
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc_extra.line_start..loc.start]);
try term.setColor(writer, .Green);
try writer.writeAll(source[loc.start..loc.end]);
try term.setColor(writer, .Reset);
try writer.writeAll(source[loc.end..loc_extra.line_end]);
try writer.writeByte('\n');
// location pointer
const line_number_len = (std.math.log10(loc_extra.line) + 1) + 3;
try writer.writeByteNTimes(
' ',
line_number_len + (loc_extra.col - 1),
);
try term.setColor(writer, .Bold);
try term.setColor(writer, .Green);
try writer.writeByte('^');
try writer.writeByteNTimes('~', loc.end - loc.start - 1);
try writer.writeByte('\n');
}
fn expectIR(source: [:0]const u8) !dusk.IR { fn expectIR(source: [:0]const u8) !dusk.IR {
var res = try dusk.Ast.parse(allocator, source); var tree = try dusk.Ast.parse(allocator, source);
switch (res) { defer tree.deinit(allocator);
.tree => |*tree| {
defer tree.deinit(allocator); if (tree.errors.list.items.len > 0) {
switch (try dusk.IR.generate(allocator, tree)) { try tree.errors.print(source, null);
.ir => |ir| return ir, return error.Parsing;
.errors => |err_msgs| {
try printErrors(err_msgs, source, null);
allocator.free(err_msgs);
return error.ExpectedIR;
},
}
},
.errors => |err_msgs| {
try printErrors(err_msgs, source, null);
allocator.free(err_msgs);
return error.Parsing;
},
} }
var ir = try dusk.IR.generate(allocator, &tree);
errdefer ir.deinit();
if (ir.errors.list.items.len > 0) {
try ir.errors.print(source, null);
return error.ExpectedIR;
}
return ir;
} }
fn expectError(source: [:0]const u8, err: dusk.ErrorMsg) !void { fn expectError(source: [:0]const u8, err: dusk.ErrorList.ErrorMsg) !void {
var gpa = std.heap.GeneralPurposeAllocator(.{ .stack_trace_frames = 12 }){}; var tree = try dusk.Ast.parse(allocator, source);
const all = gpa.allocator(); defer tree.deinit(allocator);
defer _ = gpa.deinit(); var err_list = tree.errors;
var res = try dusk.Ast.parse(all, source);
const err_list = switch (res) { var ir: ?dusk.IR = null;
.tree => |*tree| blk: { defer if (ir != null) ir.?.deinit();
defer tree.deinit(all);
switch (try dusk.IR.generate(all, tree)) { if (err_list.list.items.len == 0) {
.ir => |*ir| { ir = try dusk.IR.generate(allocator, &tree);
ir.deinit();
return error.ExpectedError; err_list = ir.?.errors;
}, if (err_list.list.items.len == 0) {
.errors => |err_msgs| break :blk err_msgs,
}
return error.ExpectedError; return error.ExpectedError;
}, }
.errors => |err_msgs| err_msgs,
};
defer {
for (err_list) |*err_msg| err_msg.deinit(all);
all.free(err_list);
} }
const first_error = err_list.list.items[0];
{ {
errdefer { errdefer {
std.debug.print( std.debug.print(
"\n\x1b[31mexpected error({d}..{d}):\n{s}\n\x1b[32mactual error({d}..{d}):\n{s}\n\x1b[0m", "\n\x1b[31mexpected error({d}..{d}):\n{s}\n\x1b[32mactual error({d}..{d}):\n{s}\n\x1b[0m",
.{ .{
err.loc.start, err.loc.end, err.msg, err.loc.start, err.loc.end, err.msg,
err_list[0].loc.start, err_list[0].loc.end, err_list[0].msg, first_error.loc.start, first_error.loc.end, first_error.msg,
}, },
); );
} }
try expect(std.mem.eql(u8, err.msg, err_list[0].msg)); try expect(std.mem.eql(u8, err.msg, first_error.msg));
try expect(err_list[0].loc.start == err.loc.start); try expect(first_error.loc.start == err.loc.start);
try expect(err_list[0].loc.end == err.loc.end); try expect(first_error.loc.end == err.loc.end);
} }
if (err_list[0].note) |_| { if (first_error.note) |_| {
errdefer { errdefer {
std.debug.print( std.debug.print(
"\n\x1b[31mexpected note msg:\n{s}\n\x1b[32mactual note msg:\n{s}\n\x1b[0m", "\n\x1b[31mexpected note msg:\n{s}\n\x1b[32mactual note msg:\n{s}\n\x1b[0m",
.{ err.note.?.msg, err_list[0].note.?.msg }, .{ err.note.?.msg, first_error.note.?.msg },
); );
} }
if (err.note == null) { if (err.note == null) {
std.debug.print("\x1b[31mnote missed: {s}\x1b[0m\n", .{err_list[0].note.?.msg}); std.debug.print("\x1b[31mnote missed: {s}\x1b[0m\n", .{first_error.note.?.msg});
return error.NoteMissed; return error.NoteMissed;
} }
try expect(std.mem.eql(u8, err.note.?.msg, err_list[0].note.?.msg)); try expect(std.mem.eql(u8, err.note.?.msg, first_error.note.?.msg));
if (err_list[0].note.?.loc) |_| { if (first_error.note.?.loc) |_| {
errdefer { errdefer {
std.debug.print( std.debug.print(
"\n\x1b[31mexpected note loc: {d}..{d}\n\x1b[32mactual note loc: {d}..{d}\n\x1b[0m", "\n\x1b[31mexpected note loc: {d}..{d}\n\x1b[32mactual note loc: {d}..{d}\n\x1b[0m",
.{ .{
err.note.?.loc.?.start, err.note.?.loc.?.end, err.note.?.loc.?.start, err.note.?.loc.?.end,
err_list[0].note.?.loc.?.start, err_list[0].note.?.loc.?.end, first_error.note.?.loc.?.start, first_error.note.?.loc.?.end,
}, },
); );
} }
try expect(err_list[0].note.?.loc.?.start == err.note.?.loc.?.start); try expect(first_error.note.?.loc.?.start == err.note.?.loc.?.start);
try expect(err_list[0].note.?.loc.?.end == err.note.?.loc.?.end); try expect(first_error.note.?.loc.?.end == err.note.?.loc.?.end);
} }
} }
} }