Co-authored-by: Stephen Gutekanst <stephen@hexops.com>
This commit is contained in:
Ali Chraghi 2023-03-05 22:44:54 +03:30 committed by GitHub
parent 2b6f3fb1d9
commit 94fbc5d27f
Failed to generate hash of commit
20 changed files with 4505 additions and 0 deletions

143
libs/dusk/src/Analyse.zig Normal file
View file

@ -0,0 +1,143 @@
const std = @import("std");
const Ast = @import("Ast.zig");
const Token = @import("Token.zig");
const ErrorMsg = @import("main.zig").ErrorMsg;
const Analyse = @This();
allocator: std.mem.Allocator,
tree: *const Ast,
errors: std.ArrayListUnmanaged(ErrorMsg),
pub fn deinit(self: *Analyse) void {
for (self.errors.items) |*err_msg| err_msg.deinit(self.allocator);
self.errors.deinit(self.allocator);
}
pub fn analyseRoot(self: *Analyse) !void {
const global_items = self.tree.spanToList(0);
for (global_items, 0..) |node_i, i| {
try self.checkRedeclaration(global_items[i + 1 ..], node_i);
try self.globalDecl(global_items, node_i);
}
if (self.errors.items.len > 0) {
return error.Analysing;
}
}
pub fn globalDecl(self: *Analyse, parent_scope: []const Ast.Index, node_i: Ast.Index) !void {
switch (self.tree.nodeTag(node_i)) {
.global_variable => {}, // TODO
.struct_decl => try self.structDecl(parent_scope, node_i),
else => std.debug.print("Global Decl TODO: {}\n", .{self.tree.nodeTag(node_i)}),
}
}
pub fn structDecl(self: *Analyse, parent_scope: []const Ast.Index, node: Ast.Index) !void {
const member_list = self.tree.spanToList(self.tree.nodeLHS(node));
for (member_list, 0..) |member_node, i| {
try self.checkRedeclaration(member_list[i + 1 ..], member_node);
const member_loc = self.tree.tokenLoc(self.tree.nodeToken(member_node));
const member_name = member_loc.slice(self.tree.source);
const member_type_node = self.tree.nodeRHS(member_node);
switch (self.tree.nodeTag(member_type_node)) {
.scalar_type,
.vector_type,
.matrix_type,
.atomic_type,
=> {},
.array_type => {
if (self.tree.nodeRHS(member_type_node) == Ast.null_index and
i != member_list.len - 1)
{
try self.addError(
member_loc,
"struct member with runtime-sized array type, must be the last member of the structure",
.{},
null,
);
}
},
.user_type => {
_ = self.findDeclNode(parent_scope, member_name) orelse {
try self.addError(
member_loc, // TODO
"use of undeclared identifier '{s}'",
.{member_name},
null,
);
continue;
};
},
else => {
try self.addError(
member_loc,
"invalid struct member type '{s}'",
.{member_name},
null,
);
},
}
}
}
pub fn findDeclNode(self: *Analyse, scope_items: []const Ast.Index, name: []const u8) ?Ast.Index {
for (scope_items) |node| {
const node_token = self.declNameToken(node) orelse continue;
if (std.mem.eql(u8, name, self.tree.tokenLoc(node_token).slice(self.tree.source))) {
return node;
}
}
return null;
}
pub fn checkRedeclaration(self: *Analyse, scope_items: []const Ast.Index, decl_node: Ast.Index) !void {
const decl_token_loc = self.tree.tokenLoc(self.declNameToken(decl_node).?);
const decl_name = decl_token_loc.slice(self.tree.source);
for (scope_items) |redecl_node| {
std.debug.assert(decl_node != redecl_node);
const redecl_token_loc = self.tree.tokenLoc(self.declNameToken(redecl_node).?);
const redecl_name = redecl_token_loc.slice(self.tree.source);
if (std.mem.eql(u8, decl_name, redecl_name)) {
try self.addError(
redecl_token_loc,
"redeclaration of '{s}'",
.{decl_name},
try ErrorMsg.Note.create(
self.allocator,
decl_token_loc,
"other declaration here",
.{},
),
);
}
}
}
pub fn declNameToken(self: *Analyse, node: Ast.Index) ?Ast.Index {
return switch (self.tree.nodeTag(node)) {
.global_variable => self.tree.extraData(Ast.Node.GlobalVarDecl, self.tree.nodeLHS(node)).name,
.struct_decl,
.fn_decl,
.global_constant,
.override,
.type_alias,
=> self.tree.nodeToken(node) + 1,
.struct_member => self.tree.nodeToken(node),
else => null,
};
}
pub fn addError(
self: *Analyse,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?ErrorMsg.Note,
) !void {
const err_msg = try ErrorMsg.create(self.allocator, loc, format, args, note);
try self.errors.append(self.allocator, err_msg);
}

715
libs/dusk/src/Ast.zig Normal file
View file

@ -0,0 +1,715 @@
const std = @import("std");
const Analyse = @import("Analyse.zig");
const Parser = @import("Parser.zig");
const Token = @import("Token.zig");
const Tokenizer = @import("Tokenizer.zig");
const ErrorMsg = @import("main.zig").ErrorMsg;
const Extension = @import("main.zig").Extension;
const Ast = @This();
pub const NodeList = std.MultiArrayList(Node);
pub const TokenList = std.MultiArrayList(Token);
source: [:0]const u8,
tokens: TokenList.Slice,
nodes: NodeList.Slice,
extra: []const Index,
pub fn deinit(tree: *Ast, allocator: std.mem.Allocator) void {
tree.tokens.deinit(allocator);
tree.nodes.deinit(allocator);
allocator.free(tree.extra);
tree.* = undefined;
}
pub const ParseResult = union(enum) {
errors: []ErrorMsg,
tree: Ast,
};
/// parses a TranslationUnit (WGSL Program)
pub fn parse(allocator: std.mem.Allocator, source: [:0]const u8) !ParseResult {
const estimated_tokens = source.len / 8;
var tokens = std.MultiArrayList(Token){};
try tokens.ensureTotalCapacity(allocator, estimated_tokens);
var tokenizer = Tokenizer.init(source);
while (true) {
const tok = tokenizer.next();
try tokens.append(allocator, tok);
if (tok.tag == .eof) break;
}
var p = Parser{
.allocator = allocator,
.source = source,
.tok_i = 0,
.tokens = tokens.toOwnedSlice(),
.nodes = .{},
.extra = .{},
.scratch = .{},
.errors = .{},
.extensions = Extension.Array.initFill(false),
};
defer p.deinit();
errdefer p.tokens.deinit(allocator);
// TODO: make sure tokens:nodes ratio is right
const estimated_node_count = (tokens.len + 2) / 2;
try p.nodes.ensureTotalCapacity(allocator, estimated_node_count);
p.parseRoot() catch |err| {
if (err == error.Parsing) {
p.tokens.deinit(allocator);
return .{ .errors = try p.errors.toOwnedSlice(allocator) };
}
return err;
};
return .{
.tree = .{
.source = source,
.tokens = p.tokens,
.nodes = p.nodes.toOwnedSlice(),
.extra = try p.extra.toOwnedSlice(allocator),
},
};
}
pub fn analyse(tree: Ast, allocator: std.mem.Allocator) !?[]ErrorMsg {
var analyser = Analyse{
.allocator = allocator,
.tree = &tree,
.errors = .{},
};
defer analyser.deinit();
analyser.analyseRoot() catch |err| {
if (err == error.Analysing) {
return try analyser.errors.toOwnedSlice(allocator);
}
return err;
};
return null;
}
pub fn spanToList(tree: Ast, span: Ast.Index) []const Ast.Index {
std.debug.assert(tree.nodeTag(span) == .span);
return tree.extra[tree.nodeLHS(span)..tree.nodeRHS(span)];
}
pub fn extraData(tree: Ast, comptime T: type, index: Ast.Index) T {
const fields = std.meta.fields(T);
var result: T = undefined;
inline for (fields, 0..) |field, i| {
comptime std.debug.assert(field.type == Ast.Index);
@field(result, field.name) = tree.extra[index + i];
}
return result;
}
pub fn tokenTag(tree: Ast, i: Index) Token.Tag {
return tree.tokens.items(.tag)[i];
}
pub fn tokenLoc(tree: Ast, i: Index) Token.Loc {
return tree.tokens.items(.loc)[i];
}
pub fn nodeTag(tree: Ast, i: Index) Node.Tag {
return tree.nodes.items(.tag)[i];
}
pub fn nodeToken(tree: Ast, i: Index) Index {
return tree.nodes.items(.main_token)[i];
}
pub fn nodeLHS(tree: Ast, i: Index) Index {
return tree.nodes.items(.lhs)[i];
}
pub fn nodeRHS(tree: Ast, i: Index) Index {
return tree.nodes.items(.rhs)[i];
}
pub const Index = u32;
pub const null_index: Index = 0;
pub const Node = struct {
tag: Tag,
main_token: Index,
lhs: Index = null_index,
rhs: Index = null_index,
pub const Tag = enum {
/// an slice to extra field [LHS..RHS]
/// TOK : undefined
/// LHS : Index
/// RHS : Index
span,
// ####### GlobalDecl #######
/// TOK : k_var
/// LHS : GlobalVarDecl
/// RHS : Expr?
global_variable,
/// TOK : k_const
/// LHS : Type
/// RHS : Expr
global_constant,
/// TOK : k_override
/// LHS : OverrideDecl
/// RHS : Expr
override,
/// TOK : k_type
/// LHS : Type
/// RHS : --
type_alias,
/// TOK : k_const_assert
/// LHS : Expr
/// RHS : --
const_assert,
/// TOK : k_struct
/// LHS : span(struct_member)
/// RHS : --
struct_decl,
/// TOK : ident
/// LHS : span(Attribute)
/// RHS : Type
struct_member,
/// TOK : k_fn
/// LHS : FnProto
/// RHS : block
fn_decl,
/// TOK : ident
/// LHS : ? Attributes
/// RHS : type
fn_param,
// ####### Statement #######
// block = span(Statement)
/// TOK : k_return
/// LHS : Expr?
/// RHS : --
@"return",
/// TOK : k_discard
/// LHS : --
/// RHS : --
discard,
/// TOK : k_loop
/// LHS : block
/// RHS : --
loop,
/// TOK : k_continuing
/// LHS : block
/// RHS : --
continuing,
/// TOK : k_break
/// LHS : Expr
/// RHS : --
break_if,
/// TOK : k_break
/// LHS : --
/// RHS : --
@"break",
/// TOK : k_continue
/// LHS : --
/// RHS : --
@"continue",
/// TOK : k_if
/// LHS : Expr
/// RHS : blcok
@"if",
/// RHS is else body
/// TOK : k_if
/// LHS : if
/// RHS : blcok
if_else,
/// TOK : k_if
/// LHS : if
/// RHS : if, if_else, if_else_if
if_else_if,
/// TOK : k_switch
/// LHS : Expr
/// RHS : span(switch_case, switch_default, switch_case_default)
@"switch",
/// TOK : k_case
/// LHS : span(Expr)
/// RHS : block
switch_case,
/// TOK : k_default
/// LHS : block
/// RHS : --
switch_default,
/// switch_case with default (`case 1, 2, default {}`)
/// TOK : k_case
/// LHS : span(Expr)
/// RHS : block
switch_case_default,
/// TOK : k_var
/// LHS : VarDecl
/// RHS : Expr?
var_decl,
/// TOK : k_const
/// LHS : Type?
/// RHS : Expr
const_decl,
/// TOK : k_let
/// LHS : Type?
/// RHS : Expr
let_decl,
/// TOK : k_while
/// LHS : Expr
/// RHS : block
@"while",
/// TOK : k_for
/// LHS : ForHeader
/// RHS : block
@"for",
/// TOK : plus_plus, minus_minus
/// LHS : Expr
increase_decrement,
/// TOK : plus_equal, minus_equal,
/// times_equal, division_equal,
/// modulo_equal, and_equal,
/// or_equal, xor_equal,
/// shift_right_equal, shift_left_equal
/// LHS : Expr
/// RHS : Expr
compound_assign,
/// TOK : equal
/// LHS : Expr
/// RHS : --
phony_assign,
// ####### Type #######
/// TOK : k_i32, k_u32, k_f32, k_f16, k_bool
/// LHS : --
/// RHS : --
scalar_type,
/// TOK : k_sampler, k_comparison_sampler
/// LHS : --
/// RHS : --
sampler_type,
/// TOK : k_vec2, k_vec3, k_vec4
/// LHS : Type
/// RHS : --
vector_type,
/// TOK : k_mat2x2, k_mat2x3, k_mat2x4,
/// k_mat3x2, k_mat3x3, k_mat3x4,
/// k_mat4x2, k_mat4x3, k_mat4x4
/// LHS : Type
/// RHS : --
matrix_type,
/// TOK : k_atomic
/// LHS : Type
/// RHS : --
atomic_type,
/// TOK : k_array
/// LHS : Type
/// RHS : Expr?
array_type,
/// TOK : k_ptr
/// LHS : Type
/// RHS : PtrType
ptr_type,
/// TOK : k_texture_1d, k_texture_2d, k_texture_2d_array,
/// k_texture_3d, k_texture_cube, k_texture_cube_array
/// LHS : Type
/// RHS : --
sampled_texture_type,
/// TOK : k_texture_multisampled_2d
/// LHS : Type
/// RHS : --
multisampled_texture_type,
/// TOK : k_texture_external
/// LHS : Type
/// RHS : --
external_texture_type,
/// TOK : k_texture_storage_1d, k_texture_storage_2d,
/// k_texture_storage_2d_array, k_texture_storage_3d
/// LHS : Index(Token(TexelFormat))
/// RHS : Index(Token(AccessMode))
storage_texture_type,
/// TOK : k_texture_depth_2d, k_texture_depth_2d_array
/// k_texture_depth_cube, k_texture_depth_cube_array
/// k_texture_depth_multisampled_2d
/// LHS : --
/// RHS : --
depth_texture_type,
/// TOK : ident
/// LHS : --
/// RHS : --
user_type,
// ####### Attr #######
// TOK : attr
attr,
/// TOK : attr
/// LHS : Expr
/// RHS : --
attr_one_arg,
/// TOK : attr
/// LHS : Index(Token(BuiltinValue))
/// RHS : --
attr_builtin,
/// TOK : attr
/// LHS : WorkgroupSize
/// RHS : --
attr_workgroup_size,
/// TOK : attr
/// LHS : Index(Token(InterpolationType))
/// RHS : Index(Token(InterpolationSample))
attr_interpolate,
// ####### Expr #######
// see both Parser.zig and https://gpuweb.github.io/gpuweb/wgsl/#expression-grammar
/// TOK : *
/// LHS : Expr
/// RHS : Expr
mul,
/// TOK : /
/// LHS : Expr
/// RHS : Expr
div,
/// TOK : %
/// LHS : Expr
/// RHS : Expr
mod,
/// TOK : +
/// LHS : Expr
/// RHS : Expr
add,
/// TOK : -
/// LHS : Expr
/// RHS : Expr
sub,
/// TOK : <<
/// LHS : Expr
/// RHS : Expr
shift_left,
/// TOK : >>
/// LHS : Expr
/// RHS : Expr
shift_right,
/// TOK : &
/// LHS : Expr
/// RHS : Expr
binary_and,
/// TOK : |
/// LHS : Expr
/// RHS : Expr
binary_or,
/// TOK : ^
/// LHS : Expr
/// RHS : Expr
binary_xor,
/// TOK : &&
/// LHS : Expr
/// RHS : Expr
circuit_and,
/// TOK : ||
/// LHS : Expr
/// RHS : Expr
circuit_or,
/// TOK : !
/// LHS : Expr
/// RHS : --
not,
/// TOK : -
/// LHS : Expr
/// RHS : --
negate,
/// TOK : *
/// LHS : Expr
/// RHS : --
deref,
/// TOK : &
/// LHS : Expr
/// RHS : --
addr_of,
/// TOK : ==
/// LHS : Expr
/// RHS : Expr
equal,
/// TOK : !=
/// LHS : Expr
/// RHS : Expr
not_equal,
/// TOK : <
/// LHS : Expr
/// RHS : Expr
less,
/// TOK : <=
/// LHS : Expr
/// RHS : Expr
less_equal,
/// TOK : >
/// LHS : Expr
/// RHS : Expr
greater,
/// TOK : >=
/// LHS : Expr
/// RHS : Expr
greater_equal,
/// for identifier, array without element type specified,
/// vector prefix (e.g. vec2) and matrix prefix (e.g. mat2x2) LHS is null
/// see callExpr in Parser.zig if you don't understand this
///
/// TOK : ident, k_array, 'scalar keywords', 'vector keywords', 'matrix keywords'
/// LHS : (scalar_type, vector_type, matrix_type, array_type)?
/// RHS : arguments (Expr span)
call,
/// TOK : k_bitcast
/// LHS : Type
/// RHS : Expr
bitcast,
/// TOK : ident
/// LHS : --
/// RHS : --
ident_expr,
/// LHS is prefix expression
/// TOK : ident
/// LHS : Expr
component_access,
/// LHS is prefix expression
/// TOK : bracket_left
/// LHS : Expr
/// RHS : Expr
index_access,
// ####### Literals #######
/// TOK : k_true, k_false
/// LHS : --
/// RHS : --
bool_literal,
/// TOK : number
/// LHS : --
/// RHS : --
number_literal,
};
pub const GlobalVarDecl = struct {
/// span(Attr)?
attrs: Index = null_index,
/// Token(ident)
name: Index,
/// Token(AddrSpace)?
addr_space: Index = null_index,
/// Token(AccessMode)?
access_mode: Index = null_index,
/// Type?
type: Index = null_index,
};
pub const VarDecl = struct {
/// Token(ident)
name: Index,
/// Token(AddrSpace)?
addr_space: Index = null_index,
/// Token(AccessMode)?
access_mode: Index = null_index,
/// Type?
type: Index = null_index,
};
pub const OverrideDecl = struct {
/// span(Attr)?
attrs: Index = null_index,
/// Type?
type: Index = null_index,
};
pub const PtrType = struct {
/// Token(AddrSpace)
addr_space: Index,
/// Token(AccessMode)?
access_mode: Index = null_index,
};
pub const WorkgroupSize = struct {
/// Expr
x: Index,
/// Expr?
y: Index = null_index,
/// Expr?
z: Index = null_index,
};
pub const FnProto = struct {
/// span(Attr)?
attrs: Index = null_index,
/// span(fn_param)?
params: Index = null_index,
/// span(Attr)?
result_attrs: Index = null_index,
/// Type?
result_type: Index = null_index,
};
pub const IfStatement = struct {
/// Expr
cond: Index,
/// block
body: Index,
};
pub const ForHeader = struct {
/// var_decl, const_decl, let_decl, phony_assign, compound_assign
init: Index = null_index,
/// Expr
cond: Index = null_index,
/// call, phony_assign, compound_assign
update: Index = null_index,
};
};
pub const BuiltinValue = enum {
vertex_index,
instance_index,
position,
front_facing,
frag_depth,
local_invocation_id,
local_invocation_index,
global_invocation_id,
workgroup_id,
num_workgroups,
sample_index,
sample_mask,
};
pub const InterpolationType = enum {
perspective,
linear,
flat,
};
pub const InterpolationSample = enum {
center,
centroid,
sample,
};
pub const AddressSpace = enum {
function,
private,
workgroup,
uniform,
storage,
};
pub const AccessMode = enum {
read,
write,
read_write,
};
pub const Attribute = enum {
invariant,
@"const",
vertex,
fragment,
compute,
@"align",
binding,
group,
id,
location,
size,
builtin,
workgroup_size,
interpolate,
};
pub const TexelFormat = enum {
rgba8unorm,
rgba8snorm,
rgba8uint,
rgba8sint,
rgba16uint,
rgba16sint,
rgba16float,
r32uint,
r32sint,
r32float,
rg32uint,
rg32sint,
rg32float,
rgba32uint,
rgba32sint,
rgba32float,
bgra8unorm,
};

1877
libs/dusk/src/Parser.zig Normal file

File diff suppressed because it is too large Load diff

467
libs/dusk/src/Token.zig Normal file
View file

@ -0,0 +1,467 @@
const std = @import("std");
tag: Tag,
loc: Loc,
pub const Loc = struct {
start: u32,
end: u32,
pub const Extra = struct {
line: u32,
col: u32,
line_start: u32,
line_end: u32,
};
pub fn slice(self: Loc, source: []const u8) []const u8 {
return source[self.start..self.end];
}
pub fn extraInfo(self: Loc, source: []const u8) Extra {
var result = Extra{
.line = 1,
.col = 1,
.line_start = 0,
.line_end = @intCast(u32, source.len),
};
for (source[0..self.start], 0..) |c, i| {
if (c == '\n') {
result.line += 1;
result.line_start = @intCast(u32, i) + 1;
}
}
for (source[self.end..], 0..) |c, i| {
if (c == '\n') {
result.line_end = self.end + @intCast(u32, i);
break;
}
}
result.col += self.start - result.line_start;
return result;
}
};
pub const Tag = enum {
eof,
invalid,
ident,
/// any number literal
number,
/// '&'
@"and",
/// '&&'
and_and,
/// '->'
arrow,
/// '@'
attr,
/// '/'
division,
/// '!'
bang,
/// '{'
brace_left,
/// '}'
brace_right,
/// '['
bracket_left,
/// ']'
bracket_right,
/// ':'
colon,
/// ','
comma,
/// '='
equal,
/// '=='
equal_equal,
/// '>'
greater_than,
/// '>='
greater_than_equal,
/// '>>'
shift_right,
/// '<'
less_than,
/// '<='
less_than_equal,
/// '<<'
shift_left,
/// '%'
mod,
/// '-'
minus,
/// '--'
minus_minus,
/// '!='
not_equal,
/// '.'
period,
/// '+'
plus,
/// '++'
plus_plus,
/// '|'
@"or",
/// '||'
or_or,
/// '('
paren_left,
/// ')'
paren_right,
/// ';'
semicolon,
/// '*'
star,
/// '~'
tilde,
/// '_'
underscore,
/// '^'
xor,
/// '+='
plus_equal,
/// '-='
minus_equal,
/// '*='
times_equal,
/// '/='
division_equal,
/// '%='
modulo_equal,
/// '&='
and_equal,
/// '|='
or_equal,
/// '^='
xor_equal,
/// '>>='
shift_right_equal,
/// '<<='
shift_left_equal,
/// 'array'
k_array,
/// 'atomic'
k_atomic,
/// 'bitcast'
k_bitcast,
/// 'bool'
k_bool,
/// 'break'
k_break,
/// 'case'
k_case,
/// 'const'
k_const,
/// 'continue'
k_continue,
/// 'continuing'
k_continuing,
/// 'discard'
k_discard,
/// 'default'
k_default,
/// 'else'
k_else,
/// 'enable'
k_enable,
/// 'f16'
k_f16,
/// 'f32'
k_f32,
/// 'fallthrough'
k_fallthrough,
/// 'false'
k_false,
/// 'fn'
k_fn,
/// 'for'
k_for,
/// 'i32'
k_i32,
/// 'if'
k_if,
/// 'let'
k_let,
/// 'loop'
k_loop,
/// 'mat2x2'
k_mat2x2,
/// 'mat2x3'
k_mat2x3,
/// 'mat2x4'
k_mat2x4,
/// 'mat3x2'
k_mat3x2,
/// 'mat3x3'
k_mat3x3,
/// 'mat3x4'
k_mat3x4,
/// 'mat4x2'
k_mat4x2,
/// 'mat4x3'
k_mat4x3,
/// 'mat4x4'
k_mat4x4,
/// 'override'
k_override,
/// 'ptr'
k_ptr,
/// 'return'
k_return,
/// 'sampler'
k_sampler,
/// 'sampler_comparison'
k_comparison_sampler,
/// 'const_assert'
k_const_assert,
/// 'struct'
k_struct,
/// 'switch'
k_switch,
/// 'texture_depth_2d'
k_texture_depth_2d,
/// 'texture_depth_2d_array'
k_texture_depth_2d_array,
/// 'texture_depth_cube'
k_texture_depth_cube,
/// 'texture_depth_cube_array'
k_texture_depth_cube_array,
/// 'texture_depth_multisampled_2d'
k_texture_depth_multisampled_2d,
/// 'texture_external'
k_texture_external,
/// 'texture_multisampled_2d'
k_texture_multisampled_2d,
/// 'texture_1d'
k_texture_sampled_1d,
/// 'texture_2d'
k_texture_sampled_2d,
/// 'texture_2d_array'
k_texture_sampled_2d_array,
/// 'texture_3d'
k_texture_sampled_3d,
/// 'texture_cube'
k_texture_sampled_cube,
/// 'texture_cube_array'
k_texture_sampled_cube_array,
/// 'texture_storage_1d'
k_texture_storage_1d,
/// 'texture_storage_2d'
k_texture_storage_2d,
/// 'texture_storage_2d_array'
k_texture_storage_2d_array,
/// 'texture_storage_3d'
k_texture_storage_3d,
/// 'true'
k_true,
/// 'type'
k_type,
/// 'u32'
k_u32,
/// 'var'
k_var,
/// 'vec2'
k_vec2,
/// 'vec3'
k_vec3,
/// 'vec4'
k_vec4,
/// 'while'
k_while,
pub fn symbol(self: Tag) []const u8 {
return switch (self) {
.eof => "EOF",
.invalid => "invalid bytes",
.ident => "an identifier",
.number => "a number literal",
.@"and" => "&",
.and_and => "&&",
.arrow => "->",
.attr => "@",
.division => "/",
.bang => "!",
.brace_left => "{",
.brace_right => "}",
.bracket_left => "[",
.bracket_right => "]",
.colon => ":",
.comma => ",",
.equal => "=",
.equal_equal => "==",
.greater_than => ">",
.greater_than_equal => ">=",
.shift_right => ">>",
.less_than => "<",
.less_than_equal => "<=",
.shift_left => "<<",
.mod => "%",
.minus => "-",
.minus_minus => "--",
.not_equal => "!=",
.period => ".",
.plus => "+",
.plus_plus => "++",
.@"or" => "|",
.or_or => "||",
.paren_left => "(",
.paren_right => ")",
.semicolon => ";",
.star => "*",
.tilde => "~",
.underscore => "_",
.xor => "^",
.plus_equal => "+=",
.minus_equal => "-=",
.times_equal => "*=",
.division_equal => "/=",
.modulo_equal => "%=",
.and_equal => "&=",
.or_equal => "|=",
.xor_equal => "^=",
.shift_right_equal => ">>=",
.shift_left_equal => "<<=",
.k_array => "array",
.k_atomic => "atomic",
.k_bitcast => "bitcast",
.k_bool => "bool",
.k_break => "break",
.k_case => "case",
.k_const => "const",
.k_continue => "continue",
.k_continuing => "continuing",
.k_discard => "discard",
.k_default => "default",
.k_else => "else",
.k_enable => "enable",
.k_f16 => "f16",
.k_f32 => "f32",
.k_fallthrough => "fallthrough",
.k_false => "false",
.k_fn => "fn",
.k_for => "for",
.k_i32 => "i32",
.k_if => "if",
.k_let => "let",
.k_loop => "loop",
.k_mat2x2 => "mat2x2",
.k_mat2x3 => "mat2x3",
.k_mat2x4 => "mat2x4",
.k_mat3x2 => "mat3x2",
.k_mat3x3 => "mat3x3",
.k_mat3x4 => "mat3x4",
.k_mat4x2 => "mat4x2",
.k_mat4x3 => "mat4x3",
.k_mat4x4 => "mat4x4",
.k_override => "override",
.k_ptr => "ptr",
.k_return => "return",
.k_sampler => "sampler",
.k_comparison_sampler => "sampler_comparison",
.k_const_assert => "const_assert",
.k_struct => "struct",
.k_switch => "switch",
.k_texture_depth_2d => "texture_depth_2d",
.k_texture_depth_2d_array => "texture_depth_2d_array",
.k_texture_depth_cube => "texture_depth_cube",
.k_texture_depth_cube_array => "texture_depth_cube_array",
.k_texture_depth_multisampled_2d => "texture_depth_multisampled_2d",
.k_texture_external => "texture_external",
.k_texture_multisampled_2d => "texture_multisampled_2d",
.k_texture_sampled_1d => "texture_1d",
.k_texture_sampled_2d => "texture_2d",
.k_texture_sampled_2d_array => "texture_2d_array",
.k_texture_sampled_3d => "texture_3d",
.k_texture_sampled_cube => "texture_cube",
.k_texture_sampled_cube_array => "texture_cube_array",
.k_texture_storage_1d => "texture_storage_1d",
.k_texture_storage_2d => "texture_storage_2d",
.k_texture_storage_2d_array => "texture_storage_2d_array",
.k_texture_storage_3d => "texture_storage_3d",
.k_true => "true",
.k_type => "type",
.k_u32 => "u32",
.k_var => "var",
.k_vec2 => "vec2",
.k_vec3 => "vec3",
.k_vec4 => "vec4",
.k_while => "while",
};
}
};
pub const keywords = std.ComptimeStringMap(Tag, .{
.{ "array", .k_array },
.{ "atomic", .k_atomic },
.{ "bitcast", .k_bitcast },
.{ "bool", .k_bool },
.{ "break", .k_break },
.{ "case", .k_case },
.{ "const", .k_const },
.{ "continue", .k_continue },
.{ "continuing", .k_continuing },
.{ "discard", .k_discard },
.{ "default", .k_default },
.{ "else", .k_else },
.{ "enable", .k_enable },
.{ "f16", .k_f16 },
.{ "f32", .k_f32 },
.{ "fallthrough", .k_fallthrough },
.{ "false", .k_false },
.{ "fn", .k_fn },
.{ "for", .k_for },
.{ "i32", .k_i32 },
.{ "if", .k_if },
.{ "let", .k_let },
.{ "loop", .k_loop },
.{ "mat2x2", .k_mat2x2 },
.{ "mat2x3", .k_mat2x3 },
.{ "mat2x4", .k_mat2x4 },
.{ "mat3x2", .k_mat3x2 },
.{ "mat3x3", .k_mat3x3 },
.{ "mat3x4", .k_mat3x4 },
.{ "mat4x2", .k_mat4x2 },
.{ "mat4x3", .k_mat4x3 },
.{ "mat4x4", .k_mat4x4 },
.{ "override", .k_override },
.{ "ptr", .k_ptr },
.{ "return", .k_return },
.{ "sampler", .k_sampler },
.{ "sampler_comparison", .k_comparison_sampler },
.{ "const_assert", .k_const_assert },
.{ "struct", .k_struct },
.{ "switch", .k_switch },
.{ "texture_depth_2d", .k_texture_depth_2d },
.{ "texture_depth_2d_array", .k_texture_depth_2d_array },
.{ "texture_depth_cube", .k_texture_depth_cube },
.{ "texture_depth_cube_array", .k_texture_depth_cube_array },
.{ "texture_depth_multisampled_2d", .k_texture_depth_multisampled_2d },
.{ "texture_external", .k_texture_external },
.{ "texture_multisampled_2d", .k_texture_multisampled_2d },
.{ "texture_1d", .k_texture_sampled_1d },
.{ "texture_2d", .k_texture_sampled_2d },
.{ "texture_2d_array", .k_texture_sampled_2d_array },
.{ "texture_3d", .k_texture_sampled_3d },
.{ "texture_cube", .k_texture_sampled_cube },
.{ "texture_cube_array", .k_texture_sampled_cube_array },
.{ "texture_storage_1d", .k_texture_storage_1d },
.{ "texture_storage_2d", .k_texture_storage_2d },
.{ "texture_storage_2d_array", .k_texture_storage_2d_array },
.{ "texture_storage_3d", .k_texture_storage_3d },
.{ "true", .k_true },
.{ "type", .k_type },
.{ "u32", .k_u32 },
.{ "var", .k_var },
.{ "vec2", .k_vec2 },
.{ "vec3", .k_vec3 },
.{ "vec4", .k_vec4 },
.{ "while", .k_while },
});

417
libs/dusk/src/Tokenizer.zig Normal file
View file

@ -0,0 +1,417 @@
const std = @import("std");
const Token = @import("Token.zig");
const Tokenizer = @This();
source: [:0]const u8,
index: u32,
const State = enum {
start,
invalid,
ident,
underscore,
number,
block_comment,
ampersand,
bang,
equal,
greater,
shift_right,
less,
shift_left,
minus,
mod,
pipe,
plus,
slash,
star,
xor,
};
pub fn dump(self: *Tokenizer, token: Token) void {
std.debug.print("\x1b[0;33m{s} \x1b[0;90m\"{s}\"\x1b[0m\n", .{ @tagName(token.tag), token.loc.slice(self.source) });
}
pub fn init(source: [:0]const u8) Tokenizer {
// Skip the UTF-8 BOM if present
const src_start: u32 = if (std.mem.startsWith(u8, source, "\xEF\xBB\xBF")) 3 else 0;
return Tokenizer{
.source = source[src_start..],
.index = 0,
};
}
pub fn peek(self: *Tokenizer) Token {
var index = self.index;
var state = State.start;
var result = Token{
.tag = .eof,
.loc = .{
.start = index,
.end = undefined,
},
};
while (true) : (index += 1) {
const c = self.source[index];
switch (state) {
.start => switch (c) {
0 => {
if (index != self.source.len) {
result.tag = .invalid;
result.loc.start = index;
index += 1;
result.loc.end = index;
return result;
}
break;
},
' ', '\n', '\t', '\r' => result.loc.start = index + 1,
'a'...'z', 'A'...'Z' => state = .ident,
'0'...'9' => state = .number,
'&' => state = .ampersand,
'!' => state = .bang,
'=' => state = .equal,
'>' => state = .greater,
'<' => state = .less,
'-' => state = .minus,
'%' => state = .mod,
'|' => state = .pipe,
'+' => state = .plus,
'/' => state = .slash,
'*' => state = .star,
'_' => state = .underscore,
'^' => state = .xor,
'@' => {
result.tag = .attr;
index += 1;
break;
},
'[' => {
result.tag = .bracket_left;
index += 1;
break;
},
']' => {
result.tag = .bracket_right;
index += 1;
break;
},
'{' => {
result.tag = .brace_left;
index += 1;
break;
},
'}' => {
result.tag = .brace_right;
index += 1;
break;
},
':' => {
result.tag = .colon;
index += 1;
break;
},
',' => {
result.tag = .comma;
index += 1;
break;
},
'(' => {
result.tag = .paren_left;
index += 1;
break;
},
')' => {
result.tag = .paren_right;
index += 1;
break;
},
'.' => {
result.tag = .period;
index += 1;
break;
},
';' => {
result.tag = .semicolon;
index += 1;
break;
},
'~' => {
result.tag = .tilde;
index += 1;
break;
},
else => {
state = .invalid;
result.tag = .invalid;
},
},
.invalid => break,
.ident => switch (c) {
'a'...'z', 'A'...'Z', '0'...'9', '_' => {},
else => {
result.tag = .ident;
if (Token.keywords.get(self.source[result.loc.start..index])) |tag| {
result.tag = tag;
}
break;
},
},
.underscore => switch (c) { // TODO: two underscore `__` https://www.w3.org/TR/WGSL/#identifiers
'a'...'z', 'A'...'Z', '_', '0'...'9' => state = .ident,
else => {
result.tag = .underscore;
break;
},
},
.number => switch (c) {
'0'...'9', '.', 'i', 'u', 'f', 'h', 'e', '-', '+' => {},
else => {
result.tag = .number;
break;
},
},
.block_comment => switch (c) {
0 => break,
'\n' => {
state = .start;
result.loc.start = index + 1;
},
else => {},
},
.ampersand => switch (c) {
'&' => {
result.tag = .and_and;
index += 1;
break;
},
'=' => {
result.tag = .and_equal;
index += 1;
break;
},
else => {
result.tag = .@"and";
break;
},
},
.bang => switch (c) {
'=' => {
result.tag = .not_equal;
index += 1;
break;
},
else => {
result.tag = .bang;
break;
},
},
.equal => switch (c) {
'=' => {
result.tag = .equal_equal;
index += 1;
break;
},
else => {
result.tag = .equal;
break;
},
},
.greater => switch (c) {
'>' => state = .shift_right,
'=' => {
result.tag = .greater_than_equal;
index += 1;
break;
},
else => {
result.tag = .greater_than;
break;
},
},
.shift_right => switch (c) {
'=' => {
result.tag = .shift_right_equal;
index += 1;
break;
},
else => {
result.tag = .shift_right;
break;
},
},
.less => switch (c) {
'<' => state = .shift_left,
'=' => {
result.tag = .less_than_equal;
index += 1;
break;
},
else => {
result.tag = .less_than;
break;
},
},
.shift_left => switch (c) {
'=' => {
result.tag = .shift_left_equal;
index += 1;
break;
},
else => {
result.tag = .shift_left;
break;
},
},
.minus => switch (c) {
'-' => {
result.tag = .minus_minus;
index += 1;
break;
},
'=' => {
result.tag = .minus_equal;
index += 1;
break;
},
'>' => {
result.tag = .arrow;
index += 1;
break;
},
else => {
result.tag = .minus;
break;
},
},
.mod => switch (c) {
'=' => {
result.tag = .modulo_equal;
index += 1;
break;
},
else => {
result.tag = .mod;
break;
},
},
.pipe => switch (c) {
'|' => {
result.tag = .or_or;
index += 1;
break;
},
'=' => {
result.tag = .or_equal;
index += 1;
break;
},
else => {
result.tag = .@"or";
break;
},
},
.plus => switch (c) {
'+' => {
result.tag = .plus_plus;
index += 1;
break;
},
'=' => {
result.tag = .plus_equal;
index += 1;
break;
},
else => {
result.tag = .plus;
break;
},
},
.slash => switch (c) {
'/' => state = .block_comment,
'=' => {
result.tag = .division_equal;
index += 1;
break;
},
else => {
result.tag = .division;
break;
},
},
.star => switch (c) {
'=' => {
result.tag = .times_equal;
index += 1;
break;
},
else => {
result.tag = .star;
break;
},
},
.xor => switch (c) {
'=' => {
result.tag = .xor_equal;
index += 1;
break;
},
else => {
result.tag = .xor;
break;
},
},
}
}
result.loc.end = index;
return result;
}
pub fn next(self: *Tokenizer) Token {
const tok = self.peek();
self.index = tok.loc.end;
return tok;
}
test "tokenize identifier and numbers" {
comptime var str: [:0]const u8 =
\\_ __ _iden iden 100.8i // cc
\\// commnet
\\
;
var tokenizer = Tokenizer.init(str);
try std.testing.expect(tokenizer.next().tag == .underscore);
try std.testing.expect(tokenizer.next().tag == .ident);
try std.testing.expect(tokenizer.next().tag == .ident);
try std.testing.expect(tokenizer.next().tag == .ident);
try std.testing.expect(tokenizer.next().tag == .number);
try std.testing.expect(tokenizer.next().tag == .eof);
}
test "tokenize other" {
comptime var str: [:0]const u8 = "";
inline for (std.meta.fields(Token.Tag), 0..) |field, i| comptime {
if (i > 3) {
str = str ++ " " ++ (Token.Tag.symbol(@intToEnum(Token.Tag, field.value)));
}
};
var tokenizer = Tokenizer.init(str);
comptime var i = 4; // skip identifiers and nums
inline while (i < std.meta.fields(Token.Tag).len) : (i += 1) {
const tag = @intToEnum(Token.Tag, i);
try std.testing.expect(tokenizer.next().tag == tag);
}
try std.testing.expect(tokenizer.next().tag == .eof);
}

61
libs/dusk/src/main.zig Normal file
View file

@ -0,0 +1,61 @@
const std = @import("std");
pub const Ast = @import("Ast.zig");
pub const Analyse = @import("Analyse.zig");
pub const Parser = @import("Parser.zig");
pub const Token = @import("Token.zig");
pub const Tokenizer = @import("Tokenizer.zig");
pub const Extension = enum {
f16,
pub const Array = std.enums.EnumArray(Extension, bool);
};
pub const ErrorMsg = struct {
loc: Token.Loc,
msg: []const u8,
note: ?Note,
pub const Note = struct {
loc: ?Token.Loc,
msg: []const u8,
pub fn create(
allocator: std.mem.Allocator,
loc: ?Token.Loc,
comptime format: []const u8,
args: anytype,
) !Note {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(allocator, comptime format, args),
};
}
pub fn deinit(note: *Note, allocator: std.mem.Allocator) void {
allocator.free(note.msg);
note.* = undefined;
}
};
pub fn create(
allocator: std.mem.Allocator,
loc: Token.Loc,
comptime format: []const u8,
args: anytype,
note: ?Note,
) !ErrorMsg {
return .{
.loc = loc,
.msg = try std.fmt.allocPrint(allocator, comptime format, args),
.note = note,
};
}
pub fn deinit(err_msg: *ErrorMsg, allocator: std.mem.Allocator) void {
if (err_msg.note) |*note| note.*.deinit(allocator);
allocator.free(err_msg.msg);
err_msg.* = undefined;
}
};