1617 lines
55 KiB
Zig
1617 lines
55 KiB
Zig
const std = @import("std");
|
|
const Ast = @import("Ast.zig");
|
|
const Ir = @import("Ir.zig");
|
|
const Story = @import("Story.zig");
|
|
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
|
const StringIndexContext = std.hash_map.StringIndexContext;
|
|
const assert = std.debug.assert;
|
|
const AstGen = @This();
|
|
|
|
gpa: std.mem.Allocator,
|
|
tree: *const Ast,
|
|
string_table: std.HashMapUnmanaged(u32, void, StringIndexContext, std.hash_map.default_max_load_percentage) = .empty,
|
|
string_bytes: std.ArrayListUnmanaged(u8) = .empty,
|
|
globals: std.ArrayListUnmanaged(Ir.Inst.Index) = .empty,
|
|
instructions: std.ArrayListUnmanaged(Ir.Inst) = .empty,
|
|
extra: std.ArrayListUnmanaged(u32) = .empty,
|
|
scratch: std.ArrayListUnmanaged(u32) = .empty,
|
|
compile_errors: std.ArrayListUnmanaged(Ir.Inst.CompileErrors.Item) = .empty,
|
|
|
|
pub const InnerError = error{
|
|
OutOfMemory,
|
|
SemanticError,
|
|
InvalidCharacter,
|
|
Overflow,
|
|
};
|
|
|
|
/// Splat an IR data struct into the `extra` array.
|
|
fn addExtra(astgen: *AstGen, extra: anytype) !u32 {
|
|
const fields = std.meta.fields(@TypeOf(extra));
|
|
try astgen.extra.ensureUnusedCapacity(astgen.gpa, fields.len);
|
|
return addExtraAssumeCapacity(astgen, extra);
|
|
}
|
|
|
|
/// Splat an IR data struct into the `extra` array.
|
|
fn addExtraAssumeCapacity(astgen: *AstGen, extra: anytype) u32 {
|
|
const fields = std.meta.fields(@TypeOf(extra));
|
|
const extra_index: u32 = @intCast(astgen.extra.items.len);
|
|
astgen.extra.items.len += fields.len;
|
|
setExtra(astgen, extra_index, extra);
|
|
return extra_index;
|
|
}
|
|
|
|
fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void {
|
|
const fields = std.meta.fields(@TypeOf(extra));
|
|
var i = index;
|
|
inline for (fields) |field| {
|
|
astgen.extra.items[i] = switch (field.type) {
|
|
u32 => @field(extra, field.name),
|
|
Ir.Inst.Index => @intFromEnum(@field(extra, field.name)),
|
|
Ir.Inst.Ref => @intFromEnum(@field(extra, field.name)),
|
|
Ir.NullTerminatedString => @intFromEnum(@field(extra, field.name)),
|
|
else => @compileError("bad field type"),
|
|
};
|
|
i += 1;
|
|
}
|
|
}
|
|
|
|
fn appendBlockBody(astgen: *AstGen, body: []const Ir.Inst.Index) void {
|
|
return appendBlockBodyArrayList(astgen, &astgen.extra, body);
|
|
}
|
|
|
|
fn appendBlockBodyArrayList(
|
|
_: *AstGen,
|
|
list: *std.ArrayListUnmanaged(u32),
|
|
body: []const Ir.Inst.Index,
|
|
) void {
|
|
for (body) |inst_index| {
|
|
list.appendAssumeCapacity(@intFromEnum(inst_index));
|
|
}
|
|
}
|
|
fn appendErrorNode(
|
|
astgen: *AstGen,
|
|
node: *const Ast.Node,
|
|
comptime format: []const u8,
|
|
args: anytype,
|
|
) error{OutOfMemory}!void {
|
|
return appendErrorToken(astgen, @intCast(node.loc.start), format, args);
|
|
}
|
|
|
|
fn appendErrorToken(
|
|
astgen: *AstGen,
|
|
byte_offset: u32,
|
|
comptime format: []const u8,
|
|
args: anytype,
|
|
) error{OutOfMemory}!void {
|
|
const gpa = astgen.gpa;
|
|
const string_bytes = &astgen.string_bytes;
|
|
const msg: Ir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
|
|
try string_bytes.print(gpa, format ++ "\x00", args);
|
|
|
|
try astgen.compile_errors.append(gpa, .{
|
|
.msg = msg,
|
|
.byte_offset = byte_offset,
|
|
});
|
|
}
|
|
|
|
fn fail(
|
|
astgen: *AstGen,
|
|
node: *const Ast.Node,
|
|
comptime format: []const u8,
|
|
args: anytype,
|
|
) error{ SemanticError, OutOfMemory } {
|
|
try appendErrorNode(astgen, node, format, args);
|
|
return error.SemanticError;
|
|
}
|
|
|
|
fn lowerAstErrors(astgen: *AstGen) error{OutOfMemory}!void {
|
|
const gpa = astgen.gpa;
|
|
var msg: std.Io.Writer.Allocating = .init(gpa);
|
|
defer msg.deinit();
|
|
const w = &msg.writer;
|
|
|
|
for (astgen.tree.errors) |err| {
|
|
astgen.tree.renderError(w, err) catch return error.OutOfMemory;
|
|
try appendErrorToken(
|
|
astgen,
|
|
@intCast(err.loc.start),
|
|
"{s}",
|
|
.{msg.written()},
|
|
);
|
|
msg.clearRetainingCapacity();
|
|
}
|
|
}
|
|
|
|
fn nullTerminatedString(astgen: *AstGen, str: Ir.NullTerminatedString) [:0]const u8 {
|
|
const slice = astgen.string_bytes.items[@intFromEnum(str)..];
|
|
return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0];
|
|
}
|
|
|
|
const IndexSlice = struct {
|
|
index: Ir.NullTerminatedString,
|
|
len: u32,
|
|
};
|
|
|
|
fn strFromSlice(astgen: *AstGen, bytes: []const u8) error{OutOfMemory}!IndexSlice {
|
|
const gpa = astgen.gpa;
|
|
const string_bytes = &astgen.string_bytes;
|
|
const str_index: u32 = @intCast(string_bytes.items.len);
|
|
try string_bytes.appendSlice(gpa, bytes);
|
|
|
|
const key: []const u8 = string_bytes.items[str_index..];
|
|
const gop = try astgen.string_table.getOrPutContextAdapted(gpa, key, StringIndexAdapter{
|
|
.bytes = string_bytes,
|
|
}, StringIndexContext{
|
|
.bytes = string_bytes,
|
|
});
|
|
if (gop.found_existing) {
|
|
string_bytes.shrinkRetainingCapacity(str_index);
|
|
return .{
|
|
.index = @enumFromInt(gop.key_ptr.*),
|
|
.len = @intCast(key.len),
|
|
};
|
|
} else {
|
|
gop.key_ptr.* = str_index;
|
|
try string_bytes.append(gpa, 0);
|
|
return .{
|
|
.index = @enumFromInt(str_index),
|
|
.len = @intCast(key.len),
|
|
};
|
|
}
|
|
}
|
|
|
|
fn strFromNode(astgen: *AstGen, node: *const Ast.Node) !IndexSlice {
|
|
const name_bytes = astgen.tree.nodeSlice(node);
|
|
return astgen.strFromSlice(name_bytes);
|
|
}
|
|
|
|
/// Perform IR code generation via tree-walk.
|
|
pub fn generate(gpa: std.mem.Allocator, tree: *const Ast) !Ir {
|
|
var astgen: AstGen = .{
|
|
.gpa = gpa,
|
|
.tree = tree,
|
|
};
|
|
defer astgen.deinit();
|
|
|
|
// First entry is reserved for Ir.NullTerminatedString.empty.
|
|
try astgen.string_bytes.append(gpa, 0);
|
|
|
|
var instructions: std.ArrayListUnmanaged(Ir.Inst.Index) = .empty;
|
|
defer instructions.deinit(gpa);
|
|
var file_scope: Scope = .{
|
|
.parent = null,
|
|
.decls = .empty,
|
|
.astgen = &astgen,
|
|
};
|
|
var block: GenIr = .{
|
|
.astgen = &astgen,
|
|
.instructions = &instructions,
|
|
.instructions_top = 0,
|
|
};
|
|
defer block.unstack();
|
|
|
|
const reserved_extra_count = @typeInfo(Ir.ExtraIndex).@"enum".fields.len;
|
|
try astgen.extra.ensureTotalCapacity(gpa, reserved_extra_count);
|
|
astgen.extra.items.len += reserved_extra_count;
|
|
|
|
const fatal = if (tree.errors.len == 0) fatal: {
|
|
// TODO: Make sure this is never null.
|
|
file(&block, &file_scope, tree.root) catch |err| switch (err) {
|
|
error.OutOfMemory => return error.OutOfMemory,
|
|
error.SemanticError => break :fatal true,
|
|
else => |e| return e,
|
|
};
|
|
break :fatal false;
|
|
} else fatal: {
|
|
try lowerAstErrors(&astgen);
|
|
break :fatal true;
|
|
};
|
|
|
|
const err_index = @intFromEnum(Ir.ExtraIndex.compile_errors);
|
|
if (astgen.compile_errors.items.len == 0) {
|
|
astgen.extra.items[err_index] = 0;
|
|
} else {
|
|
const extra_len = 1 + astgen.compile_errors.items.len *
|
|
@typeInfo(Ir.Inst.CompileErrors.Item).@"struct".fields.len;
|
|
try astgen.extra.ensureUnusedCapacity(gpa, extra_len);
|
|
|
|
astgen.extra.items[err_index] = astgen.addExtraAssumeCapacity(Ir.Inst.CompileErrors{
|
|
.items_len = @intCast(astgen.compile_errors.items.len),
|
|
});
|
|
for (astgen.compile_errors.items) |item| {
|
|
_ = astgen.addExtraAssumeCapacity(item);
|
|
}
|
|
}
|
|
return .{
|
|
.instructions = if (fatal) &.{} else try astgen.instructions.toOwnedSlice(gpa),
|
|
.string_bytes = try astgen.string_bytes.toOwnedSlice(gpa),
|
|
.extra = try astgen.extra.toOwnedSlice(gpa),
|
|
};
|
|
}
|
|
|
|
fn deinit(astgen: *AstGen) void {
|
|
const gpa = astgen.gpa;
|
|
astgen.string_table.deinit(gpa);
|
|
astgen.string_bytes.deinit(gpa);
|
|
astgen.globals.deinit(gpa);
|
|
astgen.instructions.deinit(gpa);
|
|
astgen.extra.deinit(gpa);
|
|
astgen.scratch.deinit(gpa);
|
|
astgen.compile_errors.deinit(gpa);
|
|
}
|
|
|
|
const GenIr = struct {
|
|
astgen: *AstGen,
|
|
instructions: *std.ArrayListUnmanaged(Ir.Inst.Index),
|
|
instructions_top: usize,
|
|
|
|
const unstacked_top = std.math.maxInt(usize);
|
|
|
|
fn unstack(self: *GenIr) void {
|
|
if (self.instructions_top != unstacked_top) {
|
|
self.instructions.items.len = self.instructions_top;
|
|
self.instructions_top = unstacked_top;
|
|
}
|
|
}
|
|
|
|
fn isEmpty(self: *const GenIr) bool {
|
|
return (self.instructions_top == unstacked_top) or
|
|
(self.instructions.items.len == self.instructions_top);
|
|
}
|
|
|
|
fn instructionsSlice(self: *const GenIr) []Ir.Inst.Index {
|
|
return if (self.instructions_top == unstacked_top)
|
|
&[0]Ir.Inst.Index{}
|
|
else
|
|
self.instructions.items[self.instructions_top..];
|
|
}
|
|
|
|
fn instructionsSliceUpto(
|
|
self: *const GenIr,
|
|
stacked_block: *const GenIr,
|
|
) []Ir.Inst.Index {
|
|
return if (self.instructions_top == unstacked_top)
|
|
&[0]Ir.Inst.Index{}
|
|
else if (self.instructions == stacked_block.instructions and
|
|
stacked_block.instructions_top != unstacked_top)
|
|
self.instructions.items[self.instructions_top..stacked_block.instructions_top]
|
|
else
|
|
self.instructions.items[self.instructions_top..];
|
|
}
|
|
|
|
fn endsWithNoReturn(self: *GenIr) bool {
|
|
if (self.isEmpty()) return false;
|
|
const last_inst_index = self.instructions.items[self.instructions.items.len - 1];
|
|
const last_inst = self.astgen.instructions.items[@intFromEnum(last_inst_index)];
|
|
return last_inst.isNoReturn();
|
|
}
|
|
|
|
fn makeSubBlock(self: *GenIr) GenIr {
|
|
return .{
|
|
.astgen = self.astgen,
|
|
.instructions = self.instructions,
|
|
.instructions_top = self.instructions.items.len,
|
|
};
|
|
}
|
|
|
|
fn add(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Ref {
|
|
return (try gi.addAsIndex(inst)).toRef();
|
|
}
|
|
|
|
fn addAsIndex(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Index {
|
|
const gpa = gi.astgen.gpa;
|
|
try gi.instructions.ensureUnusedCapacity(gpa, 1);
|
|
try gi.astgen.instructions.ensureUnusedCapacity(gpa, 1);
|
|
|
|
const new_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len);
|
|
gi.astgen.instructions.appendAssumeCapacity(inst);
|
|
gi.instructions.appendAssumeCapacity(new_index);
|
|
return new_index;
|
|
}
|
|
|
|
fn addInt(gi: *GenIr, value: i64) !Ir.Inst.Ref {
|
|
return add(gi, .{ .tag = .int, .data = .{
|
|
.int = value,
|
|
} });
|
|
}
|
|
|
|
fn addFloat(gi: *GenIr, value: f64) !Ir.Inst.Ref {
|
|
return add(gi, .{ .tag = .float, .data = .{
|
|
.float = value,
|
|
} });
|
|
}
|
|
|
|
fn addUnaryNode(gi: *GenIr, tag: Ir.Inst.Tag, arg: Ir.Inst.Ref) !Ir.Inst.Ref {
|
|
return add(gi, .{ .tag = tag, .data = .{
|
|
.un = .{ .lhs = arg },
|
|
} });
|
|
}
|
|
|
|
fn addBinaryNode(
|
|
gi: *GenIr,
|
|
tag: Ir.Inst.Tag,
|
|
lhs: Ir.Inst.Ref,
|
|
rhs: Ir.Inst.Ref,
|
|
) !Ir.Inst.Ref {
|
|
return add(gi, .{ .tag = tag, .data = .{
|
|
.bin = .{ .lhs = lhs, .rhs = rhs },
|
|
} });
|
|
}
|
|
|
|
fn addStr(
|
|
gi: *GenIr,
|
|
str: Ir.NullTerminatedString,
|
|
str_len: usize,
|
|
) !Ir.Inst.Ref {
|
|
assert(str_len <= std.math.maxInt(u32));
|
|
return add(gi, .{ .tag = .str, .data = .{
|
|
.str = .{ .start = str, .len = @intCast(str_len) },
|
|
} });
|
|
}
|
|
|
|
fn addStrTok(
|
|
block: *GenIr,
|
|
tag: Ir.Inst.Tag,
|
|
str_index: Ir.NullTerminatedString,
|
|
byte_offset: usize,
|
|
) !Ir.Inst.Ref {
|
|
assert(byte_offset <= std.math.maxInt(u32));
|
|
return block.add(.{
|
|
.tag = tag,
|
|
.data = .{ .str_tok = .{
|
|
.start = str_index,
|
|
.src_offset = @intCast(byte_offset),
|
|
} },
|
|
});
|
|
}
|
|
|
|
fn addPayloadNode(
|
|
gen: *GenIr,
|
|
tag: Ir.Inst.Tag,
|
|
node: *const Ast.Node,
|
|
extra: anytype,
|
|
) !Ir.Inst.Ref {
|
|
const gpa = gen.astgen.gpa;
|
|
try gen.instructions.ensureUnusedCapacity(gpa, 1);
|
|
try gen.astgen.instructions.ensureUnusedCapacity(gpa, 1);
|
|
|
|
const extra_index = try gen.astgen.addExtra(extra);
|
|
const new_index: Ir.Inst.Index = @enumFromInt(gen.astgen.instructions.items.len);
|
|
gen.astgen.instructions.appendAssumeCapacity(.{
|
|
.tag = tag,
|
|
.data = .{ .payload = .{
|
|
.extra_index = extra_index,
|
|
.src_offset = @intCast(node.loc.start),
|
|
} },
|
|
});
|
|
gen.instructions.appendAssumeCapacity(new_index);
|
|
return new_index.toRef();
|
|
}
|
|
|
|
fn addPayloadNodeWithIndex(
|
|
gen: *GenIr,
|
|
tag: Ir.Inst.Tag,
|
|
node: *const Ast.Node,
|
|
extra_index: u32,
|
|
) !Ir.Inst.Ref {
|
|
return gen.add(.{ .tag = tag, .data = .{
|
|
.payload = .{
|
|
.extra_index = extra_index,
|
|
.src_offset = @intCast(node.loc.start),
|
|
},
|
|
} });
|
|
}
|
|
|
|
fn addCondBr(gen: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
|
|
const gpa = gen.astgen.gpa;
|
|
try gen.instructions.ensureUnusedCapacity(gpa, 1);
|
|
try gen.astgen.instructions.ensureUnusedCapacity(gpa, 1);
|
|
|
|
const new_index: Ir.Inst.Index = @enumFromInt(gen.astgen.instructions.items.len);
|
|
gen.astgen.instructions.appendAssumeCapacity(.{
|
|
.tag = tag,
|
|
.data = .{ .payload = undefined },
|
|
});
|
|
gen.instructions.appendAssumeCapacity(new_index);
|
|
return new_index;
|
|
}
|
|
|
|
fn addBreak(
|
|
gen: *GenIr,
|
|
tag: Ir.Inst.Tag,
|
|
node: *const Ast.Node,
|
|
block_inst: Ir.Inst.Index,
|
|
) !Ir.Inst.Ref {
|
|
const gpa = gen.astgen.gpa;
|
|
const extra_len = @typeInfo(Ir.Inst.Break).@"struct".fields.len;
|
|
try gen.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
|
|
|
|
const extra_index = gen.astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.Break{ .block_inst = block_inst },
|
|
);
|
|
return gen.addPayloadNodeWithIndex(tag, node, extra_index);
|
|
}
|
|
|
|
fn makePayloadNode(self: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
|
|
const astgen = self.astgen;
|
|
const inst_index: Ir.Inst.Index = @enumFromInt(astgen.instructions.items.len);
|
|
try astgen.instructions.append(astgen.gpa, .{
|
|
.tag = tag,
|
|
.data = .{
|
|
.payload = undefined,
|
|
},
|
|
});
|
|
return inst_index;
|
|
}
|
|
|
|
fn setBlockBody(self: *GenIr, inst: Ir.Inst.Index) !void {
|
|
const gpa = self.astgen.gpa;
|
|
const body = self.instructionsSlice();
|
|
const extra_len = @typeInfo(Ir.Inst.Block).@"struct".fields.len + body.len;
|
|
try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
|
|
|
|
const inst_data = &self.astgen.instructions.items[@intFromEnum(inst)].data;
|
|
inst_data.payload.extra_index = self.astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.Block{ .body_len = @intCast(body.len) },
|
|
);
|
|
self.astgen.appendBlockBody(body);
|
|
self.unstack();
|
|
}
|
|
};
|
|
|
|
const Scope = struct {
|
|
parent: ?*Scope,
|
|
astgen: *AstGen,
|
|
decls: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, Decl),
|
|
|
|
const Decl = struct {
|
|
decl_node: *const Ast.Node,
|
|
inst_index: Ir.Inst.Index,
|
|
};
|
|
|
|
fn deinit(self: *Scope) void {
|
|
const gpa = self.astgen.gpa;
|
|
self.decls.deinit(gpa);
|
|
}
|
|
|
|
fn makeChild(parent_scope: *Scope) Scope {
|
|
return .{
|
|
.parent = parent_scope,
|
|
.astgen = parent_scope.astgen,
|
|
.decls = .empty,
|
|
};
|
|
}
|
|
|
|
fn insert(self: *Scope, ref: Ir.NullTerminatedString, decl: Decl) !void {
|
|
const gpa = self.astgen.gpa;
|
|
return self.decls.put(gpa, ref, decl);
|
|
}
|
|
|
|
fn lookup(self: *Scope, ref: Ir.NullTerminatedString) ?Decl {
|
|
var current_scope: ?*Scope = self;
|
|
while (current_scope) |scope| : (current_scope = scope.parent) {
|
|
const result = scope.decls.get(ref);
|
|
if (result) |symbol| return symbol;
|
|
}
|
|
return null;
|
|
}
|
|
};
|
|
|
|
fn setDeclaration(
|
|
decl_index: Ir.Inst.Index,
|
|
args: struct {
|
|
name: Ir.NullTerminatedString,
|
|
value: Ir.Inst.Index,
|
|
gi: *GenIr,
|
|
node: *const Ast.Node,
|
|
},
|
|
) !void {
|
|
const astgen = args.gi.astgen;
|
|
const extra_len = @typeInfo(Ir.Inst.Declaration).@"struct".fields.len;
|
|
try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len);
|
|
|
|
const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data;
|
|
inst_data.payload = .{
|
|
.src_offset = @intCast(args.node.loc.start),
|
|
.extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.Declaration{
|
|
.name = args.name,
|
|
.value = args.value,
|
|
.flags = if (args.node.tag == .const_decl) 0x01 else 0x00,
|
|
}),
|
|
};
|
|
}
|
|
|
|
fn setDeclVarPayload(
|
|
decl_index: Ir.Inst.Index,
|
|
body_block: *GenIr,
|
|
node: *const Ast.Node,
|
|
) !void {
|
|
defer body_block.unstack();
|
|
|
|
const astgen = body_block.astgen;
|
|
const body = body_block.instructionsSlice();
|
|
const extra_len = @typeInfo(Ir.Inst.Var).@"struct".fields.len + body.len;
|
|
try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len);
|
|
|
|
const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data;
|
|
inst_data.payload = .{
|
|
.src_offset = @intCast(node.loc.start),
|
|
.extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.Var{
|
|
.body_len = @intCast(body.len),
|
|
}),
|
|
};
|
|
astgen.appendBlockBody(body);
|
|
}
|
|
|
|
fn setDeclStitchPayload(decl_index: Ir.Inst.Index, body_block: *GenIr) !void {
|
|
defer body_block.unstack();
|
|
|
|
const astgen = body_block.astgen;
|
|
const block_body = body_block.instructionsSlice();
|
|
const extra_len = @typeInfo(Ir.Inst.Knot).@"struct".fields.len + block_body.len;
|
|
try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len);
|
|
|
|
const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data;
|
|
inst_data.payload.extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.Stitch{
|
|
.body_len = @intCast(block_body.len),
|
|
},
|
|
);
|
|
|
|
astgen.appendBlockBody(block_body);
|
|
}
|
|
|
|
fn setDeclKnotPayload(
|
|
decl_index: Ir.Inst.Index,
|
|
body_block: *GenIr,
|
|
stitches_block: *GenIr,
|
|
) !void {
|
|
defer body_block.unstack();
|
|
defer stitches_block.unstack();
|
|
|
|
const astgen = body_block.astgen;
|
|
const block_body = body_block.instructionsSliceUpto(stitches_block);
|
|
const stitches_body = stitches_block.instructionsSlice();
|
|
const extra_len =
|
|
@typeInfo(Ir.Inst.Knot).@"struct".fields.len + block_body.len + stitches_body.len;
|
|
try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len);
|
|
|
|
const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data;
|
|
inst_data.payload.extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.Knot{
|
|
.body_len = @intCast(block_body.len),
|
|
.stitches_len = @intCast(stitches_body.len),
|
|
},
|
|
);
|
|
|
|
astgen.appendBlockBody(block_body);
|
|
astgen.appendBlockBody(stitches_body);
|
|
}
|
|
|
|
fn setCondBrPayload(
|
|
condbr: Ir.Inst.Index,
|
|
cond: Ir.Inst.Ref,
|
|
then_block: *GenIr,
|
|
else_block: *GenIr,
|
|
) !void {
|
|
defer then_block.unstack();
|
|
defer else_block.unstack();
|
|
const astgen = then_block.astgen;
|
|
const then_body = then_block.instructionsSliceUpto(else_block);
|
|
const else_body = else_block.instructionsSlice();
|
|
const then_body_len = then_body.len;
|
|
const else_body_len = else_body.len;
|
|
const extra_len =
|
|
@typeInfo(Ir.Inst.CondBr).@"struct".fields.len + then_body_len + else_body_len;
|
|
try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len);
|
|
|
|
const inst_data = &astgen.instructions.items[@intFromEnum(condbr)].data;
|
|
inst_data.payload.extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.CondBr{
|
|
.condition = cond,
|
|
.then_body_len = @intCast(then_body_len),
|
|
.else_body_len = @intCast(else_body_len),
|
|
},
|
|
);
|
|
|
|
astgen.appendBlockBody(then_body);
|
|
astgen.appendBlockBody(else_body);
|
|
}
|
|
|
|
fn unaryOp(
|
|
gi: *GenIr,
|
|
scope: *Scope,
|
|
expr_node: *const Ast.Node,
|
|
op: Ir.Inst.Tag,
|
|
) InnerError!Ir.Inst.Ref {
|
|
const data = expr_node.data.bin;
|
|
const lhs = try expr(gi, scope, data.lhs.?);
|
|
return gi.addUnaryNode(op, lhs);
|
|
}
|
|
|
|
fn binaryOp(
|
|
gi: *GenIr,
|
|
scope: *Scope,
|
|
expr_node: *const Ast.Node,
|
|
op: Ir.Inst.Tag,
|
|
) InnerError!Ir.Inst.Ref {
|
|
const data = expr_node.data.bin;
|
|
const lhs = try expr(gi, scope, data.lhs.?);
|
|
const rhs = try expr(gi, scope, data.rhs.?);
|
|
return gi.addBinaryNode(op, lhs, rhs);
|
|
}
|
|
|
|
fn parseNumberLiteral(bytes: []const u8) union(enum) {
|
|
int: i64,
|
|
float: f64,
|
|
failure: union(enum) {
|
|
duplicate_period: usize,
|
|
invalid_character: usize,
|
|
},
|
|
} {
|
|
var is_float = false;
|
|
var period = false;
|
|
|
|
for (bytes, 0..) |c, i| {
|
|
switch (c) {
|
|
'.' => {
|
|
if (period) return .{ .failure = .{ .duplicate_period = i } };
|
|
period = true;
|
|
is_float = true;
|
|
},
|
|
'0'...'9' => {},
|
|
else => return .{ .failure = .{ .invalid_character = i } },
|
|
}
|
|
}
|
|
if (is_float) {
|
|
const value = std.fmt.parseFloat(f64, bytes) catch |err| switch (err) {
|
|
error.InvalidCharacter => unreachable,
|
|
};
|
|
return .{ .float = value };
|
|
} else {
|
|
const value = std.fmt.parseInt(i64, bytes, 10) catch |err| switch (err) {
|
|
error.InvalidCharacter => unreachable,
|
|
error.Overflow => unreachable,
|
|
};
|
|
return .{ .int = value };
|
|
}
|
|
}
|
|
|
|
fn numberLiteral(block: *GenIr, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
const astgen = block.astgen;
|
|
const lexeme = astgen.tree.nodeSlice(node);
|
|
switch (parseNumberLiteral(lexeme)) {
|
|
.int => |int| return block.addInt(int),
|
|
.float => |float| return block.addFloat(float),
|
|
// TODO: exact offset reporting
|
|
.failure => return fail(block.astgen, node, "invalid number literal", .{}),
|
|
}
|
|
}
|
|
|
|
fn stringLiteral(gi: *GenIr, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
const str = try gi.astgen.strFromNode(node);
|
|
return gi.addStr(str.index, str.len);
|
|
}
|
|
|
|
fn stringExpr(gen: *GenIr, expr_node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
const first_node = expr_node.data.bin.lhs.?;
|
|
return stringLiteral(gen, first_node);
|
|
}
|
|
|
|
fn identifier(
|
|
block: *GenIr,
|
|
scope: *Scope,
|
|
node: *const Ast.Node,
|
|
) InnerError!Ir.Inst.Ref {
|
|
const str = try block.astgen.strFromNode(node);
|
|
if (scope.lookup(str.index)) |decl| {
|
|
return block.addUnaryNode(.load, decl.inst_index.toRef());
|
|
}
|
|
return block.addStrTok(.decl_ref, str.index, node.loc.start);
|
|
}
|
|
|
|
fn expr(gi: *GenIr, scope: *Scope, optional_node: ?*const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
const node = optional_node.?;
|
|
switch (node.tag) {
|
|
.file => unreachable,
|
|
.true_literal => return .bool_true,
|
|
.false_literal => return .bool_false,
|
|
.number_literal => return numberLiteral(gi, node),
|
|
.string_literal => return stringLiteral(gi, node),
|
|
.string_expr => return stringExpr(gi, node),
|
|
.empty_string => return stringLiteral(gi, node),
|
|
.identifier => return identifier(gi, scope, node),
|
|
.add_expr => return binaryOp(gi, scope, node, .add),
|
|
.subtract_expr => return binaryOp(gi, scope, node, .sub),
|
|
.multiply_expr => return binaryOp(gi, scope, node, .mul),
|
|
.divide_expr => return binaryOp(gi, scope, node, .div),
|
|
.mod_expr => return binaryOp(gi, scope, node, .mod),
|
|
.negate_expr => return unaryOp(gi, scope, node, .neg),
|
|
.logical_not_expr => return unaryOp(gi, scope, node, .not),
|
|
.logical_and_expr => return binaryOp(gi, scope, node, .bool_and),
|
|
.logical_or_expr => return binaryOp(gi, scope, node, .bool_or),
|
|
.logical_equality_expr => return binaryOp(gi, scope, node, .cmp_eq),
|
|
.logical_inequality_expr => return binaryOp(gi, scope, node, .cmp_neq),
|
|
.logical_greater_expr => return binaryOp(gi, scope, node, .cmp_gt),
|
|
.logical_greater_or_equal_expr => return binaryOp(gi, scope, node, .cmp_gte),
|
|
.logical_lesser_expr => return binaryOp(gi, scope, node, .cmp_lt),
|
|
.logical_lesser_or_equal_expr => return binaryOp(gi, scope, node, .cmp_lte),
|
|
.call_expr => return callExpr(gi, scope, node, .call),
|
|
.choice_expr => unreachable,
|
|
.choice_start_expr => unreachable,
|
|
.choice_option_expr => unreachable,
|
|
.choice_inner_expr => unreachable,
|
|
.divert_expr => unreachable,
|
|
.selector_expr => return fieldAccess(gi, scope, node),
|
|
.assign_stmt => unreachable,
|
|
.block_stmt => unreachable,
|
|
.content_stmt => unreachable,
|
|
.divert_stmt => unreachable,
|
|
.return_stmt => unreachable,
|
|
.expr_stmt => unreachable,
|
|
.choice_stmt => unreachable,
|
|
.choice_star_stmt => unreachable,
|
|
.choice_plus_stmt => unreachable,
|
|
.gather_point_stmt => unreachable,
|
|
.gathered_stmt => unreachable,
|
|
.function_prototype => unreachable,
|
|
.stitch_prototype => unreachable,
|
|
.knot_prototype => unreachable,
|
|
.function_decl => unreachable,
|
|
.stitch_decl => unreachable,
|
|
.knot_decl => unreachable,
|
|
.const_decl => unreachable,
|
|
.var_decl => unreachable,
|
|
.list_decl => unreachable,
|
|
.temp_decl => unreachable,
|
|
.parameter_decl => unreachable,
|
|
.ref_parameter_decl => unreachable,
|
|
.argument_list => unreachable,
|
|
.parameter_list => unreachable,
|
|
.switch_stmt => unreachable, // Handled in switchStmt
|
|
.switch_case => unreachable, // Handled in switchStmt
|
|
.if_stmt => unreachable, // Handled in ifStmt
|
|
.multi_if_stmt => unreachable, // Handled in multiIfStmt
|
|
.if_branch => unreachable, // Handled in ifStmt and multiIfStmt
|
|
.else_branch => unreachable, // Handled in switchStmt, multiIfStmt, and ifStmt
|
|
.content => unreachable,
|
|
.inline_logic_expr => unreachable,
|
|
.invalid => unreachable,
|
|
}
|
|
}
|
|
|
|
fn exprStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
if (node.data.bin.lhs) |n| {
|
|
return expr(gi, scope, n);
|
|
}
|
|
return .none;
|
|
}
|
|
|
|
fn inlineLogicExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
if (node.data.bin.lhs) |lhs| {
|
|
return expr(gi, scope, lhs);
|
|
}
|
|
return .none;
|
|
}
|
|
|
|
fn validateSwitchProngs(gen: *GenIr, stmt_node: *const Ast.Node) InnerError!void {
|
|
const astgen = gen.astgen;
|
|
var stmt_has_block: bool = false;
|
|
var stmt_has_else: bool = false;
|
|
const case_list = stmt_node.data.switch_stmt.cases;
|
|
const last_prong = case_list[case_list.len - 1];
|
|
for (case_list) |case_stmt| {
|
|
switch (case_stmt.tag) {
|
|
.block_stmt => stmt_has_block = true,
|
|
.switch_case, .if_branch => {
|
|
if (stmt_has_block) {
|
|
//return gen.fail(.expected_else, case_stmt);
|
|
}
|
|
},
|
|
.else_branch => {
|
|
if (case_stmt != last_prong) {
|
|
return fail(astgen, case_stmt, "invalid else stmt", .{});
|
|
}
|
|
if (stmt_has_else) {
|
|
return fail(astgen, case_stmt, "duplicate else stmt", .{});
|
|
}
|
|
stmt_has_else = true;
|
|
},
|
|
inline else => |tag| @panic("Unexpected node " ++ @tagName(tag)),
|
|
}
|
|
}
|
|
}
|
|
|
|
fn ifStmt(
|
|
parent_block: *GenIr,
|
|
scope: *Scope,
|
|
stmt_node: *const Ast.Node,
|
|
) InnerError!Ir.Inst.Ref {
|
|
const astgen = parent_block.astgen;
|
|
const cond_expr = stmt_node.data.switch_stmt.condition_expr.?;
|
|
try validateSwitchProngs(parent_block, stmt_node);
|
|
|
|
const case_list = stmt_node.data.switch_stmt.cases;
|
|
const then_node = case_list[0];
|
|
const last_prong = case_list[case_list.len - 1];
|
|
|
|
var block_scope = parent_block.makeSubBlock();
|
|
defer block_scope.unstack();
|
|
|
|
const cond_inst = try expr(&block_scope, scope, cond_expr);
|
|
const condbr = try block_scope.addCondBr(.condbr);
|
|
const block = try parent_block.makePayloadNode(.block);
|
|
try block_scope.setBlockBody(block); // unstacks block
|
|
try parent_block.instructions.append(astgen.gpa, block);
|
|
|
|
var then_block = parent_block.makeSubBlock();
|
|
defer then_block.unstack();
|
|
|
|
try blockStmt(&then_block, scope, then_node);
|
|
_ = try then_block.addBreak(.@"break", then_node, block);
|
|
|
|
var else_block = parent_block.makeSubBlock();
|
|
defer else_block.unstack();
|
|
|
|
if (then_node == last_prong) {
|
|
_ = try else_block.addBreak(.@"break", then_node, block);
|
|
} else {
|
|
const block_node = last_prong.data.bin.rhs.?;
|
|
try blockStmt(&else_block, scope, block_node);
|
|
}
|
|
|
|
try setCondBrPayload(condbr, cond_inst, &then_block, &else_block);
|
|
return condbr.toRef();
|
|
}
|
|
|
|
fn ifChain(
|
|
parent_block: *GenIr,
|
|
scope: *Scope,
|
|
branch_list: []const *Ast.Node,
|
|
) InnerError!Ir.Inst.Ref {
|
|
const gpa = parent_block.astgen.gpa;
|
|
if (branch_list.len == 0) return @enumFromInt(0);
|
|
if (branch_list[0].data.bin.lhs == null) {
|
|
const body_node = branch_list[0].data.bin.rhs.?;
|
|
try blockStmt(parent_block, scope, body_node);
|
|
return @enumFromInt(0);
|
|
}
|
|
|
|
var block_scope = parent_block.makeSubBlock();
|
|
defer block_scope.unstack();
|
|
|
|
const branch = branch_list[0];
|
|
const cond_expr = branch.data.bin.lhs.?;
|
|
const body_node = branch.data.bin.rhs.?;
|
|
const cond_inst = try expr(&block_scope, scope, cond_expr);
|
|
const condbr = try block_scope.addCondBr(.condbr);
|
|
const block_inst = try parent_block.makePayloadNode(.block);
|
|
try block_scope.setBlockBody(block_inst);
|
|
try parent_block.instructions.append(gpa, block_inst);
|
|
|
|
var then_block = parent_block.makeSubBlock();
|
|
defer then_block.unstack();
|
|
try blockStmt(&then_block, scope, body_node);
|
|
_ = try then_block.addBreak(.@"break", body_node, block_inst);
|
|
|
|
var else_block = parent_block.makeSubBlock();
|
|
defer else_block.unstack();
|
|
const next_branches = branch_list[1..];
|
|
_ = try ifChain(parent_block, scope, next_branches);
|
|
_ = try else_block.addBreak(.@"break", body_node, block_inst);
|
|
try setCondBrPayload(condbr, cond_inst, &then_block, &else_block);
|
|
return @enumFromInt(0);
|
|
}
|
|
|
|
fn multiIfStmt(
|
|
parent_block: *GenIr,
|
|
scope: *Scope,
|
|
stmt_node: *const Ast.Node,
|
|
) InnerError!Ir.Inst.Ref {
|
|
try validateSwitchProngs(parent_block, stmt_node);
|
|
|
|
const branch_list = stmt_node.data.switch_stmt.cases;
|
|
if (branch_list[0].data.bin.lhs == null) {
|
|
const branch = branch_list[0];
|
|
const body_node = branch.data.bin.rhs.?;
|
|
try blockStmt(parent_block, scope, body_node);
|
|
return .none;
|
|
}
|
|
_ = try ifChain(parent_block, scope, branch_list);
|
|
return .none;
|
|
}
|
|
|
|
fn switchStmt(
|
|
parent_block: *GenIr,
|
|
scope: *Scope,
|
|
stmt_node: *const Ast.Node,
|
|
) InnerError!Ir.Inst.Ref {
|
|
const astgen = parent_block.astgen;
|
|
const gpa = astgen.gpa;
|
|
const switch_stmt = stmt_node.data.switch_stmt;
|
|
|
|
try validateSwitchProngs(parent_block, stmt_node);
|
|
|
|
const cond_inst = try expr(parent_block, scope, switch_stmt.condition_expr);
|
|
const switch_br = try parent_block.makePayloadNode(.switch_br);
|
|
var case_indexes: std.ArrayListUnmanaged(u32) = .empty;
|
|
try case_indexes.ensureUnusedCapacity(gpa, switch_stmt.cases.len);
|
|
defer case_indexes.deinit(gpa);
|
|
|
|
// TODO: Length checks.
|
|
const switch_cases = switch_stmt.cases[0 .. switch_stmt.cases.len - 1];
|
|
for (switch_cases) |case_stmt| {
|
|
// TODO: Maybe make this non-nullable
|
|
const case_expr = case_stmt.data.bin.lhs.?;
|
|
const operand: Ir.Inst.Ref = switch (case_expr.tag) {
|
|
.number_literal => try numberLiteral(parent_block, case_expr),
|
|
.true_literal => .bool_true,
|
|
.false_literal => .bool_false,
|
|
else => return fail(astgen, case_expr, "invalid switch case", .{}),
|
|
};
|
|
var case_block = parent_block.makeSubBlock();
|
|
defer case_block.unstack();
|
|
_ = try blockStmt(&case_block, scope, case_stmt.data.bin.rhs.?);
|
|
_ = try case_block.addBreak(.@"break", case_stmt, switch_br);
|
|
|
|
const body = case_block.instructionsSlice();
|
|
const case_extra_len = @typeInfo(Ir.Inst.SwitchBr.Case).@"struct".fields.len + body.len;
|
|
try astgen.extra.ensureUnusedCapacity(gpa, case_extra_len);
|
|
|
|
const extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.SwitchBr.Case{
|
|
.operand = operand,
|
|
.body_len = @intCast(body.len),
|
|
},
|
|
);
|
|
astgen.appendBlockBody(body);
|
|
case_indexes.appendAssumeCapacity(extra_index);
|
|
}
|
|
|
|
try parent_block.instructions.append(gpa, switch_br);
|
|
|
|
const else_branch = switch_stmt.cases[switch_stmt.cases.len - 1];
|
|
var case_block = parent_block.makeSubBlock();
|
|
defer case_block.unstack();
|
|
_ = try blockStmt(&case_block, scope, else_branch.data.bin.rhs.?);
|
|
_ = try case_block.addBreak(.@"break", else_branch, switch_br);
|
|
|
|
const else_body = case_block.instructionsSlice();
|
|
const extra_len =
|
|
@typeInfo(Ir.Inst.SwitchBr).@"struct".fields.len + case_indexes.items.len + else_body.len;
|
|
try astgen.extra.ensureUnusedCapacity(gpa, extra_len);
|
|
|
|
astgen.instructions.items[@intFromEnum(switch_br)].data.payload = .{
|
|
.extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.SwitchBr{
|
|
.operand = cond_inst,
|
|
.cases_len = @intCast(switch_cases.len),
|
|
.else_body_len = @intCast(else_body.len),
|
|
},
|
|
),
|
|
.src_offset = @intCast(stmt_node.loc.start),
|
|
};
|
|
astgen.extra.appendSliceAssumeCapacity(case_indexes.items[0..]);
|
|
astgen.appendBlockBody(else_body);
|
|
return switch_br.toRef();
|
|
}
|
|
|
|
fn contentExpr(block: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
// FIXME: This is a placeholder until we figure out what this function should be returning.
|
|
// TODO: Make sure that this is not nullable.
|
|
const data = node.data.list;
|
|
for (data.items) |child_node| {
|
|
switch (child_node.tag) {
|
|
.string_literal => {
|
|
const result = try stringLiteral(block, child_node);
|
|
_ = try block.addUnaryNode(.content_push, result);
|
|
},
|
|
.inline_logic_expr => {
|
|
const result = try inlineLogicExpr(block, scope, child_node);
|
|
_ = try block.addUnaryNode(.content_push, result);
|
|
},
|
|
.if_stmt => _ = try ifStmt(block, scope, child_node),
|
|
.multi_if_stmt => _ = try multiIfStmt(block, scope, child_node),
|
|
.switch_stmt => _ = try switchStmt(block, scope, child_node),
|
|
else => unreachable,
|
|
}
|
|
}
|
|
return .none;
|
|
}
|
|
|
|
fn contentStmt(gen: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
const expr_node = node.data.bin.lhs.?;
|
|
const expr_ref = try contentExpr(gen, scope, expr_node);
|
|
return gen.addUnaryNode(.content_flush, expr_ref);
|
|
}
|
|
|
|
fn assignStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void {
|
|
const astgen = gi.astgen;
|
|
const identifier_node = node.data.bin.lhs.?;
|
|
const expr_node = node.data.bin.rhs.?;
|
|
const name_str = try astgen.strFromNode(identifier_node);
|
|
|
|
// TODO: Support globals as well
|
|
if (scope.lookup(name_str.index)) |decl| {
|
|
const expr_result = try expr(gi, scope, expr_node);
|
|
_ = try gi.addBinaryNode(.store, decl.inst_index.toRef(), expr_result);
|
|
return;
|
|
}
|
|
return fail(astgen, identifier_node, "unknown identifier", .{});
|
|
}
|
|
|
|
fn choiceStarStmt(gi: *GenIr, _: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
return stringLiteral(gi, node);
|
|
}
|
|
|
|
fn choiceStmt(
|
|
parent_block: *GenIr,
|
|
scope: *Scope,
|
|
stmt_node: *const Ast.Node,
|
|
) InnerError!void {
|
|
const astgen = parent_block.astgen;
|
|
const gpa = astgen.gpa;
|
|
const data = stmt_node.data.list;
|
|
const choice_br = try parent_block.makePayloadNode(.choice_br);
|
|
var case_indexes: std.ArrayListUnmanaged(u32) = .empty;
|
|
try case_indexes.ensureUnusedCapacity(gpa, data.items.len);
|
|
defer case_indexes.deinit(gpa);
|
|
|
|
for (data.items) |branch_stmt| {
|
|
assert(branch_stmt.tag == .choice_star_stmt or branch_stmt.tag == .choice_plus_stmt);
|
|
const branch_data = branch_stmt.data.bin;
|
|
const branch_expr = branch_data.lhs.?.data.choice_expr;
|
|
var op_1: Ir.Inst.Ref = .none;
|
|
var op_2: Ir.Inst.Ref = .none;
|
|
var op_3: Ir.Inst.Ref = .none;
|
|
|
|
if (branch_expr.start_expr) |node| {
|
|
op_1 = try choiceStarStmt(parent_block, scope, node);
|
|
}
|
|
if (branch_expr.option_expr) |node| {
|
|
op_2 = try choiceStarStmt(parent_block, scope, node);
|
|
}
|
|
if (branch_expr.inner_expr) |node| {
|
|
op_3 = try choiceStarStmt(parent_block, scope, node);
|
|
}
|
|
|
|
var sub_block = parent_block.makeSubBlock();
|
|
defer sub_block.unstack();
|
|
if (branch_data.rhs) |branch_body| {
|
|
_ = try blockStmt(&sub_block, scope, branch_body);
|
|
}
|
|
|
|
const body = sub_block.instructionsSlice();
|
|
const case_extra_len = @typeInfo(Ir.Inst.SwitchBr.Case).@"struct".fields.len + body.len;
|
|
try astgen.extra.ensureUnusedCapacity(gpa, case_extra_len);
|
|
const extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.ChoiceBr.Case{
|
|
.operand_1 = op_1,
|
|
.operand_2 = op_2,
|
|
.operand_3 = op_3,
|
|
.body_len = @intCast(body.len),
|
|
},
|
|
);
|
|
astgen.appendBlockBody(body);
|
|
case_indexes.appendAssumeCapacity(extra_index);
|
|
}
|
|
|
|
try parent_block.instructions.append(gpa, choice_br);
|
|
const extra_len = @typeInfo(Ir.Inst.ChoiceBr).@"struct".fields.len + case_indexes.items.len;
|
|
try astgen.extra.ensureUnusedCapacity(gpa, extra_len);
|
|
|
|
astgen.instructions.items[@intFromEnum(choice_br)].data.payload = .{
|
|
.extra_index = astgen.addExtraAssumeCapacity(
|
|
Ir.Inst.ChoiceBr{
|
|
.cases_len = @intCast(data.items.len),
|
|
},
|
|
),
|
|
.src_offset = @intCast(stmt_node.loc.start),
|
|
};
|
|
astgen.extra.appendSliceAssumeCapacity(case_indexes.items[0..]);
|
|
}
|
|
|
|
const Callee = union(enum) {
|
|
field: struct {
|
|
obj_ptr: Ir.Inst.Ref,
|
|
/// Offset into `string_bytes`.
|
|
field_name_start: Ir.NullTerminatedString,
|
|
},
|
|
direct: Ir.Inst.Ref,
|
|
};
|
|
|
|
fn fieldAccess(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
|
|
assert(node.tag == .selector_expr);
|
|
const data = node.data.bin;
|
|
const rhs = data.rhs.?;
|
|
assert(rhs.tag == .identifier);
|
|
const field_str = try gi.astgen.strFromNode(rhs);
|
|
const lhs = try expr(gi, scope, data.lhs.?);
|
|
|
|
return gi.addPayloadNode(.field_ptr, rhs, Ir.Inst.Field{
|
|
.lhs = lhs,
|
|
.field_name_start = field_str.index,
|
|
});
|
|
}
|
|
|
|
/// calleeExpr generates the function part of a call expression (f in f(x)), but
|
|
/// *not* the callee argument for the call. Its purpose is to distinguish
|
|
/// between standard calls and method call syntax `a.b()`. Thus, if the lhs
|
|
/// is a field access, we return using the `field` union field;
|
|
/// otherwise, we use the `direct` union field.
|
|
fn calleeExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Callee {
|
|
switch (node.tag) {
|
|
.selector_expr => {
|
|
const data = node.data.bin;
|
|
const call_target = data.rhs.?;
|
|
assert(call_target.tag == .identifier);
|
|
|
|
const field_str = try gi.astgen.strFromNode(call_target);
|
|
const lhs = try expr(gi, scope, data.lhs.?);
|
|
return .{
|
|
.field = .{ .obj_ptr = lhs, .field_name_start = field_str.index },
|
|
};
|
|
},
|
|
.identifier => {
|
|
return .{ .direct = try expr(gi, scope, node) };
|
|
},
|
|
else => unreachable,
|
|
}
|
|
}
|
|
|
|
fn callExpr(
|
|
gi: *GenIr,
|
|
scope: *Scope,
|
|
node: *const Ast.Node,
|
|
comptime call: enum { divert, call },
|
|
) !Ir.Inst.Ref {
|
|
const astgen = gi.astgen;
|
|
const gpa = astgen.gpa;
|
|
const data = node.data.bin;
|
|
const callee_node = data.lhs.?;
|
|
const callee = try calleeExpr(gi, scope, callee_node);
|
|
|
|
const scratch_top = astgen.scratch.items.len;
|
|
defer astgen.scratch.shrinkRetainingCapacity(scratch_top);
|
|
|
|
// FIXME: List nodes should not have optional slices.
|
|
// This hack is an abomination.
|
|
const arguments: ?[]*Ast.Node = if (data.rhs) |args_node| args_node.data.list.items else null;
|
|
const args_count = if (arguments) |args| args.len else 0;
|
|
|
|
try astgen.scratch.resize(gpa, scratch_top + args_count);
|
|
var scratch_index = scratch_top;
|
|
|
|
if (arguments) |args| {
|
|
for (args) |arg| {
|
|
var arg_block = gi.makeSubBlock();
|
|
defer arg_block.unstack();
|
|
|
|
_ = try expr(&arg_block, scope, arg);
|
|
|
|
const body = arg_block.instructionsSlice();
|
|
try astgen.scratch.ensureUnusedCapacity(gpa, body.len);
|
|
appendBlockBodyArrayList(astgen, &astgen.scratch, body);
|
|
|
|
astgen.scratch.items[scratch_index] = @intCast(astgen.scratch.items.len - scratch_top);
|
|
scratch_index += 1;
|
|
}
|
|
}
|
|
switch (callee) {
|
|
.direct => |callee_obj| {
|
|
const tag = if (call == .divert) .divert else .call;
|
|
const extra_index = try addExtra(astgen, Ir.Inst.Call{
|
|
.callee = callee_obj,
|
|
.args_len = @intCast(args_count),
|
|
});
|
|
if (args_count != 0) {
|
|
try astgen.extra.appendSlice(gpa, astgen.scratch.items[scratch_top..]);
|
|
}
|
|
return gi.addPayloadNodeWithIndex(tag, callee_node, extra_index);
|
|
},
|
|
.field => |callee_field| {
|
|
const tag = if (call == .divert) .field_divert else .field_call;
|
|
const extra_index = try addExtra(astgen, Ir.Inst.FieldCall{
|
|
.obj_ptr = callee_field.obj_ptr,
|
|
.field_name_start = callee_field.field_name_start,
|
|
.args_len = @intCast(args_count),
|
|
});
|
|
if (args_count != 0) {
|
|
try astgen.extra.appendSlice(gpa, astgen.scratch.items[scratch_top..]);
|
|
}
|
|
return gi.addPayloadNodeWithIndex(tag, callee_node, extra_index);
|
|
},
|
|
}
|
|
}
|
|
|
|
fn divertExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void {
|
|
// FIXME: The AST should always have an args list for these nodes.
|
|
// FIXME: Oh God, the AST is completely fucked for this.
|
|
const lhs = node.data.bin.lhs.?;
|
|
switch (lhs.tag) {
|
|
.identifier => {
|
|
// TODO: Revisit this
|
|
const str_slice = gi.astgen.tree.nodeSlice(lhs);
|
|
if (std.mem.eql(u8, str_slice, "DONE")) {
|
|
_ = try gi.addUnaryNode(.done, .none);
|
|
return;
|
|
} else if (std.mem.eql(u8, str_slice, "END")) {
|
|
_ = try gi.addUnaryNode(.exit, .none);
|
|
return;
|
|
}
|
|
const callee = try calleeExpr(gi, scope, lhs);
|
|
switch (callee) {
|
|
.direct => |callee_obj| {
|
|
_ = try gi.addPayloadNode(.divert, lhs, Ir.Inst.Call{
|
|
.callee = callee_obj,
|
|
.args_len = 0,
|
|
});
|
|
},
|
|
.field => |callee_field| {
|
|
_ = try gi.addPayloadNode(.field_divert, lhs, Ir.Inst.FieldCall{
|
|
.obj_ptr = callee_field.obj_ptr,
|
|
.field_name_start = callee_field.field_name_start,
|
|
.args_len = 0,
|
|
});
|
|
},
|
|
}
|
|
},
|
|
.selector_expr => {
|
|
const callee = try calleeExpr(gi, scope, lhs);
|
|
switch (callee) {
|
|
.direct => |callee_obj| {
|
|
_ = try gi.addPayloadNode(.divert, lhs, Ir.Inst.Call{
|
|
.callee = callee_obj,
|
|
.args_len = @intCast(0),
|
|
});
|
|
},
|
|
.field => |callee_field| {
|
|
_ = try gi.addPayloadNode(.field_divert, lhs, Ir.Inst.FieldCall{
|
|
.obj_ptr = callee_field.obj_ptr,
|
|
.field_name_start = callee_field.field_name_start,
|
|
.args_len = @intCast(0),
|
|
});
|
|
},
|
|
}
|
|
},
|
|
.call_expr => _ = try callExpr(gi, scope, lhs, .divert),
|
|
else => unreachable,
|
|
}
|
|
}
|
|
|
|
fn divertStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void {
|
|
// TODO: Revisit this.
|
|
const data = node.data.bin;
|
|
return divertExpr(gi, scope, data.lhs.?);
|
|
}
|
|
|
|
fn returnStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void {
|
|
// TODO: Revisit this.
|
|
const ret_arg = if (node.data.bin.lhs) |lhs| blk: {
|
|
const arg_inst = try expr(gi, scope, lhs);
|
|
break :blk arg_inst;
|
|
} else .none;
|
|
_ = try gi.addUnaryNode(.ret, ret_arg);
|
|
}
|
|
|
|
fn tempDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void {
|
|
const astgen = gi.astgen;
|
|
const identifier_node = decl_node.data.bin.lhs.?;
|
|
const expr_node = decl_node.data.bin.rhs.?;
|
|
const name_str = try astgen.strFromNode(identifier_node);
|
|
|
|
if (scope.lookup(name_str.index)) |_| {
|
|
return fail(astgen, decl_node, "duplicate identifier", .{});
|
|
}
|
|
|
|
const alloc_inst = try gi.add(.{ .tag = .alloc, .data = undefined });
|
|
const expr_result = try expr(gi, scope, expr_node);
|
|
_ = try gi.addBinaryNode(.store, alloc_inst, expr_result);
|
|
|
|
return scope.insert(name_str.index, .{
|
|
.decl_node = decl_node,
|
|
.inst_index = alloc_inst.toIndex().?,
|
|
});
|
|
}
|
|
|
|
fn varDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void {
|
|
const astgen = gi.astgen;
|
|
const gpa = astgen.gpa;
|
|
const identifier_node = decl_node.data.bin.lhs.?;
|
|
const expr_node = decl_node.data.bin.rhs.?;
|
|
const decl_inst = try gi.makePayloadNode(.declaration);
|
|
|
|
var decl_block = gi.makeSubBlock();
|
|
defer decl_block.unstack();
|
|
|
|
const var_inst = try decl_block.makePayloadNode(.decl_var);
|
|
_ = try expr(&decl_block, scope, expr_node);
|
|
const name_str = try astgen.strFromNode(identifier_node);
|
|
|
|
try setDeclVarPayload(var_inst, &decl_block, identifier_node);
|
|
try setDeclaration(decl_inst, .{
|
|
.name = name_str.index,
|
|
.value = var_inst,
|
|
.gi = gi,
|
|
.node = decl_node,
|
|
});
|
|
try astgen.globals.append(gpa, decl_inst);
|
|
}
|
|
|
|
fn blockInner(gi: *GenIr, parent_scope: *Scope, stmt_list: []*Ast.Node) !void {
|
|
var child_scope = parent_scope.makeChild();
|
|
defer child_scope.deinit();
|
|
|
|
for (stmt_list) |node| {
|
|
_ = switch (node.tag) {
|
|
.var_decl => try varDecl(gi, &child_scope, node),
|
|
.const_decl => try varDecl(gi, &child_scope, node),
|
|
.temp_decl => try tempDecl(gi, &child_scope, node),
|
|
.assign_stmt => try assignStmt(gi, &child_scope, node),
|
|
.content_stmt => try contentStmt(gi, &child_scope, node),
|
|
.choice_stmt => try choiceStmt(gi, &child_scope, node),
|
|
.expr_stmt => try exprStmt(gi, &child_scope, node),
|
|
.divert_stmt => try divertStmt(gi, &child_scope, node),
|
|
.return_stmt => try returnStmt(gi, &child_scope, node),
|
|
inline else => |e| @panic("Unexpected node: " ++ @tagName(e)),
|
|
};
|
|
}
|
|
if (!gi.endsWithNoReturn()) {
|
|
_ = try gi.addUnaryNode(.implicit_ret, .none);
|
|
}
|
|
}
|
|
|
|
fn blockStmt(block: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void {
|
|
// TODO: Make sure that this value is concrete to omit check.
|
|
const data = node.data.list;
|
|
try blockInner(block, scope, data.items);
|
|
}
|
|
|
|
fn defaultBlock(
|
|
gi: *GenIr,
|
|
scope: *Scope,
|
|
body_node: *const Ast.Node,
|
|
) InnerError!void {
|
|
const astgen = gi.astgen;
|
|
const data = body_node.data.list;
|
|
const decl_inst = try gi.addAsIndex(.{
|
|
.tag = .declaration,
|
|
.data = .{ .payload = undefined },
|
|
});
|
|
var decl_scope = gi.makeSubBlock();
|
|
defer decl_scope.unstack();
|
|
|
|
const knot_inst = try decl_scope.makePayloadNode(.decl_knot);
|
|
try blockInner(&decl_scope, scope, data.items);
|
|
|
|
var stub_scope = decl_scope.makeSubBlock();
|
|
defer stub_scope.unstack();
|
|
try setDeclKnotPayload(knot_inst, &decl_scope, &stub_scope);
|
|
|
|
const decl_str = try astgen.strFromSlice(Story.default_knot_name);
|
|
try setDeclaration(decl_inst, .{
|
|
.name = decl_str.index,
|
|
.value = knot_inst,
|
|
.gi = gi,
|
|
.node = body_node,
|
|
});
|
|
}
|
|
|
|
fn stitchDeclInner(
|
|
gi: *GenIr,
|
|
scope: *Scope,
|
|
node: *const Ast.Node,
|
|
prototype_node: *const Ast.Node,
|
|
body_node: ?*const Ast.Node,
|
|
) InnerError!void {
|
|
const astgen = gi.astgen;
|
|
const prototype_data = prototype_node.data.bin;
|
|
const identifier_node = prototype_data.lhs.?;
|
|
const decl_inst = try gi.addAsIndex(.{
|
|
.tag = .declaration,
|
|
.data = .{ .payload = undefined },
|
|
});
|
|
|
|
var decl_block = gi.makeSubBlock();
|
|
defer decl_block.unstack();
|
|
|
|
const stitch_inst = try decl_block.makePayloadNode(.decl_stitch);
|
|
|
|
if (prototype_data.rhs) |args_node| {
|
|
const args_data = args_node.data.list;
|
|
for (args_data.items) |arg| {
|
|
assert(arg.tag == .parameter_decl);
|
|
const arg_str = try astgen.strFromNode(arg);
|
|
const arg_inst = try decl_block.addStrTok(.param, arg_str.index, arg.loc.start);
|
|
|
|
// TODO: Maybe make decl accept a ref?
|
|
try scope.insert(arg_str.index, .{
|
|
.decl_node = arg,
|
|
.inst_index = arg_inst.toIndex().?,
|
|
});
|
|
}
|
|
}
|
|
if (body_node) |body| {
|
|
const body_data = body.data.list;
|
|
try blockInner(&decl_block, scope, body_data.items);
|
|
} else {
|
|
try blockInner(&decl_block, scope, &.{});
|
|
}
|
|
|
|
const decl_str = try astgen.strFromNode(identifier_node);
|
|
try setDeclStitchPayload(stitch_inst, &decl_block);
|
|
try setDeclaration(decl_inst, .{
|
|
.name = decl_str.index,
|
|
.value = stitch_inst,
|
|
.gi = gi,
|
|
.node = node,
|
|
});
|
|
}
|
|
|
|
fn stitchDecl(gi: *GenIr, parent_scope: *Scope, decl_node: *const Ast.Node) InnerError!void {
|
|
const knot_data = decl_node.data.bin;
|
|
const prototype_node = knot_data.lhs;
|
|
const body_node = knot_data.rhs;
|
|
var decl_scope = parent_scope.makeChild();
|
|
defer decl_scope.deinit();
|
|
|
|
return stitchDeclInner(gi, &decl_scope, decl_node, prototype_node.?, body_node);
|
|
}
|
|
|
|
fn functionDeclInner(
|
|
gi: *GenIr,
|
|
scope: *Scope,
|
|
node: *const Ast.Node,
|
|
prototype_node: *const Ast.Node,
|
|
body_node: ?*const Ast.Node,
|
|
) InnerError!void {
|
|
const astgen = gi.astgen;
|
|
const prototype_data = prototype_node.data.bin;
|
|
const identifier_node = prototype_data.lhs.?;
|
|
const decl_inst = try gi.addAsIndex(.{
|
|
.tag = .declaration,
|
|
.data = .{ .payload = undefined },
|
|
});
|
|
|
|
var decl_block = gi.makeSubBlock();
|
|
defer decl_block.unstack();
|
|
|
|
const stitch_inst = try decl_block.makePayloadNode(.decl_function);
|
|
|
|
if (prototype_data.rhs) |args_node| {
|
|
const args_data = args_node.data.list;
|
|
for (args_data.items) |arg| {
|
|
assert(arg.tag == .parameter_decl);
|
|
const arg_str = try astgen.strFromNode(arg);
|
|
const arg_inst = try decl_block.addStrTok(.param, arg_str.index, arg.loc.start);
|
|
|
|
// TODO: Maybe make decl accept a ref?
|
|
try scope.insert(arg_str.index, .{
|
|
.decl_node = arg,
|
|
.inst_index = arg_inst.toIndex().?,
|
|
});
|
|
}
|
|
}
|
|
if (body_node) |body| {
|
|
try blockStmt(&decl_block, scope, body);
|
|
} else {
|
|
_ = try decl_block.addUnaryNode(.implicit_ret, .none);
|
|
}
|
|
|
|
const decl_str = try astgen.strFromNode(identifier_node);
|
|
try setDeclStitchPayload(stitch_inst, &decl_block);
|
|
try setDeclaration(decl_inst, .{
|
|
.name = decl_str.index,
|
|
.value = stitch_inst,
|
|
.gi = gi,
|
|
.node = node,
|
|
});
|
|
}
|
|
|
|
fn functionDecl(gi: *GenIr, parent_scope: *Scope, decl_node: *const Ast.Node) InnerError!void {
|
|
const knot_data = decl_node.data.bin;
|
|
const prototype_node = knot_data.lhs;
|
|
const body_node = knot_data.rhs;
|
|
var decl_scope = parent_scope.makeChild();
|
|
defer decl_scope.deinit();
|
|
|
|
return functionDeclInner(gi, &decl_scope, decl_node, prototype_node.?, body_node);
|
|
}
|
|
|
|
fn knotDecl(gi: *GenIr, parent_scope: *Scope, decl_node: *const Ast.Node) InnerError!void {
|
|
const astgen = gi.astgen;
|
|
const data = decl_node.data.knot_decl;
|
|
const prototype_node = data.prototype;
|
|
const identifier_node = prototype_node.data.bin.lhs.?;
|
|
const decl_inst = try gi.addAsIndex(.{
|
|
.tag = .declaration,
|
|
.data = .{ .payload = undefined },
|
|
});
|
|
|
|
var node_index: usize = 0;
|
|
var child_block = gi.makeSubBlock();
|
|
defer child_block.unstack();
|
|
|
|
const knot_inst = try gi.makePayloadNode(.decl_knot);
|
|
var child_scope = parent_scope.makeChild();
|
|
defer child_scope.deinit();
|
|
|
|
if (prototype_node.data.bin.rhs) |args_node| {
|
|
const args_data = args_node.data.list;
|
|
for (args_data.items) |arg| {
|
|
assert(arg.tag == .parameter_decl);
|
|
const arg_str = try astgen.strFromNode(arg);
|
|
const arg_inst = try child_block.addStrTok(.param, arg_str.index, arg.loc.start);
|
|
|
|
// TODO: Maybe make decl accept a ref?
|
|
try child_scope.insert(arg_str.index, .{
|
|
.decl_node = arg,
|
|
.inst_index = arg_inst.toIndex().?,
|
|
});
|
|
}
|
|
}
|
|
if (data.children.len > 0) {
|
|
const first_child = data.children[0];
|
|
if (first_child.tag == .block_stmt) {
|
|
try blockStmt(&child_block, &child_scope, first_child);
|
|
node_index += 1;
|
|
}
|
|
}
|
|
|
|
var nested_block = child_block.makeSubBlock();
|
|
defer nested_block.unstack();
|
|
|
|
for (data.children[node_index..]) |nested_decl_node| {
|
|
switch (nested_decl_node.tag) {
|
|
.stitch_decl => try stitchDecl(&nested_block, &child_scope, nested_decl_node),
|
|
.function_decl => try functionDecl(&nested_block, &child_scope, nested_decl_node),
|
|
else => unreachable,
|
|
}
|
|
}
|
|
|
|
const name_str = try gi.astgen.strFromNode(identifier_node);
|
|
try setDeclKnotPayload(knot_inst, &child_block, &nested_block);
|
|
try setDeclaration(decl_inst, .{
|
|
.name = name_str.index,
|
|
.value = knot_inst,
|
|
.gi = gi,
|
|
.node = decl_node,
|
|
});
|
|
}
|
|
|
|
fn file(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void {
|
|
const astgen = gi.astgen;
|
|
const data = node.data.list;
|
|
const file_inst = try gi.addAsIndex(.{
|
|
.tag = .file,
|
|
.data = .{ .payload = undefined },
|
|
});
|
|
|
|
var node_index: usize = 0;
|
|
var file_scope = gi.makeSubBlock();
|
|
defer file_scope.unstack();
|
|
|
|
// TODO: Make sure this is non-nullable.
|
|
if (data.items.len > 0) {
|
|
const first_child = data.items[0];
|
|
if (first_child.tag == .block_stmt) {
|
|
try defaultBlock(&file_scope, scope, first_child);
|
|
node_index += 1;
|
|
}
|
|
}
|
|
for (data.items[node_index..]) |child_node| {
|
|
switch (child_node.tag) {
|
|
.knot_decl => try knotDecl(gi, scope, child_node),
|
|
.stitch_decl => try stitchDecl(gi, scope, child_node),
|
|
.function_decl => try functionDecl(gi, scope, child_node),
|
|
else => unreachable,
|
|
}
|
|
}
|
|
|
|
const globals_len = astgen.globals.items.len;
|
|
try astgen.instructions.ensureUnusedCapacity(astgen.gpa, globals_len);
|
|
for (astgen.globals.items) |global| {
|
|
gi.instructions.appendAssumeCapacity(global);
|
|
}
|
|
return file_scope.setBlockBody(file_inst);
|
|
}
|