feat: code generation for diverts, wip

This commit is contained in:
Brett Broadhurst 2026-03-17 23:19:54 -06:00
parent ee26be6254
commit 20292bcc6a
Failed to generate hash of commit
5 changed files with 699 additions and 149 deletions

View file

@ -15,6 +15,7 @@ instructions: std.ArrayListUnmanaged(Ir.Inst) = .empty,
globals: std.ArrayListUnmanaged(Ir.Global) = .empty,
global_ref_table: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, usize) = .empty,
extra: std.ArrayListUnmanaged(u32) = .empty,
scratch: std.ArrayListUnmanaged(u32) = .empty,
errors: std.ArrayListUnmanaged(Ast.Error) = .empty,
pub const InnerError = error{
@ -32,38 +33,46 @@ pub fn deinit(astgen: *AstGen) void {
astgen.global_ref_table.deinit(gpa);
astgen.instructions.deinit(gpa);
astgen.extra.deinit(gpa);
astgen.scratch.deinit(gpa);
astgen.errors.deinit(gpa);
}
const Scope = struct {
parent: ?*Scope,
astgen: *AstGen,
namespace_prefix: Ir.NullTerminatedString,
decls: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, Decl),
pub const Decl = struct {
const Decl = struct {
decl_node: *const Ast.Node,
inst_index: Ir.Inst.Index,
};
pub fn deinit(self: *Scope) void {
fn deinit(self: *Scope) void {
const gpa = self.astgen.gpa;
self.decls.deinit(gpa);
}
pub fn makeChild(parent_scope: *Scope) Scope {
fn makeChild(parent_scope: *Scope) Scope {
return .{
.parent = parent_scope,
.astgen = parent_scope.astgen,
.namespace_prefix = parent_scope.namespace_prefix,
.decls = .empty,
};
}
pub fn insert(self: *Scope, ref: Ir.NullTerminatedString, decl: Decl) !void {
fn setNamespacePrefix(scope: *Scope, relative: Ir.NullTerminatedString) !void {
const astgen = scope.astgen;
scope.namespace_prefix = try astgen.qualifiedString(scope.namespace_prefix, relative);
}
fn insert(self: *Scope, ref: Ir.NullTerminatedString, decl: Decl) !void {
const gpa = self.astgen.gpa;
return self.decls.put(gpa, ref, decl);
}
pub fn lookup(self: *Scope, ref: Ir.NullTerminatedString) ?Decl {
fn lookup(self: *Scope, ref: Ir.NullTerminatedString) ?Decl {
var current_scope: ?*Scope = self;
while (current_scope) |scope| : (current_scope = scope.parent) {
const result = scope.decls.get(ref);
@ -149,6 +158,12 @@ const GenIr = struct {
} });
}
fn addStr(gi: *GenIr, tag: Ir.Inst.Tag, str: Ir.NullTerminatedString) !Ir.Inst.Ref {
return add(gi, .{ .tag = tag, .data = .{
.string = .{ .start = str },
} });
}
fn addUnaryNode(gi: *GenIr, tag: Ir.Inst.Tag, arg: Ir.Inst.Ref) !Ir.Inst.Ref {
return add(gi, .{ .tag = tag, .data = .{
.un = .{ .lhs = arg },
@ -174,20 +189,21 @@ const GenIr = struct {
} });
}
fn makePayloadNode(gi: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
fn addPayloadNode(gi: *GenIr, tag: Ir.Inst.Tag, extra: anytype) !Ir.Inst.Ref {
const gpa = gi.astgen.gpa;
const inst_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len);
try gi.astgen.instructions.append(gpa, .{
.tag = tag,
.data = .{
.payload = .{ .payload_index = undefined },
},
});
return inst_index;
}
try gi.instructions.ensureUnusedCapacity(gpa, 1);
try gi.astgen.instructions.ensureUnusedCapacity(gpa, 1);
fn makeDeclaration(gi: *GenIr) !Ir.Inst.Index {
return makePayloadNode(gi, .declaration);
const payload_index = try gi.astgen.addExtra(extra);
const new_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len);
gi.astgen.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .payload = .{
.payload_index = payload_index,
} },
});
gi.instructions.appendAssumeCapacity(new_index);
return new_index.toRef();
}
fn makeBlockInst(gi: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
@ -208,14 +224,20 @@ const GenIr = struct {
const extra_len = @typeInfo(Ir.Inst.Knot).@"struct".fields.len + body.len;
try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
const knot_node = try makePayloadNode(self, .decl_knot);
const inst_data = &self.astgen.instructions.items[@intFromEnum(knot_node)].data;
const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len);
try self.astgen.instructions.append(gpa, .{
.tag = .decl_knot,
.data = .{
.payload = .{ .payload_index = undefined },
},
});
const inst_data = &self.astgen.instructions.items[@intFromEnum(new_index)].data;
inst_data.payload.payload_index = self.astgen.addExtraAssumeCapacity(
Ir.Inst.Knot{ .body_len = @intCast(body.len) },
);
self.astgen.appendBlockBody(body);
return knot_node;
return new_index;
}
fn addVar(self: *GenIr) !Ir.Inst.Index {
@ -304,6 +326,51 @@ const GenIr = struct {
}
};
/// Splat an IR data struct into the `extra` array.
fn addExtra(astgen: *AstGen, extra: anytype) !u32 {
const fields = std.meta.fields(@TypeOf(extra));
try astgen.extra.ensureUnusedCapacity(astgen.gpa, fields.len);
return addExtraAssumeCapacity(astgen, extra);
}
/// Splat an IR data struct into the `extra` array.
fn addExtraAssumeCapacity(astgen: *AstGen, extra: anytype) u32 {
const fields = std.meta.fields(@TypeOf(extra));
const extra_index: u32 = @intCast(astgen.extra.items.len);
astgen.extra.items.len += fields.len;
setExtra(astgen, extra_index, extra);
return extra_index;
}
fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void {
const fields = std.meta.fields(@TypeOf(extra));
var i = index;
inline for (fields) |field| {
astgen.extra.items[i] = switch (field.type) {
u32 => @field(extra, field.name),
Ir.Inst.Index => @intFromEnum(@field(extra, field.name)),
Ir.Inst.Ref => @intFromEnum(@field(extra, field.name)),
Ir.NullTerminatedString => @intFromEnum(@field(extra, field.name)),
else => @compileError("bad field type"),
};
i += 1;
}
}
fn appendBlockBody(astgen: *AstGen, body: []const Ir.Inst.Index) void {
return appendBlockBodyArrayList(astgen, &astgen.extra, body);
}
fn appendBlockBodyArrayList(
_: *AstGen,
list: *std.ArrayListUnmanaged(u32),
body: []const Ir.Inst.Index,
) void {
for (body) |inst_index| {
list.appendAssumeCapacity(@intFromEnum(inst_index));
}
}
fn setDeclaration(
decl_index: Ir.Inst.Index,
args: struct {
@ -311,11 +378,11 @@ fn setDeclaration(
tag: Ir.Global.Tag,
ref: Ir.Inst.Index,
decl_node: *const Ast.Node,
body_gi: *GenIr,
body_block: *GenIr,
is_constant: bool = true,
},
) !void {
const astgen = args.body_gi.astgen;
const astgen = args.body_block.astgen;
const gpa = astgen.gpa;
const extra_len = @typeInfo(Ir.Inst.Declaration).@"struct".fields.len;
const global_index = astgen.globals.items.len;
@ -324,22 +391,22 @@ fn setDeclaration(
try astgen.globals.ensureUnusedCapacity(gpa, 1);
try astgen.global_ref_table.ensureUnusedCapacity(gpa, 1);
if (astgen.global_ref_table.get(args.name)) |_| {
return astgen.fail(.redefined_identifier, args.decl_node);
}
const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data;
inst_data.payload.payload_index = astgen.addExtraAssumeCapacity(
Ir.Inst.Declaration{ .name = args.name, .value = args.ref },
);
if (astgen.global_ref_table.get(args.name)) |_| {
return astgen.fail(.redefined_identifier, args.decl_node);
}
astgen.globals.appendAssumeCapacity(.{
.tag = args.tag,
.name = args.name,
.is_constant = args.is_constant,
});
astgen.global_ref_table.putAssumeCapacity(args.name, global_index);
args.body_gi.unstack();
args.body_block.unstack();
}
fn setCondBrPayload(
@ -372,29 +439,6 @@ fn setCondBrPayload(
astgen.appendBlockBody(else_body);
}
fn addExtraAssumeCapacity(astgen: *AstGen, extra: anytype) u32 {
const fields = std.meta.fields(@TypeOf(extra));
const extra_index: u32 = @intCast(astgen.extra.items.len);
astgen.extra.items.len += fields.len;
setExtra(astgen, extra_index, extra);
return extra_index;
}
fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void {
const fields = std.meta.fields(@TypeOf(extra));
var i = index;
inline for (fields) |field| {
astgen.extra.items[i] = switch (field.type) {
u32 => @field(extra, field.name),
Ir.Inst.Index => @intFromEnum(@field(extra, field.name)),
Ir.Inst.Ref => @intFromEnum(@field(extra, field.name)),
Ir.NullTerminatedString => @intFromEnum(@field(extra, field.name)),
else => @compileError("bad field type"),
};
i += 1;
}
}
fn fail(
self: *AstGen,
tag: Ast.Error.Tag,
@ -413,12 +457,6 @@ fn fail(
return error.SemanticError;
}
fn appendBlockBody(self: *AstGen, body: []const Ir.Inst.Index) void {
for (body) |inst_index| {
self.extra.appendAssumeCapacity(@intFromEnum(inst_index));
}
}
fn sliceFromNode(astgen: *const AstGen, node: *const Ast.Node) []const u8 {
assert(node.loc.start <= node.loc.end);
const source_bytes = astgen.tree.source;
@ -427,8 +465,8 @@ fn sliceFromNode(astgen: *const AstGen, node: *const Ast.Node) []const u8 {
fn stringFromBytes(astgen: *AstGen, bytes: []const u8) error{OutOfMemory}!Ir.NullTerminatedString {
const gpa = astgen.gpa;
const str_index: u32 = @intCast(astgen.string_bytes.items.len);
const string_bytes = &astgen.string_bytes;
const str_index: u32 = @intCast(string_bytes.items.len);
try string_bytes.appendSlice(gpa, bytes);
const key: []const u8 = string_bytes.items[str_index..];
@ -453,6 +491,46 @@ fn stringFromNode(astgen: *AstGen, node: *const Ast.Node) !Ir.NullTerminatedStri
return astgen.stringFromBytes(name_bytes);
}
fn nullTerminatedString(astgen: *AstGen, str: Ir.NullTerminatedString) [:0]const u8 {
const slice = astgen.string_bytes.items[@intFromEnum(str)..];
return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0];
}
fn qualifiedString(
astgen: *AstGen,
prefix: Ir.NullTerminatedString,
relative: Ir.NullTerminatedString,
) !Ir.NullTerminatedString {
const gpa = astgen.gpa;
const string_bytes = &astgen.string_bytes;
const string_table = &astgen.string_table;
const str_index: u32 = @intCast(string_bytes.items.len);
switch (prefix) {
.empty => return relative,
else => |prev| {
try string_bytes.appendSlice(gpa, nullTerminatedString(astgen, prev));
try string_bytes.append(gpa, '.');
try string_bytes.appendSlice(gpa, nullTerminatedString(astgen, relative));
const key: []const u8 = string_bytes.items[str_index..];
const gop = try string_table.getOrPutContextAdapted(gpa, key, StringIndexAdapter{
.bytes = string_bytes,
}, StringIndexContext{
.bytes = string_bytes,
});
if (gop.found_existing) {
string_bytes.shrinkRetainingCapacity(str_index);
return @enumFromInt(gop.key_ptr.*);
} else {
gop.key_ptr.* = str_index;
try string_bytes.append(gpa, 0);
return @enumFromInt(str_index);
}
},
}
}
fn unaryOp(
gi: *GenIr,
scope: *Scope,
@ -565,7 +643,7 @@ fn expr(gi: *GenIr, scope: *Scope, optional_expr: ?*const Ast.Node) InnerError!I
.choice_option_expr => unreachable,
.choice_inner_expr => unreachable,
.divert_expr => unreachable,
.selector_expr => unreachable,
.selector_expr => return fieldAccess(gi, scope, expr_node),
.assign_stmt => unreachable,
.block_stmt => unreachable,
.content_stmt => unreachable,
@ -825,7 +903,10 @@ fn contentExpr(block: *GenIr, scope: *Scope, expr_node: *const Ast.Node) InnerEr
const result = try stringLiteral(block, child_node);
_ = try block.addUnaryNode(.content_push, result);
},
.inline_logic_expr => _ = try inlineLogicExpr(block, scope, child_node),
.inline_logic_expr => {
const result = try inlineLogicExpr(block, scope, child_node);
_ = try block.addUnaryNode(.content_push, result);
},
.if_stmt => _ = try ifStmt(block, scope, child_node),
.multi_if_stmt => _ = try multiIfStmt(block, scope, child_node),
.switch_stmt => _ = try switchStmt(block, scope, child_node),
@ -929,6 +1010,169 @@ fn choiceStmt(
astgen.extra.appendSliceAssumeCapacity(case_indexes.items[0..]);
}
const Callee = union(enum) {
field: struct {
obj_ptr: Ir.Inst.Ref,
/// Offset into `string_bytes`.
field_name_start: Ir.NullTerminatedString,
},
direct: Ir.Inst.Ref,
};
fn fieldAccess(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref {
assert(node.tag == .selector_expr);
const data = node.data.bin;
assert(data.rhs.?.tag == .identifier);
const field_str = try gi.astgen.stringFromNode(data.rhs.?);
const lhs = try expr(gi, scope, data.lhs.?);
return gi.addPayloadNode(.field_ptr, Ir.Inst.Field{
.lhs = lhs,
.field_name_start = field_str,
});
}
/// calleeExpr generates the function part of a call expression (f in f(x)), but
/// *not* the callee argument for the call. Its purpose is to distinguish
/// between standard calls and method call syntax `a.b()`. Thus, if the lhs
/// is a field access, we return using the `field` union field;
/// otherwise, we use the `direct` union field.
fn calleeExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Callee {
switch (node.tag) {
.selector_expr => {
const data = node.data.bin;
const call_target = data.rhs.?;
assert(call_target.tag == .identifier);
const field_str = try gi.astgen.stringFromNode(call_target);
const lhs = try expr(gi, scope, data.lhs.?);
return .{
.field = .{ .obj_ptr = lhs, .field_name_start = field_str },
};
},
.identifier => {
return .{ .direct = try expr(gi, scope, node) };
},
else => unreachable,
}
}
fn callExpr(
gi: *GenIr,
scope: *Scope,
node: *const Ast.Node,
comptime call: enum { divert, call },
) !Ir.Inst.Ref {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const data = node.data.bin;
const callee_node = data.lhs.?;
const args_node = data.rhs;
const callee = try calleeExpr(gi, scope, callee_node);
const scratch_top = astgen.scratch.items.len;
defer astgen.scratch.shrinkRetainingCapacity(scratch_top);
const args_count = if (args_node) |n| n.data.list.items.?.len else 0;
try astgen.scratch.resize(gpa, scratch_top + args_count);
var scratch_index = scratch_top;
if (args_node) |n| {
const args_list = n.data.list.items.?;
for (args_list) |arg| {
var arg_block = gi.makeSubBlock();
defer arg_block.unstack();
_ = try expr(&arg_block, scope, arg);
const body = arg_block.instructionsSlice();
try astgen.scratch.ensureUnusedCapacity(gpa, body.len);
appendBlockBodyArrayList(astgen, &astgen.scratch, body);
astgen.scratch.items[scratch_index] = @intCast(astgen.scratch.items.len - scratch_top);
scratch_index += 1;
}
}
switch (callee) {
.direct => |callee_obj| {
const payload_index = try addExtra(astgen, Ir.Inst.Call{
.callee = callee_obj,
.args_len = @intCast(args_count),
});
if (args_count != 0) {
try astgen.extra.appendSlice(gpa, astgen.scratch.items[scratch_top..]);
}
return gi.add(.{
.tag = if (call == .divert) .divert else .call,
.data = .{ .payload = .{
.payload_index = payload_index,
} },
});
},
.field => |callee_field| {
const payload_index = try addExtra(astgen, Ir.Inst.FieldCall{
.obj_ptr = callee_field.obj_ptr,
.field_name_start = callee_field.field_name_start,
.args_len = @intCast(args_count),
});
if (args_count != 0) {
try astgen.extra.appendSlice(gpa, astgen.scratch.items[scratch_top..]);
}
return gi.add(.{
.tag = if (call == .divert) .field_divert else .field_call,
.data = .{ .payload = .{
.payload_index = payload_index,
} },
});
},
}
}
fn divertExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void {
// FIXME: The AST should always have an args list for these nodes.
const lhs = node.data.bin.lhs.?;
switch (lhs.tag) {
.identifier => {
const callee = try calleeExpr(gi, scope, lhs);
switch (callee) {
.direct => |callee_obj| {
const payload_index = try addExtra(gi.astgen, Ir.Inst.Call{
.callee = callee_obj,
.args_len = @intCast(0),
});
_ = try gi.add(.{
.tag = .divert,
.data = .{ .payload = .{
.payload_index = payload_index,
} },
});
},
.field => |callee_field| {
const payload_index = try addExtra(gi.astgen, Ir.Inst.FieldCall{
.obj_ptr = callee_field.obj_ptr,
.field_name_start = callee_field.field_name_start,
.args_len = @intCast(0),
});
_ = try gi.add(.{
.tag = .field_divert,
.data = .{ .payload = .{
.payload_index = payload_index,
} },
});
},
}
},
else => {
_ = try callExpr(gi, scope, lhs, .divert);
},
}
}
fn divertStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void {
const data = node.data.bin;
return divertExpr(gi, scope, data.lhs.?);
}
fn tempDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void {
const identifier_node = decl_node.data.bin.lhs.?;
const expr_node = decl_node.data.bin.rhs.?;
@ -950,23 +1194,26 @@ fn tempDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void {
fn varDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const identifier_node = decl_node.data.bin.lhs.?;
const expr_node = decl_node.data.bin.rhs.?;
const decl_inst = try gi.makeDeclaration();
try gi.instructions.append(gpa, decl_inst);
const decl_inst = try gi.add(.{
.tag = .declaration,
.data = .{ .payload = .{
.payload_index = undefined,
} },
});
var decl_block = gi.makeSubBlock();
defer decl_block.unstack();
_ = try expr(&decl_block, scope, expr_node);
const var_inst = try decl_block.addVar();
try setDeclaration(decl_inst, .{
try setDeclaration(decl_inst.toIndex().?, .{
.tag = .variable,
.name = try astgen.stringFromNode(identifier_node),
.ref = var_inst,
.decl_node = decl_node,
.body_gi = &decl_block,
.body_block = &decl_block,
.is_constant = decl_node.tag == .const_decl,
});
}
@ -984,9 +1231,11 @@ fn blockInner(gi: *GenIr, parent_scope: *Scope, stmt_list: []*Ast.Node) !void {
.content_stmt => try contentStmt(gi, &child_scope, inner_node),
.choice_stmt => try choiceStmt(gi, &child_scope, inner_node),
.expr_stmt => try exprStmt(gi, &child_scope, inner_node),
.divert_stmt => try divertStmt(gi, &child_scope, inner_node),
else => unreachable,
};
}
_ = try gi.addUnaryNode(.implicit_ret, .none);
}
fn blockStmt(block: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!void {
@ -995,74 +1244,143 @@ fn blockStmt(block: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerErro
try blockInner(block, scope, block_stmts);
}
const main_knot_name: [:0]const u8 = "$__main__$";
fn defaultBlock(
gi: *GenIr,
scope: *Scope,
body_node: *const Ast.Node,
) InnerError!void {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const decl_inst = try gi.makeDeclaration();
try gi.instructions.append(gpa, decl_inst);
const decl_inst = try gi.addAsIndex(.{
.tag = .declaration,
.data = .{ .payload = .{
.payload_index = undefined,
} },
});
var decl_scope = gi.makeSubBlock();
defer decl_scope.unstack();
// TODO: Make sure that this value is concrete to omit check.
const block_stmts = body_node.data.list.items orelse unreachable;
const block_stmts = body_node.data.list.items.?;
try blockInner(&decl_scope, scope, block_stmts);
const knot_inst = try decl_scope.addKnot();
try setDeclaration(decl_inst, .{
.tag = .knot,
.decl_node = body_node,
.name = try astgen.stringFromBytes("$__main__$"),
.name = try astgen.stringFromBytes(Story.default_knot_name),
.ref = knot_inst,
.body_gi = &decl_scope,
.decl_node = body_node,
.body_block = &decl_scope,
});
}
fn stitchDecl(_: *GenIr, _: *Scope, _: *const Ast.Node) InnerError!void {}
fn stitchDeclInner(
gi: *GenIr,
scope: *Scope,
decl_node: *const Ast.Node,
prototype_node: *const Ast.Node,
body_node: ?*const Ast.Node,
) InnerError!void {
const astgen = gi.astgen;
const prototype_data = prototype_node.data.bin;
const identifier_node = prototype_data.lhs.?;
const decl_inst = try gi.addAsIndex(.{
.tag = .declaration,
.data = .{ .payload = .{
.payload_index = undefined,
} },
});
var decl_block = gi.makeSubBlock();
defer decl_block.unstack();
if (prototype_data.rhs) |args_node| {
const args_list = args_node.data.list.items.?;
for (args_list) |arg| {
assert(arg.tag == .parameter_decl);
const arg_str = try astgen.stringFromNode(arg);
const arg_inst = try decl_block.addStr(.param, arg_str);
// TODO: Maybe make decl accept a ref?
try scope.insert(arg_str, .{
.decl_node = arg,
.inst_index = arg_inst.toIndex().?,
});
}
}
if (body_node) |body| {
try blockStmt(&decl_block, scope, body);
} else {
_ = try decl_block.addUnaryNode(.implicit_ret, .none);
}
const knot_inst = try decl_block.addKnot();
const name_str = try astgen.stringFromNode(identifier_node);
try setDeclaration(decl_inst, .{
.tag = .knot,
.name = try astgen.qualifiedString(scope.namespace_prefix, name_str),
.ref = knot_inst,
.decl_node = decl_node,
.body_block = &decl_block,
});
}
fn stitchDecl(gi: *GenIr, parent_scope: *Scope, decl_node: *const Ast.Node) InnerError!void {
const knot_data = decl_node.data.bin;
const prototype_node = knot_data.lhs.?;
const body_node = knot_data.rhs.?;
var decl_scope = parent_scope.makeChild();
defer decl_scope.deinit();
return stitchDeclInner(gi, &decl_scope, decl_node, prototype_node, body_node);
}
fn functionDecl(_: *GenIr, _: *Scope, _: *const Ast.Node) InnerError!void {}
fn knotDecl(gen: *GenIr, scope: *Scope, decl_node: *const Ast.Node) InnerError!void {
const prototype_node = decl_node.data.knot_decl.prototype;
const nested_decls_list = decl_node.data.knot_decl.children orelse return;
const identifier_node = prototype_node.data.bin.lhs orelse unreachable;
const ident_ref = try gen.astgen.stringFromNode(identifier_node);
const knot_symbol = scope.lookup(ident_ref) orelse unreachable;
const knot_scope = knot_symbol.knot.decl_scope;
fn knotDecl(gi: *GenIr, parent_scope: *Scope, decl_node: *const Ast.Node) InnerError!void {
const knot_data = decl_node.data.knot_decl;
const prototype_node = knot_data.prototype;
const identifier_node = prototype_node.data.bin.lhs.?;
const nested_decls_list = knot_data.children.?;
var block_gen = gen.makeSubBlock();
defer block_gen.deinit();
var decl_scope = parent_scope.makeChild();
defer decl_scope.deinit();
var start_index: usize = 0;
const first_child = nested_decls_list[0];
if (first_child.tag == .block_stmt) {
try blockStmt(&block_gen, knot_scope, first_child);
if (nested_decls_list.len > 1) start_index += 1 else return;
try stitchDeclInner(gi, &decl_scope, decl_node, prototype_node, first_child);
start_index += 1;
} else {
try stitchDeclInner(gi, &decl_scope, decl_node, prototype_node, null);
}
const name_str = try gi.astgen.stringFromNode(identifier_node);
try decl_scope.setNamespacePrefix(name_str);
for (nested_decls_list[start_index..]) |nested_decl_node| {
switch (decl_node.tag) {
.stitch_decl => try stitchDecl(gen, knot_scope, nested_decl_node),
.function_decl => try functionDecl(gen, knot_scope, nested_decl_node),
switch (nested_decl_node.tag) {
.stitch_decl => try stitchDecl(gi, &decl_scope, nested_decl_node),
.function_decl => try functionDecl(gi, &decl_scope, nested_decl_node),
else => unreachable,
}
}
}
fn file(root_gi: *GenIr, scope: *Scope, file_node: *const Ast.Node) InnerError!void {
const astgen = root_gi.astgen;
const gpa = astgen.gpa;
const file_inst = try root_gi.makePayloadNode(.file);
try root_gi.instructions.append(gpa, file_inst);
fn file(gi: *GenIr, scope: *Scope, file_node: *const Ast.Node) InnerError!void {
const file_inst = try gi.addAsIndex(.{
.tag = .file,
.data = .{
.payload = .{
.payload_index = undefined,
},
},
});
var start_index: usize = 0;
var file_scope = root_gi.makeSubBlock();
var file_scope = gi.makeSubBlock();
defer file_scope.unstack();
// TODO: Make sure this is non-nullable.
const nested_decls_list = file_node.data.list.items orelse return;
if (nested_decls_list.len == 0) return;
@ -1077,9 +1395,9 @@ fn file(root_gi: *GenIr, scope: *Scope, file_node: *const Ast.Node) InnerError!v
}
for (nested_decls_list[start_index..]) |child_node| {
switch (child_node.tag) {
//.knot_decl => try knotDecl(gi, scope, child_node),
//.stitch_decl => try stitchDecl(gi, scope, child_node),
//.function_decl => try functionDecl(gi, scope, child_node),
.knot_decl => try knotDecl(gi, scope, child_node),
.stitch_decl => try stitchDecl(gi, scope, child_node),
.function_decl => try functionDecl(gi, scope, child_node),
else => unreachable,
}
}
@ -1103,6 +1421,7 @@ pub fn generate(gpa: std.mem.Allocator, tree: *const Ast) !Ir {
var file_scope: Scope = .{
.parent = null,
.decls = .empty,
.namespace_prefix = .empty,
.astgen = &astgen,
};
var gen: GenIr = .{