const std = @import("std"); const Ast = @import("Ast.zig"); const Ir = @import("Ir.zig"); const Story = @import("Story.zig"); const StringIndexAdapter = std.hash_map.StringIndexAdapter; const StringIndexContext = std.hash_map.StringIndexContext; const assert = std.debug.assert; const AstGen = @This(); gpa: std.mem.Allocator, tree: *const Ast, string_table: std.HashMapUnmanaged(u32, void, StringIndexContext, std.hash_map.default_max_load_percentage) = .empty, string_bytes: std.ArrayListUnmanaged(u8) = .empty, globals: std.ArrayListUnmanaged(Ir.Inst.Index) = .empty, instructions: std.ArrayListUnmanaged(Ir.Inst) = .empty, extra: std.ArrayListUnmanaged(u32) = .empty, scratch: std.ArrayListUnmanaged(u32) = .empty, compile_errors: std.ArrayListUnmanaged(Ir.Inst.CompileErrors.Item) = .empty, pub const InnerError = error{ OutOfMemory, SemanticError, InvalidCharacter, Overflow, }; /// Splat an IR data struct into the `extra` array. fn addExtra(astgen: *AstGen, extra: anytype) !u32 { const fields = std.meta.fields(@TypeOf(extra)); try astgen.extra.ensureUnusedCapacity(astgen.gpa, fields.len); return addExtraAssumeCapacity(astgen, extra); } /// Splat an IR data struct into the `extra` array. fn addExtraAssumeCapacity(astgen: *AstGen, extra: anytype) u32 { const fields = std.meta.fields(@TypeOf(extra)); const extra_index: u32 = @intCast(astgen.extra.items.len); astgen.extra.items.len += fields.len; setExtra(astgen, extra_index, extra); return extra_index; } fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void { const fields = std.meta.fields(@TypeOf(extra)); var i = index; inline for (fields) |field| { astgen.extra.items[i] = switch (field.type) { u32 => @field(extra, field.name), Ir.Inst.Index => @intFromEnum(@field(extra, field.name)), Ir.Inst.Ref => @intFromEnum(@field(extra, field.name)), Ir.NullTerminatedString => @intFromEnum(@field(extra, field.name)), else => @compileError("bad field type"), }; i += 1; } } fn appendBlockBody(astgen: *AstGen, body: []const Ir.Inst.Index) void { return appendBlockBodyArrayList(astgen, &astgen.extra, body); } fn appendBlockBodyArrayList( _: *AstGen, list: *std.ArrayListUnmanaged(u32), body: []const Ir.Inst.Index, ) void { for (body) |inst_index| { list.appendAssumeCapacity(@intFromEnum(inst_index)); } } fn appendErrorNode( astgen: *AstGen, node: *const Ast.Node, comptime format: []const u8, args: anytype, ) error{OutOfMemory}!void { return appendErrorToken(astgen, @intCast(node.loc.start), format, args); } fn appendErrorToken( astgen: *AstGen, byte_offset: u32, comptime format: []const u8, args: anytype, ) error{OutOfMemory}!void { const gpa = astgen.gpa; const string_bytes = &astgen.string_bytes; const msg: Ir.NullTerminatedString = @enumFromInt(string_bytes.items.len); try string_bytes.print(gpa, format ++ "\x00", args); try astgen.compile_errors.append(gpa, .{ .msg = msg, .byte_offset = byte_offset, }); } fn fail( astgen: *AstGen, node: *const Ast.Node, comptime format: []const u8, args: anytype, ) error{ SemanticError, OutOfMemory } { try appendErrorNode(astgen, node, format, args); return error.SemanticError; } fn lowerAstErrors(astgen: *AstGen) error{OutOfMemory}!void { const gpa = astgen.gpa; var msg: std.Io.Writer.Allocating = .init(gpa); defer msg.deinit(); const w = &msg.writer; for (astgen.tree.errors) |err| { astgen.tree.renderError(w, err) catch return error.OutOfMemory; try appendErrorToken( astgen, @intCast(err.loc.start), "{s}", .{msg.written()}, ); msg.clearRetainingCapacity(); } } fn nullTerminatedString(astgen: *AstGen, str: Ir.NullTerminatedString) [:0]const u8 { const slice = astgen.string_bytes.items[@intFromEnum(str)..]; return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0]; } const IndexSlice = struct { index: Ir.NullTerminatedString, len: u32, }; fn strFromSlice(astgen: *AstGen, bytes: []const u8) error{OutOfMemory}!IndexSlice { const gpa = astgen.gpa; const string_bytes = &astgen.string_bytes; const str_index: u32 = @intCast(string_bytes.items.len); try string_bytes.appendSlice(gpa, bytes); const key: []const u8 = string_bytes.items[str_index..]; const gop = try astgen.string_table.getOrPutContextAdapted(gpa, key, StringIndexAdapter{ .bytes = string_bytes, }, StringIndexContext{ .bytes = string_bytes, }); if (gop.found_existing) { string_bytes.shrinkRetainingCapacity(str_index); return .{ .index = @enumFromInt(gop.key_ptr.*), .len = @intCast(key.len), }; } else { gop.key_ptr.* = str_index; try string_bytes.append(gpa, 0); return .{ .index = @enumFromInt(str_index), .len = @intCast(key.len), }; } } fn strFromNode(astgen: *AstGen, node: *const Ast.Node) !IndexSlice { const name_bytes = astgen.tree.nodeSlice(node); return astgen.strFromSlice(name_bytes); } /// Perform IR code generation via tree-walk. pub fn generate(gpa: std.mem.Allocator, tree: *const Ast) !Ir { var astgen: AstGen = .{ .gpa = gpa, .tree = tree, }; defer astgen.deinit(); // First entry is reserved for Ir.NullTerminatedString.empty. try astgen.string_bytes.append(gpa, 0); var instructions: std.ArrayListUnmanaged(Ir.Inst.Index) = .empty; defer instructions.deinit(gpa); var file_scope: Scope = .{ .parent = null, .decls = .empty, .astgen = &astgen, }; var block: GenIr = .{ .astgen = &astgen, .instructions = &instructions, .instructions_top = 0, }; defer block.unstack(); const reserved_extra_count = @typeInfo(Ir.ExtraIndex).@"enum".fields.len; try astgen.extra.ensureTotalCapacity(gpa, reserved_extra_count); astgen.extra.items.len += reserved_extra_count; const fatal = if (tree.errors.len == 0) fatal: { file(&block, &file_scope, tree.root) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, error.SemanticError => break :fatal true, else => |e| return e, }; break :fatal false; } else fatal: { try lowerAstErrors(&astgen); break :fatal true; }; const err_index = @intFromEnum(Ir.ExtraIndex.compile_errors); if (astgen.compile_errors.items.len == 0) { astgen.extra.items[err_index] = 0; } else { const extra_len = 1 + astgen.compile_errors.items.len * @typeInfo(Ir.Inst.CompileErrors.Item).@"struct".fields.len; try astgen.extra.ensureUnusedCapacity(gpa, extra_len); astgen.extra.items[err_index] = astgen.addExtraAssumeCapacity(Ir.Inst.CompileErrors{ .items_len = @intCast(astgen.compile_errors.items.len), }); for (astgen.compile_errors.items) |item| { _ = astgen.addExtraAssumeCapacity(item); } } return .{ .instructions = if (fatal) &.{} else try astgen.instructions.toOwnedSlice(gpa), .string_bytes = try astgen.string_bytes.toOwnedSlice(gpa), .extra = try astgen.extra.toOwnedSlice(gpa), }; } fn deinit(astgen: *AstGen) void { const gpa = astgen.gpa; astgen.string_table.deinit(gpa); astgen.string_bytes.deinit(gpa); astgen.globals.deinit(gpa); astgen.instructions.deinit(gpa); astgen.extra.deinit(gpa); astgen.scratch.deinit(gpa); astgen.compile_errors.deinit(gpa); } const GenIr = struct { astgen: *AstGen, instructions: *std.ArrayListUnmanaged(Ir.Inst.Index), instructions_top: usize, const unstacked_top = std.math.maxInt(usize); fn unstack(self: *GenIr) void { if (self.instructions_top != unstacked_top) { self.instructions.items.len = self.instructions_top; self.instructions_top = unstacked_top; } } fn isEmpty(self: *const GenIr) bool { return (self.instructions_top == unstacked_top) or (self.instructions.items.len == self.instructions_top); } fn instructionsSlice(self: *const GenIr) []Ir.Inst.Index { return if (self.instructions_top == unstacked_top) &[0]Ir.Inst.Index{} else self.instructions.items[self.instructions_top..]; } fn instructionsSliceUpto( self: *const GenIr, stacked_block: *const GenIr, ) []Ir.Inst.Index { return if (self.instructions_top == unstacked_top) &[0]Ir.Inst.Index{} else if (self.instructions == stacked_block.instructions and stacked_block.instructions_top != unstacked_top) self.instructions.items[self.instructions_top..stacked_block.instructions_top] else self.instructions.items[self.instructions_top..]; } fn endsWithNoReturn(self: *GenIr) bool { if (self.isEmpty()) return false; const last_inst_index = self.instructions.items[self.instructions.items.len - 1]; const last_inst = self.astgen.instructions.items[@intFromEnum(last_inst_index)]; return last_inst.isNoReturn(); } fn endsWithGlue(self: *GenIr) bool { if (self.isEmpty()) return false; const last_inst_index = self.instructions.items[self.instructions.items.len - 1]; const last_inst = self.astgen.instructions.items[@intFromEnum(last_inst_index)]; return last_inst.tag == .content_glue; } fn makeSubBlock(self: *GenIr) GenIr { return .{ .astgen = self.astgen, .instructions = self.instructions, .instructions_top = self.instructions.items.len, }; } fn add(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Ref { return (try gi.addAsIndex(inst)).toRef(); } fn addAsIndex(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Index { const gpa = gi.astgen.gpa; try gi.instructions.ensureUnusedCapacity(gpa, 1); try gi.astgen.instructions.ensureUnusedCapacity(gpa, 1); const new_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len); gi.astgen.instructions.appendAssumeCapacity(inst); gi.instructions.appendAssumeCapacity(new_index); return new_index; } fn addInt(gi: *GenIr, value: i64) !Ir.Inst.Ref { return add(gi, .{ .tag = .int, .data = .{ .int = value, } }); } fn addFloat(gi: *GenIr, value: f64) !Ir.Inst.Ref { return add(gi, .{ .tag = .float, .data = .{ .float = value, } }); } fn addUnaryNode(gi: *GenIr, tag: Ir.Inst.Tag, arg: Ir.Inst.Ref) !Ir.Inst.Ref { return add(gi, .{ .tag = tag, .data = .{ .un = .{ .lhs = arg }, } }); } fn addBinaryNode( gi: *GenIr, tag: Ir.Inst.Tag, lhs: Ir.Inst.Ref, rhs: Ir.Inst.Ref, ) !Ir.Inst.Ref { return add(gi, .{ .tag = tag, .data = .{ .bin = .{ .lhs = lhs, .rhs = rhs }, } }); } fn addStr( gi: *GenIr, str: Ir.NullTerminatedString, str_len: usize, ) !Ir.Inst.Ref { assert(str_len <= std.math.maxInt(u32)); return add(gi, .{ .tag = .str, .data = .{ .str = .{ .start = str, .len = @intCast(str_len) }, } }); } fn addStrTok( block: *GenIr, tag: Ir.Inst.Tag, str_index: Ir.NullTerminatedString, byte_offset: usize, ) !Ir.Inst.Ref { assert(byte_offset <= std.math.maxInt(u32)); return block.add(.{ .tag = tag, .data = .{ .str_tok = .{ .start = str_index, .src_offset = @intCast(byte_offset), } }, }); } fn addPayloadNode( gen: *GenIr, tag: Ir.Inst.Tag, node: *const Ast.Node, extra: anytype, ) !Ir.Inst.Ref { const gpa = gen.astgen.gpa; try gen.instructions.ensureUnusedCapacity(gpa, 1); try gen.astgen.instructions.ensureUnusedCapacity(gpa, 1); const extra_index = try gen.astgen.addExtra(extra); const new_index: Ir.Inst.Index = @enumFromInt(gen.astgen.instructions.items.len); gen.astgen.instructions.appendAssumeCapacity(.{ .tag = tag, .data = .{ .payload = .{ .extra_index = extra_index, .src_offset = @intCast(node.loc.start), } }, }); gen.instructions.appendAssumeCapacity(new_index); return new_index.toRef(); } fn addPayloadNodeWithIndex( gen: *GenIr, tag: Ir.Inst.Tag, node: *const Ast.Node, extra_index: u32, ) !Ir.Inst.Ref { return gen.add(.{ .tag = tag, .data = .{ .payload = .{ .extra_index = extra_index, .src_offset = @intCast(node.loc.start), }, } }); } fn addCondBr(gen: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index { const gpa = gen.astgen.gpa; try gen.instructions.ensureUnusedCapacity(gpa, 1); try gen.astgen.instructions.ensureUnusedCapacity(gpa, 1); const new_index: Ir.Inst.Index = @enumFromInt(gen.astgen.instructions.items.len); gen.astgen.instructions.appendAssumeCapacity(.{ .tag = tag, .data = .{ .payload = undefined }, }); gen.instructions.appendAssumeCapacity(new_index); return new_index; } fn addBreak( gi: *GenIr, tag: Ir.Inst.Tag, node: *const Ast.Node, block_inst: Ir.Inst.Index, operand: Ir.Inst.Ref, ) !Ir.Inst.Ref { const extra_len = @typeInfo(Ir.Inst.Break).@"struct".fields.len; try gi.astgen.extra.ensureUnusedCapacity(gi.astgen.gpa, extra_len); const extra_index = gi.astgen.addExtraAssumeCapacity(Ir.Inst.Break{ .operand = operand, .block_inst = block_inst, }); return gi.addPayloadNodeWithIndex(tag, node, extra_index); } fn makePayloadNode(self: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index { const astgen = self.astgen; const inst_index: Ir.Inst.Index = @enumFromInt(astgen.instructions.items.len); try astgen.instructions.append(astgen.gpa, .{ .tag = tag, .data = .{ .payload = undefined, }, }); return inst_index; } /// Assumes nothing stacked on `gz`. Unstacks `gz`. fn setBoolBrBody(gi: *GenIr, bool_br: Ir.Inst.Index, bool_br_lhs: Ir.Inst.Ref) !void { const astgen = gi.astgen; const body = gi.instructionsSlice(); try astgen.extra.ensureUnusedCapacity( astgen.gpa, @typeInfo(Ir.Inst.BoolBr).@"struct".fields.len + body.len, ); const data = &astgen.instructions.items[@intFromEnum(bool_br)].data; data.payload.extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.BoolBr{ .lhs = bool_br_lhs, .body_len = @intCast(body.len), }); astgen.appendBlockBody(body); gi.unstack(); } fn setBlockBody(self: *GenIr, inst: Ir.Inst.Index) !void { const gpa = self.astgen.gpa; const body = self.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Block).@"struct".fields.len + body.len; try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len); const inst_data = &self.astgen.instructions.items[@intFromEnum(inst)].data; inst_data.payload.extra_index = self.astgen.addExtraAssumeCapacity( Ir.Inst.Block{ .body_len = @intCast(body.len) }, ); self.astgen.appendBlockBody(body); self.unstack(); } }; const Scope = struct { parent: ?*Scope, astgen: *AstGen, decls: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, Decl), const Decl = struct { decl_node: *const Ast.Node, inst_index: Ir.Inst.Index, }; fn deinit(self: *Scope) void { const gpa = self.astgen.gpa; self.decls.deinit(gpa); } fn makeChild(parent_scope: *Scope) Scope { return .{ .parent = parent_scope, .astgen = parent_scope.astgen, .decls = .empty, }; } fn insert(self: *Scope, ref: Ir.NullTerminatedString, decl: Decl) !void { const gpa = self.astgen.gpa; return self.decls.put(gpa, ref, decl); } fn lookup(self: *Scope, ref: Ir.NullTerminatedString) ?Decl { var current_scope: ?*Scope = self; while (current_scope) |scope| : (current_scope = scope.parent) { const result = scope.decls.get(ref); if (result) |symbol| return symbol; } return null; } }; fn setDeclaration( decl_index: Ir.Inst.Index, args: struct { name: Ir.NullTerminatedString, value: Ir.Inst.Index, gi: *GenIr, node: *const Ast.Node, }, ) !void { const astgen = args.gi.astgen; const extra_len = @typeInfo(Ir.Inst.Declaration).@"struct".fields.len; try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len); const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data; inst_data.payload = .{ .src_offset = @intCast(args.node.loc.start), .extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.Declaration{ .name = args.name, .value = args.value, .flags = if (args.node.tag == .const_decl) 0x01 else 0x00, }), }; } fn setDeclVarPayload( decl_index: Ir.Inst.Index, body_block: *GenIr, node: *const Ast.Node, ) !void { defer body_block.unstack(); const astgen = body_block.astgen; const body = body_block.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Var).@"struct".fields.len + body.len; try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len); const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data; inst_data.payload = .{ .src_offset = @intCast(node.loc.start), .extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.Var{ .body_len = @intCast(body.len), }), }; astgen.appendBlockBody(body); } fn setDeclStitchPayload(decl_index: Ir.Inst.Index, body_block: *GenIr) !void { defer body_block.unstack(); const astgen = body_block.astgen; const block_body = body_block.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Knot).@"struct".fields.len + block_body.len; try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len); const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data; inst_data.payload.extra_index = astgen.addExtraAssumeCapacity( Ir.Inst.Stitch{ .body_len = @intCast(block_body.len), }, ); astgen.appendBlockBody(block_body); } fn setDeclKnotPayload( decl_index: Ir.Inst.Index, body_block: *GenIr, stitches_block: *GenIr, ) !void { defer body_block.unstack(); defer stitches_block.unstack(); const astgen = body_block.astgen; const block_body = body_block.instructionsSliceUpto(stitches_block); const stitches_body = stitches_block.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Knot).@"struct".fields.len + block_body.len + stitches_body.len; try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len); const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data; inst_data.payload.extra_index = astgen.addExtraAssumeCapacity( Ir.Inst.Knot{ .body_len = @intCast(block_body.len), .stitches_len = @intCast(stitches_body.len), }, ); astgen.appendBlockBody(block_body); astgen.appendBlockBody(stitches_body); } fn setCondBrPayload( condbr: Ir.Inst.Index, cond: Ir.Inst.Ref, then_block: *GenIr, else_block: *GenIr, ) !void { defer then_block.unstack(); defer else_block.unstack(); const astgen = then_block.astgen; const then_body = then_block.instructionsSliceUpto(else_block); const else_body = else_block.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.CondBr).@"struct".fields.len + then_body.len + else_body.len; try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len); const inst_data = &astgen.instructions.items[@intFromEnum(condbr)].data; inst_data.payload.extra_index = astgen.addExtraAssumeCapacity( Ir.Inst.CondBr{ .condition = cond, .then_body_len = @intCast(then_body.len), .else_body_len = @intCast(else_body.len), }, ); astgen.appendBlockBody(then_body); astgen.appendBlockBody(else_body); } fn unaryOp( gi: *GenIr, scope: *Scope, expr_node: *const Ast.Node, op: Ir.Inst.Tag, ) InnerError!Ir.Inst.Ref { const data = expr_node.data.bin; const lhs = try expr(gi, scope, data.lhs.?); return gi.addUnaryNode(op, lhs); } fn binaryOp( gi: *GenIr, scope: *Scope, expr_node: *const Ast.Node, op: Ir.Inst.Tag, ) InnerError!Ir.Inst.Ref { const data = expr_node.data.bin; const lhs = try expr(gi, scope, data.lhs.?); const rhs = try expr(gi, scope, data.rhs.?); return gi.addBinaryNode(op, lhs, rhs); } fn boolBinaryOp( gi: *GenIr, scope: *Scope, node: *const Ast.Node, ir_tag: Ir.Inst.Tag, ) InnerError!Ir.Inst.Ref { const data = node.data.bin; const lhs_node = data.lhs.?; const rhs_node = data.rhs.?; const lhs = try expr(gi, scope, lhs_node); const bool_br = (try gi.addPayloadNodeWithIndex(ir_tag, node, undefined)).toIndex().?; var rhs_block = gi.makeSubBlock(); defer rhs_block.unstack(); const rhs = try expr(&rhs_block, scope, rhs_node); if (!rhs_block.endsWithNoReturn()) { _ = try rhs_block.addBreak(.break_inline, rhs_node, bool_br, rhs); } try rhs_block.setBoolBrBody(bool_br, lhs); const block_ref = bool_br.toRef(); return block_ref; } fn parseNumberLiteral(bytes: []const u8) union(enum) { int: i64, float: f64, failure: union(enum) { duplicate_period: usize, invalid_character: usize, }, } { var is_float = false; var period = false; for (bytes, 0..) |c, i| { switch (c) { '.' => { if (period) return .{ .failure = .{ .duplicate_period = i } }; period = true; is_float = true; }, '0'...'9' => {}, else => return .{ .failure = .{ .invalid_character = i } }, } } if (is_float) { const value = std.fmt.parseFloat(f64, bytes) catch |err| switch (err) { error.InvalidCharacter => unreachable, }; return .{ .float = value }; } else { const value = std.fmt.parseInt(i64, bytes, 10) catch |err| switch (err) { error.InvalidCharacter => unreachable, error.Overflow => unreachable, }; return .{ .int = value }; } } fn numberLiteral(block: *GenIr, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const astgen = block.astgen; const lexeme = astgen.tree.nodeSlice(node); switch (parseNumberLiteral(lexeme)) { .int => |int| return block.addInt(int), .float => |float| return block.addFloat(float), // TODO: exact offset reporting .failure => return fail(block.astgen, node, "invalid number literal", .{}), } } fn stringLiteral(gi: *GenIr, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const str = try gi.astgen.strFromNode(node); return gi.addStr(str.index, str.len); } fn stringExpr(gen: *GenIr, expr_node: *const Ast.Node) InnerError!Ir.Inst.Ref { const first_node = expr_node.data.bin.lhs.?; return stringLiteral(gen, first_node); } fn identifier( block: *GenIr, scope: *Scope, node: *const Ast.Node, ) InnerError!Ir.Inst.Ref { const str = try block.astgen.strFromNode(node); if (scope.lookup(str.index)) |decl| { return block.addUnaryNode(.load, decl.inst_index.toRef()); } return block.addStrTok(.decl_ref, str.index, node.loc.start); } fn expr(gi: *GenIr, scope: *Scope, optional_node: ?*const Ast.Node) InnerError!Ir.Inst.Ref { const node = optional_node.?; switch (node.tag) { .file => unreachable, .true_literal => return .bool_true, .false_literal => return .bool_false, .number_literal => return numberLiteral(gi, node), .string_literal => return stringLiteral(gi, node), .string_expr => return stringExpr(gi, node), .empty_string => return stringLiteral(gi, node), .identifier => return identifier(gi, scope, node), .add_expr => return binaryOp(gi, scope, node, .add), .subtract_expr => return binaryOp(gi, scope, node, .sub), .multiply_expr => return binaryOp(gi, scope, node, .mul), .divide_expr => return binaryOp(gi, scope, node, .div), .mod_expr => return binaryOp(gi, scope, node, .mod), .negate_expr => return unaryOp(gi, scope, node, .neg), .logical_not_expr => return unaryOp(gi, scope, node, .not), .logical_and_expr => return boolBinaryOp(gi, scope, node, .bool_br_and), .logical_or_expr => return boolBinaryOp(gi, scope, node, .bool_br_or), .logical_equality_expr => return binaryOp(gi, scope, node, .cmp_eq), .logical_inequality_expr => return binaryOp(gi, scope, node, .cmp_neq), .logical_greater_expr => return binaryOp(gi, scope, node, .cmp_gt), .logical_greater_or_equal_expr => return binaryOp(gi, scope, node, .cmp_gte), .logical_lesser_expr => return binaryOp(gi, scope, node, .cmp_lt), .logical_lesser_or_equal_expr => return binaryOp(gi, scope, node, .cmp_lte), .call_expr => return callExpr(gi, scope, node, .call), .choice_expr => unreachable, .divert_expr => unreachable, .selector_expr => return fieldAccess(gi, scope, node), .assign_stmt => unreachable, .block_stmt => unreachable, .content_stmt => unreachable, .divert_stmt => unreachable, .return_stmt => unreachable, .expr_stmt => unreachable, .choice_stmt => unreachable, .choice_star_stmt => unreachable, .choice_plus_stmt => unreachable, .gather_point_stmt => unreachable, .gathered_stmt => unreachable, .function_prototype => unreachable, .stitch_prototype => unreachable, .knot_prototype => unreachable, .function_decl => unreachable, .stitch_decl => unreachable, .knot_decl => unreachable, .const_decl => unreachable, .var_decl => unreachable, .list_decl => unreachable, .temp_decl => unreachable, .parameter_decl => unreachable, .ref_parameter_decl => unreachable, .argument_list => unreachable, .parameter_list => unreachable, .switch_stmt => unreachable, // Handled in switchStmt .switch_case => unreachable, // Handled in switchStmt .if_stmt => unreachable, // Handled in ifStmt .multi_if_stmt => unreachable, // Handled in multiIfStmt .if_branch => unreachable, // Handled in ifStmt and multiIfStmt .else_branch => unreachable, // Handled in switchStmt, multiIfStmt, and ifStmt .content => unreachable, .inline_logic_expr => unreachable, .inline_if_stmt => unreachable, .invalid => unreachable, } } fn exprStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const data = node.data.bin; if (data.lhs) |lhs| return expr(gi, scope, lhs); return .none; } fn inlineLogicExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const data = node.data.bin; if (data.lhs) |lhs| return expr(gi, scope, lhs); return .none; } fn inlineIfStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const gpa = gi.astgen.gpa; const data = node.data.bin; const cond_expr = data.lhs.?; var block_scope = gi.makeSubBlock(); defer block_scope.unstack(); const cond_inst = try expr(&block_scope, scope, cond_expr); const condbr = try block_scope.addCondBr(.condbr); const block = try gi.makePayloadNode(.block); try block_scope.setBlockBody(block); try gi.instructions.append(gpa, block); var then_block = gi.makeSubBlock(); defer then_block.unstack(); if (data.rhs) |rhs| { // TODO: Revisit this. This isn't quite correct. switch (rhs.tag) { .content => _ = try content(&then_block, scope, rhs, false, false), inline else => |tag| @panic("Unexpected node type: " ++ @tagName(tag)), } } if (!then_block.endsWithNoReturn()) { _ = try then_block.addBreak(.@"break", node, block, .void); } var else_block = gi.makeSubBlock(); defer else_block.unstack(); _ = try else_block.addBreak(.@"break", node, block, .void); try setCondBrPayload(condbr, cond_inst, &then_block, &else_block); return condbr.toRef(); } fn validateIfCases(astgen: *AstGen, cases: []const *Ast.Node) InnerError!void { assert(cases.len != 0); var seen_else = false; for (cases, 0..) |case_node, i| { switch (case_node.tag) { .block_stmt => { if (i != 0) return fail(astgen, case_node, "unexpected block in conditional prong list", .{}); }, .if_branch => { if (seen_else) return fail(astgen, case_node, "branch after else is unreachable", .{}); }, .else_branch => { if (i != cases.len - 1) return fail(astgen, case_node, "'else' case should always be the final case in conditional", .{}); if (seen_else) return fail(astgen, case_node, "duplicate else branch", .{}); seen_else = true; }, else => return fail(astgen, case_node, "unexpected node in conditional prong list", .{}), } } } fn ifStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const astgen = gi.astgen; const gpa = astgen.gpa; const data = node.data.switch_stmt; const cases = data.cases; const cond_expr = data.condition_expr.?; try validateIfCases(astgen, cases); var block_scope = gi.makeSubBlock(); defer block_scope.unstack(); const cond_inst = try expr(&block_scope, scope, cond_expr); const condbr = try block_scope.addCondBr(.condbr); const block = try gi.makePayloadNode(.block); try block_scope.setBlockBody(block); try gi.instructions.append(gpa, block); var then_block = gi.makeSubBlock(); defer then_block.unstack(); const then_body = switch (cases[0].tag) { .block_stmt => cases[0], .if_branch => cases[0].data.bin.rhs.?, else => unreachable, }; try blockStmt(&then_block, scope, then_body); if (!then_block.endsWithNoReturn()) { _ = try then_block.addBreak(.@"break", then_body, block, .void); } var else_block = gi.makeSubBlock(); defer else_block.unstack(); const else_case = if (cases.len > 1) cases[cases.len - 1] else null; if (else_case) |else_stmt| { if (else_stmt.tag == .else_branch) { const else_body = else_stmt.data.bin.rhs.?; try blockStmt(&else_block, scope, else_body); if (!else_block.endsWithNoReturn()) { _ = try else_block.addBreak(.@"break", else_body, block, .void); } } } else { _ = try else_block.addBreak(.@"break", then_body, block, .void); } try setCondBrPayload(condbr, cond_inst, &then_block, &else_block); return condbr.toRef(); } fn ifChain(gi: *GenIr, scope: *Scope, branch_list: []const *Ast.Node) InnerError!void { const gpa = gi.astgen.gpa; if (branch_list.len == 0) return; const branch = branch_list[0]; if (branch.tag == .else_branch) { const data = branch.data.bin; try blockStmt(gi, scope, data.rhs.?); return; } const data = branch.data.bin; const body = data.rhs.?; assert(branch.tag == .if_branch); var block_scope = gi.makeSubBlock(); defer block_scope.unstack(); const cond_inst = try expr(&block_scope, scope, data.lhs.?); const condbr = try block_scope.addCondBr(.condbr); const block_inst = try gi.makePayloadNode(.block); try block_scope.setBlockBody(block_inst); try gi.instructions.append(gpa, block_inst); var then_block = gi.makeSubBlock(); defer then_block.unstack(); try blockStmt(&then_block, scope, body); if (!then_block.endsWithNoReturn()) { _ = try then_block.addBreak(.@"break", body, block_inst, .void); } var else_block = gi.makeSubBlock(); defer else_block.unstack(); try ifChain(&else_block, scope, branch_list[1..]); try setCondBrPayload(condbr, cond_inst, &then_block, &else_block); } fn multiIfStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const data = node.data.switch_stmt; try validateIfCases(gi.astgen, data.cases); try ifChain(gi, scope, data.cases); return .none; } fn switchStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const astgen = gi.astgen; const gpa = astgen.gpa; const data = node.data.switch_stmt; const cases = data.cases; var seen_else = false; assert(cases.len > 0); for (cases, 0..) |case_node, i| { const is_last = i == cases.len - 1; switch (case_node.tag) { .switch_case => { if (seen_else) return fail(astgen, case_node, "case after else is unreachable", .{}); }, .else_branch => { if (!is_last) return fail(astgen, case_node, "'else' case should always be the final case in conditional", .{}); if (seen_else) return fail(astgen, case_node, "duplicate else branch", .{}); seen_else = true; }, else => return fail(astgen, case_node, "unexpected node in switch prong list", .{}), } } const cond_inst = try expr(gi, scope, data.condition_expr); const switch_br = try gi.makePayloadNode(.switch_br); var case_indexes: std.ArrayListUnmanaged(u32) = .empty; try case_indexes.ensureUnusedCapacity(gpa, cases.len); defer case_indexes.deinit(gpa); const switch_cases = if (seen_else) cases[0 .. cases.len - 1] else cases; for (switch_cases) |case_stmt| { assert(case_stmt.tag == .switch_case); const case_data = case_stmt.data.bin; const case_expr = case_data.lhs.?; const operand: Ir.Inst.Ref = switch (case_expr.tag) { .number_literal => try numberLiteral(gi, case_expr), .true_literal => .bool_true, .false_literal => .bool_false, else => return fail(astgen, case_expr, "invalid switch case operand", .{}), }; var case_block = gi.makeSubBlock(); defer case_block.unstack(); _ = try blockStmt(&case_block, scope, case_data.rhs.?); if (!case_block.endsWithNoReturn()) { _ = try case_block.addBreak(.@"break", case_stmt, switch_br, .void); } const body = case_block.instructionsSlice(); const case_extra_len = @typeInfo(Ir.Inst.SwitchBr.Case).@"struct".fields.len + body.len; try astgen.extra.ensureUnusedCapacity(gpa, case_extra_len); const extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.SwitchBr.Case{ .operand = operand, .body_len = @intCast(body.len), }); astgen.appendBlockBody(body); case_indexes.appendAssumeCapacity(extra_index); } try gi.instructions.append(gpa, switch_br); var else_block = gi.makeSubBlock(); defer else_block.unstack(); if (seen_else) { const else_branch = cases[cases.len - 1]; assert(else_branch.tag == .else_branch); const else_data = else_branch.data.bin; _ = try blockStmt(&else_block, scope, else_data.rhs.?); if (!else_block.endsWithNoReturn()) { _ = try else_block.addBreak(.@"break", else_branch, switch_br, .void); } } else { _ = try else_block.addBreak(.@"break", node, switch_br, .void); } const else_body = else_block.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.SwitchBr).@"struct".fields.len + case_indexes.items.len + else_body.len; try astgen.extra.ensureUnusedCapacity(gpa, extra_len); astgen.instructions.items[@intFromEnum(switch_br)].data.payload = .{ .extra_index = astgen.addExtraAssumeCapacity(Ir.Inst.SwitchBr{ .operand = cond_inst, .cases_len = @intCast(switch_cases.len), .else_body_len = @intCast(else_body.len), }), .src_offset = @intCast(node.loc.start), }; astgen.extra.appendSliceAssumeCapacity(case_indexes.items); astgen.appendBlockBody(else_body); return switch_br.toRef(); } fn content( block: *GenIr, scope: *Scope, node: *const Ast.Node, is_last: bool, ignore_divert: bool, ) InnerError!void { const data = node.data.content; if (data.leading_glue) { _ = try block.addUnaryNode(.content_glue, .none); } for (data.items) |child_node| { switch (child_node.tag) { .string_literal => { const result = try stringLiteral(block, child_node); _ = try block.addUnaryNode(.content_push, result); }, .inline_logic_expr => { const result = try inlineLogicExpr(block, scope, child_node); _ = try block.addUnaryNode(.content_push, result); }, .if_stmt => _ = try ifStmt(block, scope, child_node), .multi_if_stmt => _ = try multiIfStmt(block, scope, child_node), .switch_stmt => _ = try switchStmt(block, scope, child_node), .inline_if_stmt => _ = try inlineIfStmt(block, scope, child_node), .divert_expr => _ = try divertExpr(block, scope, child_node), else => unreachable, } } if (is_last) { _ = try block.addUnaryNode(.content_line, .none); if (data.trailing_glue or data.trailing_divert != null) { _ = try block.addUnaryNode(.content_glue, .none); } if (!ignore_divert) { if (data.trailing_divert) |trailing| { _ = try divertExpr(block, scope, trailing); _ = try block.addUnaryNode(.content_glue, .none); } } } } fn contentStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void { const items = node.data.list.items; for (items, 0..) |n, i| { const is_last = i == items.len - 1; try content(gi, scope, n, is_last, false); } } fn assignStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const astgen = gi.astgen; const identifier_node = node.data.bin.lhs.?; const expr_node = node.data.bin.rhs.?; const name_str = try astgen.strFromNode(identifier_node); if (scope.lookup(name_str.index)) |decl| { const expr_result = try expr(gi, scope, expr_node); _ = try gi.addBinaryNode(.store, decl.inst_index.toRef(), expr_result); return; } return fail(astgen, identifier_node, "unknown identifier", .{}); } fn choiceStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const astgen = gi.astgen; const gpa = astgen.gpa; const data = node.data.list; const choice_br = try gi.makePayloadNode(.choice_br); var trailing_divert: ?*Ast.Node = null; var case_indexes: std.ArrayListUnmanaged(u32) = .empty; try case_indexes.ensureUnusedCapacity(gpa, data.items.len); defer case_indexes.deinit(gpa); for (data.items) |branch_stmt| { assert(branch_stmt.tag == .choice_star_stmt or branch_stmt.tag == .choice_plus_stmt); const branch_data = branch_stmt.data.bin; const branch_expr = branch_data.lhs.?.data.choice_expr; var block_1 = gi.makeSubBlock(); defer block_1.unstack(); if (branch_expr.start_expr) |lhs| { for (lhs) |n| { if (n.data.content.trailing_divert) |trailing| { trailing_divert = trailing; } _ = try content(&block_1, scope, n, false, true); } } var block_2 = block_1.makeSubBlock(); defer block_2.unstack(); if (branch_expr.option_expr) |mhs| { for (mhs) |n| { assert(n.data.content.trailing_divert == null); _ = try content(&block_2, scope, n, false, true); } } var block_3 = block_2.makeSubBlock(); defer block_3.unstack(); if (branch_expr.inner_expr) |rhs| { for (rhs) |n| { if (n.data.content.trailing_divert) |trailing| { trailing_divert = trailing; } _ = try content(&block_3, scope, n, false, true); } } var body_block = block_3.makeSubBlock(); defer body_block.unstack(); if (trailing_divert) |trailing| { _ = try divertExpr(&body_block, scope, trailing); } else if (branch_data.rhs) |branch_body| { _ = try blockStmt(&body_block, scope, branch_body); } if (!body_block.endsWithNoReturn()) { _ = try body_block.addUnaryNode(.implicit_ret, .none); } const lhs_body = block_1.instructionsSliceUpto(&block_2); const mhs_body = block_2.instructionsSliceUpto(&block_3); const rhs_body = block_3.instructionsSliceUpto(&body_block); const body = body_block.instructionsSlice(); const case_extra_len = @typeInfo(Ir.Inst.ChoiceBr.Case).@"struct".fields.len + lhs_body.len + mhs_body.len + rhs_body.len + body.len; try astgen.extra.ensureUnusedCapacity(gpa, case_extra_len); const extra_index = astgen.addExtraAssumeCapacity( Ir.Inst.ChoiceBr.Case{ .lhs_len = @intCast(lhs_body.len), .mhs_len = @intCast(mhs_body.len), .rhs_len = @intCast(rhs_body.len), .body_len = @intCast(body.len), }, ); astgen.appendBlockBody(lhs_body); astgen.appendBlockBody(mhs_body); astgen.appendBlockBody(rhs_body); astgen.appendBlockBody(body); case_indexes.appendAssumeCapacity(extra_index); } try gi.instructions.append(gpa, choice_br); const extra_len = @typeInfo(Ir.Inst.ChoiceBr).@"struct".fields.len + case_indexes.items.len; try astgen.extra.ensureUnusedCapacity(gpa, extra_len); astgen.instructions.items[@intFromEnum(choice_br)].data.payload = .{ .extra_index = astgen.addExtraAssumeCapacity( Ir.Inst.ChoiceBr{ .cases_len = @intCast(data.items.len), }, ), .src_offset = @intCast(node.loc.start), }; astgen.extra.appendSliceAssumeCapacity(case_indexes.items[0..]); } const Callee = union(enum) { field: struct { obj_ptr: Ir.Inst.Ref, /// Offset into `string_bytes`. field_name_start: Ir.NullTerminatedString, }, direct: Ir.Inst.Ref, }; fn fieldAccess(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { assert(node.tag == .selector_expr); const data = node.data.bin; const rhs = data.rhs.?; assert(rhs.tag == .identifier); const field_str = try gi.astgen.strFromNode(rhs); const lhs = try expr(gi, scope, data.lhs.?); return gi.addPayloadNode(.field_ptr, rhs, Ir.Inst.Field{ .lhs = lhs, .field_name_start = field_str.index, }); } /// calleeExpr generates the function part of a call expression (f in f(x)), but /// *not* the callee argument for the call. Its purpose is to distinguish /// between standard calls and method call syntax `a.b()`. Thus, if the lhs /// is a field access, we return using the `field` union field; /// otherwise, we use the `direct` union field. fn calleeExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Callee { switch (node.tag) { .selector_expr => { const data = node.data.bin; const call_target = data.rhs.?; assert(call_target.tag == .identifier); const field_str = try gi.astgen.strFromNode(call_target); const lhs = try expr(gi, scope, data.lhs.?); return .{ .field = .{ .obj_ptr = lhs, .field_name_start = field_str.index }, }; }, .identifier => { return .{ .direct = try expr(gi, scope, node) }; }, else => unreachable, } } fn callExpr( gi: *GenIr, scope: *Scope, node: *const Ast.Node, comptime call: enum { divert, call }, ) !Ir.Inst.Ref { const astgen = gi.astgen; const gpa = astgen.gpa; const data = node.data.bin; const callee_node = data.lhs.?; const callee = try calleeExpr(gi, scope, callee_node); // FIXME: List nodes should not have optional slices. // This hack is an abomination. const arguments: ?[]*Ast.Node = if (data.rhs) |args_node| args_node.data.list.items else null; const args_count = if (arguments) |args| args.len else 0; const call_index: Ir.Inst.Index = @enumFromInt(astgen.instructions.items.len); const call_inst = call_index.toRef(); try gi.astgen.instructions.append(gpa, undefined); try gi.instructions.append(gpa, call_index); const scratch_top = astgen.scratch.items.len; defer astgen.scratch.shrinkRetainingCapacity(scratch_top); try astgen.scratch.resize(gpa, scratch_top + args_count); var scratch_index = scratch_top; if (arguments) |args| { for (args) |arg| { var arg_block = gi.makeSubBlock(); defer arg_block.unstack(); const arg_ref = try expr(&arg_block, scope, arg); if (!arg_block.endsWithNoReturn()) { _ = try arg_block.addBreak(.break_inline, arg, call_index, arg_ref); } const body = arg_block.instructionsSlice(); try astgen.scratch.ensureUnusedCapacity(gpa, body.len); appendBlockBodyArrayList(astgen, &astgen.scratch, body); astgen.scratch.items[scratch_index] = @intCast(astgen.scratch.items.len - scratch_top); scratch_index += 1; } } switch (callee) { .direct => |callee_obj| { const tag = if (call == .divert) .divert else .call; const extra_index = try addExtra(astgen, Ir.Inst.Call{ .callee = callee_obj, .args_len = @intCast(args_count), }); if (args_count != 0) { try astgen.extra.appendSlice(gpa, astgen.scratch.items[scratch_top..]); } astgen.instructions.items[@intFromEnum(call_index)] = .{ .tag = tag, .data = .{ .payload = .{ .src_offset = @intCast(node.loc.start), .extra_index = extra_index, } }, }; }, .field => |callee_field| { const tag = if (call == .divert) .field_divert else .field_call; const extra_index = try addExtra(astgen, Ir.Inst.FieldCall{ .obj_ptr = callee_field.obj_ptr, .field_name_start = callee_field.field_name_start, .args_len = @intCast(args_count), }); if (args_count != 0) { try astgen.extra.appendSlice(gpa, astgen.scratch.items[scratch_top..]); } astgen.instructions.items[@intFromEnum(call_index)] = .{ .tag = tag, .data = .{ .payload = .{ .src_offset = @intCast(node.loc.start), .extra_index = extra_index, } }, }; }, } return call_inst; } fn divertExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void { // FIXME: The AST should always have an args list for these nodes. // FIXME: Oh God, the AST is completely fucked for this. const lhs = node.data.bin.lhs.?; switch (lhs.tag) { .identifier => { // TODO: Revisit this const str_slice = gi.astgen.tree.nodeSlice(lhs); if (std.mem.eql(u8, str_slice, "DONE")) { _ = try gi.addUnaryNode(.done, .none); return; } else if (std.mem.eql(u8, str_slice, "END")) { _ = try gi.addUnaryNode(.exit, .none); return; } const callee = try calleeExpr(gi, scope, lhs); switch (callee) { .direct => |callee_obj| { _ = try gi.addPayloadNode(.divert, lhs, Ir.Inst.Call{ .callee = callee_obj, .args_len = 0, }); }, .field => |callee_field| { _ = try gi.addPayloadNode(.field_divert, lhs, Ir.Inst.FieldCall{ .obj_ptr = callee_field.obj_ptr, .field_name_start = callee_field.field_name_start, .args_len = 0, }); }, } }, .selector_expr => { const callee = try calleeExpr(gi, scope, lhs); switch (callee) { .direct => |callee_obj| { _ = try gi.addPayloadNode(.divert, lhs, Ir.Inst.Call{ .callee = callee_obj, .args_len = @intCast(0), }); }, .field => |callee_field| { _ = try gi.addPayloadNode(.field_divert, lhs, Ir.Inst.FieldCall{ .obj_ptr = callee_field.obj_ptr, .field_name_start = callee_field.field_name_start, .args_len = @intCast(0), }); }, } }, .call_expr => _ = try callExpr(gi, scope, lhs, .divert), else => unreachable, } } fn divertStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void { // TODO: Revisit this. const data = node.data.bin; return divertExpr(gi, scope, data.lhs.?); } fn returnStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) !void { // TODO: Revisit this. const ret_arg = if (node.data.bin.lhs) |lhs| blk: { const arg_inst = try expr(gi, scope, lhs); break :blk arg_inst; } else .void; _ = try gi.addUnaryNode(.ret, ret_arg); } fn tempDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void { const astgen = gi.astgen; const identifier_node = decl_node.data.bin.lhs.?; const expr_node = decl_node.data.bin.rhs.?; const name_str = try astgen.strFromNode(identifier_node); if (scope.lookup(name_str.index)) |_| { return fail(astgen, decl_node, "duplicate identifier", .{}); } const alloc_inst = try gi.add(.{ .tag = .alloc, .data = undefined }); const expr_result = try expr(gi, scope, expr_node); _ = try gi.addBinaryNode(.store, alloc_inst, expr_result); return scope.insert(name_str.index, .{ .decl_node = decl_node, .inst_index = alloc_inst.toIndex().?, }); } fn varDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void { const astgen = gi.astgen; const gpa = astgen.gpa; const identifier_node = decl_node.data.bin.lhs.?; const expr_node = decl_node.data.bin.rhs.?; const decl_inst = try gi.makePayloadNode(.declaration); var decl_block = gi.makeSubBlock(); defer decl_block.unstack(); const name_str = try astgen.strFromNode(identifier_node); const var_inst = try decl_block.makePayloadNode(.decl_var); const rvalue_inst = try expr(&decl_block, scope, expr_node); _ = try decl_block.addBreak(.break_inline, decl_node, var_inst, rvalue_inst); try setDeclVarPayload(var_inst, &decl_block, identifier_node); try setDeclaration(decl_inst, .{ .name = name_str.index, .value = var_inst, .gi = gi, .node = decl_node, }); try astgen.globals.append(gpa, decl_inst); } fn blockInner(gi: *GenIr, parent_scope: *Scope, stmt_list: []*Ast.Node) !void { var child_scope = parent_scope.makeChild(); defer child_scope.deinit(); for (stmt_list) |node| { _ = switch (node.tag) { .var_decl => try varDecl(gi, &child_scope, node), .const_decl => try varDecl(gi, &child_scope, node), .temp_decl => try tempDecl(gi, &child_scope, node), .assign_stmt => try assignStmt(gi, &child_scope, node), .content_stmt => try contentStmt(gi, &child_scope, node), .choice_stmt => try choiceStmt(gi, &child_scope, node), .expr_stmt => try exprStmt(gi, &child_scope, node), .divert_stmt => try divertStmt(gi, &child_scope, node), .return_stmt => try returnStmt(gi, &child_scope, node), inline else => |e| @panic("Unexpected node: " ++ @tagName(e)), }; } } fn blockStmt(block: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const data = node.data.list; try blockInner(block, scope, data.items); } fn defaultBlock( gi: *GenIr, scope: *Scope, body_node: *const Ast.Node, ) InnerError!void { const astgen = gi.astgen; const data = body_node.data.list; const decl_inst = try gi.addAsIndex(.{ .tag = .declaration, .data = .{ .payload = undefined }, }); var decl_scope = gi.makeSubBlock(); defer decl_scope.unstack(); const knot_inst = try decl_scope.makePayloadNode(.decl_knot); try blockInner(&decl_scope, scope, data.items); if (!decl_scope.endsWithNoReturn()) { _ = try decl_scope.addUnaryNode(.implicit_ret, .none); } var stub_scope = decl_scope.makeSubBlock(); defer stub_scope.unstack(); try setDeclKnotPayload(knot_inst, &decl_scope, &stub_scope); const decl_str = try astgen.strFromSlice(Story.default_knot_name); try setDeclaration(decl_inst, .{ .name = decl_str.index, .value = knot_inst, .gi = gi, .node = body_node, }); } const KnotInfo = struct { decl_name: IndexSlice, }; fn prototypeAndBody( gi: *GenIr, scope: *Scope, prototype_node: *const Ast.Node, body_node: ?*const Ast.Node, ) InnerError!KnotInfo { const decl_name = try gi.astgen.strFromNode(prototype_node.data.bin.lhs.?); if (prototype_node.data.bin.rhs) |args_node| { const args_data = args_node.data.list; for (args_data.items) |arg| { assert(arg.tag == .parameter_decl); const arg_str = try gi.astgen.strFromNode(arg); const arg_inst = try gi.addStrTok(.param, arg_str.index, arg.loc.start); // TODO: Maybe make decl accept a ref? try scope.insert(arg_str.index, .{ .decl_node = arg, .inst_index = arg_inst.toIndex().?, }); } } if (body_node) |body| { try blockStmt(gi, scope, body); } if (!gi.endsWithNoReturn()) { _ = try gi.addUnaryNode(.implicit_ret, .none); } return .{ .decl_name = decl_name }; } fn stitchDecl(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const data = node.data.bin; const prototype_node = data.lhs.?; const body_node = data.rhs; const decl_inst = try gi.addAsIndex(.{ .tag = .declaration, .data = .{ .payload = undefined }, }); var child_block = gi.makeSubBlock(); defer child_block.unstack(); var child_scope = scope.makeChild(); defer child_scope.deinit(); const knot_info = try prototypeAndBody(&child_block, &child_scope, prototype_node, body_node); const stitch_inst = try child_block.makePayloadNode(.decl_stitch); try setDeclStitchPayload(stitch_inst, &child_block); try setDeclaration(decl_inst, .{ .name = knot_info.decl_name.index, .value = stitch_inst, .gi = gi, .node = node, }); } fn functionDecl(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const data = node.data.bin; const prototype_node = data.lhs.?; const body_node = data.rhs; const decl_inst = try gi.addAsIndex(.{ .tag = .declaration, .data = .{ .payload = undefined }, }); var child_block = gi.makeSubBlock(); defer child_block.unstack(); var child_scope = scope.makeChild(); defer child_scope.deinit(); const knot_info = try prototypeAndBody(&child_block, &child_scope, prototype_node, body_node); const stitch_inst = try child_block.makePayloadNode(.decl_function); try setDeclStitchPayload(stitch_inst, &child_block); try setDeclaration(decl_inst, .{ .name = knot_info.decl_name.index, .value = stitch_inst, .gi = gi, .node = node, }); } fn knotDecl(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const data = node.data.knot_decl; const prototype_node = data.prototype; const nested_nodes = data.children; const decl_inst = try gi.addAsIndex(.{ .tag = .declaration, .data = .{ .payload = undefined }, }); var node_index: usize = 0; var child_block = gi.makeSubBlock(); defer child_block.unstack(); var child_scope = scope.makeChild(); defer child_scope.deinit(); const knot_inst = try gi.makePayloadNode(.decl_knot); const body_node: ?*const Ast.Node = blk: { if (nested_nodes.len > 0) { const first_child = nested_nodes[0]; if (first_child.tag == .block_stmt) { node_index += 1; break :blk first_child; } } break :blk null; }; const knot_info = try prototypeAndBody(&child_block, &child_scope, prototype_node, body_node); var nested_block = child_block.makeSubBlock(); defer nested_block.unstack(); for (nested_nodes[node_index..]) |nested_decl_node| { switch (nested_decl_node.tag) { .stitch_decl => try stitchDecl(&nested_block, &child_scope, nested_decl_node), .function_decl => try functionDecl(&nested_block, &child_scope, nested_decl_node), else => unreachable, } } try setDeclKnotPayload(knot_inst, &child_block, &nested_block); try setDeclaration(decl_inst, .{ .name = knot_info.decl_name.index, .value = knot_inst, .gi = gi, .node = node, }); } fn file(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const astgen = gi.astgen; const data = node.data.list; const file_inst = try gi.addAsIndex(.{ .tag = .file, .data = .{ .payload = undefined }, }); var node_index: usize = 0; var file_scope = gi.makeSubBlock(); defer file_scope.unstack(); if (data.items.len > 0) { const first_child = data.items[0]; if (first_child.tag == .block_stmt) { try defaultBlock(&file_scope, scope, first_child); node_index += 1; } } for (data.items[node_index..]) |child_node| { switch (child_node.tag) { .knot_decl => try knotDecl(gi, scope, child_node), .stitch_decl => try stitchDecl(gi, scope, child_node), .function_decl => try functionDecl(gi, scope, child_node), else => unreachable, } } const globals_len = astgen.globals.items.len; try astgen.instructions.ensureUnusedCapacity(astgen.gpa, globals_len); for (astgen.globals.items) |global| { gi.instructions.appendAssumeCapacity(global); } return file_scope.setBlockBody(file_inst); }