const std = @import("std"); const Ast = @import("Ast.zig"); const Ir = @import("Ir.zig"); const Story = @import("Story.zig"); const StringIndexAdapter = std.hash_map.StringIndexAdapter; const StringIndexContext = std.hash_map.StringIndexContext; const assert = std.debug.assert; const AstGen = @This(); gpa: std.mem.Allocator, tree: *const Ast, string_table: std.HashMapUnmanaged(u32, void, StringIndexContext, std.hash_map.default_max_load_percentage) = .empty, string_bytes: std.ArrayListUnmanaged(u8) = .empty, instructions: std.ArrayListUnmanaged(Ir.Inst) = .empty, globals: std.ArrayListUnmanaged(Ir.Global) = .empty, global_ref_table: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, usize) = .empty, extra: std.ArrayListUnmanaged(u32) = .empty, errors: std.ArrayListUnmanaged(Ast.Error) = .empty, pub const InnerError = error{ OutOfMemory, SemanticError, InvalidCharacter, Overflow, }; pub fn deinit(astgen: *AstGen) void { const gpa = astgen.gpa; astgen.string_table.deinit(gpa); astgen.string_bytes.deinit(gpa); astgen.globals.deinit(gpa); astgen.global_ref_table.deinit(gpa); astgen.instructions.deinit(gpa); astgen.extra.deinit(gpa); astgen.errors.deinit(gpa); } const Scope = struct { parent: ?*Scope, astgen: *AstGen, decls: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, Decl), pub const Decl = struct { decl_node: *const Ast.Node, inst_index: Ir.Inst.Index, }; pub fn deinit(self: *Scope) void { const gpa = self.astgen.gpa; self.decls.deinit(gpa); } pub fn makeChild(parent_scope: *Scope) Scope { return .{ .parent = parent_scope, .astgen = parent_scope.astgen, .decls = .empty, }; } pub fn insert(self: *Scope, ref: Ir.NullTerminatedString, decl: Decl) !void { const gpa = self.astgen.gpa; return self.decls.put(gpa, ref, decl); } pub fn lookup(self: *Scope, ref: Ir.NullTerminatedString) ?Decl { var current_scope: ?*Scope = self; while (current_scope) |scope| : (current_scope = scope.parent) { const result = scope.decls.get(ref); if (result) |symbol| return symbol; } return null; } }; const GenIr = struct { astgen: *AstGen, instructions: *std.ArrayListUnmanaged(Ir.Inst.Index), instructions_top: usize, const unstacked_top = std.math.maxInt(usize); fn unstack(self: *GenIr) void { if (self.instructions_top != unstacked_top) { self.instructions.items.len = self.instructions_top; self.instructions_top = unstacked_top; } } fn isEmpty(self: *const GenIr) bool { return (self.instructions_top == unstacked_top) or (self.instructions.items.len == self.instructions_top); } fn instructionsSlice(self: *const GenIr) []Ir.Inst.Index { return if (self.instructions_top == unstacked_top) &[0]Ir.Inst.Index{} else self.instructions.items[self.instructions_top..]; } fn instructionsSliceUpto( self: *const GenIr, stacked_block: *const GenIr, ) []Ir.Inst.Index { return if (self.instructions_top == unstacked_top) &[0]Ir.Inst.Index{} else if (self.instructions == stacked_block.instructions and stacked_block.instructions_top != unstacked_top) self.instructions.items[self.instructions_top..stacked_block.instructions_top] else self.instructions.items[self.instructions_top..]; } fn fail( self: *GenIr, tag: Ast.Error.Tag, node: *const Ast.Node, ) error{ SemanticError, OutOfMemory } { return self.astgen.fail(tag, node); } fn makeSubBlock(self: *GenIr) GenIr { return .{ .astgen = self.astgen, .instructions = self.instructions, .instructions_top = self.instructions.items.len, }; } fn add(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Ref { return (try gi.addAsIndex(inst)).toRef(); } fn addAsIndex(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Index { const gpa = gi.astgen.gpa; try gi.instructions.ensureUnusedCapacity(gpa, 1); try gi.astgen.instructions.ensureUnusedCapacity(gpa, 1); const new_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len); gi.astgen.instructions.appendAssumeCapacity(inst); gi.instructions.appendAssumeCapacity(new_index); return new_index; } fn addInt(gi: *GenIr, value: u64) !Ir.Inst.Ref { return add(gi, .{ .tag = .integer, .data = .{ .integer = .{ .value = value }, } }); } fn addUnaryNode(gi: *GenIr, tag: Ir.Inst.Tag, arg: Ir.Inst.Ref) !Ir.Inst.Ref { return add(gi, .{ .tag = tag, .data = .{ .un = .{ .lhs = arg }, } }); } fn addBinaryNode( gi: *GenIr, tag: Ir.Inst.Tag, lhs: Ir.Inst.Ref, rhs: Ir.Inst.Ref, ) !Ir.Inst.Ref { return add(gi, .{ .tag = tag, .data = .{ .bin = .{ .lhs = lhs, .rhs = rhs }, } }); } fn addDeclRef(gi: *GenIr, decl_ref: Ir.NullTerminatedString) !Ir.Inst.Ref { return add(gi, .{ .tag = .decl_ref, .data = .{ .string = .{ .start = decl_ref, }, } }); } fn makePayloadNode(gi: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index { const gpa = gi.astgen.gpa; const inst_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len); try gi.astgen.instructions.append(gpa, .{ .tag = tag, .data = .{ .payload = .{ .payload_index = undefined }, }, }); return inst_index; } fn makeDeclaration(gi: *GenIr) !Ir.Inst.Index { return makePayloadNode(gi, .declaration); } fn makeBlockInst(gi: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index { const inst_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len); const gpa = gi.astgen.gpa; try gi.astgen.instructions.append(gpa, .{ .tag = tag, .data = .{ .payload = .{ .payload_index = undefined }, }, }); return inst_index; } fn addKnot(self: *GenIr) !Ir.Inst.Index { const gpa = self.astgen.gpa; const body = self.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Knot).@"struct".fields.len + body.len; try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len); const knot_node = try makePayloadNode(self, .decl_knot); const inst_data = &self.astgen.instructions.items[@intFromEnum(knot_node)].data; inst_data.payload.payload_index = self.astgen.addExtraAssumeCapacity( Ir.Inst.Knot{ .body_len = @intCast(body.len) }, ); self.astgen.appendBlockBody(body); return knot_node; } fn addVar(self: *GenIr) !Ir.Inst.Index { const gpa = self.astgen.gpa; const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len); const body = self.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Var).@"struct".fields.len + body.len; try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len); try self.astgen.instructions.ensureUnusedCapacity(gpa, 1); try self.instructions.ensureUnusedCapacity(gpa, 1); self.astgen.instructions.appendAssumeCapacity(.{ .tag = .decl_var, .data = .{ .payload = .{ .payload_index = self.astgen.addExtraAssumeCapacity( Ir.Inst.Var{ .body_len = @intCast(body.len) }, ) }, } }); self.astgen.appendBlockBody(body); self.instructions.appendAssumeCapacity(new_index); return new_index; } fn addCondBr(self: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index { const gpa = self.astgen.gpa; try self.instructions.ensureUnusedCapacity(gpa, 1); try self.astgen.instructions.ensureUnusedCapacity(gpa, 1); const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len); self.astgen.instructions.appendAssumeCapacity(.{ .tag = tag, .data = .{ .payload = .{ .payload_index = undefined, } }, }); self.instructions.appendAssumeCapacity(new_index); return new_index; } fn addBreak( self: *GenIr, tag: Ir.Inst.Tag, block_inst: Ir.Inst.Index, ) !Ir.Inst.Index { const gpa = self.astgen.gpa; try self.instructions.ensureUnusedCapacity(gpa, 1); const new_index = try self.makeBreak(tag, block_inst); self.instructions.appendAssumeCapacity(new_index); return new_index; } fn makeBreak( self: *GenIr, tag: Ir.Inst.Tag, block_inst: Ir.Inst.Index, ) !Ir.Inst.Index { const gpa = self.astgen.gpa; const extra_len = @typeInfo(Ir.Inst.Break).@"struct".fields.len; try self.astgen.instructions.ensureUnusedCapacity(gpa, 1); try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len); const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len); self.astgen.instructions.appendAssumeCapacity(.{ .tag = tag, .data = .{ .payload = .{ .payload_index = self.astgen.addExtraAssumeCapacity( Ir.Inst.Break{ .block_inst = block_inst }, ), } }, }); return new_index; } fn setBlockBody(self: *GenIr, inst: Ir.Inst.Index) !void { const gpa = self.astgen.gpa; const body = self.instructionsSlice(); const extra_len = @typeInfo(Ir.Inst.Block).@"struct".fields.len + body.len; try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len); const inst_data = &self.astgen.instructions.items[@intFromEnum(inst)].data; inst_data.payload.payload_index = self.astgen.addExtraAssumeCapacity( Ir.Inst.Block{ .body_len = @intCast(body.len) }, ); self.astgen.appendBlockBody(body); self.unstack(); } }; fn setDeclaration( decl_index: Ir.Inst.Index, args: struct { name: Ir.NullTerminatedString, tag: Ir.Global.Tag, ref: Ir.Inst.Index, decl_node: *const Ast.Node, body_gi: *GenIr, is_constant: bool = true, }, ) !void { const astgen = args.body_gi.astgen; const gpa = astgen.gpa; const extra_len = @typeInfo(Ir.Inst.Declaration).@"struct".fields.len; const global_index = astgen.globals.items.len; try astgen.extra.ensureUnusedCapacity(gpa, extra_len); try astgen.globals.ensureUnusedCapacity(gpa, 1); try astgen.global_ref_table.ensureUnusedCapacity(gpa, 1); const inst_data = &astgen.instructions.items[@intFromEnum(decl_index)].data; inst_data.payload.payload_index = astgen.addExtraAssumeCapacity( Ir.Inst.Declaration{ .name = args.name, .value = args.ref }, ); if (astgen.global_ref_table.get(args.name)) |_| { return astgen.fail(.redefined_identifier, args.decl_node); } astgen.globals.appendAssumeCapacity(.{ .tag = args.tag, .name = args.name, .is_constant = args.is_constant, }); astgen.global_ref_table.putAssumeCapacity(args.name, global_index); args.body_gi.unstack(); } fn setCondBrPayload( condbr: Ir.Inst.Index, cond: Ir.Inst.Ref, then_block: *GenIr, else_block: *GenIr, ) !void { defer then_block.unstack(); defer else_block.unstack(); const astgen = then_block.astgen; const then_body = then_block.instructionsSliceUpto(else_block); const else_body = else_block.instructionsSlice(); const then_body_len = then_body.len; const else_body_len = else_body.len; const extra_len = @typeInfo(Ir.Inst.CondBr).@"struct".fields.len + then_body_len + else_body_len; try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len); const inst_data = &astgen.instructions.items[@intFromEnum(condbr)].data; inst_data.payload.payload_index = astgen.addExtraAssumeCapacity( Ir.Inst.CondBr{ .condition = cond, .then_body_len = @intCast(then_body_len), .else_body_len = @intCast(else_body_len), }, ); astgen.appendBlockBody(then_body); astgen.appendBlockBody(else_body); } fn addExtraAssumeCapacity(astgen: *AstGen, extra: anytype) u32 { const fields = std.meta.fields(@TypeOf(extra)); const extra_index: u32 = @intCast(astgen.extra.items.len); astgen.extra.items.len += fields.len; setExtra(astgen, extra_index, extra); return extra_index; } fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void { const fields = std.meta.fields(@TypeOf(extra)); var i = index; inline for (fields) |field| { astgen.extra.items[i] = switch (field.type) { u32 => @field(extra, field.name), Ir.Inst.Index => @intFromEnum(@field(extra, field.name)), Ir.Inst.Ref => @intFromEnum(@field(extra, field.name)), Ir.NullTerminatedString => @intFromEnum(@field(extra, field.name)), else => @compileError("bad field type"), }; i += 1; } } fn fail( self: *AstGen, tag: Ast.Error.Tag, source_node: *const Ast.Node, ) error{ SemanticError, OutOfMemory } { const gpa = self.gpa; const err: Ast.Error = .{ .tag = tag, .loc = .{ .start = source_node.loc.start, .end = source_node.loc.end, }, }; try self.errors.append(gpa, err); return error.SemanticError; } fn appendBlockBody(self: *AstGen, body: []const Ir.Inst.Index) void { for (body) |inst_index| { self.extra.appendAssumeCapacity(@intFromEnum(inst_index)); } } fn sliceFromNode(astgen: *const AstGen, node: *const Ast.Node) []const u8 { assert(node.loc.start <= node.loc.end); const source_bytes = astgen.tree.source; return source_bytes[node.loc.start..node.loc.end]; } fn stringFromBytes(astgen: *AstGen, bytes: []const u8) error{OutOfMemory}!Ir.NullTerminatedString { const gpa = astgen.gpa; const str_index: u32 = @intCast(astgen.string_bytes.items.len); const string_bytes = &astgen.string_bytes; try string_bytes.appendSlice(gpa, bytes); const key: []const u8 = string_bytes.items[str_index..]; const gop = try astgen.string_table.getOrPutContextAdapted(gpa, key, StringIndexAdapter{ .bytes = string_bytes, }, StringIndexContext{ .bytes = string_bytes, }); if (gop.found_existing) { string_bytes.shrinkRetainingCapacity(str_index); return @enumFromInt(gop.key_ptr.*); } else { gop.key_ptr.* = str_index; try string_bytes.append(gpa, 0); return @enumFromInt(str_index); } } fn stringFromNode(astgen: *AstGen, node: *const Ast.Node) !Ir.NullTerminatedString { const name_bytes = sliceFromNode(astgen, node); assert(name_bytes.len > 0); return astgen.stringFromBytes(name_bytes); } fn unaryOp( gi: *GenIr, scope: *Scope, expr_node: *const Ast.Node, op: Ir.Inst.Tag, ) InnerError!Ir.Inst.Ref { const data = expr_node.data.bin; const lhs = try expr(gi, scope, data.lhs.?); return gi.addUnaryNode(op, lhs); } fn binaryOp( gi: *GenIr, scope: *Scope, expr_node: *const Ast.Node, op: Ir.Inst.Tag, ) InnerError!Ir.Inst.Ref { const data = expr_node.data.bin; assert(data.lhs != null and data.rhs != null); const lhs = try expr(gi, scope, data.lhs.?); const rhs = try expr(gi, scope, data.rhs.?); return gi.addBinaryNode(op, lhs, rhs); } fn logicalOp( gen: *GenIr, scope: *Scope, node: *const Ast.Node, op: Story.Opcode, ) InnerError!void { const data = node.data.bin; assert(data.lhs != null and data.rhs != null); try expr(gen, scope, data.lhs); const else_label = try gen.makeLabel(); const fixup_offset = try gen.emitJumpInst(op); _ = try gen.makeFixup(.{ .mode = .relative, .label_index = else_label, .code_offset = fixup_offset, }); try gen.emitSimpleInst(.pop); const rhs_label = try gen.makeLabel(); gen.setLabel(rhs_label); try expr(gen, scope, data.rhs); gen.setLabel(else_label); } fn numberLiteral(gen: *GenIr, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const lexeme = sliceFromNode(gen.astgen, node); const int_value = try std.fmt.parseUnsigned(u64, lexeme, 10); return gen.addInt(int_value); } fn stringLiteral(gi: *GenIr, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const str = try gi.astgen.stringFromNode(node); return gi.add(.{ .tag = .string, .data = .{ .string = .{ .start = str, } }, }); } fn stringExpr(gen: *GenIr, expr_node: *const Ast.Node) InnerError!Ir.Inst.Ref { const first_node = expr_node.data.bin.lhs.?; return stringLiteral(gen, first_node); } fn identifier(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const astgen = gi.astgen; const str = try astgen.stringFromNode(node); if (scope.lookup(str)) |decl| { return gi.addUnaryNode(.load, decl.inst_index.toRef()); } return gi.addDeclRef(str); } fn expr(gi: *GenIr, scope: *Scope, optional_expr: ?*const Ast.Node) InnerError!Ir.Inst.Ref { const expr_node = optional_expr.?; switch (expr_node.tag) { .file => unreachable, .true_literal => return .bool_true, .false_literal => return .bool_false, .number_literal => return numberLiteral(gi, expr_node), .string_literal => return stringLiteral(gi, expr_node), .string_expr => return stringExpr(gi, expr_node), .empty_string => return stringLiteral(gi, expr_node), .identifier => return identifier(gi, scope, expr_node), .add_expr => return binaryOp(gi, scope, expr_node, .add), .subtract_expr => return binaryOp(gi, scope, expr_node, .sub), .multiply_expr => return binaryOp(gi, scope, expr_node, .mul), .divide_expr => return binaryOp(gi, scope, expr_node, .div), .mod_expr => return binaryOp(gi, scope, expr_node, .mod), .negate_expr => return unaryOp(gi, scope, expr_node, .neg), .logical_and_expr => unreachable, .logical_or_expr => unreachable, .logical_not_expr => return unaryOp(gi, scope, expr_node, .not), .logical_equality_expr => return binaryOp(gi, scope, expr_node, .cmp_eq), .logical_inequality_expr => return binaryOp(gi, scope, expr_node, .cmp_neq), .logical_greater_expr => return binaryOp(gi, scope, expr_node, .cmp_gt), .logical_greater_or_equal_expr => return binaryOp(gi, scope, expr_node, .cmp_gte), .logical_lesser_expr => return binaryOp(gi, scope, expr_node, .cmp_lt), .logical_lesser_or_equal_expr => return binaryOp(gi, scope, expr_node, .cmp_lte), .call_expr => unreachable, .choice_expr => unreachable, .choice_start_expr => unreachable, .choice_option_expr => unreachable, .choice_inner_expr => unreachable, .divert_expr => unreachable, .selector_expr => unreachable, .assign_stmt => unreachable, .block_stmt => unreachable, .content_stmt => unreachable, .divert_stmt => unreachable, .return_stmt => unreachable, .expr_stmt => unreachable, .choice_stmt => unreachable, .choice_star_stmt => unreachable, .choice_plus_stmt => unreachable, .gather_point_stmt => unreachable, .gathered_stmt => unreachable, .function_prototype => unreachable, .stitch_prototype => unreachable, .knot_prototype => unreachable, .function_decl => unreachable, .stitch_decl => unreachable, .knot_decl => unreachable, .const_decl => unreachable, .var_decl => unreachable, .list_decl => unreachable, .temp_decl => unreachable, .parameter_decl => unreachable, .ref_parameter_decl => unreachable, .argument_list => unreachable, .parameter_list => unreachable, .switch_stmt => unreachable, // Handled in switchStmt .switch_case => unreachable, // Handled in switchStmt .if_stmt => unreachable, // Handled in ifStmt .multi_if_stmt => unreachable, // Handled in multiIfStmt .if_branch => unreachable, // Handled in ifStmt and multiIfStmt .else_branch => unreachable, // Handled in switchStmt, multiIfStmt, and ifStmt .content => unreachable, .inline_logic_expr => unreachable, .invalid => unreachable, } } fn exprStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { // TODO: Maybe we should introduce a unary node type to avoid optional checks? const expr_node = node.data.bin.lhs.?; return expr(gi, scope, expr_node); } fn inlineLogicExpr(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { // TODO: Maybe we should introduce a unary node type to avoid optional checks? const main_node = node.data.bin.lhs.?; return expr(gi, scope, main_node); } fn validateSwitchProngs(gen: *GenIr, stmt_node: *const Ast.Node) InnerError!void { var stmt_has_block: bool = false; var stmt_has_else: bool = false; const case_list = stmt_node.data.switch_stmt.cases; const last_prong = case_list[case_list.len - 1]; for (case_list) |case_stmt| { switch (case_stmt.tag) { .block_stmt => stmt_has_block = true, .switch_case, .if_branch => { if (stmt_has_block) { //return gen.fail(.expected_else, case_stmt); } }, .else_branch => { if (case_stmt != last_prong) { return gen.fail(.invalid_else_stmt, case_stmt); } if (stmt_has_else) { return gen.fail(.unexpected_else_stmt, case_stmt); } stmt_has_else = true; }, else => unreachable, } } } fn ifStmt( parent_block: *GenIr, scope: *Scope, stmt_node: *const Ast.Node, ) InnerError!Ir.Inst.Ref { const astgen = parent_block.astgen; const cond_expr = stmt_node.data.switch_stmt.condition_expr.?; try validateSwitchProngs(parent_block, stmt_node); const case_list = stmt_node.data.switch_stmt.cases; const then_node = case_list[0]; const last_prong = case_list[case_list.len - 1]; var block_scope = parent_block.makeSubBlock(); defer block_scope.unstack(); const cond_inst = try expr(&block_scope, scope, cond_expr); const condbr = try block_scope.addCondBr(.condbr); const block = try parent_block.makeBlockInst(.block); try block_scope.setBlockBody(block); // unstacks block try parent_block.instructions.append(astgen.gpa, block); var then_block = parent_block.makeSubBlock(); defer then_block.unstack(); try blockStmt(&then_block, scope, then_node); _ = try then_block.addBreak(.@"break", block); var else_block = parent_block.makeSubBlock(); defer else_block.unstack(); if (then_node == last_prong) { _ = try else_block.addBreak(.@"break", block); } else { const block_node = last_prong.data.bin.rhs.?; try blockStmt(&else_block, scope, block_node); } try setCondBrPayload(condbr, cond_inst, &then_block, &else_block); return condbr.toRef(); } fn ifChain( parent_block: *GenIr, scope: *Scope, branch_list: []const *Ast.Node, ) InnerError!Ir.Inst.Ref { const gpa = parent_block.astgen.gpa; if (branch_list.len == 0) return @enumFromInt(0); if (branch_list[0].data.bin.lhs == null) { const body_node = branch_list[0].data.bin.rhs.?; try blockStmt(parent_block, scope, body_node); return @enumFromInt(0); } var block_scope = parent_block.makeSubBlock(); defer block_scope.unstack(); const branch = branch_list[0]; const cond_expr = branch.data.bin.lhs.?; const body_node = branch.data.bin.rhs.?; const cond_inst = try expr(&block_scope, scope, cond_expr); const condbr = try block_scope.addCondBr(.condbr); const block_inst = try parent_block.makeBlockInst(.block); try block_scope.setBlockBody(block_inst); try parent_block.instructions.append(gpa, block_inst); var then_block = parent_block.makeSubBlock(); defer then_block.unstack(); try blockStmt(&then_block, scope, body_node); _ = try then_block.addBreak(.@"break", block_inst); var else_block = parent_block.makeSubBlock(); defer else_block.unstack(); const next_branches = branch_list[1..]; _ = try ifChain(parent_block, scope, next_branches); _ = try else_block.addBreak(.@"break", block_inst); try setCondBrPayload(condbr, cond_inst, &then_block, &else_block); return @enumFromInt(0); } fn multiIfStmt( parent_block: *GenIr, scope: *Scope, stmt_node: *const Ast.Node, ) InnerError!Ir.Inst.Ref { try validateSwitchProngs(parent_block, stmt_node); const branch_list = stmt_node.data.switch_stmt.cases; if (branch_list[0].data.bin.lhs == null) { const branch = branch_list[0]; const body_node = branch.data.bin.rhs.?; try blockStmt(parent_block, scope, body_node); return @enumFromInt(0); } _ = try ifChain(parent_block, scope, branch_list); return @enumFromInt(0); } fn contentExpr(block: *GenIr, scope: *Scope, expr_node: *const Ast.Node) InnerError!Ir.Inst.Ref { // FIXME: This is a placeholder until we figure out what this function should be returning. var last_inst: Ir.Inst.Ref = undefined; // TODO: Make sure that this is not nullable. const node_list = expr_node.data.list.items.?; for (node_list) |child_node| { last_inst = switch (child_node.tag) { .string_literal => try stringLiteral(block, child_node), .inline_logic_expr => try inlineLogicExpr(block, scope, child_node), .if_stmt => try ifStmt(block, scope, child_node), .multi_if_stmt => try multiIfStmt(block, scope, child_node), //.switch_stmt => try switchStmt(block, scope, child_node), else => unreachable, }; last_inst = try block.addUnaryNode(.content_push, last_inst); } return last_inst; } fn contentStmt(gen: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.Inst.Ref { const expr_node = node.data.bin.lhs.?; const expr_ref = try contentExpr(gen, scope, expr_node); return gen.addUnaryNode(.content_flush, expr_ref); } fn assignStmt(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!void { const astgen = gi.astgen; const identifier_node = node.data.bin.lhs.?; const expr_node = node.data.bin.rhs.?; const name_ref = try astgen.stringFromNode(identifier_node); // TODO: Support globals as well if (scope.lookup(name_ref)) |decl| { const expr_result = try expr(gi, scope, expr_node); _ = try gi.addBinaryNode(.store, decl.inst_index.toRef(), expr_result); return; } return gi.fail(.unknown_identifier, identifier_node); } fn choiceStmt(gen: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!void { const Choice = struct { label_index: usize, start_expression: ?*const Ast.Node, option_expression: ?*const Ast.Node, inner_expression: ?*const Ast.Node, block_stmt: ?*const Ast.Node, }; const branch_list = stmt_node.data.list.items orelse unreachable; assert(branch_list.len != 0); const gpa = gen.astgen.gpa; var choice_list: std.ArrayListUnmanaged(Choice) = .empty; defer choice_list.deinit(gpa); try choice_list.ensureUnusedCapacity(gpa, branch_list.len); for (branch_list) |branch_stmt| { assert(branch_stmt.tag == .choice_star_stmt or branch_stmt.tag == .choice_plus_stmt); const branch_data = branch_stmt.data.bin; const branch_expr = branch_data.lhs orelse unreachable; const branch_expr_data = branch_expr.data.choice_expr; const label_index = try gen.makeLabel(); if (branch_expr_data.start_expr) |node| { try stringLiteral(gen, node); try gen.emitSimpleInst(.stream_push); } if (branch_expr_data.option_expr) |node| { try stringLiteral(gen, node); try gen.emitSimpleInst(.stream_push); } const fixup_offset = try gen.emitJumpInst(.br_push); _ = try gen.makeFixup(.{ .mode = .absolute, .label_index = label_index, .code_offset = fixup_offset, }); choice_list.appendAssumeCapacity(.{ .label_index = label_index, .start_expression = branch_expr_data.start_expr, .inner_expression = branch_expr_data.inner_expr, .option_expression = branch_expr_data.option_expr, .block_stmt = branch_data.rhs, }); } try gen.emitSimpleInst(.br_table); try gen.emitSimpleInst(.br_select_index); try gen.emitSimpleInst(.br_dispatch); for (choice_list.items) |choice| { gen.setLabel(choice.label_index); if (choice.start_expression) |expr_node| { try stringLiteral(gen, expr_node); try gen.emitSimpleInst(.stream_push); } if (choice.inner_expression) |expr_node| { try stringLiteral(gen, expr_node); try gen.emitSimpleInst(.stream_push); } try gen.emitSimpleInst(.stream_flush); if (choice.block_stmt) |block| { try blockStmt(gen, scope, block); } else { try gen.emitSimpleInst(.exit); } } } fn tempDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void { const identifier_node = decl_node.data.bin.lhs.?; const expr_node = decl_node.data.bin.rhs.?; const name_ref = try gi.astgen.stringFromNode(identifier_node); if (scope.lookup(name_ref)) |_| { return gi.fail(.redefined_identifier, decl_node); } const alloc_inst = try gi.add(.{ .tag = .alloc, .data = undefined }); const expr_result = try expr(gi, scope, expr_node); _ = try gi.addBinaryNode(.store, alloc_inst, expr_result); return scope.insert(name_ref, .{ .decl_node = decl_node, .inst_index = alloc_inst.toIndex().?, }); } fn varDecl(gi: *GenIr, scope: *Scope, decl_node: *const Ast.Node) !void { const astgen = gi.astgen; const gpa = astgen.gpa; const identifier_node = decl_node.data.bin.lhs.?; const expr_node = decl_node.data.bin.rhs.?; const decl_inst = try gi.makeDeclaration(); try gi.instructions.append(gpa, decl_inst); var decl_block = gi.makeSubBlock(); defer decl_block.unstack(); _ = try expr(&decl_block, scope, expr_node); const var_inst = try decl_block.addVar(); try setDeclaration(decl_inst, .{ .tag = .variable, .name = try astgen.stringFromNode(identifier_node), .ref = var_inst, .decl_node = decl_node, .body_gi = &decl_block, .is_constant = decl_node.tag == .const_decl, }); } fn blockInner(gi: *GenIr, parent_scope: *Scope, stmt_list: []*Ast.Node) !void { var child_scope = parent_scope.makeChild(); defer child_scope.deinit(); for (stmt_list) |inner_node| { _ = switch (inner_node.tag) { .var_decl => try varDecl(gi, &child_scope, inner_node), .const_decl => try varDecl(gi, &child_scope, inner_node), .temp_decl => try tempDecl(gi, &child_scope, inner_node), .assign_stmt => try assignStmt(gi, &child_scope, inner_node), .content_stmt => try contentStmt(gi, &child_scope, inner_node), //.choice_stmt => try choiceStmt(gen, scope, inner_node), .expr_stmt => try exprStmt(gi, &child_scope, inner_node), else => unreachable, }; } } fn blockStmt(block: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!void { // TODO: Make sure that this value is concrete to omit check. const block_stmts = stmt_node.data.list.items.?; try blockInner(block, scope, block_stmts); } const main_knot_name: [:0]const u8 = "$__main__$"; fn defaultBlock( gi: *GenIr, scope: *Scope, body_node: *const Ast.Node, ) InnerError!void { const astgen = gi.astgen; const gpa = astgen.gpa; const decl_inst = try gi.makeDeclaration(); try gi.instructions.append(gpa, decl_inst); var decl_scope = gi.makeSubBlock(); defer decl_scope.unstack(); // TODO: Make sure that this value is concrete to omit check. const block_stmts = body_node.data.list.items orelse unreachable; try blockInner(&decl_scope, scope, block_stmts); const knot_inst = try decl_scope.addKnot(); try setDeclaration(decl_inst, .{ .tag = .knot, .decl_node = body_node, .name = try astgen.stringFromBytes("$__main__$"), .ref = knot_inst, .body_gi = &decl_scope, }); } fn stitchDecl(_: *GenIr, _: *Scope, _: *const Ast.Node) InnerError!void {} fn functionDecl(_: *GenIr, _: *Scope, _: *const Ast.Node) InnerError!void {} fn knotDecl(gen: *GenIr, scope: *Scope, decl_node: *const Ast.Node) InnerError!void { const prototype_node = decl_node.data.knot_decl.prototype; const nested_decls_list = decl_node.data.knot_decl.children orelse return; const identifier_node = prototype_node.data.bin.lhs orelse unreachable; const ident_ref = try gen.astgen.stringFromNode(identifier_node); const knot_symbol = scope.lookup(ident_ref) orelse unreachable; const knot_scope = knot_symbol.knot.decl_scope; var block_gen = gen.makeSubBlock(); defer block_gen.deinit(); var start_index: usize = 0; const first_child = nested_decls_list[0]; if (first_child.tag == .block_stmt) { try blockStmt(&block_gen, knot_scope, first_child); if (nested_decls_list.len > 1) start_index += 1 else return; } for (nested_decls_list[start_index..]) |nested_decl_node| { switch (decl_node.tag) { .stitch_decl => try stitchDecl(gen, knot_scope, nested_decl_node), .function_decl => try functionDecl(gen, knot_scope, nested_decl_node), else => unreachable, } } } fn file(root_gi: *GenIr, scope: *Scope, file_node: *const Ast.Node) InnerError!void { const astgen = root_gi.astgen; const gpa = astgen.gpa; const file_inst = try root_gi.makePayloadNode(.file); try root_gi.instructions.append(gpa, file_inst); var start_index: usize = 0; var file_scope = root_gi.makeSubBlock(); defer file_scope.unstack(); // TODO: Make sure this is non-nullable. const nested_decls_list = file_node.data.list.items orelse return; if (nested_decls_list.len == 0) return; const first_child = nested_decls_list[0]; if (first_child.tag == .block_stmt) { try defaultBlock(&file_scope, scope, first_child); if (nested_decls_list.len > 1) start_index += 1 else return file_scope.setBlockBody(file_inst); } for (nested_decls_list[start_index..]) |child_node| { switch (child_node.tag) { //.knot_decl => try knotDecl(gi, scope, child_node), //.stitch_decl => try stitchDecl(gi, scope, child_node), //.function_decl => try functionDecl(gi, scope, child_node), else => unreachable, } } return file_scope.setBlockBody(file_inst); } /// Perform code generation via tree-walk. pub fn generate(gpa: std.mem.Allocator, tree: *const Ast) !Ir { var astgen: AstGen = .{ .gpa = gpa, .tree = tree, }; defer astgen.deinit(); // First entry is reserved for Ir.NullTerminatedString.empty. try astgen.string_bytes.append(gpa, 0); var instructions: std.ArrayListUnmanaged(Ir.Inst.Index) = .empty; defer instructions.deinit(gpa); var file_scope: Scope = .{ .parent = null, .decls = .empty, .astgen = &astgen, }; var gen: GenIr = .{ .astgen = &astgen, .instructions = &instructions, .instructions_top = 0, }; defer gen.unstack(); // TODO: Make sure this is never null. const root_node = tree.root.?; file(&gen, &file_scope, root_node) catch |err| switch (err) { error.SemanticError => {}, else => |e| return e, }; return .{ .string_bytes = try astgen.string_bytes.toOwnedSlice(gpa), .instructions = try astgen.instructions.toOwnedSlice(gpa), .globals = try astgen.globals.toOwnedSlice(gpa), .extra = try astgen.extra.toOwnedSlice(gpa), .errors = try astgen.errors.toOwnedSlice(gpa), }; }