feat: semantics for if statements and multi-prong if statements

This commit is contained in:
Brett Broadhurst 2026-03-10 16:52:13 -06:00
parent 9658c8a308
commit ce5385ebac
Failed to generate hash of commit
6 changed files with 553 additions and 317 deletions

View file

@ -22,10 +22,11 @@ pub const Node = struct {
pub const Tag = enum {
file,
false_literal,
true_literal,
false_literal,
number_literal,
string_literal,
string_expr,
empty_string,
identifier,
add_expr,
@ -73,7 +74,6 @@ pub const Node = struct {
temp_decl,
parameter_decl,
ref_parameter_decl,
string_expr,
argument_list,
parameter_list,
switch_stmt,

View file

@ -40,6 +40,11 @@ const Scope = struct {
astgen: *AstGen,
decls: std.AutoHashMapUnmanaged(Ir.NullTerminatedString, Decl),
pub const Decl = struct {
decl_node: *const Ast.Node,
inst_index: Ir.Inst.Index,
};
pub fn deinit(self: *Scope) void {
const gpa = self.astgen.gpa;
self.decls.deinit(gpa);
@ -75,13 +80,18 @@ const GenIr = struct {
const unstacked_top = std.math.maxInt(usize);
pub fn unstack(self: *GenIr) void {
fn unstack(self: *GenIr) void {
if (self.instructions_top != unstacked_top) {
self.instructions.items.len = self.instructions_top;
self.instructions_top = unstacked_top;
}
}
fn isEmpty(self: *const GenIr) bool {
return (self.instructions_top == unstacked_top) or
(self.instructions.items.len == self.instructions_top);
}
fn instructionsSlice(self: *const GenIr) []Ir.Inst.Index {
return if (self.instructions_top == unstacked_top)
&[0]Ir.Inst.Index{}
@ -89,6 +99,18 @@ const GenIr = struct {
self.instructions.items[self.instructions_top..];
}
fn instructionsSliceUpto(
self: *const GenIr,
stacked_block: *const GenIr,
) []Ir.Inst.Index {
return if (self.instructions_top == unstacked_top)
&[0]Ir.Inst.Index{}
else if (self.instructions == stacked_block.instructions and stacked_block.instructions_top != unstacked_top)
self.instructions.items[self.instructions_top..stacked_block.instructions_top]
else
self.instructions.items[self.instructions_top..];
}
fn fail(
self: *GenIr,
tag: Ast.Error.Tag,
@ -97,7 +119,7 @@ const GenIr = struct {
return self.astgen.fail(tag, node);
}
pub fn makeSubBlock(self: *GenIr) GenIr {
fn makeSubBlock(self: *GenIr) GenIr {
return .{
.astgen = self.astgen,
.instructions = self.instructions,
@ -105,7 +127,7 @@ const GenIr = struct {
};
}
pub fn add(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Index {
fn add(gi: *GenIr, inst: Ir.Inst) !Ir.Inst.Index {
const gpa = gi.astgen.gpa;
try gi.instructions.ensureUnusedCapacity(gpa, 1);
try gi.astgen.instructions.ensureUnusedCapacity(gpa, 1);
@ -116,53 +138,38 @@ const GenIr = struct {
return inst_index;
}
pub fn addInt(gi: *GenIr, value: u64) !Ir.Inst.Index {
return add(gi, .{
.tag = .integer,
.data = .{ .integer = .{
.value = value,
} },
});
fn addInt(gi: *GenIr, value: u64) !Ir.Inst.Index {
return add(gi, .{ .tag = .integer, .data = .{
.integer = .{ .value = value },
} });
}
pub fn addUnaryNode(
gi: *GenIr,
tag: Ir.Inst.Tag,
arg: Ir.Inst.Index,
) !Ir.Inst.Index {
return add(gi, .{
.tag = tag,
.data = .{ .un = .{
.lhs = arg,
} },
});
fn addUnaryNode(gi: *GenIr, tag: Ir.Inst.Tag, arg: Ir.Inst.Index) !Ir.Inst.Index {
return add(gi, .{ .tag = tag, .data = .{
.un = .{ .lhs = arg },
} });
}
pub fn addBinaryNode(
fn addBinaryNode(
gi: *GenIr,
tag: Ir.Inst.Tag,
lhs: Ir.Inst.Index,
rhs: Ir.Inst.Index,
) !Ir.Inst.Index {
return add(gi, .{
.tag = tag,
.data = .{ .bin = .{
.lhs = lhs,
.rhs = rhs,
} },
});
return add(gi, .{ .tag = tag, .data = .{
.bin = .{ .lhs = lhs, .rhs = rhs },
} });
}
pub fn addDeclRef(gi: *GenIr, decl_ref: Ir.NullTerminatedString) !Ir.Inst.Index {
return add(gi, .{
.tag = .decl_ref,
.data = .{ .string = .{
fn addDeclRef(gi: *GenIr, decl_ref: Ir.NullTerminatedString) !Ir.Inst.Index {
return add(gi, .{ .tag = .decl_ref, .data = .{
.string = .{
.start = decl_ref,
} },
});
},
} });
}
pub fn makePayloadNode(gi: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
fn makePayloadNode(gi: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
const gpa = gi.astgen.gpa;
const inst_index: Ir.Inst.Index = @enumFromInt(gi.astgen.instructions.items.len);
try gi.astgen.instructions.append(gpa, .{
@ -174,80 +181,113 @@ const GenIr = struct {
return inst_index;
}
pub fn makeDeclaration(gi: *GenIr) !Ir.Inst.Index {
fn makeDeclaration(gi: *GenIr) !Ir.Inst.Index {
return makePayloadNode(gi, .declaration);
}
pub fn makeBlockInst(gi: *GenIr) !Ir.Inst.Index {
fn makeBlockInst(gi: *GenIr) !Ir.Inst.Index {
return makePayloadNode(gi, .block);
}
pub fn addKnot(gi: *GenIr) !Ir.Inst.Index {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const body = gi.instructionsSlice();
fn addKnot(self: *GenIr) !Ir.Inst.Index {
const gpa = self.astgen.gpa;
const body = self.instructionsSlice();
const extra_len = @typeInfo(Ir.Inst.Knot).@"struct".fields.len + body.len;
try astgen.extra.ensureUnusedCapacity(gpa, extra_len);
try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
const knot_node = try makePayloadNode(gi, .decl_knot);
const inst_data = &astgen.instructions.items[@intFromEnum(knot_node)].data;
inst_data.payload.payload_index = astgen.addExtraAssumeCapacity(
const knot_node = try makePayloadNode(self, .decl_knot);
const inst_data = &self.astgen.instructions.items[@intFromEnum(knot_node)].data;
inst_data.payload.payload_index = self.astgen.addExtraAssumeCapacity(
Ir.Inst.Knot{ .body_len = @intCast(body.len) },
);
for (body) |inst_index| {
astgen.extra.appendAssumeCapacity(@intFromEnum(inst_index));
}
self.astgen.appendBlockBody(body);
return knot_node;
}
pub fn addVar(gi: *GenIr) !Ir.Inst.Index {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const new_index: Ir.Inst.Index = @enumFromInt(astgen.instructions.items.len);
try astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gi.instructions.ensureUnusedCapacity(gpa, 1);
gi.astgen.instructions.appendAssumeCapacity(.{
.tag = .decl_var,
.data = .{
.payload = .{ .payload_index = undefined },
},
});
const body = gi.instructionsSlice();
fn addVar(self: *GenIr) !Ir.Inst.Index {
const gpa = self.astgen.gpa;
const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len);
const body = self.instructionsSlice();
const extra_len = @typeInfo(Ir.Inst.Var).@"struct".fields.len + body.len;
try astgen.extra.ensureUnusedCapacity(gpa, extra_len);
try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
try self.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try self.instructions.ensureUnusedCapacity(gpa, 1);
const inst_data = &astgen.instructions.items[@intFromEnum(new_index)].data;
inst_data.payload.payload_index = astgen.addExtraAssumeCapacity(
Ir.Inst.Var{ .body_len = @intCast(body.len) },
);
self.astgen.instructions.appendAssumeCapacity(.{ .tag = .decl_var, .data = .{
.payload = .{ .payload_index = self.astgen.addExtraAssumeCapacity(
Ir.Inst.Var{ .body_len = @intCast(body.len) },
) },
} });
for (body) |inst_index| {
astgen.extra.appendAssumeCapacity(@intFromEnum(inst_index));
}
gi.instructions.appendAssumeCapacity(new_index);
self.astgen.appendBlockBody(body);
self.instructions.appendAssumeCapacity(new_index);
return new_index;
}
pub fn setBlockBody(gi: *GenIr, inst: Ir.Inst.Index) !void {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const body = gi.instructionsSlice();
const extra_len = @typeInfo(Ir.Inst.Block).@"struct".fields.len + body.len;
try astgen.extra.ensureUnusedCapacity(gpa, extra_len);
fn addCondBr(self: *GenIr, tag: Ir.Inst.Tag) !Ir.Inst.Index {
const gpa = self.astgen.gpa;
try self.instructions.ensureUnusedCapacity(gpa, 1);
try self.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const inst_data = &astgen.instructions.items[@intFromEnum(inst)].data;
inst_data.payload.payload_index = astgen.addExtraAssumeCapacity(
const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len);
self.astgen.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .payload = .{
.payload_index = undefined,
} },
});
self.instructions.appendAssumeCapacity(new_index);
return new_index;
}
fn addBreak(
self: *GenIr,
tag: Ir.Inst.Tag,
block_inst: Ir.Inst.Index,
) !Ir.Inst.Index {
const gpa = self.astgen.gpa;
try self.instructions.ensureUnusedCapacity(gpa, 1);
const new_index = try self.makeBreak(tag, block_inst);
self.instructions.appendAssumeCapacity(new_index);
return new_index;
}
fn makeBreak(
self: *GenIr,
tag: Ir.Inst.Tag,
block_inst: Ir.Inst.Index,
) !Ir.Inst.Index {
const gpa = self.astgen.gpa;
const extra_len = @typeInfo(Ir.Inst.Break).@"struct".fields.len;
try self.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
const new_index: Ir.Inst.Index = @enumFromInt(self.astgen.instructions.items.len);
self.astgen.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .payload = .{
.payload_index = self.astgen.addExtraAssumeCapacity(
Ir.Inst.Break{ .block_inst = block_inst },
),
} },
});
return new_index;
}
fn setBlockBody(self: *GenIr, inst: Ir.Inst.Index) !void {
const gpa = self.astgen.gpa;
const body = self.instructionsSlice();
const extra_len = @typeInfo(Ir.Inst.Block).@"struct".fields.len + body.len;
try self.astgen.extra.ensureUnusedCapacity(gpa, extra_len);
const inst_data = &self.astgen.instructions.items[@intFromEnum(inst)].data;
inst_data.payload.payload_index = self.astgen.addExtraAssumeCapacity(
Ir.Inst.Block{ .body_len = @intCast(body.len) },
);
for (body) |inst_index| {
astgen.extra.appendAssumeCapacity(@intFromEnum(inst_index));
}
gi.unstack();
self.astgen.appendBlockBody(body);
self.unstack();
}
};
@ -289,6 +329,36 @@ fn setDeclaration(
args.body_gi.unstack();
}
fn setCondBrPayload(
condbr: Ir.Inst.Index,
cond: Ir.Inst.Index,
then_block: *GenIr,
else_block: *GenIr,
) !void {
defer then_block.unstack();
defer else_block.unstack();
const astgen = then_block.astgen;
const then_body = then_block.instructionsSliceUpto(else_block);
const else_body = else_block.instructionsSlice();
const then_body_len = then_body.len;
const else_body_len = else_body.len;
const extra_len =
@typeInfo(Ir.Inst.CondBr).@"struct".fields.len + then_body_len + else_body_len;
try astgen.extra.ensureUnusedCapacity(astgen.gpa, extra_len);
const inst_data = &astgen.instructions.items[@intFromEnum(condbr)].data;
inst_data.payload.payload_index = astgen.addExtraAssumeCapacity(
Ir.Inst.CondBr{
.condition = cond,
.then_body_len = @intCast(then_body_len),
.else_body_len = @intCast(else_body_len),
},
);
astgen.appendBlockBody(then_body);
astgen.appendBlockBody(else_body);
}
fn addExtraAssumeCapacity(astgen: *AstGen, extra: anytype) u32 {
const fields = std.meta.fields(@TypeOf(extra));
const extra_index: u32 = @intCast(astgen.extra.items.len);
@ -329,6 +399,12 @@ fn fail(
return error.SemanticError;
}
fn appendBlockBody(self: *AstGen, body: []const Ir.Inst.Index) void {
for (body) |inst_index| {
self.extra.appendAssumeCapacity(@intFromEnum(inst_index));
}
}
fn sliceFromNode(astgen: *const AstGen, node: *const Ast.Node) []const u8 {
assert(node.loc.start <= node.loc.end);
const source_bytes = astgen.tree.source;
@ -454,39 +530,79 @@ fn identifier(gi: *GenIr, scope: *Scope, node: *const Ast.Node) InnerError!Ir.In
return gi.addDeclRef(str);
}
fn expr(gen: *GenIr, scope: *Scope, optional_expr: ?*const Ast.Node) InnerError!Ir.Inst.Index {
fn expr(block: *GenIr, scope: *Scope, optional_expr: ?*const Ast.Node) InnerError!Ir.Inst.Index {
const expr_node = optional_expr orelse unreachable;
switch (expr_node.tag) {
.true_literal => return trueLiteral(gen),
.false_literal => return falseLiteral(gen),
.number_literal => return numberLiteral(gen, expr_node),
.string_literal => return stringLiteral(gen, expr_node),
.string_expr => return stringExpr(gen, expr_node),
.identifier => return identifier(gen, scope, expr_node),
.add_expr => return binaryOp(gen, scope, expr_node, .add),
.subtract_expr => return binaryOp(gen, scope, expr_node, .sub),
.multiply_expr => return binaryOp(gen, scope, expr_node, .mul),
.divide_expr => return binaryOp(gen, scope, expr_node, .div),
.mod_expr => return binaryOp(gen, scope, expr_node, .mod),
.negate_expr => return unaryOp(gen, scope, expr_node, .neg),
//.logical_and_expr => return logicalOp(gen, scope, expr_node, .jmp_f),
//.logical_or_expr => return logicalOp(gen, scope, expr_node, .jmp_t),
//.logical_not_expr => return unaryOp(gen, scope, expr_node, .not),
//.logical_equality_expr => return binaryOp(gen, scope, expr_node, .cmp_eq),
//.logical_inequality_expr => {
// return binaryOp(gen, scope, expr_node, .cmp_eq);
//},
//.logical_greater_expr => return binaryOp(gen, scope, expr_node, .cmp_gt),
//.logical_greater_or_equal_expr => return binaryOp(gen, scope, expr_node, .cmp_gte),
//.logical_lesser_expr => return binaryOp(gen, scope, expr_node, .cmp_lt),
//.logical_lesser_or_equal_expr => return binaryOp(gen, scope, expr_node, .cmp_lte),
else => unreachable,
.file => unreachable,
.true_literal => return trueLiteral(block),
.false_literal => return falseLiteral(block),
.number_literal => return numberLiteral(block, expr_node),
.string_literal => return stringLiteral(block, expr_node),
.string_expr => return stringExpr(block, expr_node),
.empty_string => return stringLiteral(block, expr_node),
.identifier => return identifier(block, scope, expr_node),
.add_expr => return binaryOp(block, scope, expr_node, .add),
.subtract_expr => return binaryOp(block, scope, expr_node, .sub),
.multiply_expr => return binaryOp(block, scope, expr_node, .mul),
.divide_expr => return binaryOp(block, scope, expr_node, .div),
.mod_expr => return binaryOp(block, scope, expr_node, .mod),
.negate_expr => return unaryOp(block, scope, expr_node, .neg),
.logical_and_expr => unreachable,
.logical_or_expr => unreachable,
.logical_not_expr => return unaryOp(block, scope, expr_node, .not),
.logical_equality_expr => return binaryOp(block, scope, expr_node, .cmp_eq),
.logical_inequality_expr => return binaryOp(block, scope, expr_node, .cmp_neq),
.logical_greater_expr => return binaryOp(block, scope, expr_node, .cmp_gt),
.logical_greater_or_equal_expr => return binaryOp(block, scope, expr_node, .cmp_gte),
.logical_lesser_expr => return binaryOp(block, scope, expr_node, .cmp_lt),
.logical_lesser_or_equal_expr => return binaryOp(block, scope, expr_node, .cmp_lte),
.call_expr => unreachable,
.choice_expr => unreachable,
.choice_start_expr => unreachable,
.choice_option_expr => unreachable,
.choice_inner_expr => unreachable,
.divert_expr => unreachable,
.selector_expr => unreachable,
.assign_stmt => unreachable,
.block_stmt => unreachable,
.content_stmt => unreachable,
.divert_stmt => unreachable,
.return_stmt => unreachable,
.expr_stmt => unreachable,
.choice_stmt => unreachable,
.choice_star_stmt => unreachable,
.choice_plus_stmt => unreachable,
.gather_point_stmt => unreachable,
.gathered_stmt => unreachable,
.function_prototype => unreachable,
.stitch_prototype => unreachable,
.knot_prototype => unreachable,
.function_decl => unreachable,
.stitch_decl => unreachable,
.knot_decl => unreachable,
.const_decl => unreachable,
.var_decl => unreachable,
.list_decl => unreachable,
.temp_decl => unreachable,
.parameter_decl => unreachable,
.ref_parameter_decl => unreachable,
.argument_list => unreachable,
.parameter_list => unreachable,
.switch_stmt => unreachable,
.switch_case => unreachable,
.if_stmt => unreachable,
.multi_if_stmt => unreachable,
.if_branch => unreachable,
.else_branch => unreachable,
.content => unreachable,
.inline_logic_expr => unreachable,
.invalid => unreachable,
}
}
fn exprStmt(gen: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!Ir.Inst.Index {
// TODO: Maybe we should introduce a unary node type to avoid optional checks?
const expr_node = stmt_node.data.bin.lhs orelse unreachable;
const expr_node = stmt_node.data.bin.lhs.?;
return expr(gen, scope, expr_node);
}
@ -517,125 +633,6 @@ fn validateSwitchProngs(gen: *GenIr, stmt_node: *const Ast.Node) InnerError!void
}
}
fn ifStmt(gen: *GenIr, parent_scope: *Scope, stmt_node: *const Ast.Node) InnerError!void {
const case_list = stmt_node.data.switch_stmt.cases;
const eval_expr = stmt_node.data.switch_stmt.condition_expr;
if (eval_expr) |expr_node| {
try validateSwitchProngs(gen, stmt_node);
const first_prong = case_list[0];
const last_prong = case_list[case_list.len - 1];
const then_stmt: *const Ast.Node = first_prong;
const else_stmt: ?*const Ast.Node = if (first_prong == last_prong)
null
else
last_prong;
var child_scope = try gen.astgen.createScope(parent_scope);
defer child_scope.deinit();
try expr(gen, child_scope, expr_node);
const else_label = try gen.makeLabel();
const end_label = try gen.makeLabel();
const then_br = try gen.emitJumpInst(.jmp_f);
_ = try gen.makeFixup(.{
.mode = .relative,
.label_index = else_label,
.code_offset = then_br,
});
try gen.emitSimpleInst(.pop);
try blockStmt(gen, child_scope, then_stmt);
const else_br = try gen.emitJumpInst(.jmp);
_ = try gen.makeFixup(.{
.mode = .relative,
.label_index = end_label,
.code_offset = else_br,
});
gen.setLabel(else_label);
try gen.emitSimpleInst(.pop);
if (else_stmt) |else_node| {
const block_stmt = else_node.data.bin.rhs;
try blockStmt(gen, child_scope, block_stmt);
}
gen.setLabel(end_label);
} else {
return gen.fail(.expected_expression, stmt_node);
}
}
fn multiIfStmt(
gen: *GenIr,
parent_scope: *Scope,
stmt_node: *const Ast.Node,
) InnerError!void {
const gpa = gen.astgen.gpa;
try validateSwitchProngs(gen, stmt_node);
const case_list = stmt_node.data.switch_stmt.cases;
// NOTE: We're going to create an array of label indexes here, since we
// may create additional labels while traversing nested expressions.
var label_list: std.ArrayList(usize) = .empty;
defer label_list.deinit(gpa);
try label_list.ensureUnusedCapacity(gpa, case_list.len);
var child_scope = try gen.astgen.createScope(parent_scope);
defer child_scope.deinit();
const exit_label = try gen.makeLabel();
gen.setExit(exit_label);
for (case_list) |case_stmt| {
const label_index = try gen.makeLabel();
switch (case_stmt.tag) {
.if_branch => {
const lhs = case_stmt.data.bin.lhs orelse unreachable;
try expr(gen, child_scope, lhs);
const fixup_offset = try gen.emitJumpInst(.jmp_t);
_ = try gen.makeFixup(.{
.mode = .relative,
.label_index = label_index,
.code_offset = fixup_offset,
});
try gen.emitSimpleInst(.pop);
},
.else_branch => {
const fixup_offset = try gen.emitJumpInst(.jmp);
_ = try gen.makeFixup(.{
.mode = .relative,
.label_index = label_index,
.code_offset = fixup_offset,
});
},
else => unreachable,
}
}
for (case_list, label_list.items) |case_stmt, label_index| {
const body_stmt = case_stmt.data.bin.rhs;
switch (case_stmt.tag) {
.if_branch => {
gen.setLabel(label_index);
try gen.emitSimpleInst(.pop);
},
.else_branch => {
gen.setLabel(label_index);
},
else => unreachable,
}
try blockStmt(gen, child_scope, body_stmt);
const fixup_inst = try gen.emitJumpInst(.jmp);
_ = try gen.makeFixup(.{
.mode = .relative,
.label_index = gen.exit_label,
.code_offset = fixup_inst,
});
}
gen.setLabel(gen.exit_label);
}
fn switchStmt(
gen: *GenIr,
parent_scope: *Scope,
@ -721,6 +718,105 @@ fn switchStmt(
gen.setLabel(gen.exit_label);
}
fn ifStmt(
parent_block: *GenIr,
scope: *Scope,
stmt_node: *const Ast.Node,
) InnerError!Ir.Inst.Index {
const astgen = parent_block.astgen;
const cond_expr = stmt_node.data.switch_stmt.condition_expr.?;
try validateSwitchProngs(parent_block, stmt_node);
const case_list = stmt_node.data.switch_stmt.cases;
const then_node = case_list[0];
const last_prong = case_list[case_list.len - 1];
var block_scope = parent_block.makeSubBlock();
defer block_scope.unstack();
const cond_inst = try expr(&block_scope, scope, cond_expr);
const condbr = try block_scope.addCondBr(.condbr);
const block = try parent_block.makeBlockInst();
try block_scope.setBlockBody(block); // unstacks block
try parent_block.instructions.append(astgen.gpa, block);
var then_block = parent_block.makeSubBlock();
defer then_block.unstack();
try blockStmt(&then_block, scope, then_node);
_ = try then_block.addBreak(.@"break", block);
var else_block = parent_block.makeSubBlock();
defer else_block.unstack();
if (then_node == last_prong) {
_ = try else_block.addBreak(.@"break", block);
} else {
const block_node = last_prong.data.bin.rhs.?;
try blockStmt(&else_block, scope, block_node);
}
try setCondBrPayload(condbr, cond_inst, &then_block, &else_block);
return @enumFromInt(0);
}
fn ifChain(
parent_block: *GenIr,
scope: *Scope,
branch_list: []const *Ast.Node,
) InnerError!Ir.Inst.Index {
const gpa = parent_block.astgen.gpa;
if (branch_list.len == 0) return @enumFromInt(0);
if (branch_list[0].data.bin.lhs == null) {
const body_node = branch_list[0].data.bin.rhs.?;
try blockStmt(parent_block, scope, body_node);
return @enumFromInt(0);
}
var block_scope = parent_block.makeSubBlock();
defer block_scope.unstack();
const branch = branch_list[0];
const cond_expr = branch.data.bin.lhs.?;
const body_node = branch.data.bin.rhs.?;
const cond_inst = try expr(&block_scope, scope, cond_expr);
const condbr = try block_scope.addCondBr(.condbr);
const block_inst = try parent_block.makeBlockInst();
try block_scope.setBlockBody(block_inst);
try parent_block.instructions.append(gpa, block_inst);
var then_block = parent_block.makeSubBlock();
defer then_block.unstack();
try blockStmt(&then_block, scope, body_node);
_ = try then_block.addBreak(.@"break", block_inst);
var else_block = parent_block.makeSubBlock();
defer else_block.unstack();
const next_branches = branch_list[1..];
_ = try ifChain(parent_block, scope, next_branches);
_ = try else_block.addBreak(.@"break", block_inst);
try setCondBrPayload(condbr, cond_inst, &then_block, &else_block);
return @enumFromInt(0);
}
fn multiIfStmt(
parent_block: *GenIr,
scope: *Scope,
stmt_node: *const Ast.Node,
) InnerError!Ir.Inst.Index {
try validateSwitchProngs(parent_block, stmt_node);
const branch_list = stmt_node.data.switch_stmt.cases;
if (branch_list[0].data.bin.lhs == null) {
const branch = branch_list[0];
const body_node = branch.data.bin.rhs.?;
try blockStmt(parent_block, scope, body_node);
return @enumFromInt(0);
}
_ = try ifChain(parent_block, scope, branch_list);
return @enumFromInt(0);
}
fn inlineLogicExpr(
gen: *GenIr,
scope: *Scope,
@ -731,34 +827,29 @@ fn inlineLogicExpr(
return expr(gen, scope, main_node);
}
fn contentExpr(gi: *GenIr, scope: *Scope, expr_node: *const Ast.Node) InnerError!Ir.Inst.Index {
const astgen = gi.astgen;
const gpa = astgen.gpa;
const block_inst = try gi.makeBlockInst();
try gi.instructions.append(gpa, block_inst);
var sub_block = gi.makeSubBlock();
defer sub_block.unstack();
fn contentExpr(block: *GenIr, scope: *Scope, expr_node: *const Ast.Node) InnerError!Ir.Inst.Index {
// FIXME: This is a placeholder until we figure out what this function should be returning.
var last_inst: Ir.Inst.Index = undefined;
// TODO: Make sure that this is not nullable.
const node_list = expr_node.data.list.items orelse unreachable;
const node_list = expr_node.data.list.items.?;
for (node_list) |child_node| {
_ = switch (child_node.tag) {
.string_literal => try stringLiteral(&sub_block, child_node),
.inline_logic_expr => try inlineLogicExpr(&sub_block, scope, child_node),
//.if_stmt => try ifStmt(gen, scope, child_node),
//.multi_if_stmt => try multiIfStmt(gen, scope, child_node),
last_inst = switch (child_node.tag) {
.string_literal => try stringLiteral(block, child_node),
.inline_logic_expr => try inlineLogicExpr(block, scope, child_node),
.if_stmt => try ifStmt(block, scope, child_node),
.multi_if_stmt => try multiIfStmt(block, scope, child_node),
//.switch_stmt => try switchStmt(gen, scope, child_node),
else => unreachable,
};
last_inst = try block.add(.{ .tag = .content_push, .data = undefined });
}
try sub_block.setBlockBody(block_inst);
return block_inst;
return last_inst;
}
fn contentStmt(gen: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!Ir.Inst.Index {
const expr_node = stmt_node.data.bin.lhs orelse unreachable;
const expr_ref = try contentExpr(gen, scope, expr_node);
return gen.addUnaryNode(.content, expr_ref);
return gen.addUnaryNode(.content_flush, expr_ref);
}
fn assignStmt(gi: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!void {
@ -909,22 +1000,10 @@ fn blockInner(gi: *GenIr, parent_scope: *Scope, stmt_list: []*Ast.Node) !void {
}
}
fn blockStmt(
parent_gi: *GenIr,
scope: *Scope,
stmt_node: *const Ast.Node,
) InnerError!void {
const gpa = parent_gi.astgen.gpa;
const block_inst = try parent_gi.makeBlockInst();
try parent_gi.instructions.append(gpa, block_inst);
var gi = parent_gi.makeSubBlock();
defer gi.unstack();
fn blockStmt(block: *GenIr, scope: *Scope, stmt_node: *const Ast.Node) InnerError!void {
// TODO: Make sure that this value is concrete to omit check.
const block_stmts = stmt_node.data.list.items orelse unreachable;
try blockInner(&gi, scope, block_stmts);
try gi.setBlockBody(block_inst);
const block_stmts = stmt_node.data.list.items.?;
try blockInner(block, scope, block_stmts);
}
const main_knot_name: [:0]const u8 = "$__main__$";
@ -1018,11 +1097,6 @@ fn file(root_gi: *GenIr, scope: *Scope, file_node: *const Ast.Node) InnerError!v
return file_scope.setBlockBody(file_inst);
}
pub const Decl = struct {
decl_node: *const Ast.Node,
inst_index: Ir.Inst.Index,
};
/// Perform code generation via tree-walk.
pub fn generate(gpa: std.mem.Allocator, tree: *const Ast) !Ir {
var astgen: AstGen = .{

View file

@ -24,6 +24,8 @@ pub const Inst = struct {
decl_var,
decl_ref,
block,
condbr,
@"break",
alloc_local,
load_local,
store_local,
@ -33,11 +35,19 @@ pub const Inst = struct {
div,
mod,
neg,
not,
cmp_eq,
cmp_neq,
cmp_gt,
cmp_gte,
cmp_lt,
cmp_lte,
true_literal,
false_literal,
integer,
string,
content,
content_push,
content_flush,
};
pub const Data = union {
@ -45,11 +55,11 @@ pub const Inst = struct {
payload_index: u32,
},
un: struct {
lhs: Inst.Index,
lhs: Index,
},
bin: struct {
lhs: Inst.Index,
rhs: Inst.Index,
lhs: Index,
rhs: Index,
},
integer: struct {
value: u64,
@ -66,7 +76,7 @@ pub const Inst = struct {
pub const Declaration = struct {
name: NullTerminatedString,
value: Inst.Index,
value: Index,
};
pub const Knot = struct {
@ -80,6 +90,16 @@ pub const Inst = struct {
pub const Block = struct {
body_len: u32,
};
pub const Break = struct {
block_inst: Index,
};
pub const CondBr = struct {
condition: Index,
then_body_len: u32,
else_body_len: u32,
};
};
pub const Global = struct {
@ -112,29 +132,6 @@ pub fn deinit(ir: *Ir, gpa: std.mem.Allocator) void {
ir.* = undefined;
}
const Prefix = struct {
buf: std.ArrayListUnmanaged(u8) = .empty,
pub fn deinit(self: *Prefix, gpa: std.mem.Allocator) void {
self.buf.deinit(gpa);
}
pub fn writeIndent(self: *const Prefix, writer: *std.Io.Writer) !void {
try writer.writeAll(self.buf.items);
}
pub fn pushChildPrefix(self: *Prefix, gpa: std.mem.Allocator) !usize {
const old_len = self.buf.items.len;
const seg: []const u8 = " ";
try self.buf.appendSlice(gpa, seg);
return old_len;
}
pub fn restore(self: *Prefix, new_len: usize) void {
self.buf.shrinkRetainingCapacity(new_len);
}
};
const Render = struct {
gpa: std.mem.Allocator,
prefix: Prefix,
@ -145,6 +142,29 @@ const Render = struct {
WriteFailed,
};
const Prefix = struct {
buf: std.ArrayListUnmanaged(u8) = .empty,
pub fn deinit(self: *Prefix, gpa: std.mem.Allocator) void {
self.buf.deinit(gpa);
}
pub fn writeIndent(self: *const Prefix, writer: *std.Io.Writer) !void {
try writer.writeAll(self.buf.items);
}
pub fn pushChildPrefix(self: *Prefix, gpa: std.mem.Allocator) !usize {
const old_len = self.buf.items.len;
const seg: []const u8 = " ";
try self.buf.appendSlice(gpa, seg);
return old_len;
}
pub fn restore(self: *Prefix, new_len: usize) void {
self.buf.shrinkRetainingCapacity(new_len);
}
};
fn renderSimple(r: *Render, inst: Inst) Error!void {
const io_w = r.writer;
return io_w.print("{s}(?)", .{@tagName(inst.tag)});
@ -183,6 +203,26 @@ const Render = struct {
try io_w.writeAll(")");
}
fn renderBreak(r: *Render, ir: Ir, inst: Inst) Error!void {
const io_w = r.writer;
const extra = ir.extraData(Inst.Break, inst.data.payload.payload_index);
try io_w.print("{s}(%{d})", .{ @tagName(inst.tag), extra.data.block_inst });
}
fn renderCondbr(r: *Render, ir: Ir, inst: Inst) Error!void {
const io_w = r.writer;
const extra = ir.extraData(Inst.CondBr, inst.data.payload.payload_index);
const then_body = ir.bodySlice(extra.end, extra.data.then_body_len);
const else_body = ir.bodySlice(extra.end + then_body.len, extra.data.else_body_len);
try io_w.print("{s}(%{d}, ", .{ @tagName(inst.tag), extra.data.condition });
try renderBodyInner(r, ir, then_body);
try io_w.writeAll(", ");
try renderBodyInner(r, ir, else_body);
try io_w.writeAll(")");
}
fn renderKnotDecl(r: *Render, ir: Ir, inst: Inst) Error!void {
const io_w = r.writer;
const extra = ir.extraData(Inst.Knot, inst.data.payload.payload_index);
@ -244,6 +284,8 @@ const Render = struct {
const str_bytes = inst.data.string.get(ir);
try io_w.print("{s}(\"{s}\")", .{ @tagName(inst.tag), str_bytes });
},
.condbr => try r.renderCondbr(ir, inst),
.@"break" => try r.renderBreak(ir, inst),
.alloc_local => try r.renderSimple(inst),
.load_local => try r.renderUnary(inst),
.store_local => try r.renderBinary(inst),
@ -254,6 +296,13 @@ const Render = struct {
.div => try r.renderBinary(inst),
.mod => try r.renderBinary(inst),
.neg => try r.renderUnary(inst),
.not => try r.renderUnary(inst),
.cmp_eq => try r.renderBinary(inst),
.cmp_neq => try r.renderBinary(inst),
.cmp_gt => try r.renderBinary(inst),
.cmp_gte => try r.renderBinary(inst),
.cmp_lt => try r.renderBinary(inst),
.cmp_lte => try r.renderBinary(inst),
.true_literal => {
try io_w.print("{s}", .{@tagName(inst.tag)});
},
@ -268,7 +317,8 @@ const Render = struct {
const str_bytes = inst.data.string.get(ir);
try io_w.print("{s}(\"{s}\")", .{ @tagName(inst.tag), str_bytes });
},
.content => try r.renderUnary(inst),
.content_push => try r.renderSimple(inst),
.content_flush => try r.renderUnary(inst),
}
try io_w.writeAll("\n");
}

View file

@ -14,6 +14,7 @@ knots: std.ArrayListUnmanaged(CompiledStory.Knot) = .empty,
const InnerError = error{
OutOfMemory,
TooManyConstants,
InvalidJump,
};
fn deinit(sema: *Sema) void {
@ -66,10 +67,74 @@ fn makeConstant(sema: *Sema, data: CompiledStory.Constant) !usize {
}
const Chunk = struct {
sema: *Sema,
name: Ir.NullTerminatedString,
arity: u32,
stack_size: u32,
stack_map: std.AutoHashMapUnmanaged(Ir.Inst.Index, u32),
labels: std.ArrayListUnmanaged(Label),
fixups: std.ArrayListUnmanaged(Fixup),
const dummy_address = 0xffffffff;
const Label = struct {
code_offset: usize,
};
const Fixup = struct {
mode: enum {
relative,
absolute,
},
label_index: usize,
code_offset: usize,
};
fn addFixup(chunk: *Chunk, op: Story.Opcode, label: usize) !void {
return chunk.fixups.append(chunk.sema.gpa, .{
.mode = .relative,
.label_index = label,
.code_offset = try chunk.sema.emitJumpOp(op),
});
}
fn addLabel(chunk: *Chunk) error{OutOfMemory}!usize {
const label_index = chunk.labels.items.len;
try chunk.labels.append(chunk.sema.gpa, .{
.code_offset = dummy_address,
});
return label_index;
}
fn setLabel(chunk: *Chunk, label_index: usize) void {
const code_offset = chunk.sema.bytecode.items.len;
assert(label_index <= chunk.labels.items.len);
const label_data = &chunk.labels.items[label_index];
label_data.code_offset = code_offset;
}
fn resolveLabels(chunk: *Chunk) !void {
const start_index = 0;
const end_index = chunk.fixups.items.len;
const bytecode = &chunk.sema.bytecode;
for (chunk.fixups.items[start_index..end_index]) |fixup| {
const label = chunk.labels.items[fixup.label_index];
assert(label.code_offset != dummy_address);
const target_offset: usize = switch (fixup.mode) {
.relative => label.code_offset - fixup.code_offset - 2,
.absolute => label.code_offset,
};
if (target_offset >= std.math.maxInt(u16)) {
std.debug.print("Too much code to jump over!\n", .{});
return error.InvalidJump;
}
assert(bytecode.capacity >= label.code_offset + 2);
bytecode.items[fixup.code_offset] = @intCast((target_offset >> 8) & 0xff);
bytecode.items[fixup.code_offset + 1] = @intCast(target_offset & 0xff);
}
}
};
fn integerInst(sema: *Sema, inst: Ir.Inst) InnerError!void {
@ -94,7 +159,11 @@ fn binaryInst(sema: *Sema, _: Ir.Inst, op: Story.Opcode) InnerError!void {
return emitByteOp(sema, op);
}
fn contentInst(sema: *Sema, _: Ir.Inst) InnerError!void {
fn irContentPush(sema: *Sema, _: Ir.Inst) InnerError!void {
return emitByteOp(sema, .stream_push);
}
fn irContentFlush(sema: *Sema, _: Ir.Inst) InnerError!void {
return emitByteOp(sema, .stream_flush);
}
@ -119,6 +188,38 @@ fn loadLocal(sema: *Sema, chunk: *Chunk, inst: Ir.Inst) InnerError!void {
try emitConstOp(sema, .load, @intCast(stack_offset));
}
fn irCondBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst) InnerError!void {
const payload_node = inst.data.payload;
const extra = sema.ir.extraData(Ir.Inst.CondBr, payload_node.payload_index);
const then_body = sema.ir.bodySlice(extra.end, extra.data.then_body_len);
const else_body = sema.ir.bodySlice(extra.end + then_body.len, extra.data.else_body_len);
const else_label = try chunk.addLabel();
const end_label = try chunk.addLabel();
try chunk.addFixup(.jmp_f, else_label);
try emitByteOp(sema, .pop);
for (then_body) |body_index| try compileInst(sema, chunk, body_index);
try chunk.addFixup(.jmp, end_label);
chunk.setLabel(else_label);
try emitByteOp(sema, .pop);
for (else_body) |body_index| try compileInst(sema, chunk, body_index);
chunk.setLabel(end_label);
}
fn irBreak(sema: *Sema, inst: Ir.Inst) InnerError!void {
_ = sema;
_ = inst;
}
fn irBlock(sema: *Sema, chunk: *Chunk, block_inst: Ir.Inst) InnerError!void {
const payload_node = block_inst.data.payload;
const extra = sema.ir.extraData(Ir.Inst.Block, payload_node.payload_index);
const body = sema.ir.bodySlice(extra.end, extra.data.body_len);
for (body) |body_index| try compileInst(sema, chunk, body_index);
}
fn declRef(sema: *Sema, inst: Ir.Inst) InnerError!void {
const ir = sema.ir;
const str = inst.data.string.start;
@ -160,17 +261,23 @@ fn declKnot(sema: *Sema, name_ref: Ir.NullTerminatedString, inst: Ir.Inst) Inner
const const_start = sema.constants.items.len;
var chunk: Chunk = .{
.sema = sema,
.name = name_ref,
.arity = 0,
.stack_size = 0,
.stack_map = .empty,
.fixups = .empty,
.labels = .empty,
};
defer chunk.stack_map.deinit(gpa);
defer chunk.fixups.deinit(gpa);
defer chunk.labels.deinit(gpa);
const body_slice = ir.bodySlice(extra.end, extra.data.body_len);
for (body_slice) |body_inst| try compileInst(sema, &chunk, body_inst);
try emitByteOp(sema, .exit);
try chunk.resolveLabels();
try sema.knots.append(gpa, .{
.name_ref = name_ref,
@ -187,13 +294,6 @@ fn declKnot(sema: *Sema, name_ref: Ir.NullTerminatedString, inst: Ir.Inst) Inner
});
}
fn blockInst(sema: *Sema, chunk: *Chunk, block_inst: Ir.Inst) InnerError!void {
const ir = sema.ir;
const extra = ir.extraData(Ir.Inst.Block, block_inst.data.payload.payload_index);
const body = ir.bodySlice(extra.end, extra.data.body_len);
for (body) |body_index| try compileInst(sema, chunk, body_index);
}
fn declaration(sema: *Sema, parent_chunk: ?*Chunk, inst: Ir.Inst) !void {
const ir = sema.ir;
const extra = ir.extraData(Ir.Inst.Declaration, inst.data.payload.payload_index);
@ -215,7 +315,9 @@ fn compileInst(sema: *Sema, chunk: *Chunk, index: Ir.Inst.Index) InnerError!void
.decl_var => unreachable, // handled in declaration()
.decl_knot => unreachable, // handled in declaration()
.decl_ref => try declRef(sema, inst),
.block => try blockInst(sema, chunk, inst),
.condbr => try irCondBr(sema, chunk, inst),
.@"break" => try irBreak(sema, inst),
.block => try irBlock(sema, chunk, inst),
.alloc_local => try allocLocal(sema, chunk, index),
.store_local => try storeLocal(sema, chunk, inst),
.load_local => try loadLocal(sema, chunk, inst),
@ -227,7 +329,18 @@ fn compileInst(sema: *Sema, chunk: *Chunk, index: Ir.Inst.Index) InnerError!void
.div => try binaryInst(sema, inst, .div),
.mod => try binaryInst(sema, inst, .mod),
.neg => try unaryInst(sema, inst, .neg),
.content => try contentInst(sema, inst),
.not => try unaryInst(sema, inst, .not),
.cmp_eq => try unaryInst(sema, inst, .cmp_eq),
.cmp_neq => {
try unaryInst(sema, inst, .cmp_eq);
try emitByteOp(sema, .not);
},
.cmp_lt => try unaryInst(sema, inst, .cmp_lt),
.cmp_lte => try unaryInst(sema, inst, .cmp_lte),
.cmp_gt => try unaryInst(sema, inst, .cmp_gt),
.cmp_gte => try unaryInst(sema, inst, .cmp_gte),
.content_push => try irContentPush(sema, inst),
.content_flush => try irContentFlush(sema, inst),
.string => try stringInst(sema, inst),
.integer => try integerInst(sema, inst),
}

View file

@ -524,7 +524,7 @@ pub fn loadFromString(
.dump_writer = options.dump_writer,
};
try compiled.buildRuntime(gpa, sem_ir, &story);
// try story.divert("$__main__$");
// story.can_advance = true;
try story.divert("$__main__$");
story.can_advance = true;
return story;
}

View file

@ -227,15 +227,14 @@ pub const Object = struct {
}
pub fn fromObject(story: *Story, object: *Object) !*Object.String {
// NOTE: 20 bytes should be enough.
const print_buffer_len = 20;
var print_buffer: [print_buffer_len]u8 = undefined;
switch (object.tag) {
.number => {
// NOTE: 20 bytes should be enough.
const print_buffer_len = 20;
var print_buffer: [print_buffer_len]u8 = undefined;
const number_object: *Object.Number = @ptrCast(object);
const number_bytes = try std.fmt.bufPrint(&print_buffer, "{}", .{
number_object.data.floating,
number_object.data.integer,
});
return Object.String.create(story, number_bytes);
},