feat: infrastructure to support qualified lookups

This commit is contained in:
Brett Broadhurst 2026-03-25 03:18:24 -06:00
parent e8ad1cd5b5
commit 440ec68481
Failed to generate hash of commit
6 changed files with 1118 additions and 575 deletions

View file

@ -3,19 +3,16 @@ const Ast = @import("Ast.zig");
const Ir = @import("Ir.zig");
const Story = @import("Story.zig");
const compile = @import("compile.zig");
const Compilation = compile.Compilation;
const InternPool = compile.InternPool;
const Module = compile.Module;
const assert = std.debug.assert;
const Sema = @This();
gpa: std.mem.Allocator,
arena: std.mem.Allocator,
tree: Ast,
module: *compile.Module,
ir: Ir,
constants: std.ArrayListUnmanaged(Compilation.Constant) = .empty,
constants_map: std.AutoHashMapUnmanaged(Compilation.Constant, u32) = .empty,
globals_map: std.AutoHashMapUnmanaged(u32, u32) = .empty,
knots: std.ArrayListUnmanaged(Compilation.Knot) = .empty,
errors: *std.ArrayListUnmanaged(Compilation.Error),
errors: *std.ArrayListUnmanaged(Module.Error),
const InnerError = error{
OutOfMemory,
@ -24,14 +21,19 @@ const InnerError = error{
InvalidJump,
};
const Ref = union(enum) {
pub const Ref = union(enum) {
none,
bool_true,
bool_false,
index: u32,
constant: u32,
global: u32,
local: u32,
constant: InternPool.Constant.Index,
variable: InternPool.Constant.Index,
temporary: u32,
// FIXME: This is horrible.
knot: struct {
const_index: InternPool.Constant.Index,
namespace: *Module.Namespace,
},
};
pub const SrcLoc = struct {
@ -45,8 +47,9 @@ fn fail(
args: anytype,
) error{ OutOfMemory, AnalysisFail } {
// TODO: Revisit this
const source_bytes = sema.module.tree.source;
const message = try std.fmt.allocPrint(sema.arena, format, args);
const loc = compile.findLineColumn(sema.tree.source, src.src_offset);
const loc = compile.findLineColumn(source_bytes, src.src_offset);
try sema.errors.append(sema.gpa, .{
.line = loc.line,
.column = loc.column,
@ -56,47 +59,57 @@ fn fail(
return error.AnalysisFail;
}
/// Intern a constant.
fn getOrPutConstant(sema: *Sema, data: Compilation.Constant) error{OutOfMemory}!Ref {
if (sema.constants_map.get(data)) |index| {
return .{ .constant = index };
} else {
const gpa = sema.gpa;
const index = sema.constants.items.len;
try sema.constants.append(gpa, data);
try sema.constants_map.put(gpa, data, @intCast(index));
return .{ .constant = @intCast(index) };
}
/// Retrieve an index into the global constant pool for an integer value.
fn getOrPutInt(sema: *Sema, value: u64) !Ref {
const const_index = try sema.module.intern_pool.getOrPutInt(sema.gpa, value);
return .{ .constant = const_index };
}
/// Intern an integer as a story constant.
fn getOrPutInt(sema: *Sema, value: u64) error{OutOfMemory}!Ref {
return sema.getOrPutConstant(.{ .integer = value });
/// Retrieve an index into the global constant pool for a string value.
fn getOrPutStr(sema: *Sema, value: Ir.NullTerminatedString) !Ref {
const const_index = try sema.module.intern_pool.getOrPutStr(sema.gpa, value);
return .{ .constant = const_index };
}
/// Intern a string as a story constant.
fn getOrPutStr(sema: *Sema, value: Ir.NullTerminatedString) error{OutOfMemory}!Ref {
return sema.getOrPutConstant(.{ .string = value });
pub fn lookupIdentifier(
sema: *Sema,
chunk: *Chunk,
ident: InternPool.Constant.Index,
) !Ref {
return sema.lookupInNamespace(chunk.namespace, ident);
}
pub fn lookupInNamespace(
sema: *Sema,
namespace: *Module.Namespace,
ident: InternPool.Constant.Index,
) !Ref {
var scope: ?*Module.Namespace = namespace;
while (scope) |s| : (scope = s.parent) {
if (s.decls.get(ident)) |decl| switch (decl.tag) {
.knot => return .{ .knot = .{
.namespace = decl.namespace.?,
.const_index = ident,
} },
.variable => return .{ .variable = ident },
};
}
// FIXME: This is temporary
return sema.fail(.{ .src_offset = 0 }, "unknown identifier", .{});
}
pub fn deinit(sema: *Sema) void {
const gpa = sema.gpa;
sema.constants.deinit(gpa);
sema.constants_map.deinit(gpa);
sema.globals_map.deinit(gpa);
sema.knots.deinit(gpa);
sema.* = undefined;
}
const Chunk = struct {
pub const Chunk = struct {
sema: *Sema,
knot: *Compilation.Knot,
namespace: *Module.Namespace,
code: *Module.CodeChunk,
inst_map: std.AutoHashMapUnmanaged(Ir.Inst.Index, Ref) = .empty,
constants_map: std.AutoHashMapUnmanaged(InternPool.Constant.Index, u8) = .empty,
labels: std.ArrayListUnmanaged(Label) = .empty,
fixups: std.ArrayListUnmanaged(Fixup) = .empty,
inst_map: std.AutoHashMapUnmanaged(Ir.Inst.Index, Ref) = .empty,
constant_map: std.AutoHashMapUnmanaged(u32, u32) = .empty,
const dummy_address = 0xffffffff;
const Label = struct {
code_offset: usize,
@ -111,16 +124,31 @@ const Chunk = struct {
code_offset: u32,
};
fn deinit(chunk: *Chunk, gpa: std.mem.Allocator) void {
chunk.fixups.deinit(gpa);
chunk.labels.deinit(gpa);
const dummy_address = 0xffffffff;
pub fn deinit(chunk: *Chunk, gpa: std.mem.Allocator) void {
chunk.inst_map.deinit(gpa);
chunk.constant_map.deinit(gpa);
chunk.constants_map.deinit(gpa);
chunk.labels.deinit(gpa);
chunk.fixups.deinit(gpa);
}
/// Reserve a stack slot for temporary variables.
fn addStackSlot(chunk: *Chunk) u8 {
const new_slot = chunk.code.stack_size;
chunk.code.stack_size += 1;
return @intCast(new_slot);
}
/// Reserve a stack slot for a parameter.
fn addParameter(chunk: *Chunk) u8 {
chunk.code.args_count += 1;
return chunk.addStackSlot();
}
fn addByteOp(chunk: *Chunk, op: Story.Opcode) error{OutOfMemory}!Ref {
const gpa = chunk.sema.gpa;
const bytecode = &chunk.knot.bytecode;
const bytecode = &chunk.code.bytecode;
const byte_index = bytecode.items.len;
try bytecode.append(gpa, @intFromEnum(op));
return .{ .index = @intCast(byte_index) };
@ -128,7 +156,7 @@ const Chunk = struct {
fn addConstOp(chunk: *Chunk, op: Story.Opcode, arg: u8) error{OutOfMemory}!Ref {
const gpa = chunk.sema.gpa;
const bytecode = &chunk.knot.bytecode;
const bytecode = &chunk.code.bytecode;
const byte_index = bytecode.items.len;
try bytecode.ensureUnusedCapacity(gpa, 2);
bytecode.appendAssumeCapacity(@intFromEnum(op));
@ -138,7 +166,7 @@ const Chunk = struct {
fn addJumpOp(chunk: *Chunk, op: Story.Opcode) error{OutOfMemory}!Ref {
const gpa = chunk.sema.gpa;
const bytecode = &chunk.knot.bytecode;
const bytecode = &chunk.code.bytecode;
try bytecode.ensureUnusedCapacity(gpa, 3);
bytecode.appendAssumeCapacity(@intFromEnum(op));
bytecode.appendAssumeCapacity(0xff);
@ -173,13 +201,26 @@ const Chunk = struct {
}
fn setLabel(chunk: *Chunk, label_index: usize) void {
const code_offset = chunk.knot.bytecode.items.len;
const bytecode = &chunk.code.bytecode;
const code_offset = bytecode.items.len;
assert(label_index <= chunk.labels.items.len);
const label_data = &chunk.labels.items[label_index];
label_data.code_offset = code_offset;
}
/// Intern a reference to a global constant within this chunk.
fn getOrPutConstantIndex(chunk: *Chunk, index: InternPool.Constant.Index) !u8 {
const gpa = chunk.sema.gpa;
const constants = &chunk.code.constants;
if (chunk.constants_map.get(index)) |local_index| return local_index;
const local_index: u8 = @intCast(constants.items.len);
try constants.append(gpa, @intCast(@intFromEnum(index)));
try chunk.constants_map.put(gpa, index, local_index);
return local_index;
}
fn resolveInst(chunk: *Chunk, ref: Ir.Inst.Ref) Ref {
if (ref.toIndex()) |index| {
return chunk.inst_map.get(index).?;
@ -187,16 +228,14 @@ const Chunk = struct {
switch (ref) {
.bool_true => return .bool_true,
.bool_false => return .bool_false,
else => return .{ .constant = @intFromEnum(ref) },
else => unreachable,
}
}
fn resolveLabels(chunk: *Chunk) !void {
const start_index = 0;
const end_index = chunk.fixups.items.len;
const bytecode = &chunk.knot.bytecode;
pub fn finalize(chunk: *Chunk) !void {
const bytecode = &chunk.code.bytecode;
for (chunk.fixups.items[start_index..end_index]) |fixup| {
for (chunk.fixups.items) |fixup| {
const label = chunk.labels.items[fixup.label_index];
assert(label.code_offset != dummy_address);
const target_offset: usize = switch (fixup.mode) {
@ -213,82 +252,29 @@ const Chunk = struct {
}
}
/// Intern a reference to a global constant within this chunk.
fn getOrPutConstantIndex(chunk: *Chunk, global_index: u32) !u32 {
const gpa = chunk.sema.gpa;
if (chunk.constant_map.get(global_index)) |local_index| return local_index;
const local_index: u32 = @intCast(chunk.knot.constants.items.len);
try chunk.knot.constants.append(gpa, global_index);
try chunk.constant_map.put(gpa, global_index, local_index);
return local_index;
}
fn doLoad(chunk: *Chunk, ref: Ref) InnerError!Ref {
switch (ref) {
.none => return ref,
.bool_true => return chunk.addByteOp(.true),
.bool_false => return chunk.addByteOp(.false),
.none => return ref,
.constant => |global_index| {
const local_index = try chunk.getOrPutConstantIndex(global_index);
.constant => |index| {
const local_index = try chunk.getOrPutConstantIndex(index);
return chunk.addConstOp(.load_const, @intCast(local_index));
},
.global => |global_index| {
const local_index = try chunk.getOrPutConstantIndex(global_index);
.variable => |index| {
const local_index = try chunk.getOrPutConstantIndex(index);
return chunk.addConstOp(.load_global, @intCast(local_index));
},
.local => |id| return chunk.addConstOp(.load, @intCast(id)),
.temporary => |id| return chunk.addConstOp(.load, @intCast(id)),
.index => return ref,
.knot => unreachable,
}
}
};
fn analyzeArithmeticArg(
sema: *Sema,
chunk: *Chunk,
arg: Ref,
arg_src: SrcLoc,
) !void {
switch (arg) {
.global => |index| {
const g = sema.ir.globals[index];
switch (g.tag) {
.variable => {
const name = try sema.getOrPutStr(g.name);
const local_index = try chunk.getOrPutConstantIndex(name.constant);
_ = try chunk.addConstOp(.load_global, @intCast(local_index));
},
.knot => return fail(sema, arg_src, "invalid operand to arithmetic expression", .{}),
}
},
.constant => |index| {
const local_index = try chunk.getOrPutConstantIndex(index);
_ = try chunk.addConstOp(.load_const, @intCast(local_index));
},
else => {},
}
}
fn analyzeDivertTarget(sema: *Sema, chunk: *Chunk, src: SrcLoc, callee: Ref) !Ref {
switch (callee) {
.global => |global_index| {
const g = sema.ir.globals[global_index];
switch (g.tag) {
.knot => {
const name = try sema.getOrPutStr(g.name);
const local_index = try chunk.getOrPutConstantIndex(name.constant);
return chunk.addConstOp(.load_global, @intCast(local_index));
},
.variable => return fail(sema, src, "invalid divert target", .{}),
}
},
else => unreachable,
}
}
fn irInteger(sema: *Sema, inst: Ir.Inst.Index) InnerError!Ref {
const value = sema.ir.instructions[@intFromEnum(inst)].data.int;
return sema.getOrPutConstant(.{ .integer = value });
return sema.getOrPutInt(value);
}
fn irString(sema: *Sema, inst: Ir.Inst.Index) InnerError!Ref {
@ -324,11 +310,9 @@ fn irBinaryOp(
}
fn irAlloc(_: *Sema, chunk: *Chunk, _: Ir.Inst.Index) InnerError!Ref {
const local_index = chunk.knot.stack_size;
// TODO: Add constraints on how many temporaries we can have.
// max(u8) or max(u16) are most likey appropriate.
chunk.knot.stack_size += 1;
return .{ .local = local_index };
return .{ .temporary = chunk.addStackSlot() };
}
fn irStore(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
@ -340,14 +324,18 @@ fn irStore(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
.bool_true, .bool_false => unreachable, // TODO: "Cannot assign to boolean"
.none => unreachable,
.constant => |_| unreachable, // TODO: "Cannot assign to constant"
.global => |id| _ = try chunk.addConstOp(.store_global, @intCast(id)),
.local => |id| _ = try chunk.addConstOp(.store, @intCast(id)),
.knot => |_| unreachable, // TODO: "Cannot assign to knot"
.variable => |index| {
_ = try chunk.addConstOp(.store_global, @intCast(@intFromEnum(index)));
},
.temporary => |index| {
_ = try chunk.addConstOp(.store, @intCast(index));
},
.index => unreachable,
}
_ = try chunk.addByteOp(.pop);
}
// TODO: Check what the target is!
fn irLoad(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!Ref {
const data = sema.ir.instructions[@intFromEnum(inst)].data.un;
const lhs = chunk.resolveInst(data.lhs);
@ -366,13 +354,13 @@ fn irCondBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!Ref {
_ = try chunk.doLoad(condition);
try chunk.addFixup(.jmp_f, else_label);
_ = try chunk.addByteOp(.pop);
try blockBodyInner(sema, chunk, then_body);
try analyzeBodyInner(sema, chunk, then_body);
try chunk.addFixup(.jmp, end_label);
chunk.setLabel(else_label);
_ = try chunk.addByteOp(.pop);
try blockBodyInner(sema, chunk, else_body);
try analyzeBodyInner(sema, chunk, else_body);
chunk.setLabel(end_label);
return .none;
}
@ -386,7 +374,7 @@ fn irBlock(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Block, data.extra_index);
const body = sema.ir.bodySlice(extra.end, extra.data.body_len);
return blockBodyInner(sema, chunk, body);
return analyzeBodyInner(sema, chunk, body);
}
fn irSwitchBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
@ -401,10 +389,8 @@ fn irSwitchBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
// TODO: Do something with this value?
//const condition = chunk.resolveInst(extra.data.operand);
const exit_label = try chunk.addLabel();
const cmp_var = chunk.knot.stack_size;
chunk.knot.stack_size += 1;
_ = try chunk.addConstOp(.store, @intCast(cmp_var));
const cmp_var = chunk.addStackSlot();
_ = try chunk.addConstOp(.store, cmp_var);
for (cases_slice) |case_index| {
const case_extra = sema.ir.extraData(Ir.Inst.SwitchBr.Case, @intFromEnum(case_index));
@ -428,7 +414,7 @@ fn irSwitchBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
chunk.setLabel(label_index);
_ = try chunk.addByteOp(.pop);
try blockBodyInner(sema, chunk, case_body);
try analyzeBodyInner(sema, chunk, case_body);
try chunk.addFixup(.jmp, exit_label);
}
@ -438,7 +424,7 @@ fn irSwitchBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
);
chunk.setLabel(else_label);
try blockBodyInner(sema, chunk, else_body);
try analyzeBodyInner(sema, chunk, else_body);
chunk.setLabel(exit_label);
}
@ -515,7 +501,7 @@ fn irChoiceBr(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!void {
}
_ = try chunk.addByteOp(.stream_flush);
try blockBodyInner(sema, chunk, body_slice);
try analyzeBodyInner(sema, chunk, body_slice);
}
}
@ -523,72 +509,10 @@ fn irImplicitRet(_: *Sema, chunk: *Chunk, _: Ir.Inst.Index) InnerError!Ref {
return chunk.addByteOp(.exit);
}
fn irDeclRef(sema: *Sema, _: *Chunk, inst: Ir.Inst.Index) InnerError!Ref {
fn irDeclRef(sema: *Sema, chunk: *Chunk, inst: Ir.Inst.Index) InnerError!Ref {
const data = sema.ir.instructions[@intFromEnum(inst)].data.str_tok;
const str = try sema.getOrPutStr(data.start);
if (sema.globals_map.get(str.constant)) |global_index| {
return .{ .global = global_index };
}
return fail(sema, .{ .src_offset = data.src_offset }, "unknown global variable", .{});
}
fn irDeclVar(
sema: *Sema,
chunk: *Chunk,
name: Ir.NullTerminatedString,
inst: Ir.Inst.Index,
) InnerError!void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Block, data.extra_index);
const body = sema.ir.bodySlice(extra.end, extra.data.body_len);
try blockBodyInner(sema, chunk, body);
// FIXME: hack
{
const last_inst = body[body.len - 1].toRef();
const val = chunk.resolveInst(last_inst);
_ = try chunk.doLoad(val);
}
const interned_str = try sema.getOrPutStr(name);
_ = try chunk.addConstOp(.store_global, @intCast(interned_str.constant));
_ = try chunk.addByteOp(.pop);
}
fn irDeclKnot(
sema: *Sema,
name_ref: Ir.NullTerminatedString,
inst: Ir.Inst.Index,
) InnerError!void {
const gpa = sema.gpa;
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Knot, data.extra_index);
var knot: Compilation.Knot = .{
.name = name_ref,
.arity = 0,
.stack_size = 0,
};
var chunk: Chunk = .{
.sema = sema,
.knot = &knot,
};
defer chunk.deinit(gpa);
const body = sema.ir.bodySlice(extra.end, extra.data.body_len);
try blockBodyInner(sema, &chunk, body);
_ = try chunk.addByteOp(.exit);
try chunk.resolveLabels();
try sema.knots.append(gpa, knot);
}
fn irDeclaration(sema: *Sema, parent_chunk: ?*Chunk, inst: Ir.Inst.Index) !void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Declaration, data.extra_index).data;
const value_data = sema.ir.instructions[@intFromEnum(extra.value)];
switch (value_data.tag) {
.decl_var => try irDeclVar(sema, parent_chunk.?, extra.name, extra.value),
.decl_knot => try irDeclKnot(sema, extra.name, extra.value),
else => unreachable,
}
const decl_name = try sema.getOrPutStr(data.start);
return sema.lookupIdentifier(chunk, decl_name.constant);
}
fn irCall(_: *Sema, _: *Chunk, _: Ir.Inst.Index) !Ref {
@ -608,12 +532,28 @@ fn irDivert(
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(ExtraType, data.extra_index);
const body = sema.ir.extra[extra.end..];
const callee = switch (kind) {
.direct => chunk.resolveInst(extra.data.callee),
.field => chunk.resolveInst(extra.data.obj_ptr),
};
switch (kind) {
.direct => {
const callee = chunk.resolveInst(extra.data.callee);
const callee_src: SrcLoc = .{ .src_offset = data.src_offset };
_ = try analyzeDivertTarget(sema, chunk, callee_src, callee);
},
.field => {
const callee = chunk.resolveInst(extra.data.obj_ptr);
const callee_src: SrcLoc = .{ .src_offset = data.src_offset };
const field_name = try sema.getOrPutStr(extra.data.field_name_start);
_ = try analyzeDivertTarget(sema, chunk, callee_src, callee);
const e = try sema.lookupInNamespace(callee.knot.namespace, field_name.constant);
switch (e) {
.knot => |knot| {
const local_index = try chunk.getOrPutConstantIndex(knot.const_index);
_ = try chunk.addConstOp(.load_attr, @intCast(local_index));
},
else => return sema.fail(callee_src, "invalid divert target", .{}),
}
},
}
const args_len = extra.data.args_len;
var arg_start: u32 = args_len;
@ -622,7 +562,7 @@ fn irDivert(
const arg_end = sema.ir.extra[extra.end + i];
defer arg_start = arg_end;
const arg_body = body[arg_start..arg_end];
try blockBodyInner(sema, chunk, @ptrCast(arg_body));
try analyzeBodyInner(sema, chunk, @ptrCast(arg_body));
// FIXME: hack
{
const last_inst: Ir.Inst.Index = @enumFromInt(arg_body[arg_body.len - 1]);
@ -639,28 +579,50 @@ fn irFieldPtr(_: *Sema, _: *Chunk, _: Ir.Inst.Index) !Ref {
// TODO: Check for duplicate parameters.
fn irParam(_: *Sema, chunk: *Chunk, _: Ir.Inst.Index) !Ref {
//const data = sema.ir.instructions[@intFromEnum(inst)].data.string;
const local_index = chunk.knot.stack_size;
// TODO: Add constraints on how many temporaries we can have.
// max(u8) or max(u16) are most likey appropriate.
chunk.knot.arity += 1;
chunk.knot.stack_size += 1;
return .{ .local = local_index };
return .{ .temporary = chunk.addParameter() };
}
fn blockBodyInner(sema: *Sema, chunk: *Chunk, body: []const Ir.Inst.Index) InnerError!void {
const gpa = sema.gpa;
fn analyzeArithmeticArg(
sema: *Sema,
chunk: *Chunk,
arg: Ref,
arg_src: SrcLoc,
) !void {
switch (arg) {
.variable => |index| {
const local_index = try chunk.getOrPutConstantIndex(index);
_ = try chunk.addConstOp(.load_global, @intCast(local_index));
},
.constant => |index| {
const local_index = try chunk.getOrPutConstantIndex(index);
_ = try chunk.addConstOp(.load_const, @intCast(local_index));
},
.knot => return fail(sema, arg_src, "invalid operand to arithmetic expression", .{}),
else => unreachable,
}
}
fn analyzeDivertTarget(sema: *Sema, chunk: *Chunk, src: SrcLoc, callee: Ref) !Ref {
switch (callee) {
.knot => |knot| {
const local_index = try chunk.getOrPutConstantIndex(knot.const_index);
return chunk.addConstOp(.load_global, @intCast(local_index));
},
else => return sema.fail(src, "invalid divert target", .{}),
}
}
fn analyzeBodyInner(sema: *Sema, chunk: *Chunk, body: []const Ir.Inst.Index) InnerError!void {
for (body) |inst| {
const data = sema.ir.instructions[@intFromEnum(inst)];
const ref: Ref = switch (data.tag) {
.file => unreachable,
.declaration => {
try irDeclaration(sema, chunk, inst);
continue;
},
.decl_var => unreachable, // handled in declaration()
.decl_knot => unreachable, // handled in declaration()
.file => unreachable, // never present inside block bodies
.declaration => unreachable, // never present inside block bodies
.decl_var => unreachable, // never present inside block bodies
.decl_knot => unreachable, // never present inside block bodies
.decl_stitch => unreachable, // never present inside block bodies
.switch_br => {
try irSwitchBr(sema, chunk, inst);
continue;
@ -720,29 +682,118 @@ fn blockBodyInner(sema: *Sema, chunk: *Chunk, body: []const Ir.Inst.Index) Inner
.field_ptr => try irFieldPtr(sema, chunk, inst),
.param => try irParam(sema, chunk, inst),
};
try chunk.inst_map.put(gpa, inst, ref);
try chunk.inst_map.put(sema.gpa, inst, ref);
}
}
pub fn analyzeFile(sema: *Sema, inst: Ir.Inst.Index) InnerError!void {
pub fn analyzeStitch(
sema: *Sema,
chunk: *Chunk,
inst: Ir.Inst.Index,
) !void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Block, data.extra_index);
const extra = sema.ir.extraData(Ir.Inst.Stitch, data.extra_index);
const body = sema.ir.bodySlice(extra.end, extra.data.body_len);
// FIXME: We are going to get burned by this if we don't formalize it.
// Adding common constants to the constant pool.
const static_constants = &[_]Compilation.Constant{
.{ .integer = 0 },
.{ .integer = 1 },
};
for (static_constants) |sc| {
_ = try sema.getOrPutConstant(sc);
try analyzeBodyInner(sema, chunk, body);
}
try sema.globals_map.ensureUnusedCapacity(sema.gpa, @intCast(sema.ir.globals.len));
for (sema.ir.globals, 0..) |global, global_index| {
const interned = try sema.getOrPutStr(global.name);
sema.globals_map.putAssumeCapacity(interned.constant, @intCast(global_index));
pub fn analyzeKnot(
sema: *Sema,
chunk: *Chunk,
inst: Ir.Inst.Index,
) !void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Knot, data.extra_index);
const body = sema.ir.bodySlice(extra.end, extra.data.body_len);
try analyzeBodyInner(sema, chunk, body);
}
fn analyzeNestedDecl(
sema: *Sema,
namespace: *Module.Namespace,
inst: Ir.Inst.Index,
) !void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Declaration, data.extra_index).data;
const decl = sema.ir.instructions[@intFromEnum(extra.value)];
const decl_name = try sema.module.intern_pool.getOrPutStr(sema.gpa, extra.name);
switch (decl.tag) {
.decl_stitch => {
const child_namespace = try sema.module.createNamespace(namespace);
try namespace.decls.put(sema.arena, decl_name, .{
.tag = .knot,
.decl_inst = extra.value,
.args_count = 0,
.namespace = child_namespace,
});
try sema.module.queueWorkItem(.{
.tag = .stitch,
.decl_name = decl_name,
.inst_index = extra.value,
.namespace = child_namespace,
});
},
else => unreachable,
}
}
pub fn analyzeTopLevelDecl(
sema: *Sema,
namespace: *Module.Namespace,
inst: Ir.Inst.Index,
) !void {
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Declaration, data.extra_index).data;
const decl_inst = sema.ir.instructions[@intFromEnum(extra.value)];
const decl_name = try sema.module.intern_pool.getOrPutStr(sema.gpa, extra.name);
switch (decl_inst.tag) {
.decl_var => {
const decl_extra = sema.ir.extraData(Ir.Inst.Var, decl_inst.data.payload.extra_index);
const body = sema.ir.bodySlice(decl_extra.end, decl_extra.data.body_len);
try namespace.decls.put(sema.gpa, decl_name, .{
.tag = .variable,
.namespace = null,
.decl_inst = extra.value,
.args_count = 0,
});
// FIXME: Broken
var chunk: *Chunk = undefined;
try analyzeBodyInner(sema, chunk, body);
// FIXME: hack
{
const last_inst = body[body.len - 1].toRef();
const val = chunk.resolveInst(last_inst);
_ = try chunk.doLoad(val);
}
_ = try chunk.addConstOp(.store_global, @intCast(@intFromEnum(decl_name)));
_ = try chunk.addByteOp(.pop);
},
.decl_knot => {
const _data = sema.ir.instructions[@intFromEnum(extra.value)].data.payload;
const _extra = sema.ir.extraData(Ir.Inst.Knot, _data.extra_index);
const _body = sema.ir.bodySlice(_extra.end, _extra.data.body_len);
const _stitches = sema.ir.bodySlice(_extra.end + _body.len, _extra.data.stitches_len);
const child_namespace = try sema.module.createNamespace(namespace);
try namespace.decls.put(sema.arena, decl_name, .{
.tag = .knot,
.decl_inst = extra.value,
.args_count = 0,
.namespace = child_namespace,
});
try sema.module.queueWorkItem(.{
.tag = .knot,
.decl_name = decl_name,
.inst_index = extra.value,
.namespace = child_namespace,
});
for (_stitches) |st| {
try analyzeNestedDecl(sema, child_namespace, st);
}
},
else => unreachable,
}
for (body) |body_index| try irDeclaration(sema, null, body_index);
}

View file

@ -1,10 +1,9 @@
//! Virtual machine state for story execution.
const std = @import("std");
const Compilation = @import("compile.zig").Compilation;
const tokenizer = @import("tokenizer.zig");
const Ast = @import("Ast.zig");
const AstGen = @import("AstGen.zig");
const Sema = @import("Sema.zig");
const Module = @import("compile.zig").Module;
pub const Object = @import("Story/Object.zig");
const Dumper = @import("Story/Dumper.zig");
const assert = std.debug.assert;
@ -16,6 +15,7 @@ is_exited: bool = false,
can_advance: bool = false,
choice_index: usize = 0,
current_choices: std.ArrayListUnmanaged(Choice) = .empty,
code_chunks: std.ArrayListUnmanaged(*Object) = .empty,
constants_pool: std.ArrayListUnmanaged(*Object) = .empty,
globals: std.StringHashMapUnmanaged(?*Object) = .empty,
stack: std.ArrayListUnmanaged(?*Object) = .empty,
@ -30,7 +30,7 @@ pub const default_knot_name: [:0]const u8 = "$__main__$";
pub const CallFrame = struct {
ip: usize,
sp: usize,
callee: *Object.ContentPath,
callee: *Object.Knot,
};
pub const Choice = struct {
@ -83,6 +83,8 @@ pub const Opcode = enum(u8) {
store,
load_global,
store_global,
load_attr,
store_attr,
/// Pop a value off the stack and write it to the content stream.
stream_push,
stream_line,
@ -114,11 +116,11 @@ pub fn deinit(story: *Story) void {
}
pub fn dump(story: *Story, writer: *std.Io.Writer) !void {
const story_dumper: Dumper = .{ .story = story, .writer = writer };
var story_dumper: Dumper = .{ .story = story };
try writer.writeAll("=== Constants ===\n");
for (story.constants_pool.items) |global_constant| {
try story_dumper.dumpObject(global_constant);
try story_dumper.dumpObject(writer, global_constant);
try writer.writeAll("\n");
}
@ -138,7 +140,12 @@ pub fn dump(story: *Story, writer: *std.Io.Writer) !void {
while (knots_iter.next()) |entry| {
if (entry.value_ptr.*) |global| {
switch (global.tag) {
.content_path => try story_dumper.dump(@ptrCast(global)),
.knot => {
try writer.writeAll("*");
story_dumper.indent_level += 2;
try story_dumper.dumpKnot(writer, @ptrCast(global));
story_dumper.indent_level -= 2;
},
else => {},
}
}
@ -147,8 +154,7 @@ pub fn dump(story: *Story, writer: *std.Io.Writer) !void {
pub fn trace(story: *Story, writer: *std.Io.Writer, frame: *CallFrame) !void {
try writer.print("\tStack => stack_pointer={d}, objects=[", .{frame.sp});
const story_dumper: Dumper = .{ .story = story, .writer = writer };
var story_dumper: Dumper = .{ .story = story };
const stack = &story.stack;
const stack_top = story.stack.items.len;
if (stack_top > 0) {
@ -156,7 +162,7 @@ pub fn trace(story: *Story, writer: *std.Io.Writer, frame: *CallFrame) !void {
if (stack_top > 1) {
for (stack.items[frame.sp .. stack.items.len - 1]) |slot| {
if (slot) |object| {
try story_dumper.dumpObject(object);
try story_dumper.dumpObject(writer, object);
} else {
try writer.writeAll("null");
}
@ -164,14 +170,14 @@ pub fn trace(story: *Story, writer: *std.Io.Writer, frame: *CallFrame) !void {
}
}
if (stack.items[stack.items.len - 1]) |object| {
try story_dumper.dumpObject(object);
try story_dumper.dumpObject(writer, object);
} else {
try writer.writeAll("null");
}
}
try writer.writeAll("]\n");
_ = try story_dumper.dumpInst(frame.callee, frame.ip, true);
_ = try story_dumper.dumpInst(writer, frame.callee, frame.ip, true);
return writer.flush();
}
@ -204,9 +210,9 @@ fn popStack(vm: *Story) ?*Object {
}
fn getConstant(story: *Story, frame: *CallFrame, offset: u8) !*Object {
if (offset >= frame.callee.const_pool.len) return error.InvalidArgument;
if (offset >= frame.callee.code.constants.len) return error.InvalidArgument;
const constant_index = frame.callee.const_pool[offset];
const constant_index = frame.callee.code.constants[offset];
return story.constants_pool.items[constant_index];
}
@ -251,7 +257,7 @@ fn execute(vm: *Story) !std.ArrayListUnmanaged(u8) {
while (true) {
const frame = vm.currentFrame();
const code = std.mem.bytesAsSlice(Opcode, frame.callee.bytes);
const code = std.mem.bytesAsSlice(Opcode, frame.callee.code.bytecode);
if (vm.dump_writer) |w| {
vm.trace(w, frame) catch {};
}
@ -262,6 +268,7 @@ fn execute(vm: *Story) !std.ArrayListUnmanaged(u8) {
return .empty;
},
.true => {
// TODO: Intern this value.
const true_object = try Object.Number.create(vm, .{
.boolean = true,
});
@ -269,6 +276,7 @@ fn execute(vm: *Story) !std.ArrayListUnmanaged(u8) {
frame.ip += 1;
},
.false => {
// TODO: Intern this value.
const false_object = try Object.Number.create(vm, .{
.boolean = false,
});
@ -472,6 +480,28 @@ fn execute(vm: *Story) !std.ArrayListUnmanaged(u8) {
return error.InvalidArgument;
}
},
.load_attr => {
const arg_offset: u8 = @intFromEnum(code[frame.ip + 1]);
frame.ip += 2;
if (peekStack(vm, 0)) |obj| {
assert(obj.tag == .knot);
const knot_obj: *Object.Knot = @ptrCast(obj);
const arg_obj = try vm.getConstant(frame, arg_offset);
assert(arg_obj.tag == .string);
const knot_attr: *Object.String = @ptrCast(arg_obj);
_ = popStack(vm);
if (knot_obj.members.get(knot_attr.toSlice())) |attr_obj| {
try vm.pushStack(attr_obj);
} else {
return error.InvalidArgument;
}
} else {
return error.InvalidArgument;
}
},
else => return error.InvalidInstruction,
}
}
@ -487,8 +517,8 @@ pub fn advance(story: *Story, gpa: std.mem.Allocator) ![]const u8 {
return content.toOwnedSlice(gpa);
}
pub fn getKnot(vm: *Story, name: []const u8) ?*Object.ContentPath {
const knot: ?*Object.ContentPath = blk: {
pub fn getKnot(vm: *Story, name: []const u8) ?*Object.Knot {
const knot: ?*Object.Knot = blk: {
if (vm.globals.get(name)) |object| {
break :blk @ptrCast(object);
}
@ -498,10 +528,10 @@ pub fn getKnot(vm: *Story, name: []const u8) ?*Object.ContentPath {
}
// TODO(Brett): Add arguments?
fn divertToKnot(vm: *Story, knot: *Object.ContentPath) !void {
fn divertToKnot(vm: *Story, knot: *Object.Knot) !void {
const gpa = vm.allocator;
const stack_ptr = vm.stack.items.len - knot.arity;
const stack_needed = knot.locals_count;
const stack_ptr = vm.stack.items.len - knot.code.args_count;
const stack_needed = knot.code.stack_size;
try vm.stack.ensureUnusedCapacity(gpa, stack_needed);
try vm.call_stack.ensureUnusedCapacity(gpa, 1);
@ -527,6 +557,7 @@ pub const LoadOptions = struct {
use_color: bool = true,
dump_ast: bool = false,
dump_ir: bool = false,
dump_trace: bool = false,
};
pub fn selectChoiceIndex(story: *Story, index: usize) !void {
@ -540,7 +571,11 @@ pub fn loadFromString(
source_bytes: [:0]const u8,
options: LoadOptions,
) !Story {
var comp = try Compilation.compile(gpa, .{
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
var comp = try Module.compile(gpa, arena, .{
.source_bytes = source_bytes,
.filename = "<STDIN>",
.dump_writer = options.dump_writer,
@ -550,17 +585,17 @@ pub fn loadFromString(
});
defer comp.deinit();
if (comp.errors.len > 0) {
for (comp.errors) |err| {
if (comp.errors.items.len > 0) {
for (comp.errors.items) |err| {
try comp.renderError(options.error_writer, err);
}
return error.Fail;
return error.LoadFailed;
}
var story: Story = .{
.allocator = gpa,
.can_advance = false,
.dump_writer = options.dump_writer,
.dump_writer = if (options.dump_trace) options.dump_writer else null,
};
errdefer story.deinit();
try comp.setupStoryRuntime(gpa, &story);

View file

@ -6,41 +6,61 @@ const assert = std.debug.assert;
const Dumper = @This();
story: *const Story,
writer: *std.Io.Writer,
indent_level: usize = 0,
fn dumpSimpleInst(d: Dumper, offset: usize, op: Opcode) !usize {
try d.writer.print("{s}\n", .{@tagName(op)});
fn dumpSimpleInst(_: *Dumper, w: *std.Io.Writer, offset: usize, op: Opcode) !usize {
try w.print("{s}\n", .{@tagName(op)});
return offset + 1;
}
fn dumpByteInst(d: Dumper, context: *const Object.ContentPath, offset: usize, op: Opcode) !usize {
const arg = context.bytes[offset + 1];
fn dumpByteInst(
self: *Dumper,
w: *std.Io.Writer,
knot: *const Object.Knot,
offset: usize,
op: Opcode,
) !usize {
const code = knot.code;
assert(code.bytecode.len > offset + 1);
const arg = code.bytecode[offset + 1];
if (op == .load_const) {
const constant_index = context.const_pool[arg];
const global_constant = d.story.constants_pool.items[constant_index];
try d.writer.print("{s} {d} (", .{ @tagName(op), arg });
try d.dumpObject(global_constant);
try d.writer.print(")\n", .{});
try w.print("{s} {d}", .{ @tagName(op), arg });
try w.writeAll(" (");
if (code.constants.len > arg) {
const constant_index = code.constants[arg];
if (self.story.constants_pool.items.len > constant_index) {
const global_constant = self.story.constants_pool.items[constant_index];
try self.dumpObject(w, global_constant);
} else {
try d.writer.print("{s} {x}\n", .{ @tagName(op), arg });
try w.writeAll("invalid!");
}
} else {
try w.writeAll("invalid!");
}
try w.writeAll(")\n");
} else {
try w.print("{s} {x}\n", .{ @tagName(op), arg });
}
return offset + 2;
}
fn dumpGlobalInst(
d: Dumper,
context: *const Object.ContentPath,
self: *Dumper,
w: *std.Io.Writer,
knot: *const Object.Knot,
offset: usize,
op: Opcode,
) !usize {
const arg = context.bytes[offset + 1];
const constant_index = context.const_pool[arg];
const global_constant = d.story.constants_pool.items[constant_index];
const code = knot.code;
assert(code.bytecode.len > offset + 1);
const arg = code.bytecode[offset + 1];
assert(code.constants.len > arg);
const constant_index = code.constants[arg];
const global_constant = self.story.constants_pool.items[constant_index];
assert(global_constant.tag == .string);
const global_name: *Object.String = @ptrCast(global_constant);
const name_bytes = global_name.bytes[0..global_name.length];
try d.writer.print("{s} {x} '{s}'\n", .{ @tagName(op), arg, name_bytes });
try w.print("{s} {x} '{s}'\n", .{ @tagName(op), arg, name_bytes });
return offset + 2;
}
@ -50,23 +70,26 @@ const Jump = enum {
};
fn dumpJumpInst(
d: Dumper,
context: *const Object.ContentPath,
_: *Dumper,
w: *std.Io.Writer,
knot: *const Object.Knot,
offset: usize,
op: Opcode,
mode: Jump,
) !usize {
var jump: u16 = @as(u16, context.bytes[offset + 1]) << 8;
jump |= context.bytes[offset + 2];
const code = knot.code;
assert(code.bytecode.len > offset + 2);
var jump: u16 = @as(u16, code.bytecode[offset + 1]) << 8;
jump |= code.bytecode[offset + 2];
switch (mode) {
.relative => try d.writer.print("{s} 0x{x:0>4} (0x{x:0>4} -> 0x{x:0>4})\n", .{
.relative => try w.print("{s} 0x{x:0>4} (0x{x:0>4} -> 0x{x:0>4})\n", .{
@tagName(op),
jump,
offset,
offset + 3 + jump,
}),
.absolute => try d.writer.print("{s} 0x{x:0>4}\n", .{
.absolute => try w.print("{s} 0x{x:0>4}\n", .{
@tagName(op),
jump,
}),
@ -75,130 +98,204 @@ fn dumpJumpInst(
}
pub fn dumpInst(
d: Dumper,
path: *const Object.ContentPath,
self: *Dumper,
w: *std.Io.Writer,
knot: *const Object.Knot,
offset: usize,
should_prefix: bool,
) !usize {
const name_object = path.name;
const name_object = knot.name;
const name_bytes = name_object.bytes[0..name_object.length];
const op: Opcode = @enumFromInt(path.bytes[offset]);
const op: Opcode = @enumFromInt(knot.code.bytecode[offset]);
try w.splatByteAll(' ', self.indent_level);
if (should_prefix) {
try d.writer.print("<{s}>:0x{x:0>4} | ", .{ name_bytes, offset });
try w.print("<{s}>:0x{x:0>4} | ", .{ name_bytes, offset });
} else {
try d.writer.print("0x{x:0>4} | ", .{offset});
try w.print("0x{x:0>4} | ", .{offset});
}
switch (op) {
.exit => return d.dumpSimpleInst(offset, op),
.ret => return d.dumpSimpleInst(offset, op),
.pop => return d.dumpSimpleInst(offset, op),
.true => return d.dumpSimpleInst(offset, op),
.false => return d.dumpSimpleInst(offset, op),
.add => return d.dumpSimpleInst(offset, op),
.sub => return d.dumpSimpleInst(offset, op),
.mul => return d.dumpSimpleInst(offset, op),
.div => return d.dumpSimpleInst(offset, op),
.mod => return d.dumpSimpleInst(offset, op),
.neg => return d.dumpSimpleInst(offset, op),
.not => return d.dumpSimpleInst(offset, op),
.cmp_eq => return d.dumpSimpleInst(offset, op),
.cmp_lt => return d.dumpSimpleInst(offset, op),
.cmp_lte => return d.dumpSimpleInst(offset, op),
.cmp_gt => return d.dumpSimpleInst(offset, op),
.cmp_gte => return d.dumpSimpleInst(offset, op),
.load_const => return d.dumpByteInst(path, offset, op),
.load => return d.dumpByteInst(path, offset, op),
.store => return d.dumpByteInst(path, offset, op),
.load_global => return d.dumpGlobalInst(path, offset, op),
.store_global => return d.dumpGlobalInst(path, offset, op),
.call => return d.dumpByteInst(path, offset, op),
.divert => return d.dumpByteInst(path, offset, op),
.jmp => return d.dumpJumpInst(path, offset, op, .relative),
.jmp_t => return d.dumpJumpInst(path, offset, op, .relative),
.jmp_f => return d.dumpJumpInst(path, offset, op, .relative),
.stream_push => return d.dumpSimpleInst(offset, op),
.stream_flush => return d.dumpSimpleInst(offset, op),
.stream_line => return d.dumpSimpleInst(offset, op),
.stream_glue => return d.dumpSimpleInst(offset, op),
.br_push => return d.dumpJumpInst(path, offset, op, .absolute),
.br_table => return d.dumpSimpleInst(offset, op),
.br_dispatch => return d.dumpSimpleInst(offset, op),
.br_select_index => return d.dumpSimpleInst(offset, op),
.exit => return self.dumpSimpleInst(w, offset, op),
.ret => return self.dumpSimpleInst(w, offset, op),
.pop => return self.dumpSimpleInst(w, offset, op),
.true => return self.dumpSimpleInst(w, offset, op),
.false => return self.dumpSimpleInst(w, offset, op),
.add => return self.dumpSimpleInst(w, offset, op),
.sub => return self.dumpSimpleInst(w, offset, op),
.mul => return self.dumpSimpleInst(w, offset, op),
.div => return self.dumpSimpleInst(w, offset, op),
.mod => return self.dumpSimpleInst(w, offset, op),
.neg => return self.dumpSimpleInst(w, offset, op),
.not => return self.dumpSimpleInst(w, offset, op),
.cmp_eq => return self.dumpSimpleInst(w, offset, op),
.cmp_lt => return self.dumpSimpleInst(w, offset, op),
.cmp_lte => return self.dumpSimpleInst(w, offset, op),
.cmp_gt => return self.dumpSimpleInst(w, offset, op),
.cmp_gte => return self.dumpSimpleInst(w, offset, op),
.load_const => return self.dumpByteInst(w, knot, offset, op),
.load => return self.dumpByteInst(w, knot, offset, op),
.store => return self.dumpByteInst(w, knot, offset, op),
.load_global => return self.dumpGlobalInst(w, knot, offset, op),
.store_global => return self.dumpGlobalInst(w, knot, offset, op),
.call => return self.dumpByteInst(w, knot, offset, op),
.divert => return self.dumpByteInst(w, knot, offset, op),
.jmp => return self.dumpJumpInst(w, knot, offset, op, .relative),
.jmp_t => return self.dumpJumpInst(w, knot, offset, op, .relative),
.jmp_f => return self.dumpJumpInst(w, knot, offset, op, .relative),
.load_attr => return self.dumpGlobalInst(w, knot, offset, op),
.store_attr => return self.dumpGlobalInst(w, knot, offset, op),
.stream_push => return self.dumpSimpleInst(w, offset, op),
.stream_flush => return self.dumpSimpleInst(w, offset, op),
.stream_line => return self.dumpSimpleInst(w, offset, op),
.stream_glue => return self.dumpSimpleInst(w, offset, op),
.br_push => return self.dumpJumpInst(w, knot, offset, op, .absolute),
.br_table => return self.dumpSimpleInst(w, offset, op),
.br_dispatch => return self.dumpSimpleInst(w, offset, op),
.br_select_index => return self.dumpSimpleInst(w, offset, op),
else => |code| {
try d.writer.print("Unknown opcode 0x{x:0>4}\n", .{@intFromEnum(code)});
try w.print("Unknown opcode 0x{x:0>4}\n", .{@intFromEnum(code)});
return offset + 1;
},
}
}
pub fn dump(d: Dumper, path: *const Object.ContentPath) !void {
const name_object = path.name;
fn dumpKnotConstants(w: *std.Io.Writer, knot: *const Object.Knot) !void {
try w.writeAll("[");
for (knot.code.constants) |index| {
try w.print("{d},", .{index});
}
try w.writeAll("]");
try w.flush();
}
fn dumpKnotChildren(w: *std.Io.Writer, knot: *const Object.Knot) !void {
try w.writeAll("[");
var stitch_iter = knot.members.iterator();
while (stitch_iter.next()) |entry| {
try w.print("Stitch: \"{s}\",", .{entry.key_ptr.*});
}
try w.writeAll("]");
try w.flush();
}
fn dumpKnotBytecode(self: *Dumper, w: *std.Io.Writer, knot: *const Object.Knot) !void {
var index: usize = 0;
while (index < knot.code.bytecode.len) {
index = try self.dumpInst(w, knot, index, false);
try w.flush();
}
return w.flush();
}
pub fn dumpKnot(self: *Dumper, w: *std.Io.Writer, knot: *const Object.Knot) !void {
const name_object = knot.name;
const name_bytes = name_object.bytes[0..name_object.length];
try d.writer.print("=== {s}(args: {d}, constants: {d}, locals: {d}) ===\n", .{
name_bytes,
path.arity,
path.const_pool.len,
path.locals_count,
});
try w.splatByteAll(' ', self.indent_level);
try w.print("Name: \"{s}\"\n", .{name_bytes});
var index: usize = 0;
while (index < path.bytes.len) {
index = try d.dumpInst(path, index, false);
try d.writer.flush();
self.indent_level += 1;
try w.splatByteAll(' ', self.indent_level);
try w.print("Arguments: {d}\n", .{knot.code.args_count});
try w.splatByteAll(' ', self.indent_level);
try w.print("Locals: {d}\n", .{knot.code.locals_count});
try w.splatByteAll(' ', self.indent_level);
try w.print("Stack Size: {d}\n", .{knot.code.stack_size});
try w.splatByteAll(' ', self.indent_level);
try w.writeAll("Constants: ");
try dumpKnotConstants(w, knot);
try w.writeAll("\n");
try w.splatByteAll(' ', self.indent_level);
try w.writeAll("Children: ");
try dumpKnotChildren(w, knot);
try w.writeAll("\n");
try w.splatByteAll(' ', self.indent_level);
try w.writeAll("Bytecode: \n");
self.indent_level += 2;
try self.dumpKnotBytecode(w, knot);
try w.writeAll("\n");
self.indent_level -= 2;
var stitch_iter = knot.members.iterator();
var count: usize = 0;
while (stitch_iter.next()) |entry| : (count += 1) {
try w.splatByteAll(' ', self.indent_level);
try w.print("Stitch #{d}: \"{s}\"\n", .{ count, entry.key_ptr.* });
self.indent_level += 2;
try w.splatByteAll(' ', self.indent_level);
try w.print("Arguments: {d}\n", .{knot.code.args_count});
try w.splatByteAll(' ', self.indent_level);
try w.print("Locals: {d}\n", .{knot.code.locals_count});
try w.splatByteAll(' ', self.indent_level);
try w.print("Stack Size: {d}\n", .{knot.code.stack_size});
try w.splatByteAll(' ', self.indent_level);
try w.writeAll("Constants: ");
try dumpKnotConstants(w, knot);
try w.writeAll("\n");
try w.splatByteAll(' ', self.indent_level);
try w.writeAll("Children: ");
try dumpKnotChildren(w, knot);
try w.writeAll("\n");
try w.splatByteAll(' ', self.indent_level);
try w.writeAll("Bytecode: \n");
self.indent_level += 2;
try self.dumpKnotBytecode(w, @ptrCast(entry.value_ptr.*));
try w.writeAll("\n");
self.indent_level -= 4;
try w.flush();
}
return d.writer.flush();
self.indent_level = 2;
return w.flush();
}
fn getObjectType(object: *const Object) []const u8 {
switch (object.tag) {
.number => return "Number",
.string => return "String",
.content_path => return "ContentPath",
}
}
pub fn dumpObject(d: Dumper, object: *const Object) !void {
const type_string = getObjectType(object);
switch (object.tag) {
pub fn dumpObject(_: Dumper, w: *std.Io.Writer, obj: *const Object) !void {
const type_string = obj.tag.toStr();
switch (obj.tag) {
.number => {
const typed_object: *const Object.Number = @ptrCast(object);
const typed_object: *const Object.Number = @ptrCast(obj);
switch (typed_object.data) {
.boolean => |value| {
try d.writer.print("<type={s} value={s}, address={*}>", .{
.boolean => |value| try w.print("<type={s} value={s}, address={*}>", .{
type_string,
if (value) "true" else "false",
object,
});
},
.floating => |value| {
try d.writer.print("<type={s} value={d}, address={*}>", .{
obj,
}),
.floating => |value| try w.print("<type={s} value={d}, address={*}>", .{
type_string,
value,
object,
});
},
.integer => |value| {
try d.writer.print("<type={s} value={d}, address={*}>", .{
obj,
}),
.integer => |value| try w.print("<type={s} value={d}, address={*}>", .{
type_string,
value,
object,
});
},
obj,
}),
}
},
.string => {
const typed_object: *const Object.String = @ptrCast(object);
const typed_object: *const Object.String = @ptrCast(obj);
const string_bytes = typed_object.bytes[0..typed_object.length];
try d.writer.print("<type={s} value=\"{s}\", address={*}>", .{
try w.print("<type={s} value=\"{s}\", address={*}>", .{
type_string,
string_bytes,
object,
obj,
});
},
.content_path => {
try d.writer.print("<type={s} address={*}>", .{ type_string, object });
.code, .knot => {
try w.print("<type={s} address={*}>", .{ type_string, obj });
},
}
}

View file

@ -14,7 +14,17 @@ node: std.SinglyLinkedList.Node,
pub const Tag = enum {
number,
string,
content_path,
code,
knot,
pub fn toStr(tag: Tag) []const u8 {
return switch (tag) {
.number => "Number",
.string => "String",
.code => "Code",
.knot => "Knot",
};
}
};
pub fn destroy(obj: *Object, story: *Story) void {
@ -27,8 +37,12 @@ pub fn destroy(obj: *Object, story: *Story) void {
const typed_obj: *Object.String = @alignCast(@fieldParentPtr("base", obj));
typed_obj.destroy(story);
},
.content_path => {
const typed_obj: *Object.ContentPath = @alignCast(@fieldParentPtr("base", obj));
.code => {
const typed_obj: *Object.Code = @alignCast(@fieldParentPtr("base", obj));
typed_obj.destroy(story);
},
.knot => {
const typed_obj: *Object.Knot = @alignCast(@fieldParentPtr("base", obj));
typed_obj.destroy(story);
},
}
@ -190,12 +204,13 @@ pub const String = struct {
const Type = Object.String;
pub fn create(
story: *Story,
pub const Options = struct {
bytes: []const u8,
) error{OutOfMemory}!*Object.String {
};
pub fn create(story: *Story, options: Options) error{OutOfMemory}!*Object.String {
const gpa = story.allocator;
const alloc_len = @sizeOf(Type) + bytes.len + 1;
const alloc_len = @sizeOf(Type) + options.bytes.len + 1;
const raw = try gpa.alignedAlloc(u8, .of(Type), alloc_len);
const object: *Type = @ptrCast(raw);
@ -206,15 +221,15 @@ pub const String = struct {
.node = .{},
},
.hash = 0,
.length = bytes.len,
.length = options.bytes.len,
.bytes = undefined,
};
// Point bytes slice to the memory *after* the struct
const buf = raw[@sizeOf(Type)..][0 .. bytes.len + 1];
const buf = raw[@sizeOf(Type)..][0 .. options.bytes.len + 1];
object.bytes = buf.ptr;
@memcpy(buf[0..bytes.len], bytes);
buf[bytes.len] = 0;
@memcpy(buf[0..options.bytes.len], options.bytes);
buf[options.bytes.len] = 0;
story.gc_objects.prepend(&object.base.node);
return object;
@ -227,6 +242,10 @@ pub const String = struct {
gpa.free(base[0..alloc_len]);
}
pub fn toSlice(obj: *Object.String) []const u8 {
return obj.bytes[0..obj.length];
}
pub fn fromObject(story: *Story, obj: *Object) !*Object.String {
switch (obj.tag) {
.number => {
@ -237,7 +256,9 @@ pub const String = struct {
const number_bytes = try std.fmt.bufPrint(&print_buffer, "{}", .{
number_object.data.integer,
});
return .create(story, number_bytes);
return .create(story, .{
.bytes = number_bytes,
});
},
.string => return @ptrCast(obj),
else => unreachable,
@ -254,59 +275,111 @@ pub const String = struct {
@memcpy(bytes[lhs.length..], rhs.bytes[0..rhs.length]);
//ink_gc_disown(story, INK_OBJ(lhs));
//ink_gc_disown(story, INK_OBJ(rhs));
return .create(story, bytes);
return .create(story, .{
.bytes = bytes,
});
}
};
pub const ContentPath = struct {
/// Immutable object type for code chunks.
pub const Code = struct {
base: Object,
name: *Object.String,
arity: usize,
// TODO: Rename this to stack size.
locals_count: usize,
// TODO: Rename this to constant_pool.
const_pool: []u32,
bytes: []const u8,
/// Number of arguments.
args_count: u32,
/// Number of local variables.
locals_count: u32,
/// Stack size required to load.
stack_size: u32,
/// Table of global constant indexes.
constants: []const u8,
/// Raw compiled bytecode.
bytecode: []const u8,
const Type = Object.ContentPath;
pub const CreateOptions = struct {
name: *Object.String,
arity: usize,
locals_count: usize,
const_pool: []u32,
bytes: []const u8,
pub const Options = struct {
args_count: u32,
locals_count: u32,
stack_size: u32,
constants: []const u8,
code_bytes: []const u8,
};
pub fn create(story: *Story, options: CreateOptions) error{OutOfMemory}!*Object.ContentPath {
const Type = Code;
pub fn create(story: *Story, options: Options) error{OutOfMemory}!*Object.Code {
const gpa = story.allocator;
const alloc_len = @sizeOf(Type);
const raw = try gpa.alignedAlloc(u8, .of(Type), alloc_len);
const raw = try gpa.alignedAlloc(u8, .of(Type), @sizeOf(Type));
const obj: *Type = @ptrCast(raw);
obj.* = .{
.base = .{
.tag = .content_path,
.tag = .code,
.is_marked = false,
.node = .{},
},
.name = options.name,
.arity = options.arity,
.args_count = options.args_count,
.locals_count = options.locals_count,
.const_pool = options.const_pool,
.bytes = options.bytes,
.stack_size = options.stack_size,
.constants = options.constants,
.bytecode = options.code_bytes,
};
story.gc_objects.prepend(&obj.base.node);
return obj;
}
pub fn destroy(obj: *ContentPath, story: *Story) void {
pub fn destroy(obj: *Code, story: *Story) void {
const gpa = story.allocator;
gpa.free(obj.const_pool);
gpa.free(obj.bytes);
gpa.free(obj.constants);
gpa.free(obj.bytecode);
const base: [*]align(@alignOf(Type)) u8 = @ptrCast(obj);
gpa.free(base[0..@sizeOf(Type)]);
}
};
pub const Knot = struct {
base: Object,
/// Pointer to the name of the knot.
name: *Object.String,
/// Pointer to the code object for the knot.
code: *Object.Code,
members: std.StringHashMapUnmanaged(*Object) = .empty,
pub const Options = struct {
name: []const u8,
code: *Object.Code,
};
const Type = Knot;
pub fn create(story: *Story, options: Options) error{OutOfMemory}!*Object.Knot {
const gpa = story.allocator;
const raw = try gpa.alignedAlloc(u8, .of(Type), @sizeOf(Type));
const obj: *Type = @ptrCast(raw);
obj.* = .{
.base = .{
.tag = .knot,
.is_marked = false,
.node = .{},
},
.name = try .create(story, .{
.bytes = options.name,
}),
.code = options.code,
.members = .empty,
};
story.gc_objects.prepend(&obj.base.node);
return obj;
}
pub fn destroy(obj: *Knot, story: *Story) void {
const gpa = story.allocator;
obj.members.deinit(gpa);
const alloc_len = @sizeOf(Type);
const base: [*]align(@alignOf(Type)) u8 = @ptrCast(obj);
gpa.free(base[0..alloc_len]);
}
};

View file

@ -7,14 +7,220 @@ const Story = @import("Story.zig");
const Object = Story.Object;
const assert = std.debug.assert;
pub const Compilation = struct {
pub fn IntrusiveQueue(comptime T: type) type {
return struct {
const Self = @This();
head: ?*T = null,
tail: ?*T = null,
pub fn push(self: *Self, v: *T) void {
assert(v.next == null);
if (self.tail) |tail| {
tail.next = v;
self.tail = v;
} else {
self.head = v;
self.tail = v;
}
}
pub fn pop(self: *Self) ?*T {
const next = self.head orelse return null;
if (self.head == self.tail) self.tail = null;
self.head = next.next;
next.next = null;
return next;
}
pub fn isEmpty(self: *const Self) bool {
return self.head == null;
}
};
}
test IntrusiveQueue {
const testing = std.testing;
const Elem = struct {
const Self = @This();
next: ?*Self = null,
};
const Queue = IntrusiveQueue(Elem);
var q: Queue = .{};
try testing.expect(q.isEmpty());
var elems: [10]Elem = .{Elem{}} ** 10;
// One
try testing.expect(q.pop() == null);
q.push(&elems[0]);
try testing.expect(!q.isEmpty());
try testing.expect(q.pop().? == &elems[0]);
try testing.expect(q.pop() == null);
try testing.expect(q.isEmpty());
// Two
try testing.expect(q.pop() == null);
q.push(&elems[0]);
q.push(&elems[1]);
try testing.expect(q.pop().? == &elems[0]);
try testing.expect(q.pop().? == &elems[1]);
try testing.expect(q.pop() == null);
// Interleaved
try testing.expect(q.pop() == null);
q.push(&elems[0]);
try testing.expect(q.pop().? == &elems[0]);
q.push(&elems[1]);
try testing.expect(q.pop().? == &elems[1]);
try testing.expect(q.pop() == null);
}
pub const InternPool = struct {
constants: std.ArrayListUnmanaged(Constant.Key) = .empty,
constants_map: std.AutoHashMapUnmanaged(Constant.Key, Constant.Index) = .empty,
code_chunks: std.ArrayListUnmanaged(*Module.CodeChunk) = .empty,
pub const Constant = struct {
pub const Key = union(enum) {
int: u64,
str: Ir.NullTerminatedString,
};
pub const Index = enum(u32) {
_,
};
};
pub fn getOrPutConstant(
ip: *InternPool,
gpa: std.mem.Allocator,
arena: std.heap.ArenaAllocator,
key: Constant.Key,
) error{OutOfMemory}!Constant.Index {
if (ip.constants_map.get(key)) |index| {
return index;
} else {
const new_index: Constant.Index = @enumFromInt(ip.constants.items.len);
try ip.constants.append(gpa, key);
try ip.constants_map.put(gpa, key, new_index);
return new_index;
}
}
pub fn getOrPutInt(
ip: *InternPool,
gpa: std.mem.Allocator,
value: u64,
) error{OutOfMemory}!Constant.Index {
return ip.getOrPutConstant(gpa, .{
.int = value,
});
}
pub fn getOrPutStr(
ip: *InternPool,
gpa: std.mem.Allocator,
start: Ir.NullTerminatedString,
) error{OutOfMemory}!Constant.Index {
return ip.getOrPutConstant(gpa, .{
.str = start,
});
}
pub fn getStrBytes(ip: *InternPool, ir: Ir, index: Constant.Index) []const u8 {
assert(ip.constants.items.len > @intFromEnum(index));
const c = ip.constants.items[@intFromEnum(index)];
return ir.nullTerminatedString(c.str);
}
pub fn getCodeChunk(ip: *InternPool, index: Module.CodeChunk.Index) *Module.CodeChunk {
assert(ip.code_chunks.items.len > @intFromEnum(index));
return ip.code_chunks.items[@intFromEnum(index)];
}
pub fn deinit(ip: *InternPool, gpa: std.mem.Allocator) void {
ip.constants.deinit(gpa);
ip.constants_map.deinit(gpa);
ip.code_chunks.deinit(gpa);
}
};
pub const WorkItem = struct {
tag: Tag,
next: ?*WorkItem = null,
decl_name: InternPool.Constant.Index,
inst_index: Ir.Inst.Index,
namespace: *Module.Namespace,
pub const Tag = enum {
knot,
stitch,
};
};
pub const WorkQueue = IntrusiveQueue(WorkItem);
pub const Module = struct {
gpa: std.mem.Allocator,
arena: std.mem.Allocator,
tree: Ast,
ir: Ir,
errors: []Error,
knots: []Knot,
constants: []Constant,
intern_pool: InternPool = .{},
knots: std.ArrayListUnmanaged(Knot) = .empty,
stitches: std.ArrayListUnmanaged(Stitch) = .empty,
errors: std.ArrayListUnmanaged(Error) = .empty,
work_queue: WorkQueue = .{},
pub const Knot = struct {
name_index: InternPool.Constant.Index,
code_index: CodeChunk.Index,
pub const Index = enum(u32) {
_,
};
};
pub const Stitch = struct {
knot_index: Knot.Index,
code_index: CodeChunk.Index,
name_index: InternPool.Constant.Index,
pub const Index = enum(u32) {
_,
};
};
pub const Namespace = struct {
parent: ?*Namespace,
decls: std.AutoHashMapUnmanaged(InternPool.Constant.Index, Decl),
pub const Decl = struct {
tag: Tag,
namespace: ?*Namespace,
decl_inst: Ir.Inst.Index,
args_count: u32,
pub const Tag = enum {
knot,
variable,
};
};
};
pub const CodeChunk = struct {
args_count: u32 = 0,
locals_count: u32 = 0,
stack_size: u32 = 0,
constants: std.ArrayListUnmanaged(u8) = .empty,
bytecode: std.ArrayListUnmanaged(u8) = .empty,
pub const Index = enum(u32) {
_,
};
};
pub const Error = struct {
line: usize,
@ -23,21 +229,226 @@ pub const Compilation = struct {
message: []const u8,
};
pub const Knot = struct {
name: Ir.NullTerminatedString,
arity: u32,
stack_size: u32,
constants: std.ArrayListUnmanaged(u32) = .empty,
bytecode: std.ArrayListUnmanaged(u8) = .empty,
fn generateFile(mod: *Module) !void {
const root_node: Ir.Inst.Index = .file_inst;
const gpa = mod.gpa;
const data = mod.ir.instructions[@intFromEnum(root_node)].data.payload;
const extra = mod.ir.extraData(Ir.Inst.Block, data.extra_index);
const top_level_decls = mod.ir.bodySlice(extra.end, extra.data.body_len);
var sema: Sema = .{
.module = mod,
.gpa = gpa,
.arena = mod.arena,
.ir = mod.ir,
.errors = &mod.errors,
};
defer sema.deinit();
const file_scope = try mod.createNamespace(null);
for (top_level_decls) |decl_index| {
try sema.analyzeTopLevelDecl(file_scope, decl_index);
}
while (mod.work_queue.pop()) |work_unit| {
const chunk_index = mod.intern_pool.code_chunks.items.len;
var chunk: Sema.Chunk = .{
.sema = &sema,
.code = try mod.createCodeChunk(),
.namespace = work_unit.namespace,
};
defer chunk.deinit(gpa);
const debug_name_str = mod.intern_pool.getStrBytes(mod.ir, work_unit.decl_name);
std.debug.print("Analyzing {s}\n", .{debug_name_str});
switch (work_unit.tag) {
.knot => {
try sema.analyzeKnot(&chunk, work_unit.inst_index);
try chunk.finalize();
try mod.intern_pool.code_chunks.append(gpa, chunk.code);
try mod.knots.append(gpa, .{
.name_index = work_unit.decl_name,
.code_index = @enumFromInt(chunk_index),
});
},
.stitch => {
const knot_index = mod.knots.items.len - 1;
try sema.analyzeStitch(&chunk, work_unit.inst_index);
try chunk.finalize();
try mod.intern_pool.code_chunks.append(gpa, chunk.code);
try mod.stitches.append(gpa, .{
.knot_index = @enumFromInt(knot_index),
.name_index = work_unit.decl_name,
.code_index = @enumFromInt(chunk_index),
});
},
}
}
}
pub const Options = struct {
source_bytes: [:0]const u8,
filename: [:0]const u8,
dump_writer: ?*std.Io.Writer = null,
dump_ast: bool = false,
dump_ir: bool = false,
dump_use_color: bool = false,
};
pub const Constant = union(enum) {
integer: u64,
string: Ir.NullTerminatedString,
pub fn compile(
gpa: std.mem.Allocator,
arena: std.mem.Allocator,
options: Options,
) !Module {
const tree = try Ast.parse(gpa, arena, options.source_bytes, options.filename, 0);
var module: Module = .{
.gpa = gpa,
.arena = arena,
.tree = tree,
.ir = try AstGen.generate(gpa, &tree),
};
errdefer module.deinit();
pub fn renderError(cu: *const Compilation, w: *std.Io.Writer, compile_error: Error) !void {
const filename = cu.tree.filename;
if (options.dump_writer) |w| {
if (options.dump_ast) {
try w.writeAll("=== AST ===\n");
try module.tree.render(gpa, w, .{
.use_color = options.dump_use_color,
});
}
if (options.dump_ir) {
try w.writeAll("=== Semantic IR ===\n");
try module.ir.dumpInfo(w);
try module.ir.render(w);
}
}
if (module.ir.hasCompileErrors()) {
const payload_index = module.ir.extra[@intFromEnum(Ir.ExtraIndex.compile_errors)];
assert(payload_index != 0);
const header = module.ir.extraData(Ir.Inst.CompileErrors, payload_index);
const items_len = header.data.items_len;
var extra_index = header.end;
// TODO: Make an iterator for this?
for (0..items_len) |_| {
const item = module.ir.extraData(Ir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const loc = findLineColumn(tree.source, item.data.byte_offset);
try module.errors.append(gpa, .{
.line = loc.line,
.column = loc.column,
.snippet = loc.source_line,
.message = module.ir.nullTerminatedString(item.data.msg),
});
}
} else {
module.generateFile() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => |e| @panic(@errorName(e)),
};
}
return module;
}
pub fn setupStoryRuntime(mod: *Module, gpa: std.mem.Allocator, story: *Story) !void {
assert(mod.errors.items.len == 0);
const constants_len = mod.intern_pool.constants.items.len;
try story.constants_pool.ensureUnusedCapacity(gpa, constants_len);
for (mod.intern_pool.constants.items) |constant| {
switch (constant) {
.int => |value| {
const obj = try Object.Number.create(story, .{
.integer = @intCast(value),
});
story.constants_pool.appendAssumeCapacity(&obj.base);
},
.str => |str| {
const bytes = mod.ir.nullTerminatedString(str);
const obj = try Object.String.create(story, .{
.bytes = bytes,
});
story.constants_pool.appendAssumeCapacity(&obj.base);
},
}
}
for (mod.knots.items) |knot| {
const name_bytes = mod.intern_pool.getStrBytes(mod.ir, knot.name_index);
const code_chunk = mod.intern_pool.getCodeChunk(knot.code_index);
const knot_object = try Object.Knot.create(story, .{
.name = name_bytes,
.code = try Object.Code.create(story, .{
.args_count = @intCast(code_chunk.args_count),
.locals_count = @intCast(code_chunk.locals_count),
.stack_size = @intCast(code_chunk.stack_size),
.constants = try code_chunk.constants.toOwnedSlice(gpa),
.code_bytes = try code_chunk.bytecode.toOwnedSlice(gpa),
}),
});
try story.globals.put(gpa, name_bytes, @ptrCast(knot_object));
}
for (mod.stitches.items) |stitch| {
const name_bytes = mod.intern_pool.getStrBytes(mod.ir, stitch.name_index);
const code_chunk = mod.intern_pool.getCodeChunk(stitch.code_index);
const stitch_obj = try Object.Knot.create(story, .{
.name = name_bytes,
.code = try Object.Code.create(story, .{
.args_count = @intCast(code_chunk.args_count),
.locals_count = @intCast(code_chunk.locals_count),
.stack_size = @intCast(code_chunk.stack_size),
.constants = try code_chunk.constants.toOwnedSlice(gpa),
.code_bytes = try code_chunk.bytecode.toOwnedSlice(gpa),
}),
});
const parent_knot = mod.knots.items[@intFromEnum(stitch.knot_index)];
const parent_knot_name = mod.intern_pool.getStrBytes(mod.ir, parent_knot.name_index);
const parent_knot_obj: *Object.Knot = @ptrCast(story.globals.get(parent_knot_name).?);
try parent_knot_obj.members.put(story.allocator, name_bytes, &stitch_obj.base);
}
story.string_bytes = mod.ir.string_bytes;
mod.ir.string_bytes = &.{};
}
pub fn createNamespace(mod: *Module, parent: ?*Namespace) error{OutOfMemory}!*Namespace {
const ns = try mod.arena.create(Namespace);
ns.* = .{
.parent = parent,
.decls = .empty,
};
return ns;
}
pub fn createCodeChunk(mod: *Module) error{OutOfMemory}!*CodeChunk {
const chunk = try mod.arena.create(CodeChunk);
chunk.* = .{};
return chunk;
}
pub fn queueWorkItem(
mod: *Module,
options: struct {
tag: WorkItem.Tag,
decl_name: InternPool.Constant.Index,
inst_index: Ir.Inst.Index,
namespace: *Namespace,
},
) !void {
const work_item = try mod.arena.create(WorkItem);
work_item.* = .{
.tag = options.tag,
.decl_name = options.decl_name,
.inst_index = options.inst_index,
.namespace = options.namespace,
};
mod.work_queue.push(work_item);
}
pub fn renderError(mod: *const Module, w: *std.Io.Writer, compile_error: Error) !void {
const filename = mod.tree.filename;
const line = compile_error.line + 1;
const column = compile_error.column + 1;
@ -55,142 +466,14 @@ pub const Compilation = struct {
return w.flush();
}
pub const CompileOptions = struct {
source_bytes: [:0]const u8,
filename: [:0]const u8,
dump_writer: ?*std.Io.Writer = null,
dump_ast: bool = false,
dump_ir: bool = false,
dump_use_color: bool = false,
};
pub fn compile(gpa: std.mem.Allocator, options: CompileOptions) !Compilation {
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
errdefer arena_allocator.deinit();
var errors: std.ArrayListUnmanaged(Error) = .empty;
defer errors.deinit(gpa);
const arena = arena_allocator.allocator();
const ast = try Ast.parse(gpa, arena, options.source_bytes, options.filename, 0);
var ir = try AstGen.generate(gpa, &ast);
errdefer ir.deinit(gpa);
var sema: Sema = .{
.gpa = gpa,
.arena = arena,
.tree = ast,
.ir = ir,
.errors = &errors,
};
defer sema.deinit();
if (options.dump_writer) |w| {
if (options.dump_ast) {
try w.writeAll("=== AST ===\n");
try ast.render(gpa, w, .{
.use_color = options.dump_use_color,
});
}
if (options.dump_ir) {
try w.writeAll("=== Semantic IR ===\n");
try ir.dumpInfo(w);
try ir.render(w);
}
}
const fatal = if (ir.hasCompileErrors()) fatal: {
const payload_index = ir.extra[@intFromEnum(Ir.ExtraIndex.compile_errors)];
assert(payload_index != 0);
const header = ir.extraData(Ir.Inst.CompileErrors, payload_index);
const items_len = header.data.items_len;
var extra_index = header.end;
// TODO: Make an iterator for this?
for (0..items_len) |_| {
const item = ir.extraData(Ir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const loc = findLineColumn(ast.source, item.data.byte_offset);
try errors.append(gpa, .{
.line = loc.line,
.column = loc.column,
.snippet = loc.source_line,
.message = ir.nullTerminatedString(item.data.msg),
});
}
break :fatal true;
} else fatal: {
//sema.analyzeFile(.file_inst) catch |err| switch (err) {
// error.OutOfMemory => return error.OutOfMemory,
// error.AnalysisFail => break :fatal true,
// // TODO: These errors should be handled...
// else => |e| return e,
//};
break :fatal false;
};
return .{
.gpa = gpa,
.arena = arena_allocator,
.tree = ast,
.ir = ir,
.errors = try errors.toOwnedSlice(gpa),
.constants = if (fatal) &.{} else try sema.constants.toOwnedSlice(gpa),
.knots = if (fatal) &.{} else try sema.knots.toOwnedSlice(gpa),
};
}
pub fn setupStoryRuntime(cu: *Compilation, gpa: std.mem.Allocator, story: *Story) !void {
assert(cu.errors.len == 0);
const constants_pool = &story.constants_pool;
try constants_pool.ensureUnusedCapacity(gpa, cu.constants.len);
for (cu.constants) |constant| {
switch (constant) {
.integer => |value| {
const object: *Object.Number = try .create(story, .{
.integer = @intCast(value),
});
constants_pool.appendAssumeCapacity(&object.base);
},
.string => |ref| {
const bytes = cu.ir.nullTerminatedString(ref);
const object: *Object.String = try .create(story, bytes);
constants_pool.appendAssumeCapacity(&object.base);
},
}
}
for (cu.knots) |*knot| {
const knot_name = cu.ir.nullTerminatedString(knot.name);
const runtime_chunk: *Object.ContentPath = try .create(story, .{
.name = try .create(story, knot_name),
.arity = @intCast(knot.arity),
.locals_count = @intCast(knot.stack_size - knot.arity),
.const_pool = try knot.constants.toOwnedSlice(gpa),
.bytes = try knot.bytecode.toOwnedSlice(gpa),
});
try story.globals.put(gpa, knot_name, &runtime_chunk.base);
}
story.string_bytes = cu.ir.string_bytes;
cu.ir.string_bytes = &.{};
}
pub fn deinit(cu: *Compilation) void {
const gpa = cu.gpa;
for (cu.knots) |*knot| {
knot.constants.deinit(gpa);
knot.bytecode.deinit(gpa);
}
gpa.free(cu.knots);
gpa.free(cu.errors);
gpa.free(cu.constants);
cu.ir.deinit(gpa);
cu.arena.deinit();
cu.* = undefined;
pub fn deinit(mod: *Module) void {
const gpa = mod.gpa;
mod.ir.deinit(gpa);
mod.intern_pool.deinit(gpa);
mod.knots.deinit(gpa);
mod.stitches.deinit(gpa);
mod.errors.deinit(gpa);
mod.* = undefined;
}
};

View file

@ -36,6 +36,7 @@ fn mainArgs(
var dump_ast: bool = false;
var dump_ir: bool = false;
var dump_story: bool = false;
var dump_trace: bool = false;
var use_stdin: bool = false;
var use_color: bool = false;
@ -52,6 +53,8 @@ fn mainArgs(
dump_ir = true;
} else if (std.mem.eql(u8, arg, "--dump-story")) {
dump_story = true;
} else if (std.mem.eql(u8, arg, "--dump-trace")) {
dump_trace = true;
} else if (std.mem.eql(u8, arg, "--use-color")) {
use_color = true;
} else {
@ -91,8 +94,9 @@ fn mainArgs(
.use_color = use_color,
.dump_ast = dump_ast,
.dump_ir = dump_ir,
.dump_trace = dump_trace,
}) catch |err| switch (err) {
error.Fail => std.process.exit(1),
error.LoadFailed => std.process.exit(1),
else => |e| return e,
};
defer story.deinit();