refactor: make room for loading and outputting pre-compiled files

This commit is contained in:
Brett Broadhurst 2026-04-13 18:40:04 -06:00
parent 3afbbb6ec2
commit 96866ba9ae
Failed to generate hash of commit
11 changed files with 537 additions and 433 deletions

View file

@ -230,7 +230,6 @@ int main(int argc, char *argv[])
int flags = 0;
const char *filename = NULL;
struct ink_source source;
struct ink_story *story = NULL;
option_setopts(cli_options, argv);
@ -285,11 +284,6 @@ int main(int argc, char *argv[])
return rc;
}
story = ink_open();
if (!story) {
goto out;
}
const struct ink_story_options opts = {
.source_bytes = source.bytes,
.source_length = source.length,
@ -298,8 +292,8 @@ int main(int argc, char *argv[])
.flags = flags,
};
rc = ink_load_story_options(story, &opts);
if (rc < 0) {
struct ink_story *const story = ink_load_story_options(&opts);
if (story == NULL) {
goto out;
}
if (!compile_only) {

View file

@ -71,24 +71,17 @@ enum ink_flags {
};
/**
* @brief Open a story context.
* Load an Ink story with extended options.
*
* @returns a new story context
* @returns an opaque pointer on success or null on failure.
*/
INK_API struct ink_story *ink_open(void);
INK_API struct ink_story *ink_load_story_options(const struct ink_load_options *options);
/**
* Close a story context.
*/
INK_API void ink_close(struct ink_story *story);
/**
* Load an Ink story with extended options.
*
* @returns a non-zero value on error.
*/
INK_API int ink_load_story_options(struct ink_story *story,
const struct ink_load_options *options);
/**
* Determine if the story can continue.

View file

@ -4,16 +4,16 @@ const Ir = @import("Ir.zig");
const Story = @import("Story.zig");
const InternPool = @import("InternPool.zig");
const compile = @import("compile.zig");
const Module = compile.Module;
const Compilation = compile.Compilation;
const assert = std.debug.assert;
const Sema = @This();
gpa: std.mem.Allocator,
arena: std.mem.Allocator,
module: *compile.Module,
module: *Compilation,
ir: Ir,
inst_map: std.AutoHashMapUnmanaged(Ir.Inst.Index, ValueInfo) = .empty,
errors: *std.ArrayListUnmanaged(Module.Error),
errors: *std.ArrayListUnmanaged(Compilation.Error),
comptime_break_inst: Ir.Inst.Index = undefined,
const InnerError = error{
@ -136,17 +136,17 @@ pub fn lookupIdentifier(
builder: *Builder,
ident: InternPool.Index,
src: SrcLoc,
) !Module.Namespace.Decl {
) !Compilation.Namespace.Decl {
return sema.lookupInNamespace(builder.namespace, ident, src);
}
pub fn lookupInNamespace(
sema: *Sema,
namespace: *Module.Namespace,
namespace: *Compilation.Namespace,
ident: InternPool.Index,
src: SrcLoc,
) !Module.Namespace.Decl {
var scope: ?*Module.Namespace = namespace;
) !Compilation.Namespace.Decl {
var scope: ?*Compilation.Namespace = namespace;
while (scope) |s| : (scope = s.parent) {
if (s.decls.get(ident)) |decl| return decl;
}
@ -166,7 +166,7 @@ pub const Block = struct {
pub const Builder = struct {
sema: *Sema,
namespace: *Module.Namespace,
namespace: *Compilation.Namespace,
code: *InternPool.CodeChunk,
constants_map: std.AutoHashMapUnmanaged(InternPool.Index, u8) = .empty,
labels: std.ArrayListUnmanaged(Label) = .empty,
@ -1148,7 +1148,7 @@ fn analyzeCallTarget(
builder: *Builder,
src: SrcLoc,
callee: ValueInfo,
) !Module.Namespace.Decl {
) !Compilation.Namespace.Decl {
switch (callee) {
.function => |ip_index| {
try builder.materialize(callee);
@ -1163,7 +1163,7 @@ fn analyzeDivertTarget(
builder: *Builder,
src: SrcLoc,
callee: ValueInfo,
) !Module.Namespace.Decl {
) !Compilation.Namespace.Decl {
switch (callee) {
.knot => |ip_index| {
try builder.materialize(callee);
@ -1333,29 +1333,31 @@ pub fn analyzeKnot(
fn analyzeNestedDecl(
sema: *Sema,
namespace: *Module.Namespace,
namespace: *Compilation.Namespace,
inst: Ir.Inst.Index,
) !void {
const gpa = sema.gpa;
const arena = sema.arena;
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Declaration, data.extra_index).data;
const decl = sema.ir.instructions[@intFromEnum(extra.value)];
const decl_name = try sema.module.intern_pool.getOrPutString(
sema.gpa,
gpa,
sema.ir.nullTerminatedString(extra.name),
);
const ip_index = try sema.module.intern_pool.getOrPutValue(sema.gpa, .{ .str = decl_name });
const ip_index = try sema.module.intern_pool.getOrPutValue(gpa, .{ .str = decl_name });
switch (decl.tag) {
.decl_stitch => {
const child_namespace = try sema.module.createNamespace(namespace);
try namespace.decls.put(sema.arena, ip_index, .{
const child_namespace = try sema.module.createNamespace(arena, namespace);
try namespace.decls.put(arena, ip_index, .{
.tag = .knot,
.decl_inst = extra.value,
.args_count = 0,
.namespace = child_namespace,
});
try sema.module.queueWorkItem(.{
try sema.module.queueWorkItem(arena, .{
.tag = .stitch,
.decl_name = ip_index,
.inst_index = extra.value,
@ -1366,7 +1368,8 @@ fn analyzeNestedDecl(
}
}
fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Index) !void {
fn scanTopLevelDecl(sema: *Sema, namespace: *Compilation.Namespace, inst: Ir.Inst.Index) !void {
const arena = sema.arena;
const data = sema.ir.instructions[@intFromEnum(inst)].data.payload;
const extra = sema.ir.extraData(Ir.Inst.Declaration, data.extra_index).data;
const decl_inst = sema.ir.instructions[@intFromEnum(extra.value)];
@ -1398,8 +1401,8 @@ fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Ind
const _body = sema.ir.bodySlice(_extra.end, _extra.data.body_len);
const _stitches = sema.ir.bodySlice(_extra.end + _body.len, _extra.data.stitches_len);
const child_namespace = try sema.module.createNamespace(namespace);
const gop = try namespace.decls.getOrPut(sema.arena, decl_name);
const child_namespace = try sema.module.createNamespace(arena, namespace);
const gop = try namespace.decls.getOrPut(arena, decl_name);
if (gop.found_existing) {
return sema.fail(src_loc, "duplicate identifier", .{});
} else {
@ -1412,7 +1415,7 @@ fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Ind
};
}
try sema.module.queueWorkItem(.{
try sema.module.queueWorkItem(arena, .{
.tag = .knot,
.decl_name = decl_name,
.inst_index = extra.value,
@ -1424,8 +1427,8 @@ fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Ind
}
},
.decl_stitch => {
const child_namespace = try sema.module.createNamespace(namespace);
const gop = try namespace.decls.getOrPut(sema.arena, decl_name);
const child_namespace = try sema.module.createNamespace(arena, namespace);
const gop = try namespace.decls.getOrPut(arena, decl_name);
if (gop.found_existing) {
return sema.fail(src_loc, "duplicate identifier", .{});
} else {
@ -1437,7 +1440,7 @@ fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Ind
.namespace = child_namespace,
};
}
try sema.module.queueWorkItem(.{
try sema.module.queueWorkItem(arena, .{
.tag = .stitch,
.decl_name = decl_name,
.inst_index = extra.value,
@ -1445,8 +1448,8 @@ fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Ind
});
},
.decl_function => {
const child_namespace = try sema.module.createNamespace(namespace);
const gop = try namespace.decls.getOrPut(sema.arena, decl_name);
const child_namespace = try sema.module.createNamespace(arena, namespace);
const gop = try namespace.decls.getOrPut(arena, decl_name);
if (gop.found_existing) {
return sema.fail(src_loc, "duplicate identifier", .{});
} else {
@ -1458,7 +1461,7 @@ fn scanTopLevelDecl(sema: *Sema, namespace: *Module.Namespace, inst: Ir.Inst.Ind
.namespace = child_namespace,
};
}
try sema.module.queueWorkItem(.{
try sema.module.queueWorkItem(arena, .{
.tag = .function,
.decl_name = decl_name,
.inst_index = extra.value,
@ -1500,7 +1503,7 @@ fn resolveGlobalDecl(
pub fn scanTopLevelDecls(
sema: *Sema,
namespace: *Module.Namespace,
namespace: *Compilation.Namespace,
decls: []const Ir.Inst.Index,
) !void {
const gpa = sema.gpa;

View file

@ -1,13 +1,14 @@
//! Virtual machine state for story execution.
const std = @import("std");
const tokenizer = @import("tokenizer.zig");
const assert = std.debug.assert;
const Ast = @import("Ast.zig");
const AstGen = @import("AstGen.zig");
const Module = @import("compile.zig").Module;
const Compilation = @import("compile.zig").Compilation;
pub const Loader = @import("Story/Loader.zig");
pub const Object = @import("Story/Object.zig");
const Dumper = @import("Story/Dumper.zig");
pub const Dumper = @import("Story/Dumper.zig");
const ink = @import("root.zig");
const assert = std.debug.assert;
const Story = @This();
gpa: std.mem.Allocator,
@ -42,17 +43,6 @@ internal_counter: usize = 0,
pub const default_knot_name: [:0]const u8 = "$__main__$";
pub const VariableObserver = struct {
callback: Callback,
context: Context,
pub const Callback = *const fn (Value, Context) anyerror!void;
pub const Context = struct {
ptr: *anyopaque,
};
};
pub const Opcode = enum(u8) {
/// Exit the VM normally.
exit,
@ -118,6 +108,17 @@ pub const Opcode = enum(u8) {
_,
};
pub const VariableObserver = struct {
callback: Callback,
context: Context,
pub const Callback = *const fn (Value, Context) anyerror!void;
pub const Context = struct {
ptr: *anyopaque,
};
};
pub const CallFrame = struct {
/// Pointer to the knot that initiated the call.
callee: *Object.Knot,
@ -936,70 +937,6 @@ pub fn dump(story: *Story, writer: *std.Io.Writer) !void {
return Dumper.dump(story, writer);
}
pub const LoadOptions = struct {
filename: []const u8,
error_writer: *std.Io.Writer,
dump_writer: ?*std.Io.Writer = null,
dump_use_color: bool = true,
dump_ast: bool = false,
dump_ir: bool = false,
};
pub fn fromSourceBytes(
gpa: std.mem.Allocator,
source_bytes: [:0]const u8,
options: LoadOptions,
) !Story {
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
var comp = try Module.compile(gpa, arena, .{
.source_bytes = source_bytes,
.filename = options.filename,
.dump_writer = options.dump_writer,
.dump_use_color = options.dump_use_color,
.dump_ast = options.dump_ast,
.dump_ir = options.dump_ir,
});
defer comp.deinit();
if (comp.errors.items.len > 0) {
for (comp.errors.items) |err| {
try comp.renderError(options.error_writer, err);
}
return error.LoadFailed;
}
// TODO: Make this configureable.
const stack_size = 128;
const eval_stack_ptr = try gpa.alloc(Value, stack_size);
errdefer gpa.free(eval_stack_ptr);
const call_stack_ptr = try gpa.alloc(CallFrame, stack_size);
errdefer gpa.free(call_stack_ptr);
var story: Story = .{
.gpa = gpa,
.arena = .init(gpa),
.can_advance = false,
.dump_writer = null,
.stack = eval_stack_ptr,
.call_stack = call_stack_ptr,
};
errdefer story.deinit();
try comp.setupStoryRuntime(gpa, &story);
if (story.getKnot(Story.default_knot_name)) |knot| {
try story.pushStack(.{ .object = &knot.base });
try story.divert(knot, 0);
}
return story;
}
var read_buffer: [4096]u8 align(std.heap.page_size_min) = undefined;
pub const LoadFileOptions = struct {
error_writer: *std.Io.Writer,
};
@ -1009,6 +946,7 @@ pub fn readSourceFile(
filename: []const u8,
options: LoadFileOptions,
) !Story {
var read_buffer: [4096]u8 align(std.heap.page_size_min) = undefined;
// FIXME: Temporary until 0.16.x
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
@ -1030,3 +968,53 @@ pub fn readSourceFile(
.dump_ir = false,
});
}
pub const LoadOptions = struct {
filename: [:0]const u8,
errors: *std.ArrayListUnmanaged(Compilation.Error),
stack_size: usize = 128,
};
pub fn fromSourceBytes(
gpa: std.mem.Allocator,
source_bytes: [:0]const u8,
options: LoadOptions,
) !Story {
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
var tree = try Ast.parse(gpa, arena, source_bytes, options.filename, 0);
defer tree.deinit(gpa);
var ir = try AstGen.generate(gpa, &tree);
defer ir.deinit(gpa);
var cu = Compilation.build(gpa, tree, ir, options.errors) catch |err| switch (err) {
else => |e| return e,
};
defer cu.deinit();
if (cu.hasCompileErrors()) {
return error.CompilationError;
}
return .fromCompilation(gpa, &cu, .{
.stack_size = options.stack_size,
});
}
pub fn fromCompilation(
gpa: std.mem.Allocator,
cu: *Compilation,
options: Loader.Options,
) !Story {
return Loader.fromCompilation(gpa, cu, options);
}
pub fn fromCachedCompilation(
gpa: std.mem.Allocator,
bytes: []const u8,
options: Loader.Options,
) !Story {
return Loader.fromCachedCompilation(gpa, bytes, options);
}

136
src/Story/Loader.zig Normal file
View file

@ -0,0 +1,136 @@
const std = @import("std");
const InternPool = @import("../InternPool.zig");
const Compilation = @import("../compile.zig").Compilation;
const Story = @import("../Story.zig");
const Value = Story.Value;
const Object = Story.Object;
fn makeValueFromInterned(story: *Story, value: InternPool.Key) !Story.Value {
return switch (value) {
.bool => |boolean| .{ .bool = boolean },
.int => |int| .{ .int = @intCast(int) },
.float => |float| .{ .float = @bitCast(float) },
.str => |index| blk: {
const str_object = try Object.String.create(story, .{
.bytes = stringBytes(story, index),
});
break :blk .{ .object = &str_object.base };
},
};
}
fn makeKnotObject(
story: *Story,
name: []const u8,
code: *InternPool.CodeChunk,
) !*Object.Knot {
return Object.Knot.create(story, .{
.name = name,
.code = try Object.Code.create(story, .{
.args_count = @intCast(code.args_count),
.locals_count = @intCast(code.locals_count),
.stack_size = @intCast(code.stack_size),
.constants = try code.constants.toOwnedSlice(story.gpa),
.code_bytes = try code.bytecode.toOwnedSlice(story.gpa),
}),
});
}
fn makeConstantsPool(mod: *Compilation, story: *Story) ![]const Story.Value {
const ip = &mod.intern_pool;
const gpa = mod.gpa;
var constants_pool: std.ArrayListUnmanaged(Value) = .empty;
defer constants_pool.deinit(mod.gpa);
try constants_pool.ensureUnusedCapacity(mod.gpa, ip.values.items.len);
for (ip.values.items) |value| {
const obj = try makeValueFromInterned(story, value);
constants_pool.appendAssumeCapacity(obj);
}
return constants_pool.toOwnedSlice(gpa);
}
fn stringBytes(story: *Story, index: InternPool.NullTerminatedString) [:0]const u8 {
const slice = story.string_bytes[@intFromEnum(index)..];
return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0];
}
pub const Options = struct {
stack_size: usize,
};
pub fn fromCompilationCompat(
gpa: std.mem.Allocator,
mod: *Compilation,
story: *Story,
options: Options,
) !void {
const ip = &mod.intern_pool;
story.stack = try gpa.alloc(Story.Value, options.stack_size);
story.call_stack = try gpa.alloc(Story.CallFrame, options.stack_size);
story.string_bytes = try ip.string_bytes.toOwnedSlice(gpa);
story.constants_pool = try makeConstantsPool(mod, story);
for (mod.globals.items) |global| {
const ip_key = ip.internedValue(global.key);
const ip_value = ip.internedValue(global.value);
const key_bytes = stringBytes(story, ip_key.str);
const obj = try makeValueFromInterned(story, ip_value);
try story.globals.put(gpa, key_bytes, obj);
}
for (mod.knots.items) |knot| {
const ip_key = ip.internedValue(knot.name_index);
const key_bytes = stringBytes(story, ip_key.str);
const knot_object = try makeKnotObject(story, key_bytes, knot.code_chunk);
const value: Story.Value = .{ .object = &knot_object.base };
try story.globals.put(gpa, key_bytes, value);
}
for (mod.stitches.items) |stitch| {
const ip_key = ip.internedValue(stitch.name_index);
const key_bytes = stringBytes(story, ip_key.str);
const stitch_object = try makeKnotObject(story, key_bytes, stitch.code_chunk);
if (stitch.knot_index) |index| {
const parent_knot = mod.knots.items[@intFromEnum(index)];
const s_key_value = ip.internedValue(parent_knot.name_index);
const parent_knot_name = stringBytes(story, s_key_value.str);
const parent_knot_value = story.globals.get(parent_knot_name).?;
const parent_knot_obj: *Object.Knot = @ptrCast(parent_knot_value.object);
try parent_knot_obj.members.put(gpa, key_bytes, &stitch_object.base);
} else {
const value: Story.Value = .{ .object = &stitch_object.base };
try story.globals.put(gpa, key_bytes, value);
}
}
if (story.getKnot(Story.default_knot_name)) |knot| {
try story.pushStack(.{ .object = &knot.base });
try story.divert(knot, 0);
}
}
pub fn fromCompilation(
gpa: std.mem.Allocator,
mod: *Compilation,
options: Options,
) !Story {
var story: Story = .{
.gpa = gpa,
.arena = .init(gpa),
};
try fromCompilationCompat(gpa, mod, &story, options);
return story;
}
pub fn fromCachedCompilation(
gpa: std.mem.Allocator,
bytes: []const u8,
options: Options,
) !Story {
_ = gpa;
_ = bytes;
_ = options;
return error.NotImplemented;
}

View file

@ -2,6 +2,7 @@ const std = @import("std");
const fatal = std.process.fatal;
const ink = @import("../root.zig");
const Story = ink.Story;
const Compilation = ink.Compilation;
test "fixture - variable arithmetic" {
try testRuntimeFixture("variable-arithmetic");
@ -262,12 +263,15 @@ test "variable observer" {
var io_w = std.Io.Writer.Allocating.init(gpa);
defer io_w.deinit();
var errors: std.ArrayListUnmanaged(Compilation.Error) = .empty;
defer errors.deinit(gpa);
var story = try ink.Story.fromSourceBytes(gpa,
\\VAR foo = 1
\\~foo = 10
, .{
.filename = "<STDIN>",
.error_writer = &io_w.writer,
.errors = &errors,
});
defer story.deinit();
@ -297,9 +301,13 @@ const Options = struct {
fn testRunner(gpa: std.mem.Allocator, source_bytes: [:0]const u8, options: Options) !void {
const io_r = options.input_reader;
const io_w = options.transcript_writer;
var errors: std.ArrayListUnmanaged(Compilation.Error) = .empty;
defer errors.deinit(gpa);
var story = try Story.fromSourceBytes(gpa, source_bytes, .{
.filename = "<STDIN>",
.error_writer = options.error_writer,
.errors = &errors,
});
defer story.deinit();

View file

@ -1,48 +1,14 @@
const std = @import("std");
const ink = @import("ink");
const Story = ink.Story;
const Module = ink.Module;
const Compilation = ink.Compilation;
const Ast = ink.Ast;
const AstGen = ink.AstGen;
const Ir = ink.Ir;
var global_allocator: std.heap.DebugAllocator(.{}) = .init;
var stdout_buffer: [4096]u8 align(std.heap.page_size_min) = undefined;
pub export fn ink_open() callconv(.c) ?*Story {
const gpa = global_allocator.allocator();
const story = gpa.create(Story) catch |err| switch (err) {
error.OutOfMemory => return null,
};
story.* = .{
.gpa = gpa,
.arena = .init(gpa),
.is_exited = false,
.can_advance = false,
.stack_top = 0,
.call_stack_top = 0,
.output_marker = 0,
.choice_selected = null,
.output_buffer = .empty,
.output_scratch = .empty,
.current_choices = .empty,
.variable_observers = .empty,
.globals = .empty,
.stack = &.{},
.call_stack = &.{},
.code_chunks = .empty,
.gc_objects = .{},
.constants_pool = &.{},
.string_bytes = &.{},
.dump_writer = null,
};
return story;
}
pub export fn ink_close(story: *Story) callconv(.c) void {
defer _ = global_allocator.deinit();
const gpa = story.gpa;
story.deinit();
gpa.destroy(story);
}
pub const InkLoadOpts = extern struct {
filename: [*]const u8,
filename_length: usize,
@ -51,61 +17,90 @@ pub const InkLoadOpts = extern struct {
flags: i32,
};
fn loadStory(story: *Story, options: *const InkLoadOpts) !void {
const gpa = story.gpa;
const LoadStoryOptions = struct {
filename: []const u8,
source_bytes: [:0]const u8,
flags: i32,
stack_size: usize,
error_writer: *std.Io.Writer,
};
fn loadStory(gpa: std.mem.Allocator, options: LoadStoryOptions) !*Story {
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
const stderr = std.fs.File.stderr();
var stderr_writer = stderr.writer(&stdout_buffer);
var comp = try Module.compile(gpa, arena, .{
.source_bytes = options.source_bytes[0..options.source_length :0],
.filename = options.filename[0..options.filename_length],
.dump_writer = null,
.dump_use_color = false,
.dump_ast = false,
.dump_ir = false,
});
defer comp.deinit();
var tree = try Ast.parse(
gpa,
arena,
options.source_bytes,
options.filename,
@intCast(options.flags),
);
defer tree.deinit(gpa);
if (comp.errors.items.len > 0) {
for (comp.errors.items) |err| {
try comp.renderError(&stderr_writer.interface, err);
var ir = try AstGen.generate(gpa, &tree);
defer ir.deinit(gpa);
var errors: std.ArrayListUnmanaged(Compilation.Error) = .empty;
defer errors.deinit(gpa);
var cu = Compilation.build(gpa, tree, ir, &errors) catch |err| switch (err) {
else => |e| return e,
};
defer cu.deinit();
if (cu.hasCompileErrors()) {
for (cu.errors.items) |err| {
try cu.renderError(options.error_writer, err);
}
return error.LoadFailed;
return error.Failed;
}
// TODO: Make this configureable.
const stack_size = 128;
const eval_stack_ptr = try gpa.alloc(Story.Value, stack_size);
errdefer gpa.free(eval_stack_ptr);
const call_stack_ptr = try gpa.alloc(Story.CallFrame, stack_size);
errdefer gpa.free(call_stack_ptr);
story.can_advance = false;
story.stack = eval_stack_ptr;
story.call_stack = call_stack_ptr;
const story = try gpa.create(Story);
errdefer gpa.destroy(story);
story.* = .{
.gpa = gpa,
.arena = .init(gpa),
};
errdefer story.deinit();
try comp.setupStoryRuntime(gpa, story);
if (story.getKnot(Story.default_knot_name)) |knot| {
try story.pushStack(.{ .object = &knot.base });
try story.divert(knot, 0);
}
try Story.Loader.fromCompilationCompat(gpa, &cu, story, .{
.stack_size = options.stack_size,
});
return story;
}
pub export fn ink_load_story_options(
story: *Story,
options: *const InkLoadOpts,
) callconv(.c) c_int {
loadStory(story, options) catch |err| {
) callconv(.c) ?*Story {
const gpa = global_allocator.allocator();
const source_bytes = options.source_bytes[0..options.source_length :0];
const filename = options.filename[0..options.filename_length :0];
const stack_size = 128;
const stderr = std.fs.File.stderr();
var stderr_writer = stderr.writer(&stdout_buffer);
return loadStory(gpa, .{
.filename = filename,
.source_bytes = source_bytes,
.flags = options.flags,
.error_writer = &stderr_writer.interface,
.stack_size = stack_size,
}) catch |err| {
std.debug.print("{any}\n", .{@errorName(err)});
return -1;
return null;
};
return 0;
}
pub export fn ink_close(optional_story: ?*Story) callconv(.c) void {
defer _ = global_allocator.deinit();
if (optional_story) |story| {
const gpa = story.gpa;
story.deinit();
gpa.destroy(story);
}
}
pub export fn ink_story_can_continue(story: *Story) callconv(.c) bool {

View file

@ -1,12 +1,8 @@
const std = @import("std");
const Ast = @import("Ast.zig");
const AstGen = @import("AstGen.zig");
const Sema = @import("Sema.zig");
const Ir = @import("Ir.zig");
const Story = @import("Story.zig");
const Sema = @import("Sema.zig");
const InternPool = @import("InternPool.zig");
const Value = Story.Value;
const Object = Story.Object;
const assert = std.debug.assert;
pub fn IntrusiveQueue(comptime T: type) type {
@ -80,34 +76,33 @@ test IntrusiveQueue {
try testing.expect(q.pop() == null);
}
// TODO: Revisit this. We might not need this at all.
pub const WorkItem = struct {
pub const Compilation = struct {
gpa: std.mem.Allocator,
arena: std.heap.ArenaAllocator,
tree: Ast,
ir: Ir,
globals: std.ArrayListUnmanaged(InternPool.Global) = .empty,
knots: std.ArrayListUnmanaged(InternPool.Knot) = .empty,
stitches: std.ArrayListUnmanaged(InternPool.Stitch) = .empty,
errors: *std.ArrayListUnmanaged(Error),
intern_pool: InternPool = .{},
work_queue: WorkQueue = .{},
pub const WorkItem = struct {
tag: Tag,
next: ?*WorkItem = null,
decl_name: InternPool.Index,
inst_index: Ir.Inst.Index,
namespace: *Module.Namespace,
namespace: *Compilation.Namespace,
pub const Tag = enum {
knot,
stitch,
function,
};
};
};
pub const WorkQueue = IntrusiveQueue(WorkItem);
pub const Module = struct {
gpa: std.mem.Allocator,
arena: std.mem.Allocator,
tree: Ast,
ir: Ir,
globals: std.ArrayListUnmanaged(InternPool.Global) = .empty,
knots: std.ArrayListUnmanaged(InternPool.Knot) = .empty,
stitches: std.ArrayListUnmanaged(InternPool.Stitch) = .empty,
errors: std.ArrayListUnmanaged(Error) = .empty,
intern_pool: InternPool = .{},
work_queue: WorkQueue = .{},
pub const WorkQueue = IntrusiveQueue(WorkItem);
pub const Namespace = struct {
parent: ?*Namespace,
@ -143,28 +138,29 @@ pub const Module = struct {
message: []const u8,
};
fn generateFile(mod: *Module) !void {
fn analyzeAndGenerate(cu: *Compilation) !void {
const gpa = cu.gpa;
const arena = cu.arena.allocator();
const root_node: Ir.Inst.Index = .file_inst;
const gpa = mod.gpa;
const data = mod.ir.instructions[@intFromEnum(root_node)].data.payload;
const extra = mod.ir.extraData(Ir.Inst.Block, data.extra_index);
const top_level_decls = mod.ir.bodySlice(extra.end, extra.data.body_len);
const data = cu.ir.instructions[@intFromEnum(root_node)].data.payload;
const extra = cu.ir.extraData(Ir.Inst.Block, data.extra_index);
const top_level_decls = cu.ir.bodySlice(extra.end, extra.data.body_len);
var knot_index: ?InternPool.Knot.Index = null;
var sema: Sema = .{
.module = mod,
.gpa = gpa,
.arena = mod.arena,
.ir = mod.ir,
.errors = &mod.errors,
.arena = arena,
.module = cu,
.ir = cu.ir,
.errors = cu.errors,
};
defer sema.deinit();
const file_scope = try mod.createNamespace(null);
const file_scope = try cu.createNamespace(arena, null);
try sema.scanTopLevelDecls(file_scope, top_level_decls);
while (mod.work_queue.pop()) |work_unit| {
const code_chunk = try mod.createCodeChunk();
while (cu.work_queue.pop()) |work_unit| {
const code_chunk = try cu.createCodeChunk(arena);
var builder: Sema.Builder = .{
.sema = &sema,
@ -180,8 +176,8 @@ pub const Module = struct {
try sema.analyzeKnot(&builder, work_unit.inst_index);
try builder.finalize();
knot_index = @enumFromInt(mod.knots.items.len);
try mod.knots.append(gpa, .{
knot_index = @enumFromInt(cu.knots.items.len);
try cu.knots.append(gpa, .{
.name_index = work_unit.decl_name,
.code_chunk = code_chunk,
});
@ -191,7 +187,7 @@ pub const Module = struct {
try sema.analyzeStitch(&builder, work_unit.inst_index);
try builder.finalize();
try mod.stitches.append(gpa, .{
try cu.stitches.append(gpa, .{
.knot_index = knot_index,
.name_index = work_unit.decl_name,
.code_chunk = code_chunk,
@ -202,7 +198,7 @@ pub const Module = struct {
try sema.analyzeFunction(&builder, work_unit.inst_index);
try builder.finalize();
try mod.stitches.append(gpa, .{
try cu.stitches.append(gpa, .{
.knot_index = null,
.name_index = work_unit.decl_name,
.code_chunk = code_chunk,
@ -212,170 +208,78 @@ pub const Module = struct {
}
}
pub const Options = struct {
source_bytes: [:0]const u8,
filename: []const u8,
dump_writer: ?*std.Io.Writer = null,
dump_ast: bool = false,
dump_ir: bool = false,
dump_use_color: bool = false,
};
pub fn compile(
fn generate(
gpa: std.mem.Allocator,
arena: std.mem.Allocator,
options: Options,
) !Module {
const tree = try Ast.parse(gpa, arena, options.source_bytes, options.filename, 0);
if (options.dump_writer) |w| {
if (options.dump_ast) {
try w.writeAll("=== AST ===\n");
try tree.render(gpa, w, .{
.use_color = options.dump_use_color,
});
try w.flush();
}
}
var module: Module = .{
tree: Ast,
ir: Ir,
errors: *std.ArrayListUnmanaged(Error),
) !Compilation {
var cu: Compilation = .{
.gpa = gpa,
.arena = arena,
.arena = .init(gpa),
.tree = tree,
.ir = try AstGen.generate(gpa, &tree),
.ir = ir,
.errors = errors,
};
errdefer module.deinit();
errdefer cu.deinit();
if (options.dump_writer) |w| {
if (options.dump_ir) {
try w.writeAll("=== Semantic IR ===\n");
try module.ir.dumpInfo(w);
try module.ir.render(w);
if (errors.items.len != 0) return cu;
try cu.intern_pool.string_bytes.append(gpa, 0);
try cu.intern_pool.values.append(gpa, .{ .bool = true });
try cu.intern_pool.values.append(gpa, .{ .bool = false });
try cu.intern_pool.values.append(gpa, .{ .str = .empty });
cu.analyzeAndGenerate() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return cu,
else => |e| @panic(@errorName(e)),
};
return cu;
}
}
if (module.ir.hasCompileErrors()) {
const payload_index = module.ir.extra[@intFromEnum(Ir.ExtraIndex.compile_errors)];
pub fn build(
gpa: std.mem.Allocator,
tree: Ast,
ir: Ir,
errors: *std.ArrayListUnmanaged(Error),
) !Compilation {
if (ir.hasCompileErrors()) {
const payload_index = ir.extra[@intFromEnum(Ir.ExtraIndex.compile_errors)];
assert(payload_index != 0);
const header = module.ir.extraData(Ir.Inst.CompileErrors, payload_index);
const header = ir.extraData(Ir.Inst.CompileErrors, payload_index);
const items_len = header.data.items_len;
var extra_index = header.end;
// TODO: Make an iterator for this?
for (0..items_len) |_| {
const item = module.ir.extraData(Ir.Inst.CompileErrors.Item, extra_index);
const item = ir.extraData(Ir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const loc = findLineColumn(tree.source, item.data.byte_offset);
try module.errors.append(gpa, .{
try errors.append(gpa, .{
.line = loc.line,
.column = loc.column,
.snippet = loc.source_line,
.message = module.ir.nullTerminatedString(item.data.msg),
.message = ir.nullTerminatedString(item.data.msg),
});
}
} else {
try module.intern_pool.string_bytes.append(gpa, 0);
try module.intern_pool.values.append(gpa, .{ .bool = true });
try module.intern_pool.values.append(gpa, .{ .bool = false });
try module.intern_pool.values.append(gpa, .{ .str = .empty });
// TODO: Revisit this.
module.generateFile() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return module,
else => |e| @panic(@errorName(e)),
};
}
return module;
return .generate(gpa, tree, ir, errors);
}
fn storyStr(story: *Story, index: InternPool.NullTerminatedString) [:0]const u8 {
const slice = story.string_bytes[@intFromEnum(index)..];
return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0];
pub fn hasCompileErrors(cu: *const Compilation) bool {
return cu.errors.items.len > 0;
}
fn makeValueFromInterned(story: *Story, value: InternPool.Key) !Story.Value {
return switch (value) {
.bool => |boolean| .{ .bool = boolean },
.int => |int| .{ .int = @intCast(int) },
.float => |float| .{ .float = @bitCast(float) },
.str => |index| blk: {
const str_object = try Object.String.create(story, .{
.bytes = storyStr(story, index),
});
break :blk .{ .object = &str_object.base };
},
};
}
fn makeKnotObject(story: *Story, name_bytes: []const u8, code: *InternPool.CodeChunk) !*Object.Knot {
return Object.Knot.create(story, .{
.name = name_bytes,
.code = try Object.Code.create(story, .{
.args_count = @intCast(code.args_count),
.locals_count = @intCast(code.locals_count),
.stack_size = @intCast(code.stack_size),
.constants = try code.constants.toOwnedSlice(story.gpa),
.code_bytes = try code.bytecode.toOwnedSlice(story.gpa),
}),
});
}
fn makeConstantsPool(mod: *Module, story: *Story) ![]const Story.Value {
const ip = &mod.intern_pool;
const gpa = mod.gpa;
var constants_pool: std.ArrayListUnmanaged(Value) = .empty;
defer constants_pool.deinit(mod.gpa);
try constants_pool.ensureUnusedCapacity(mod.gpa, ip.values.items.len);
for (ip.values.items) |value| {
const obj = try makeValueFromInterned(story, value);
constants_pool.appendAssumeCapacity(obj);
}
return constants_pool.toOwnedSlice(gpa);
}
pub fn setupStoryRuntime(mod: *Module, gpa: std.mem.Allocator, story: *Story) !void {
const ip = &mod.intern_pool;
story.string_bytes = try ip.string_bytes.toOwnedSlice(mod.gpa);
story.constants_pool = try makeConstantsPool(mod, story);
for (mod.globals.items) |global| {
const ip_key = ip.internedValue(global.key);
const ip_value = ip.internedValue(global.value);
const key_bytes = storyStr(story, ip_key.str);
const obj = try makeValueFromInterned(story, ip_value);
try story.globals.put(gpa, key_bytes, obj);
}
for (mod.knots.items) |knot| {
const ip_key = ip.internedValue(knot.name_index);
const key_bytes = storyStr(story, ip_key.str);
const knot_object = try makeKnotObject(story, key_bytes, knot.code_chunk);
const value: Story.Value = .{ .object = &knot_object.base };
try story.globals.put(gpa, key_bytes, value);
}
for (mod.stitches.items) |stitch| {
const ip_key = ip.internedValue(stitch.name_index);
const key_bytes = storyStr(story, ip_key.str);
const stitch_object = try makeKnotObject(story, key_bytes, stitch.code_chunk);
if (stitch.knot_index) |index| {
const parent_knot = mod.knots.items[@intFromEnum(index)];
const s_key_value = ip.internedValue(parent_knot.name_index);
const parent_knot_name = storyStr(story, s_key_value.str);
const parent_knot_value = story.globals.get(parent_knot_name).?;
const parent_knot_obj: *Object.Knot = @ptrCast(parent_knot_value.object);
try parent_knot_obj.members.put(gpa, key_bytes, &stitch_object.base);
} else {
const value: Story.Value = .{ .object = &stitch_object.base };
try story.globals.put(gpa, key_bytes, value);
}
}
}
pub fn createNamespace(mod: *Module, parent: ?*Namespace) error{OutOfMemory}!*Namespace {
const ns = try mod.arena.create(Namespace);
pub fn createNamespace(
cu: *Compilation,
arena: std.mem.Allocator,
parent: ?*Namespace,
) error{OutOfMemory}!*Namespace {
_ = cu;
const ns = try arena.create(Namespace);
ns.* = .{
.parent = parent,
.decls = .empty,
@ -383,14 +287,19 @@ pub const Module = struct {
return ns;
}
pub fn createCodeChunk(mod: *Module) error{OutOfMemory}!*InternPool.CodeChunk {
const chunk = try mod.arena.create(InternPool.CodeChunk);
pub fn createCodeChunk(
cu: *Compilation,
arena: std.mem.Allocator,
) error{OutOfMemory}!*InternPool.CodeChunk {
_ = cu;
const chunk = try arena.create(InternPool.CodeChunk);
chunk.* = .{};
return chunk;
}
pub fn queueWorkItem(
mod: *Module,
mod: *Compilation,
arena: std.mem.Allocator,
options: struct {
tag: WorkItem.Tag,
decl_name: InternPool.Index,
@ -398,7 +307,7 @@ pub const Module = struct {
namespace: *Namespace,
},
) !void {
const work_item = try mod.arena.create(WorkItem);
const work_item = try arena.create(WorkItem);
work_item.* = .{
.tag = options.tag,
.decl_name = options.decl_name,
@ -408,7 +317,7 @@ pub const Module = struct {
mod.work_queue.push(work_item);
}
pub fn renderError(mod: *const Module, w: *std.Io.Writer, compile_error: Error) !void {
pub fn renderError(mod: *const Compilation, w: *std.Io.Writer, compile_error: Error) !void {
const filename = mod.tree.filename;
const line = compile_error.line + 1;
const column = compile_error.column + 1;
@ -427,15 +336,13 @@ pub const Module = struct {
return w.flush();
}
pub fn deinit(mod: *Module) void {
pub fn deinit(mod: *Compilation) void {
const gpa = mod.gpa;
mod.tree.deinit(gpa);
mod.ir.deinit(gpa);
mod.arena.deinit();
mod.intern_pool.deinit(gpa);
mod.globals.deinit(gpa);
mod.knots.deinit(gpa);
mod.stitches.deinit(gpa);
mod.errors.deinit(gpa);
mod.* = undefined;
}
};
@ -472,8 +379,3 @@ pub fn findLineColumn(source: []const u8, byte_offset: usize) Loc {
.source_line = source[line_start..i],
};
}
fn hack(bytes: []const u8, index: InternPool.NullTerminatedString) [:0]const u8 {
const slice = bytes[@intFromEnum(index)..];
return slice[0..std.mem.indexOfScalar(u8, slice, 0).? :0];
}

View file

@ -1,6 +1,7 @@
const std = @import("std");
const compile = @import("compile.zig");
const Module = compile.Module;
const Ast = @import("Ast.zig");
const AstGen = @import("AstGen.zig");
const Compilation = @import("compile.zig").Compilation;
test "compiler: VAR expected expression" {
try testEqual(
@ -173,17 +174,21 @@ fn testEqual(source_bytes: [:0]const u8, expected_error: []const u8) !void {
const io_w = &allocating.writer;
const arena = arena_allocator.allocator();
var c = try Module.compile(gpa, arena, .{
.source_bytes = source_bytes,
.filename = "<STDIN>",
.dump_writer = null,
.dump_use_color = false,
.dump_ast = false,
.dump_ir = false,
});
defer c.deinit();
var errors: std.ArrayListUnmanaged(Compilation.Error) = .empty;
defer errors.deinit(gpa);
try std.testing.expect(c.errors.items.len > 0);
for (c.errors.items) |err| try c.renderError(io_w, err);
var tree = try Ast.parse(gpa, arena, source_bytes, "<STDIN>", 0);
defer tree.deinit(gpa);
var ir = try AstGen.generate(gpa, &tree);
defer ir.deinit(gpa);
var cu = Compilation.build(gpa, tree, ir, &errors) catch |err| switch (err) {
else => |e| return e,
};
defer cu.deinit();
try std.testing.expect(errors.items.len > 0);
for (errors.items) |err| try cu.renderError(io_w, err);
return std.testing.expectEqualSlices(u8, expected_error, allocating.written());
}

View file

@ -1,6 +1,10 @@
const std = @import("std");
const ink = @import("ink");
const Ast = ink.Ast;
const AstGen = ink.AstGen;
const Ir = ink.Ir;
const Story = ink.Story;
const Compilation = ink.Compilation;
const fatal = std.process.fatal;
var stdin_buffer: [4096]u8 align(std.heap.page_size_min) = undefined;
@ -8,6 +12,32 @@ var stdout_buffer: [4096]u8 align(std.heap.page_size_min) = undefined;
var stderr_buffer: [4096]u8 align(std.heap.page_size_min) = undefined;
var debug_allocator: std.heap.DebugAllocator(.{}) = .init;
const ArgsIterator = struct {
args: []const []const u8,
index: usize = 0,
pub fn next(iter: *ArgsIterator) ?[]const u8 {
if (iter.index >= iter.args.len) return null;
defer iter.index += 1;
return iter.args[iter.index];
}
pub fn nextOrFatal(iter: *ArgsIterator) []const u8 {
if (iter.index >= iter.args.len) {
fatal("expected parameter after {s}", .{iter.args[iter.index - 1]});
}
defer iter.index += 1;
return iter.args[iter.index];
}
};
const FileExtension = enum {
/// Ink source file.
ink,
/// Pre-compiled source file.
inkc,
};
pub fn main() !void {
const gpa = debug_allocator.allocator();
defer _ = debug_allocator.deinit();
@ -31,7 +61,7 @@ fn mainArgs(
args_list: []const [:0]const u8,
) !void {
var source_path: ?[]const u8 = null;
var arg_index: usize = 1;
var output_path: ?[]const u8 = null;
var compile_only: bool = false;
var dump_ast: bool = false;
var dump_ir: bool = false;
@ -40,8 +70,11 @@ fn mainArgs(
var use_stdin: bool = false;
var use_color: bool = false;
while (arg_index < args_list.len) : (arg_index += 1) {
const arg = args_list[arg_index];
var args_iter: ArgsIterator = .{
.args = args_list[1..],
};
while (args_iter.next()) |arg| {
if (std.mem.startsWith(u8, arg, "-")) {
if (std.mem.eql(u8, arg, "--stdin")) {
use_stdin = true;
@ -57,6 +90,9 @@ fn mainArgs(
dump_trace = true;
} else if (std.mem.eql(u8, arg, "--use-color")) {
use_color = true;
} else if (std.mem.eql(u8, arg, "--output")) {
const next_arg = args_iter.nextOrFatal();
output_path = next_arg;
} else {
fatal("invalid parameter: '{s}'", .{arg});
}
@ -82,32 +118,74 @@ fn mainArgs(
};
};
const stdout = std.fs.File.stdout();
var stdout_writer = stdout.writer(&stdout_buffer);
const stderr = std.fs.File.stderr();
var stderr_writer = stderr.writer(&stderr_buffer);
const io_w = &stderr_writer.interface;
const stack_size = 128;
var story = Story.fromSourceBytes(gpa, source_bytes, .{
.filename = filename,
.error_writer = &stderr_writer.interface,
.dump_writer = &stdout_writer.interface,
.dump_use_color = use_color,
.dump_ast = dump_ast,
.dump_ir = dump_ir,
}) catch |err| switch (err) {
//error.LoadFailed => std.process.exit(1),
const file_ext: FileExtension = f: {
const bytes = std.fs.path.extension(filename);
if (std.mem.eql(u8, bytes, ".ink")) break :f .ink;
if (std.mem.eql(u8, bytes, ".inkc")) break :f .inkc;
return error.InvalidFileExtension;
};
switch (file_ext) {
.ink => {
var tree = try Ast.parse(gpa, arena, source_bytes, filename, 0);
defer tree.deinit(gpa);
var ir = try AstGen.generate(gpa, &tree);
defer ir.deinit(gpa);
var errors: std.ArrayListUnmanaged(Compilation.Error) = .empty;
defer errors.deinit(gpa);
if (dump_ast) {
try io_w.writeAll("=== AST ===\n");
try tree.render(gpa, io_w, .{
.use_color = use_color,
});
try io_w.flush();
}
if (dump_ir) {
try io_w.writeAll("=== IR ===\n");
try ir.render(io_w);
try io_w.flush();
}
var cu = Compilation.build(gpa, tree, ir, &errors) catch |err| switch (err) {
else => |e| return e,
};
defer story.deinit();
defer cu.deinit();
if (dump_trace) {
story.dump_writer = &stderr_writer.interface;
if (cu.hasCompileErrors()) {
for (cu.errors.items) |err| {
try cu.renderError(io_w, err);
}
} else if (output_path) |_| {
return error.NotImplemented;
} else {
var story: Story = try .fromCompilation(gpa, &cu, .{
.stack_size = stack_size,
});
defer story.deinit();
if (dump_story) {
try story.dump(&stdout_writer.interface);
try story.dump(io_w);
}
if (dump_trace) {
story.dump_writer = io_w;
}
return if (!compile_only) run(gpa, &story);
}
},
.inkc => {
var story: Story = try .fromCachedCompilation(gpa, source_bytes, .{
.stack_size = stack_size,
});
defer story.deinit();
return if (!compile_only) run(gpa, &story);
},
}
}
fn run(_: std.mem.Allocator, story: *Story) !void {

View file

@ -2,7 +2,9 @@ const std = @import("std");
const tokenizer = @import("tokenizer.zig");
pub const Story = @import("Story.zig");
pub const Ast = @import("Ast.zig");
pub const Module = @import("compile.zig").Module;
pub const AstGen = @import("AstGen.zig");
pub const Ir = @import("Ir.zig");
pub const Compilation = @import("compile.zig").Compilation;
pub const max_src_size = std.math.maxInt(u32);