ink/src/compile.zig
2026-03-23 22:45:55 -06:00

228 lines
7.2 KiB
Zig

const std = @import("std");
const Ast = @import("Ast.zig");
const AstGen = @import("AstGen.zig");
const Sema = @import("Sema.zig");
const Ir = @import("Ir.zig");
const Story = @import("Story.zig");
const Object = Story.Object;
const assert = std.debug.assert;
pub const Compilation = struct {
gpa: std.mem.Allocator,
arena: std.heap.ArenaAllocator,
tree: Ast,
ir: Ir,
errors: []Error,
knots: []Knot,
constants: []Constant,
pub const Error = struct {
line: usize,
column: usize,
snippet: []const u8,
message: []const u8,
};
pub const Knot = struct {
name: Ir.NullTerminatedString,
arity: u32,
stack_size: u32,
constants: std.ArrayListUnmanaged(u32) = .empty,
bytecode: std.ArrayListUnmanaged(u8) = .empty,
};
pub const Constant = union(enum) {
integer: u64,
string: Ir.NullTerminatedString,
};
pub fn renderError(cu: *const Compilation, w: *std.Io.Writer, compile_error: Error) !void {
const filename = cu.tree.filename;
const line = compile_error.line + 1;
const column = compile_error.column + 1;
try w.print(
"{s}:{d}:{d}: error: {s}\n",
.{ filename, line, column, compile_error.message },
);
try w.print("{d:<4} | {s}\n", .{ line, compile_error.snippet });
try w.writeAll(" | ");
if (column > 1) {
try w.splatByteAll(' ', column - 1);
}
try w.writeAll("^\n");
return w.flush();
}
pub const CompileOptions = struct {
source_bytes: [:0]const u8,
filename: [:0]const u8,
dump_writer: ?*std.Io.Writer = null,
dump_ast: bool = false,
dump_ir: bool = false,
dump_use_color: bool = false,
};
pub fn compile(gpa: std.mem.Allocator, options: CompileOptions) !Compilation {
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
errdefer arena_allocator.deinit();
var errors: std.ArrayListUnmanaged(Error) = .empty;
defer errors.deinit(gpa);
const arena = arena_allocator.allocator();
const ast = try Ast.parse(gpa, arena, options.source_bytes, options.filename, 0);
var ir = try AstGen.generate(gpa, &ast);
errdefer ir.deinit(gpa);
var sema: Sema = .{
.gpa = gpa,
.arena = arena,
.tree = ast,
.ir = ir,
.errors = &errors,
};
defer sema.deinit();
if (options.dump_writer) |w| {
if (options.dump_ast) {
try w.writeAll("=== AST ===\n");
try ast.render(gpa, w, .{
.use_color = options.dump_use_color,
});
}
if (options.dump_ir) {
try w.writeAll("=== Semantic IR ===\n");
try ir.dumpInfo(w);
try ir.render(w);
}
}
const fatal = if (ir.hasCompileErrors()) fatal: {
const payload_index = ir.extra[@intFromEnum(Ir.ExtraIndex.compile_errors)];
assert(payload_index != 0);
const header = ir.extraData(Ir.Inst.CompileErrors, payload_index);
const items_len = header.data.items_len;
var extra_index = header.end;
// TODO: Make an iterator for this?
for (0..items_len) |_| {
const item = ir.extraData(Ir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const loc = findLineColumn(ast.source, item.data.byte_offset);
try errors.append(gpa, .{
.line = loc.line,
.column = loc.column,
.snippet = loc.source_line,
.message = ir.nullTerminatedString(item.data.msg),
});
}
break :fatal true;
} else fatal: {
//sema.analyzeFile(.file_inst) catch |err| switch (err) {
// error.OutOfMemory => return error.OutOfMemory,
// error.AnalysisFail => break :fatal true,
// // TODO: These errors should be handled...
// else => |e| return e,
//};
break :fatal false;
};
return .{
.gpa = gpa,
.arena = arena_allocator,
.tree = ast,
.ir = ir,
.errors = try errors.toOwnedSlice(gpa),
.constants = if (fatal) &.{} else try sema.constants.toOwnedSlice(gpa),
.knots = if (fatal) &.{} else try sema.knots.toOwnedSlice(gpa),
};
}
pub fn setupStoryRuntime(cu: *Compilation, gpa: std.mem.Allocator, story: *Story) !void {
assert(cu.errors.len == 0);
const constants_pool = &story.constants_pool;
try constants_pool.ensureUnusedCapacity(gpa, cu.constants.len);
for (cu.constants) |constant| {
switch (constant) {
.integer => |value| {
const object: *Object.Number = try .create(story, .{
.integer = @intCast(value),
});
constants_pool.appendAssumeCapacity(&object.base);
},
.string => |ref| {
const bytes = cu.ir.nullTerminatedString(ref);
const object: *Object.String = try .create(story, bytes);
constants_pool.appendAssumeCapacity(&object.base);
},
}
}
for (cu.knots) |*knot| {
const knot_name = cu.ir.nullTerminatedString(knot.name);
const runtime_chunk: *Object.ContentPath = try .create(story, .{
.name = try .create(story, knot_name),
.arity = @intCast(knot.arity),
.locals_count = @intCast(knot.stack_size - knot.arity),
.const_pool = try knot.constants.toOwnedSlice(gpa),
.bytes = try knot.bytecode.toOwnedSlice(gpa),
});
try story.globals.put(gpa, knot_name, &runtime_chunk.base);
}
story.string_bytes = cu.ir.string_bytes;
cu.ir.string_bytes = &.{};
}
pub fn deinit(cu: *Compilation) void {
const gpa = cu.gpa;
for (cu.knots) |*knot| {
knot.constants.deinit(gpa);
knot.bytecode.deinit(gpa);
}
gpa.free(cu.knots);
gpa.free(cu.errors);
gpa.free(cu.constants);
cu.ir.deinit(gpa);
cu.arena.deinit();
cu.* = undefined;
}
};
pub const Loc = struct {
line: usize,
column: usize,
source_line: []const u8,
};
pub fn findLineColumn(source: []const u8, byte_offset: usize) Loc {
var line: usize = 0;
var column: usize = 0;
var line_start: usize = 0;
var i: usize = 0;
while (i < byte_offset) : (i += 1) {
switch (source[i]) {
'\n' => {
line += 1;
column = 0;
line_start = i + 1;
},
else => {
column += 1;
},
}
}
while (i < source.len and source[i] != '\n') {
i += 1;
}
return .{
.line = line,
.column = column,
.source_line = source[line_start..i],
};
}