refactor: working towards a better astgen patterns

This commit is contained in:
Brett Broadhurst 2026-03-08 04:22:57 -06:00
parent d6ff3a40bd
commit 95d89b7bf9
Failed to generate hash of commit
7 changed files with 922 additions and 596 deletions

View file

@ -218,6 +218,7 @@ pub const Error = struct {
panic,
unexpected_token,
unknown_identifier,
redefined_identifier,
assignment_to_const,
expected_newline,
expected_expression,

View file

@ -162,6 +162,7 @@ fn renderError(r: *Render, writer: *std.Io.Writer, err: Ast.Error) !void {
switch (err.tag) {
.panic => try renderErrorf(r, writer, err, "parser panicked"),
.unknown_identifier => try renderErrorf(r, writer, err, "unknown identifier"),
.redefined_identifier => try renderErrorf(r, writer, err, "redefined identifier"),
.assignment_to_const => try renderErrorf(r, writer, err, "assignment to constant value"),
.unexpected_token => try renderErrorf(r, writer, err, "unexpected token"),
.expected_newline => try renderErrorf(r, writer, err, "expected newline"),

File diff suppressed because it is too large Load diff

View file

@ -497,11 +497,48 @@ pub fn loadFromString(
return error.Invalid;
}
var story = try AstGen.generate(gpa, &ast);
const comp_unit = try AstGen.generate(gpa, &ast);
defer comp_unit.deinit(gpa);
comp_unit.dumpStringsWithHex();
var story: Story = .{
.allocator = gpa,
.can_advance = false,
.dump_writer = options.dump_writer,
};
errdefer story.deinit();
try story.divert("@main@");
story.dump_writer = options.dump_writer;
for (comp_unit.knots) |compiled_chunk| {
const chunk_name = comp_unit.resolveString(compiled_chunk.name_ref);
var constant_pool: std.ArrayList(*Object) = .empty;
try constant_pool.ensureUnusedCapacity(gpa, compiled_chunk.constants.len);
defer constant_pool.deinit(gpa);
for (comp_unit.resolveConstants(compiled_chunk.constants)) |constant| {
switch (constant) {
.number => |value| {
const object: *Object.Number = try .create(&story, .{ .integer = value });
constant_pool.appendAssumeCapacity(&object.base);
},
.string => |ref| {
const bytes = comp_unit.resolveString(ref);
const object: *Object.String = try .create(&story, bytes);
constant_pool.appendAssumeCapacity(&object.base);
},
}
}
const runtime_chunk: *Object.ContentPath = try .create(&story, .{
.name = try .create(&story, chunk_name),
.arity = @intCast(compiled_chunk.arity),
.locals_count = @intCast(compiled_chunk.stack_size - compiled_chunk.arity),
.const_pool = try constant_pool.toOwnedSlice(gpa),
.bytes = try gpa.dupe(u8, comp_unit.resolveInstructions(compiled_chunk.instructions)),
});
try story.paths.append(gpa, &runtime_chunk.base);
}
try story.divert("$__main__$");
story.can_advance = true;
return story;
}

View file

@ -262,17 +262,23 @@ pub const Object = struct {
base: Object,
name: *Object.String,
arity: usize,
// TODO: Rename this to stack size.
locals_count: usize,
// TODO: Rename this to constant_pool.
const_pool: []*Object,
bytes: []const u8,
pub fn create(
story: *Story,
pub const CreateOptions = struct {
name: *Object.String,
arity: usize,
locals_count: usize,
const_pool: []*Object,
bytes: []const u8,
};
pub fn create(
story: *Story,
options: CreateOptions,
) error{OutOfMemory}!*ContentPath {
const gpa = story.allocator;
const alloc_len = @sizeOf(ContentPath);
@ -285,11 +291,11 @@ pub const Object = struct {
.is_marked = false,
.node = .{},
},
.name = name,
.arity = arity,
.locals_count = locals_count,
.const_pool = const_pool,
.bytes = bytes,
.name = options.name,
.arity = options.arity,
.locals_count = options.locals_count,
.const_pool = options.const_pool,
.bytes = options.bytes,
};
story.gc_objects.prepend(&object.base.node);