feat: parsing a decent chunk of the ink grammar
This commit is contained in:
parent
5268a53148
commit
4ebdd3c66e
3 changed files with 988 additions and 7 deletions
|
|
@ -251,16 +251,14 @@ pub fn createKnotDeclNode(
|
|||
|
||||
pub fn parse(
|
||||
gpa: std.mem.Allocator,
|
||||
arena: std.mem.Allocator,
|
||||
source: [:0]const u8,
|
||||
filename: []const u8,
|
||||
_: u32,
|
||||
) !Ast {
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
|
||||
var parser: Parse = .{
|
||||
.gpa = gpa,
|
||||
.arena = arena_allocator.allocator(),
|
||||
.arena = arena,
|
||||
.tokenizer = Tokenizer.init(source),
|
||||
.token = .{
|
||||
.tag = .invalid,
|
||||
|
|
|
|||
982
src/Parse.zig
982
src/Parse.zig
|
|
@ -53,6 +53,16 @@ pub const ScanContext = struct {
|
|||
source_offset: usize,
|
||||
};
|
||||
|
||||
const Precedence = enum {
|
||||
none,
|
||||
assign,
|
||||
logical_or,
|
||||
logical_and,
|
||||
comparison,
|
||||
term,
|
||||
factor,
|
||||
};
|
||||
|
||||
pub fn deinit(p: *Parse) void {
|
||||
p.scratch.deinit(p.gpa);
|
||||
p.grammar_stack.deinit(p.gpa);
|
||||
|
|
@ -62,6 +72,974 @@ pub fn deinit(p: *Parse) void {
|
|||
p.* = undefined;
|
||||
}
|
||||
|
||||
pub fn parseFile(_: *Parse) Error!*Ast.Node {
|
||||
return error.OutOfMemory;
|
||||
fn makeStmtContext(p: *Parse, tag: StmtContext.Tag, node: ?*Ast.Node) StmtContext {
|
||||
const context: StmtContext = .{
|
||||
.tag = tag,
|
||||
.expression_node = node,
|
||||
.blocks_top = p.block_stack.items.len,
|
||||
.choices_top = p.choice_stack.items.len,
|
||||
.scratch_top = p.scratch.items.len,
|
||||
};
|
||||
return context;
|
||||
}
|
||||
|
||||
fn getTokenPrefixType(tag: Token.Tag) Ast.Node.Tag {
|
||||
return switch (tag) {
|
||||
.keyword_not, .exclaimation_mark => .logical_not_expr,
|
||||
.minus => .negate_expr,
|
||||
else => .invalid,
|
||||
};
|
||||
}
|
||||
|
||||
fn getTokenInfixType(tag: Token.Tag) Ast.Node.Tag {
|
||||
return switch (tag) {
|
||||
.percentage, .keyword_mod => .mod_expr,
|
||||
.plus => .add_expr,
|
||||
.minus => .subtract_expr,
|
||||
.star => .multiply_expr,
|
||||
.slash => .divide_expr,
|
||||
// .question_mark => .contains_expr,
|
||||
.equal => .assign_stmt,
|
||||
.ampersand_ampersand, .keyword_and => .logical_and_expr,
|
||||
.pipe_pipe, .keyword_or => .logical_or_expr,
|
||||
.equal_equal => .logical_equality_expr,
|
||||
.not_equal => .logical_inequality_expr,
|
||||
.less_than => .logical_lesser_expr,
|
||||
.greater_than => .logical_greater_expr,
|
||||
.less_than_equal => .logical_lesser_or_equal_expr,
|
||||
.greater_than_equal => .logical_greater_or_equal_expr,
|
||||
else => .invalid,
|
||||
};
|
||||
}
|
||||
|
||||
fn getBindingPower(tag: Token.Tag) Precedence {
|
||||
return switch (tag) {
|
||||
.ampersand_ampersand, .keyword_and => .logical_and,
|
||||
.pipe_pipe, .keyword_or => .logical_or,
|
||||
.equal_equal, .not_equal => .comparison,
|
||||
.less_than, .less_than_equal => .comparison,
|
||||
.greater_than, .greater_than_equal => .comparison,
|
||||
.question_mark => .comparison,
|
||||
.plus, .minus => .term,
|
||||
.star, .slash, .percentage, .keyword_mod => .factor,
|
||||
.equal => .assign,
|
||||
else => .none,
|
||||
};
|
||||
}
|
||||
|
||||
fn getBranchTag(tag: Token.Tag) Ast.Node.Tag {
|
||||
return switch (tag) {
|
||||
.star => .choice_star_stmt,
|
||||
.plus => .choice_plus_stmt,
|
||||
else => .invalid,
|
||||
};
|
||||
}
|
||||
|
||||
fn fail(p: *Parse, tag: Ast.Error.Tag, token: Token) error{ ParseError, OutOfMemory } {
|
||||
if (p.panic_mode) return error.ParseError;
|
||||
p.panic_mode = true;
|
||||
|
||||
const err: Ast.Error = .{
|
||||
.tag = tag,
|
||||
.loc = .{
|
||||
.start = token.loc.start,
|
||||
.end = token.loc.end,
|
||||
},
|
||||
};
|
||||
|
||||
try p.errors.append(p.gpa, err);
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
fn checkToken(p: *const Parse, tag: Token.Tag) bool {
|
||||
return p.token.tag == tag;
|
||||
}
|
||||
|
||||
fn checkTokenInSet(p: *const Parse, tag_set: []const Token.Tag) bool {
|
||||
if (p.checkToken(.eof)) return true;
|
||||
for (tag_set) |tag| {
|
||||
if (p.checkToken(tag)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
fn nextToken(p: *Parse) Token {
|
||||
assert(p.grammar_stack.items.len > 0);
|
||||
|
||||
const token = p.token;
|
||||
const context = p.grammar_stack.getLast();
|
||||
p.token = p.tokenizer.next(context.grammar);
|
||||
return token;
|
||||
}
|
||||
|
||||
fn eatToken(p: *Parse, tag: Token.Tag) void {
|
||||
if (p.checkToken(tag)) _ = p.nextToken();
|
||||
}
|
||||
|
||||
fn eatTokenLooped(p: *Parse, tag: Token.Tag, ignore_whitespace: bool) usize {
|
||||
var count: usize = 0;
|
||||
while (p.checkToken(tag)) : (count += 1) {
|
||||
_ = p.nextToken();
|
||||
if (ignore_whitespace) p.eatToken(.whitespace);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
fn expectToken(p: *Parse, tag: Token.Tag, skip_whitespace: bool) Error!Token {
|
||||
if (skip_whitespace) p.eatToken(.whitespace);
|
||||
if (!p.checkToken(tag)) {
|
||||
std.debug.print("Expected token '{any}', got '{any}'\n", .{ tag, p.token.tag });
|
||||
return p.fail(.unexpected_token, p.token);
|
||||
}
|
||||
return p.nextToken();
|
||||
}
|
||||
|
||||
fn expectNewline(p: *Parse) Error!Token {
|
||||
if (!p.checkToken(.eof) and !p.checkToken(.newline)) {
|
||||
return p.fail(.expected_newline, p.token);
|
||||
}
|
||||
return p.token;
|
||||
}
|
||||
|
||||
fn synchronize(p: *Parse) void {
|
||||
p.panic_mode = false;
|
||||
|
||||
while (true) {
|
||||
switch (p.token.tag) {
|
||||
.eof, .newline, .right_brace, .right_paren => break,
|
||||
else => _ = p.nextToken(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pushGrammar(p: *Parse, grammar: Tokenizer.Grammar) error{OutOfMemory}!void {
|
||||
const context: ScanContext = .{
|
||||
.grammar = grammar,
|
||||
.source_offset = p.tokenizer.index,
|
||||
};
|
||||
return p.grammar_stack.append(p.gpa, context);
|
||||
}
|
||||
|
||||
fn popGrammar(p: *Parse) void {
|
||||
_ = p.grammar_stack.pop() orelse
|
||||
@panic("BUG: Grammar mode stack popped when empty!");
|
||||
}
|
||||
|
||||
fn rewindGrammar(p: *Parse) void {
|
||||
const grammar = p.grammar_stack.getLast();
|
||||
const offset = grammar.source_offset;
|
||||
assert(offset <= p.tokenizer.index);
|
||||
p.tokenizer.index = offset;
|
||||
}
|
||||
|
||||
fn isScratchEmpty(p: *Parse, context: *const StmtContext) bool {
|
||||
return context.scratch_top == p.scratch.items.len;
|
||||
}
|
||||
|
||||
fn peekScratch(p: *Parse, context: *const StmtContext) *Ast.Node {
|
||||
assert(context.scratch_top < p.scratch.items.len);
|
||||
return p.scratch.getLast();
|
||||
}
|
||||
|
||||
fn popScratch(p: *Parse, context: *const StmtContext) *Ast.Node {
|
||||
assert(context.scratch_top < p.scratch.items.len);
|
||||
return p.scratch.pop() orelse
|
||||
@panic("BUG: Scratch buffer popped when empty!");
|
||||
}
|
||||
|
||||
fn nodeListFromScratch(p: *Parse, start_offset: usize, end_offset: usize) Error!?[]*Ast.Node {
|
||||
if (start_offset >= end_offset) return null;
|
||||
|
||||
const span = end_offset - start_offset;
|
||||
assert(span > 0);
|
||||
const list = try p.arena.alloc(*Ast.Node, span);
|
||||
defer p.scratch.shrinkRetainingCapacity(start_offset);
|
||||
|
||||
var li: usize = 0;
|
||||
var i: usize = start_offset;
|
||||
while (i < end_offset) : (i += 1) {
|
||||
list[li] = p.scratch.items[i];
|
||||
li += 1;
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
fn makeNodeSequence(
|
||||
p: *Parse,
|
||||
context: *const StmtContext,
|
||||
tag: Ast.Node.Tag,
|
||||
bytes_start: usize,
|
||||
bytes_end: usize,
|
||||
scratch_offset: usize,
|
||||
) Error!*Ast.Node {
|
||||
var list: ?[]*Ast.Node = null;
|
||||
if (!p.isScratchEmpty(context)) {
|
||||
list = try p.nodeListFromScratch(scratch_offset, p.scratch.items.len);
|
||||
}
|
||||
return Ast.createListNode(p.arena, tag, bytes_start, bytes_end, list);
|
||||
}
|
||||
|
||||
fn isBlockStackEmpty(p: *Parse, context: *const StmtContext) bool {
|
||||
return context.blocks_top == p.block_stack.items.len;
|
||||
}
|
||||
|
||||
fn peekBlockStack(p: *Parse, context: *const StmtContext) State {
|
||||
assert(context.blocks_top < p.block_stack.items.len);
|
||||
return p.block_stack.getLast();
|
||||
}
|
||||
|
||||
fn popBlockStack(p: *Parse, context: *const StmtContext) State {
|
||||
assert(context.blocks_top < p.block_stack.items.len);
|
||||
return p.block_stack.pop() orelse
|
||||
@panic("Bug: Block stack popped when empty!");
|
||||
}
|
||||
|
||||
fn isChoiceStackEmpty(p: *Parse, context: *const StmtContext) bool {
|
||||
return context.choices_top == p.choice_stack.items.len;
|
||||
}
|
||||
|
||||
fn peekChoiceStack(p: *Parse, context: *const StmtContext) State {
|
||||
assert(context.choices_top < p.choice_stack.items.len);
|
||||
return p.choice_stack.getLast();
|
||||
}
|
||||
|
||||
fn popChoiceStack(p: *Parse, context: *const StmtContext) State {
|
||||
assert(context.choices_top < p.choice_stack.items.len);
|
||||
return p.choice_stack.pop() orelse
|
||||
@panic("Bug: Choice stack popped when empty!");
|
||||
}
|
||||
|
||||
fn fixupBlock(p: *Parse, context: *StmtContext, node: *Ast.Node) !*Ast.Node {
|
||||
if (!p.isScratchEmpty(context)) {
|
||||
const stmt = p.peekScratch(context);
|
||||
switch (stmt.tag) {
|
||||
.choice_star_stmt,
|
||||
.choice_plus_stmt,
|
||||
.switch_case,
|
||||
.if_branch,
|
||||
.else_branch,
|
||||
=> {
|
||||
stmt.data.bin.rhs = node;
|
||||
return p.popScratch(context);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
context.is_block_created = true;
|
||||
return node;
|
||||
}
|
||||
|
||||
fn collectBlock(p: *Parse, context: *StmtContext, level: usize) Error!?*Ast.Node {
|
||||
if (p.isBlockStackEmpty(context)) return null;
|
||||
|
||||
const block = p.peekBlockStack(context);
|
||||
if (block.level < level) return null;
|
||||
|
||||
const bytes_start = block.source_offset;
|
||||
var bytes_end: usize = 0;
|
||||
|
||||
if (!p.isScratchEmpty(context)) {
|
||||
const last = p.peekScratch(context);
|
||||
bytes_end = last.source_end;
|
||||
} else {
|
||||
bytes_end = bytes_start;
|
||||
}
|
||||
|
||||
var node = try p.makeNodeSequence(
|
||||
context,
|
||||
.block_stmt,
|
||||
bytes_start,
|
||||
bytes_end,
|
||||
block.scratch_offset,
|
||||
);
|
||||
node = try p.fixupBlock(context, node);
|
||||
_ = p.popBlockStack(context);
|
||||
return node;
|
||||
}
|
||||
|
||||
fn collectContext(
|
||||
p: *Parse,
|
||||
context: *StmtContext,
|
||||
level: usize,
|
||||
should_gather: bool,
|
||||
) Error!?*Ast.Node {
|
||||
// The level of the current choice should always be greater then the
|
||||
// level for the current block. Choice statements must have non-zero
|
||||
// levels, while blocks can levels greater than or equal to zero.
|
||||
//
|
||||
// Choice statement levels need not follow a sequentially increasing order.
|
||||
// When collecting choice branches, statements with levels less than the
|
||||
// previous statement will be included in the same enclosing choice if no
|
||||
// previous levels exist.
|
||||
while (!p.isChoiceStackEmpty(context)) {
|
||||
assert(!p.isBlockStackEmpty(context));
|
||||
const choice_state = p.peekChoiceStack(context);
|
||||
if (choice_state.level <= level) break;
|
||||
|
||||
_ = p.popChoiceStack(context);
|
||||
if (!p.isBlockStackEmpty(context)) {
|
||||
const block_state = p.peekBlockStack(context);
|
||||
if (choice_state.level <= block_state.level) {
|
||||
const node = try p.collectBlock(context, block_state.level);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
}
|
||||
if (!should_gather) {
|
||||
if (!p.isChoiceStackEmpty(context)) {
|
||||
const prev_choice = p.peekChoiceStack(context);
|
||||
if (level > prev_choice.level) {
|
||||
try p.choice_stack.append(p.gpa, .{
|
||||
.level = level,
|
||||
.scratch_offset = choice_state.scratch_offset,
|
||||
.source_offset = choice_state.source_offset,
|
||||
});
|
||||
break;
|
||||
}
|
||||
} else if (level > 0) {
|
||||
try p.choice_stack.append(p.gpa, .{
|
||||
.level = level,
|
||||
.scratch_offset = choice_state.scratch_offset,
|
||||
.source_offset = choice_state.source_offset,
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const node = try p.makeNodeSequence(
|
||||
context,
|
||||
.choice_stmt,
|
||||
choice_state.source_offset,
|
||||
p.token.loc.start,
|
||||
choice_state.scratch_offset,
|
||||
);
|
||||
try p.scratch.append(p.gpa, node);
|
||||
}
|
||||
if (!should_gather) return p.collectBlock(context, level);
|
||||
if (!p.isScratchEmpty(context)) return p.popScratch(context);
|
||||
return null;
|
||||
}
|
||||
|
||||
fn collectStitch(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
||||
const node = try p.collectContext(context, 0, false);
|
||||
if (p.isScratchEmpty(context)) return node;
|
||||
|
||||
const proto = p.peekScratch(context);
|
||||
const tag: Ast.Node.Tag = switch (proto.tag) {
|
||||
.stitch_prototype => .stitch_decl,
|
||||
.function_prototype => .function_decl,
|
||||
else => return node,
|
||||
};
|
||||
const span_start = proto.source_start;
|
||||
const span_end = if (node) |n| n.source_end else proto.source_end;
|
||||
_ = p.popScratch(context);
|
||||
return Ast.createBinaryNode(p.arena, tag, span_start, span_end, proto, node);
|
||||
}
|
||||
|
||||
fn collectKnot(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
||||
if (p.isScratchEmpty(context)) return null;
|
||||
|
||||
const child = try p.collectStitch(context);
|
||||
if (child) |n| try p.scratch.append(p.gpa, n);
|
||||
|
||||
const proto = p.scratch.items[p.knot_offset];
|
||||
if (proto.tag != .knot_prototype) return null;
|
||||
|
||||
const list = try p.nodeListFromScratch(p.knot_offset + 1, p.scratch.items.len);
|
||||
defer _ = p.popScratch(context);
|
||||
|
||||
const bytes_start = proto.source_start;
|
||||
const bytes_end = if (child) |n| n.source_end else proto.source_end;
|
||||
|
||||
return Ast.createKnotDeclNode(p.arena, .knot_decl, bytes_start, bytes_end, proto, list);
|
||||
}
|
||||
|
||||
fn handleChoiceBranch(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
||||
const level = context.level;
|
||||
if (p.isBlockStackEmpty(context)) {
|
||||
// Always start with a block level of zero.
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = 0,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
});
|
||||
}
|
||||
if (p.isChoiceStackEmpty(context)) {
|
||||
try p.choice_stack.append(p.gpa, .{
|
||||
.level = level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
});
|
||||
} else {
|
||||
const choice_state = p.peekChoiceStack(context);
|
||||
const block_state = p.peekBlockStack(context);
|
||||
|
||||
if (level > choice_state.level) {
|
||||
if (block_state.level < choice_state.level) {
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = choice_state.level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = p.token.loc.start,
|
||||
});
|
||||
}
|
||||
|
||||
try p.choice_stack.append(p.gpa, .{
|
||||
.level = level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
});
|
||||
} else if (level == choice_state.level) {
|
||||
const t_node = try p.collectBlock(context, level);
|
||||
if (t_node) |n| try p.scratch.append(p.gpa, n);
|
||||
} else {
|
||||
const t_node = try p.collectContext(context, level, false);
|
||||
if (t_node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handleGatherPoint(p: *Parse, context: *StmtContext, node: **Ast.Node) !void {
|
||||
const token = p.token;
|
||||
const level = context.level;
|
||||
|
||||
if (p.isBlockStackEmpty(context)) {
|
||||
assert(p.isChoiceStackEmpty(context));
|
||||
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = 0,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.*.source_start,
|
||||
});
|
||||
}
|
||||
// Gather points terminate compound statements at the appropriate level.
|
||||
if (!p.isChoiceStackEmpty(context)) {
|
||||
const choice_state = p.peekChoiceStack(context);
|
||||
const block_state = p.peekBlockStack(context);
|
||||
|
||||
if (level > choice_state.level) {
|
||||
if (block_state.level != choice_state.level) {
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = choice_state.level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.*.source_start,
|
||||
});
|
||||
}
|
||||
} else if (!p.isScratchEmpty(context)) {
|
||||
const tmp = (try p.collectContext(context, level - 1, true)) orelse @panic("FUCK!");
|
||||
if (tmp.tag == .choice_stmt) {
|
||||
node.* = try Ast.createBinaryNode(
|
||||
p.arena,
|
||||
.gathered_stmt,
|
||||
tmp.source_start,
|
||||
token.loc.start,
|
||||
tmp,
|
||||
node.*,
|
||||
);
|
||||
}
|
||||
if (!p.isBlockStackEmpty(context)) {
|
||||
const b = p.peekBlockStack(context);
|
||||
if (b.level == level) {
|
||||
const tmp_2 = try p.collectBlock(context, level);
|
||||
if (tmp_2) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handleContentStmt(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
||||
if (p.isBlockStackEmpty(context)) {
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = 0,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
});
|
||||
}
|
||||
if (!p.isChoiceStackEmpty(context)) {
|
||||
const block_state = p.peekBlockStack(context);
|
||||
const choice_state = p.peekChoiceStack(context);
|
||||
|
||||
if (block_state.level != choice_state.level) {
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = choice_state.level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handleConditionalBranch(p: *Parse, context: *StmtContext) Error!void {
|
||||
const node = try p.collectContext(context, 0, false);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
|
||||
fn handleKnotDecl(p: *Parse, context: *StmtContext) !void {
|
||||
const node = try p.collectKnot(context);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
|
||||
p.knot_offset = p.scratch.items.len;
|
||||
}
|
||||
|
||||
fn handleStitchDecl(p: *Parse, context: *StmtContext) !void {
|
||||
const node = try p.collectStitch(context);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
|
||||
fn handleFunctionDecl(p: *Parse, context: *StmtContext) !void {
|
||||
return p.handleStitchDecl(context);
|
||||
}
|
||||
|
||||
fn expectExpr(p: *Parse) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const node = try parseInfixExpr(p, null, .none);
|
||||
if (node) |n| return n;
|
||||
|
||||
return p.fail(.expected_expression, token);
|
||||
}
|
||||
|
||||
fn parseAtom(p: *Parse, tag: Ast.Node.Tag) Error!*Ast.Node {
|
||||
const token = p.nextToken();
|
||||
const span_start = token.loc.start;
|
||||
const span_end = token.loc.end;
|
||||
return Ast.createLeafNode(p.arena, tag, span_start, span_end);
|
||||
}
|
||||
|
||||
fn parseIdentifier(p: *Parse) Error!*Ast.Node {
|
||||
return parseAtom(p, .identifier);
|
||||
}
|
||||
|
||||
fn expectIdentifier(p: *Parse) Error!*Ast.Node {
|
||||
if (!p.checkToken(.identifier)) {
|
||||
return p.fail(.expected_identifier, p.token);
|
||||
}
|
||||
return parseIdentifier(p);
|
||||
}
|
||||
|
||||
fn parsePrimaryExpr(p: *Parse) Error!?*Ast.Node {
|
||||
switch (p.token.tag) {
|
||||
.number_literal => return parseAtom(p, .number_literal),
|
||||
.keyword_true => return parseAtom(p, .true_literal),
|
||||
.keyword_false => return parseAtom(p, .false_literal),
|
||||
.identifier => return parseIdentifierExpr(p),
|
||||
.double_quote => return parseStringExpr(p),
|
||||
.left_paren => {
|
||||
_ = p.nextToken();
|
||||
|
||||
const node = try parseInfixExpr(p, null, .none);
|
||||
if (node == null) return null;
|
||||
|
||||
_ = try p.expectToken(.right_paren, true);
|
||||
return node;
|
||||
},
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
|
||||
fn parsePrefixExpr(p: *Parse) Error!?*Ast.Node {
|
||||
switch (p.token.tag) {
|
||||
.keyword_not, .minus, .exclaimation_mark => {
|
||||
const token = p.nextToken();
|
||||
const lhs = try parsePrefixExpr(p);
|
||||
if (lhs == null) return null;
|
||||
|
||||
const tag = getTokenPrefixType(token.tag);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = if (lhs) |n| n.source_end else p.token.loc.end;
|
||||
return Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, lhs, null);
|
||||
},
|
||||
else => return parsePrimaryExpr(p),
|
||||
}
|
||||
}
|
||||
|
||||
fn parseInfixExpr(
|
||||
p: *Parse,
|
||||
prev_node: ?*Ast.Node,
|
||||
precedence: Precedence,
|
||||
) Error!?*Ast.Node {
|
||||
var lhs = prev_node;
|
||||
if (lhs == null) {
|
||||
lhs = try parsePrefixExpr(p);
|
||||
if (lhs == null) return lhs;
|
||||
}
|
||||
while (true) {
|
||||
var next_node: ?*Ast.Node = null;
|
||||
const token = p.token;
|
||||
const token_precedence = getBindingPower(token.tag);
|
||||
|
||||
if (@intFromEnum(token_precedence) > @intFromEnum(precedence)) {
|
||||
_ = p.nextToken();
|
||||
next_node = try parseInfixExpr(p, null, token_precedence);
|
||||
if (next_node) |rhs| {
|
||||
const tag = getTokenInfixType(token.tag);
|
||||
const bytes_start = if (lhs) |n| n.source_start else token.loc.start;
|
||||
const bytes_end = rhs.source_end;
|
||||
|
||||
lhs = try Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, lhs, rhs);
|
||||
} else return null;
|
||||
} else break;
|
||||
}
|
||||
return lhs;
|
||||
}
|
||||
|
||||
fn parseExpression(p: *Parse) Error!?*Ast.Node {
|
||||
return parseInfixExpr(p, null, .none);
|
||||
}
|
||||
|
||||
fn parseStringExpr(p: *Parse) Error!*Ast.Node {
|
||||
assert(p.token.tag == .double_quote);
|
||||
const leading_token = p.nextToken();
|
||||
|
||||
while (true) switch (p.token.tag) {
|
||||
.double_quote, .newline, .eof => break,
|
||||
else => _ = p.nextToken(),
|
||||
};
|
||||
|
||||
const last_token = try p.expectToken(.double_quote, true);
|
||||
const span_start = leading_token.loc.start;
|
||||
const span_end = p.token.loc.start;
|
||||
const expr = try Ast.createLeafNode(p.arena, .string_literal, leading_token.loc.end, last_token.loc.start);
|
||||
return Ast.createBinaryNode(p.arena, .string_expr, span_start, span_end, expr, null);
|
||||
}
|
||||
|
||||
fn parseContentString(p: *Parse, token_set: []const Token.Tag) Error!?*Ast.Node {
|
||||
const main_token = p.token;
|
||||
while (!p.checkTokenInSet(token_set)) _ = p.nextToken();
|
||||
|
||||
const span_start = main_token.loc.start;
|
||||
const span_end = p.token.loc.start;
|
||||
const tag: Ast.Node.Tag = if (span_start == span_end) .empty_string else .string_literal;
|
||||
return Ast.createLeafNode(p.arena, tag, span_start, span_end);
|
||||
}
|
||||
|
||||
fn parseExprStmt(p: *Parse, lhs: ?*Ast.Node) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const node = try parseInfixExpr(p, lhs, .none);
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = if (lhs) |n| n.source_start else token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .expr_stmt, bytes_start, bytes_end, node, null);
|
||||
}
|
||||
|
||||
fn parseAssignStmt(p: *Parse) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const lhs = try parseIdentifierExpr(p);
|
||||
|
||||
if (!p.checkToken(.equal)) return parseExprStmt(p, lhs);
|
||||
_ = p.nextToken();
|
||||
|
||||
const rhs = try p.expectExpr();
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .assign_stmt, bytes_start, bytes_end, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseTempDecl(p: *Parse) Error!*Ast.Node {
|
||||
const token = p.nextToken();
|
||||
const lhs = try p.expectIdentifier();
|
||||
_ = try p.expectToken(.equal, true);
|
||||
const rhs = try p.expectExpr();
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .temp_decl, bytes_start, bytes_end, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseTildeStmt(p: *Parse) Error!*Ast.Node {
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
|
||||
_ = p.nextToken();
|
||||
const node: *Ast.Node = switch (p.token.tag) {
|
||||
.keyword_temp => try parseTempDecl(p),
|
||||
.keyword_return => try parseReturnStmt(p),
|
||||
.identifier => try parseAssignStmt(p),
|
||||
else => try parseExprStmt(p, null),
|
||||
};
|
||||
return node;
|
||||
}
|
||||
|
||||
fn parseReturnStmt(p: *Parse) Error!*Ast.Node {
|
||||
var node: ?*Ast.Node = null;
|
||||
const token = p.nextToken();
|
||||
|
||||
if (!p.checkToken(.newline) and !p.checkToken(.eof)) {
|
||||
node = try parseInfixExpr(p, null, .none);
|
||||
}
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
return Ast.createBinaryNode(p.arena, .return_stmt, bytes_start, bytes_end, node, null);
|
||||
}
|
||||
|
||||
fn parseIdentifierExpr(p: *Parse) Error!*Ast.Node {
|
||||
var lhs = try p.expectIdentifier();
|
||||
|
||||
while (true) {
|
||||
switch (p.token.tag) {
|
||||
.dot => {
|
||||
_ = p.nextToken();
|
||||
const rhs = try p.expectIdentifier();
|
||||
const bytes_start = lhs.source_start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
lhs = try Ast.createBinaryNode(
|
||||
p.arena,
|
||||
.selector_expr,
|
||||
bytes_start,
|
||||
bytes_end,
|
||||
lhs,
|
||||
rhs,
|
||||
);
|
||||
},
|
||||
.left_paren => {
|
||||
const rhs = try parseArgumentList(p);
|
||||
const bytes_start = lhs.source_start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
return Ast.createBinaryNode(
|
||||
p.arena,
|
||||
.call_expr,
|
||||
bytes_start,
|
||||
bytes_end,
|
||||
lhs,
|
||||
rhs,
|
||||
);
|
||||
},
|
||||
else => return lhs,
|
||||
}
|
||||
}
|
||||
}
|
||||
fn parseContentExpr(p: *Parse, token_set: []const Token.Tag) Error!?*Ast.Node {
|
||||
const main_token = p.token;
|
||||
const context = makeStmtContext(p, .block, null);
|
||||
|
||||
while (true) {
|
||||
var node: ?*Ast.Node = null;
|
||||
if (!p.checkTokenInSet(token_set)) {
|
||||
node = try parseContentString(p, token_set);
|
||||
} else switch (p.token.tag) {
|
||||
.eof, .newline, .right_brace => break,
|
||||
// .left_brace => node = try parseLbraceExpr(p),
|
||||
// .right_arrow => node = try parseDivertStmt(p),
|
||||
//.INK_TT_GLUE => node = ink_parse_glue(p),
|
||||
else => {
|
||||
return p.fail(.unexpected_token, p.token);
|
||||
},
|
||||
}
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
|
||||
const span_start = main_token.loc.start;
|
||||
const span_end = p.token.loc.start;
|
||||
return p.makeNodeSequence(&context, .content, span_start, span_end, context.scratch_top);
|
||||
}
|
||||
|
||||
fn parseArgumentList(p: *Parse) Error!?*Ast.Node {
|
||||
const context = p.makeStmtContext(.block, null);
|
||||
const token = try p.expectToken(.left_paren, true);
|
||||
|
||||
if (!p.checkToken(.right_paren)) {
|
||||
var cnt: usize = 0;
|
||||
while (true) : (cnt += 1) {
|
||||
if (cnt == p.max_argument_count) {
|
||||
return p.fail(.too_many_arguments, p.token);
|
||||
}
|
||||
|
||||
const node = try parseInfixExpr(p, null, .none);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
if (p.checkToken(.comma)) {
|
||||
_ = p.nextToken();
|
||||
} else break;
|
||||
}
|
||||
}
|
||||
|
||||
_ = try p.expectToken(.right_paren, false);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return p.makeNodeSequence(&context, .argument_list, bytes_start, bytes_end, context.scratch_top);
|
||||
}
|
||||
|
||||
fn parseContentStmt(p: *Parse) Error!*Ast.Node {
|
||||
const token_set = [_]Token.Tag{
|
||||
.left_brace, .right_brace, .right_arrow,
|
||||
.glue, .newline, .eof,
|
||||
};
|
||||
const main_token = p.token;
|
||||
const node = try parseContentExpr(p, &token_set);
|
||||
const end_token = try p.expectNewline();
|
||||
const span_start = main_token.loc.start;
|
||||
const span_end = end_token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .content_stmt, span_start, span_end, node, null);
|
||||
}
|
||||
|
||||
fn parseChoiceExpr(p: *Parse) Error!?*Ast.Node {
|
||||
const token_set = [_]Token.Tag{
|
||||
.left_brace, .left_bracket, .right_brace,
|
||||
.right_bracket, .right_arrow, .newline,
|
||||
.eof,
|
||||
};
|
||||
const token = p.token;
|
||||
var lhs: ?*Ast.Node = null;
|
||||
var mhs: ?*Ast.Node = null;
|
||||
var rhs: ?*Ast.Node = null;
|
||||
|
||||
lhs = try parseContentString(p, &token_set);
|
||||
if (lhs) |n| {
|
||||
if (n.tag != .empty_string) {
|
||||
n.tag = .choice_start_expr;
|
||||
}
|
||||
}
|
||||
if (p.checkToken(.left_bracket)) {
|
||||
_ = p.nextToken();
|
||||
p.eatToken(.whitespace);
|
||||
|
||||
if (!p.checkToken(.right_bracket)) {
|
||||
mhs = try parseContentString(p, &token_set);
|
||||
if (mhs) |n| {
|
||||
if (n.tag != .empty_string) {
|
||||
n.tag = .choice_option_expr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ = try p.expectToken(.right_bracket, false);
|
||||
if (!p.checkTokenInSet(&token_set)) {
|
||||
rhs = try parseContentString(p, &token_set);
|
||||
if (rhs) |n| {
|
||||
if (n.tag != .empty_string) {
|
||||
n.tag = .choice_inner_expr;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createChoiceExprNode(p.arena, .choice_expr, bytes_start, bytes_end, lhs, mhs, rhs);
|
||||
}
|
||||
|
||||
fn parseChoiceStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const level = p.eatTokenLooped(token.tag, true);
|
||||
const node = try parseChoiceExpr(p);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = if (node) |n| n.source_end else p.token.loc.start;
|
||||
|
||||
context.level = level;
|
||||
|
||||
if (p.checkToken(.newline)) _ = p.nextToken();
|
||||
const tag = getBranchTag(token.tag);
|
||||
return Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, node, null);
|
||||
}
|
||||
|
||||
fn parseGatherPointStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const level = p.eatTokenLooped(token.tag, true);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
context.level = level;
|
||||
|
||||
return Ast.createBinaryNode(p.arena, .gather_point_stmt, bytes_start, bytes_end, null, null);
|
||||
}
|
||||
|
||||
fn parseVar(p: *Parse, tag: Ast.Node.Tag) Error!*Ast.Node {
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
|
||||
const token = p.nextToken();
|
||||
const lhs = try p.expectIdentifier();
|
||||
_ = try p.expectToken(.equal, true);
|
||||
|
||||
const rhs = try p.expectExpr();
|
||||
const last_token = try p.expectNewline();
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = last_token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseVarDecl(p: *Parse) Error!*Ast.Node {
|
||||
assert(p.token.tag == .keyword_var);
|
||||
return parseVar(p, .var_decl);
|
||||
}
|
||||
|
||||
fn parseConstDecl(p: *Parse) Error!*Ast.Node {
|
||||
assert(p.token.tag == .keyword_const);
|
||||
return parseVar(p, .const_decl);
|
||||
}
|
||||
|
||||
fn parseStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
p.eatToken(.whitespace);
|
||||
|
||||
var node = switch (p.token.tag) {
|
||||
.star, .plus => try parseChoiceStmt(p, context),
|
||||
.minus => switch (context.tag) {
|
||||
.block => try parseGatherPointStmt(p, context),
|
||||
else => unreachable,
|
||||
//.conditional => try parseConditionalStmt(p, context),
|
||||
},
|
||||
.equal, .equal_equal => switch (context.tag) {
|
||||
//.block => try parseKnotDecl(p),
|
||||
else => try parseContentStmt(p),
|
||||
},
|
||||
.tilde => try parseTildeStmt(p),
|
||||
//.right_arrow => try parseDivertStmt(p),
|
||||
.right_brace => {
|
||||
const token = p.nextToken();
|
||||
return p.fail(.unexpected_token, token);
|
||||
},
|
||||
.keyword_const => try parseConstDecl(p),
|
||||
.keyword_var => try parseVarDecl(p),
|
||||
else => try parseContentStmt(p),
|
||||
};
|
||||
|
||||
p.eatToken(.newline);
|
||||
p.eatToken(.whitespace);
|
||||
|
||||
switch (node.tag) {
|
||||
.if_branch, .else_branch, .switch_case => try p.handleConditionalBranch(context),
|
||||
.choice_star_stmt => try p.handleChoiceBranch(context, node),
|
||||
.choice_plus_stmt => try p.handleChoiceBranch(context, node),
|
||||
.gather_point_stmt => try p.handleGatherPoint(context, &node),
|
||||
.knot_prototype => try p.handleKnotDecl(context),
|
||||
.stitch_prototype => try p.handleStitchDecl(context),
|
||||
.function_prototype => try p.handleFunctionDecl(context),
|
||||
else => try p.handleContentStmt(context, node),
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
pub fn parseFile(p: *Parse) Error!*Ast.Node {
|
||||
var context = p.makeStmtContext(.block, null);
|
||||
try p.pushGrammar(.content);
|
||||
defer p.popGrammar();
|
||||
|
||||
const token = p.nextToken();
|
||||
while (!p.checkToken(.eof)) {
|
||||
const node = parseStmt(p, &context) catch |err| switch (err) {
|
||||
error.ParseError => {
|
||||
if (p.panic_mode) p.synchronize();
|
||||
|
||||
p.eatToken(.newline);
|
||||
continue;
|
||||
},
|
||||
error.OutOfMemory => @panic("Out of memory!"),
|
||||
};
|
||||
try p.scratch.append(p.gpa, node);
|
||||
}
|
||||
|
||||
const node = try p.collectKnot(&context);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
|
||||
const span_start = token.loc.start;
|
||||
const span_end = p.token.loc.end;
|
||||
return p.makeNodeSequence(&context, .file, span_start, span_end, context.scratch_top);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,11 @@ pub const Story = struct {
|
|||
source_bytes: [:0]const u8,
|
||||
options: LoadOptions,
|
||||
) !Story {
|
||||
var ast = try Ast.parse(gpa, source_bytes, "<STDIN>", 0);
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
|
||||
const arena = arena_allocator.allocator();
|
||||
var ast = try Ast.parse(gpa, arena, source_bytes, "<STDIN>", 0);
|
||||
defer ast.deinit(gpa);
|
||||
|
||||
try ast.render(gpa, options.stream_writer, .{
|
||||
|
|
@ -27,6 +31,7 @@ pub const Story = struct {
|
|||
}
|
||||
return .{};
|
||||
}
|
||||
|
||||
pub fn deinit(_: *Story) void {}
|
||||
};
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue