1409 lines
42 KiB
Zig
1409 lines
42 KiB
Zig
const std = @import("std");
|
|
const tok = @import("tokenizer.zig");
|
|
const Ast = @import("Ast.zig");
|
|
const assert = std.debug.assert;
|
|
const Token = tok.Token;
|
|
const Tokenizer = tok.Tokenizer;
|
|
const Parse = @This();
|
|
|
|
gpa: std.mem.Allocator,
|
|
arena: std.mem.Allocator,
|
|
tokenizer: Tokenizer,
|
|
token: Token,
|
|
scratch: std.ArrayListUnmanaged(*Ast.Node),
|
|
grammar_stack: std.ArrayListUnmanaged(ScanContext),
|
|
block_stack: std.ArrayListUnmanaged(State),
|
|
choice_stack: std.ArrayListUnmanaged(State),
|
|
errors: std.ArrayListUnmanaged(Ast.Error),
|
|
panic_mode: bool,
|
|
flags: u32,
|
|
max_parse_depth: usize,
|
|
max_argument_count: usize,
|
|
knot_offset: usize = 0,
|
|
|
|
pub const Error = error{
|
|
ParseError,
|
|
OutOfMemory,
|
|
};
|
|
|
|
pub const State = struct {
|
|
level: usize,
|
|
scratch_offset: usize,
|
|
source_offset: usize,
|
|
};
|
|
|
|
pub const StmtContext = struct {
|
|
tag: Tag,
|
|
is_block_created: bool = false,
|
|
expression_node: ?*Ast.Node = null,
|
|
level: usize = 0,
|
|
blocks_top: usize,
|
|
choices_top: usize,
|
|
scratch_top: usize,
|
|
|
|
pub const Tag = enum {
|
|
block,
|
|
conditional,
|
|
};
|
|
};
|
|
|
|
pub const ScanContext = struct {
|
|
grammar: Tokenizer.Grammar,
|
|
source_offset: usize,
|
|
};
|
|
|
|
const Precedence = enum {
|
|
none,
|
|
assign,
|
|
logical_or,
|
|
logical_and,
|
|
comparison,
|
|
term,
|
|
factor,
|
|
};
|
|
|
|
pub fn deinit(p: *Parse) void {
|
|
p.scratch.deinit(p.gpa);
|
|
p.grammar_stack.deinit(p.gpa);
|
|
p.block_stack.deinit(p.gpa);
|
|
p.choice_stack.deinit(p.gpa);
|
|
p.errors.deinit(p.gpa);
|
|
p.* = undefined;
|
|
}
|
|
|
|
fn makeStmtContext(p: *Parse, tag: StmtContext.Tag, node: ?*Ast.Node) StmtContext {
|
|
const context: StmtContext = .{
|
|
.tag = tag,
|
|
.expression_node = node,
|
|
.blocks_top = p.block_stack.items.len,
|
|
.choices_top = p.choice_stack.items.len,
|
|
.scratch_top = p.scratch.items.len,
|
|
};
|
|
return context;
|
|
}
|
|
|
|
fn getTokenPrefixType(tag: Token.Tag) Ast.Node.Tag {
|
|
return switch (tag) {
|
|
.keyword_not, .exclaimation_mark => .logical_not_expr,
|
|
.minus => .negate_expr,
|
|
else => .invalid,
|
|
};
|
|
}
|
|
|
|
fn getTokenInfixType(tag: Token.Tag) Ast.Node.Tag {
|
|
return switch (tag) {
|
|
.percentage, .keyword_mod => .mod_expr,
|
|
.plus => .add_expr,
|
|
.minus => .subtract_expr,
|
|
.star => .multiply_expr,
|
|
.slash => .divide_expr,
|
|
// .question_mark => .contains_expr,
|
|
// .equal => .assign_stmt,
|
|
.ampersand_ampersand, .keyword_and => .logical_and_expr,
|
|
.pipe_pipe, .keyword_or => .logical_or_expr,
|
|
.equal_equal => .logical_equality_expr,
|
|
.not_equal => .logical_inequality_expr,
|
|
.less_than => .logical_lesser_expr,
|
|
.greater_than => .logical_greater_expr,
|
|
.less_than_equal => .logical_lesser_or_equal_expr,
|
|
.greater_than_equal => .logical_greater_or_equal_expr,
|
|
else => .invalid,
|
|
};
|
|
}
|
|
|
|
fn getTokenAssignType(tag: Token.Tag) ?Ast.Node.Tag {
|
|
return switch (tag) {
|
|
.equal => .assign_stmt,
|
|
.plus_equal => .assign_add_stmt,
|
|
.minus_equal => .assign_sub_stmt,
|
|
else => null,
|
|
};
|
|
}
|
|
|
|
fn getBindingPower(tag: Token.Tag) Precedence {
|
|
return switch (tag) {
|
|
.ampersand_ampersand, .keyword_and => .logical_and,
|
|
.pipe_pipe, .keyword_or => .logical_or,
|
|
.equal_equal, .not_equal => .comparison,
|
|
.less_than, .less_than_equal => .comparison,
|
|
.greater_than, .greater_than_equal => .comparison,
|
|
.question_mark => .comparison,
|
|
.plus, .minus => .term,
|
|
.star, .slash, .percentage, .keyword_mod => .factor,
|
|
.equal => .assign,
|
|
else => .none,
|
|
};
|
|
}
|
|
|
|
fn getBranchTag(tag: Token.Tag) Ast.Node.Tag {
|
|
return switch (tag) {
|
|
.star => .choice_star_stmt,
|
|
.plus => .choice_plus_stmt,
|
|
else => .invalid,
|
|
};
|
|
}
|
|
|
|
fn fail(p: *Parse, tag: Ast.Error.Tag, token: Token) error{ ParseError, OutOfMemory } {
|
|
if (p.panic_mode) return error.ParseError;
|
|
p.panic_mode = true;
|
|
|
|
const err: Ast.Error = .{
|
|
.tag = tag,
|
|
.loc = .{
|
|
.start = token.loc.start,
|
|
.end = token.loc.end,
|
|
},
|
|
};
|
|
|
|
try p.errors.append(p.gpa, err);
|
|
return error.ParseError;
|
|
}
|
|
|
|
fn checkToken(p: *const Parse, tag: Token.Tag) bool {
|
|
return p.token.tag == tag;
|
|
}
|
|
|
|
fn checkTokenInSet(p: *const Parse, tag_set: []const Token.Tag) bool {
|
|
if (p.checkToken(.eof)) return true;
|
|
for (tag_set) |tag| {
|
|
if (p.checkToken(tag)) return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
fn nextToken(p: *Parse) Token {
|
|
assert(p.grammar_stack.items.len > 0);
|
|
const token = p.token;
|
|
const context = p.grammar_stack.getLast();
|
|
p.token = p.tokenizer.next(context.grammar);
|
|
return token;
|
|
}
|
|
|
|
fn eatToken(p: *Parse, tag: Token.Tag) void {
|
|
if (p.checkToken(tag)) _ = p.nextToken();
|
|
}
|
|
|
|
fn eatTokenLooped(p: *Parse, tag: Token.Tag, ignore_whitespace: bool) usize {
|
|
var count: usize = 0;
|
|
while (p.checkToken(tag)) : (count += 1) {
|
|
_ = p.nextToken();
|
|
if (ignore_whitespace) p.eatToken(.whitespace);
|
|
}
|
|
return count;
|
|
}
|
|
|
|
fn expectToken(p: *Parse, tag: Token.Tag, skip_whitespace: bool) Error!Token {
|
|
if (skip_whitespace) p.eatToken(.whitespace);
|
|
if (!p.checkToken(tag)) {
|
|
return p.fail(.unexpected_token, p.token);
|
|
}
|
|
return p.nextToken();
|
|
}
|
|
|
|
fn expectNewline(p: *Parse) Error!Token {
|
|
if (!p.checkToken(.eof) and !p.checkToken(.newline)) {
|
|
return p.fail(.expected_newline, p.token);
|
|
}
|
|
return p.token;
|
|
}
|
|
|
|
fn synchronize(p: *Parse) void {
|
|
p.panic_mode = false;
|
|
|
|
while (true) {
|
|
switch (p.token.tag) {
|
|
.eof, .newline, .right_brace, .right_paren => break,
|
|
else => _ = p.nextToken(),
|
|
}
|
|
}
|
|
}
|
|
|
|
fn pushGrammar(p: *Parse, grammar: Tokenizer.Grammar) error{OutOfMemory}!void {
|
|
const context: ScanContext = .{
|
|
.grammar = grammar,
|
|
.source_offset = p.tokenizer.index,
|
|
};
|
|
return p.grammar_stack.append(p.gpa, context);
|
|
}
|
|
|
|
fn popGrammar(p: *Parse) void {
|
|
_ = p.grammar_stack.pop() orelse
|
|
@panic("BUG: Grammar mode stack popped when empty!");
|
|
}
|
|
|
|
fn rewindGrammar(p: *Parse) void {
|
|
const grammar = p.grammar_stack.getLast();
|
|
const offset = grammar.source_offset;
|
|
assert(offset <= p.tokenizer.index);
|
|
p.tokenizer.index = offset;
|
|
}
|
|
|
|
fn isScratchEmpty(p: *Parse, context: *const StmtContext) bool {
|
|
return context.scratch_top == p.scratch.items.len;
|
|
}
|
|
|
|
fn peekScratch(p: *Parse, context: *const StmtContext) *Ast.Node {
|
|
assert(context.scratch_top < p.scratch.items.len);
|
|
return p.scratch.getLast();
|
|
}
|
|
|
|
fn popScratch(p: *Parse, context: *const StmtContext) *Ast.Node {
|
|
assert(context.scratch_top < p.scratch.items.len);
|
|
return p.scratch.pop() orelse
|
|
@panic("BUG: Scratch buffer popped when empty!");
|
|
}
|
|
|
|
fn makeNodeSliceFromScratch(p: *Parse, start: usize) Error![]*Ast.Node {
|
|
defer p.scratch.shrinkRetainingCapacity(start);
|
|
return p.arena.dupe(*Ast.Node, p.scratch.items[start..]);
|
|
}
|
|
|
|
fn makeNodeSliceFrom(
|
|
p: *Parse,
|
|
context: *const StmtContext,
|
|
tag: Ast.Node.Tag,
|
|
loc: Ast.Node.Span,
|
|
scratch_offset: usize,
|
|
) Error!*Ast.Node {
|
|
assert(scratch_offset >= context.scratch_top);
|
|
const list = try p.makeNodeSliceFromScratch(scratch_offset);
|
|
return .createList(p.arena, tag, loc, list);
|
|
}
|
|
|
|
fn makeNodeSlice(
|
|
p: *Parse,
|
|
context: *const StmtContext,
|
|
tag: Ast.Node.Tag,
|
|
loc: Ast.Node.Span,
|
|
) Error!*Ast.Node {
|
|
return p.makeNodeSliceFrom(context, tag, loc, context.scratch_top);
|
|
}
|
|
|
|
fn isBlockStackEmpty(p: *Parse, context: *const StmtContext) bool {
|
|
return context.blocks_top == p.block_stack.items.len;
|
|
}
|
|
|
|
fn peekBlockStack(p: *Parse, context: *const StmtContext) State {
|
|
assert(context.blocks_top < p.block_stack.items.len);
|
|
return p.block_stack.getLast();
|
|
}
|
|
|
|
fn popBlockStack(p: *Parse, context: *const StmtContext) State {
|
|
assert(context.blocks_top < p.block_stack.items.len);
|
|
return p.block_stack.pop() orelse
|
|
@panic("Bug: Block stack popped when empty!");
|
|
}
|
|
|
|
fn isChoiceStackEmpty(p: *Parse, context: *const StmtContext) bool {
|
|
return context.choices_top == p.choice_stack.items.len;
|
|
}
|
|
|
|
fn peekChoiceStack(p: *Parse, context: *const StmtContext) State {
|
|
assert(context.choices_top < p.choice_stack.items.len);
|
|
return p.choice_stack.getLast();
|
|
}
|
|
|
|
fn popChoiceStack(p: *Parse, context: *const StmtContext) State {
|
|
assert(context.choices_top < p.choice_stack.items.len);
|
|
return p.choice_stack.pop() orelse
|
|
@panic("Bug: Choice stack popped when empty!");
|
|
}
|
|
|
|
fn fixupBlock(p: *Parse, context: *StmtContext, node: *Ast.Node) !*Ast.Node {
|
|
if (!p.isScratchEmpty(context)) {
|
|
const stmt = p.peekScratch(context);
|
|
switch (stmt.tag) {
|
|
.choice_star_stmt,
|
|
.choice_plus_stmt,
|
|
.switch_case,
|
|
.if_branch,
|
|
.else_branch,
|
|
=> {
|
|
stmt.data.bin.rhs = node;
|
|
return p.popScratch(context);
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
|
|
context.is_block_created = true;
|
|
return node;
|
|
}
|
|
|
|
fn collectBlock(p: *Parse, context: *StmtContext, level: usize) Error!?*Ast.Node {
|
|
if (p.isBlockStackEmpty(context)) return null;
|
|
|
|
const block = p.peekBlockStack(context);
|
|
if (block.level < level) return null;
|
|
|
|
const span_start = block.source_offset;
|
|
var span_end: usize = 0;
|
|
|
|
if (!p.isScratchEmpty(context)) {
|
|
const last = p.peekScratch(context);
|
|
span_end = last.loc.end;
|
|
} else {
|
|
span_end = span_start;
|
|
}
|
|
|
|
var node = try p.makeNodeSliceFrom(context, .block_stmt, .{
|
|
.start = span_start,
|
|
.end = span_end,
|
|
}, block.scratch_offset);
|
|
node = try p.fixupBlock(context, node);
|
|
_ = p.popBlockStack(context);
|
|
return node;
|
|
}
|
|
|
|
fn collectContext(
|
|
p: *Parse,
|
|
context: *StmtContext,
|
|
level: usize,
|
|
should_gather: bool,
|
|
) Error!?*Ast.Node {
|
|
// The level of the current choice should always be greater then the
|
|
// level for the current block. Choice statements must have non-zero
|
|
// levels, while blocks can levels greater than or equal to zero.
|
|
//
|
|
// Choice statement levels need not follow a sequentially increasing order.
|
|
// When collecting choice branches, statements with levels less than the
|
|
// previous statement will be included in the same enclosing choice if no
|
|
// previous levels exist.
|
|
while (!p.isChoiceStackEmpty(context)) {
|
|
assert(!p.isBlockStackEmpty(context));
|
|
const choice_state = p.peekChoiceStack(context);
|
|
if (choice_state.level <= level) break;
|
|
|
|
_ = p.popChoiceStack(context);
|
|
if (!p.isBlockStackEmpty(context)) {
|
|
const block_state = p.peekBlockStack(context);
|
|
if (choice_state.level <= block_state.level) {
|
|
const node = try p.collectBlock(context, block_state.level);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
}
|
|
}
|
|
if (!should_gather) {
|
|
if (!p.isChoiceStackEmpty(context)) {
|
|
const prev_choice = p.peekChoiceStack(context);
|
|
if (level > prev_choice.level) {
|
|
try p.choice_stack.append(p.gpa, .{
|
|
.level = level,
|
|
.scratch_offset = choice_state.scratch_offset,
|
|
.source_offset = choice_state.source_offset,
|
|
});
|
|
break;
|
|
}
|
|
} else if (level > 0) {
|
|
try p.choice_stack.append(p.gpa, .{
|
|
.level = level,
|
|
.scratch_offset = choice_state.scratch_offset,
|
|
.source_offset = choice_state.source_offset,
|
|
});
|
|
break;
|
|
}
|
|
}
|
|
|
|
const node = try p.makeNodeSliceFrom(context, .choice_stmt, .{
|
|
.start = choice_state.source_offset,
|
|
.end = p.token.loc.start,
|
|
}, choice_state.scratch_offset);
|
|
try p.scratch.append(p.gpa, node);
|
|
}
|
|
if (!should_gather) return p.collectBlock(context, level);
|
|
if (!p.isScratchEmpty(context)) return p.popScratch(context);
|
|
return null;
|
|
}
|
|
|
|
fn collectStitch(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
|
const node = try p.collectContext(context, 0, false);
|
|
if (p.isScratchEmpty(context)) return node;
|
|
|
|
const proto = p.peekScratch(context);
|
|
const tag: Ast.Node.Tag = switch (proto.tag) {
|
|
.stitch_prototype => .stitch_decl,
|
|
.function_prototype => .function_decl,
|
|
else => return node,
|
|
};
|
|
_ = p.popScratch(context);
|
|
return .createBinary(p.arena, tag, .{
|
|
.start = proto.loc.start,
|
|
.end = if (node) |n| n.loc.end else proto.loc.end,
|
|
}, proto, node);
|
|
}
|
|
|
|
fn collectKnot(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
|
if (p.isScratchEmpty(context)) return null;
|
|
|
|
const child = try p.collectStitch(context);
|
|
if (child) |n| try p.scratch.append(p.gpa, n);
|
|
|
|
const proto = p.scratch.items[p.knot_offset];
|
|
if (proto.tag != .knot_prototype) return null;
|
|
|
|
const list = try p.makeNodeSliceFromScratch(p.knot_offset + 1);
|
|
defer _ = p.popScratch(context);
|
|
|
|
return .createKnot(p.arena, .knot_decl, .{
|
|
.start = proto.loc.start,
|
|
.end = if (child) |n| n.loc.end else proto.loc.end,
|
|
}, proto, list);
|
|
}
|
|
|
|
fn handleChoiceBranch(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|
const level = context.level;
|
|
if (p.isBlockStackEmpty(context)) {
|
|
// Always start with a block level of zero.
|
|
try p.block_stack.append(p.gpa, .{
|
|
.level = 0,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.loc.start,
|
|
});
|
|
}
|
|
if (p.isChoiceStackEmpty(context)) {
|
|
try p.choice_stack.append(p.gpa, .{
|
|
.level = level,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.loc.start,
|
|
});
|
|
} else {
|
|
const choice_state = p.peekChoiceStack(context);
|
|
const block_state = p.peekBlockStack(context);
|
|
|
|
if (level > choice_state.level) {
|
|
if (block_state.level < choice_state.level) {
|
|
try p.block_stack.append(p.gpa, .{
|
|
.level = choice_state.level,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = p.token.loc.start,
|
|
});
|
|
}
|
|
try p.choice_stack.append(p.gpa, .{
|
|
.level = level,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.loc.start,
|
|
});
|
|
} else if (level == choice_state.level) {
|
|
const t_node = try p.collectBlock(context, level);
|
|
if (t_node) |n| try p.scratch.append(p.gpa, n);
|
|
} else {
|
|
const t_node = try p.collectContext(context, level, false);
|
|
if (t_node) |n| try p.scratch.append(p.gpa, n);
|
|
}
|
|
}
|
|
}
|
|
|
|
fn handleGatherPoint(p: *Parse, context: *StmtContext, node: **Ast.Node) !void {
|
|
const main_token = p.token;
|
|
const level = context.level;
|
|
|
|
if (p.isBlockStackEmpty(context)) {
|
|
assert(p.isChoiceStackEmpty(context));
|
|
try p.block_stack.append(p.gpa, .{
|
|
.level = 0,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.*.loc.start,
|
|
});
|
|
}
|
|
// Gather points terminate compound statements at the appropriate level.
|
|
if (!p.isChoiceStackEmpty(context)) {
|
|
const choice_state = p.peekChoiceStack(context);
|
|
const block_state = p.peekBlockStack(context);
|
|
|
|
if (level > choice_state.level) {
|
|
if (block_state.level != choice_state.level) {
|
|
try p.block_stack.append(p.gpa, .{
|
|
.level = choice_state.level,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.*.loc.start,
|
|
});
|
|
}
|
|
} else if (!p.isScratchEmpty(context)) {
|
|
const tmp = (try p.collectContext(context, level - 1, true)) orelse @panic("FUCK!");
|
|
if (tmp.tag == .choice_stmt) {
|
|
node.* = try Ast.Node.createBinary(p.arena, .gathered_stmt, .{
|
|
.start = tmp.loc.start,
|
|
.end = main_token.loc.start,
|
|
}, tmp, node.*);
|
|
}
|
|
if (!p.isBlockStackEmpty(context)) {
|
|
const b = p.peekBlockStack(context);
|
|
if (b.level == level) {
|
|
const tmp_2 = try p.collectBlock(context, level);
|
|
if (tmp_2) |n| try p.scratch.append(p.gpa, n);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
fn handleContentStmt(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|
if (p.isBlockStackEmpty(context)) {
|
|
try p.block_stack.append(p.gpa, .{
|
|
.level = 0,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.loc.start,
|
|
});
|
|
}
|
|
if (!p.isChoiceStackEmpty(context)) {
|
|
const block_state = p.peekBlockStack(context);
|
|
const choice_state = p.peekChoiceStack(context);
|
|
|
|
if (block_state.level != choice_state.level) {
|
|
try p.block_stack.append(p.gpa, .{
|
|
.level = choice_state.level,
|
|
.scratch_offset = p.scratch.items.len,
|
|
.source_offset = node.loc.start,
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
fn handleConditionalBranch(p: *Parse, context: *StmtContext) Error!void {
|
|
const node = try p.collectContext(context, 0, false);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
}
|
|
|
|
fn handleKnotDecl(p: *Parse, context: *StmtContext) !void {
|
|
const node = try p.collectKnot(context);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
|
|
p.knot_offset = p.scratch.items.len;
|
|
}
|
|
|
|
fn handleStitchDecl(p: *Parse, context: *StmtContext) !void {
|
|
const node = try p.collectStitch(context);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
}
|
|
|
|
fn handleFunctionDecl(p: *Parse, context: *StmtContext) !void {
|
|
return p.handleStitchDecl(context);
|
|
}
|
|
|
|
fn expectExpr(p: *Parse) Error!*Ast.Node {
|
|
const token = p.token;
|
|
const node = try parseInfixExpr(p, null, .none);
|
|
if (node) |n| return n;
|
|
|
|
return p.fail(.expected_expression, token);
|
|
}
|
|
|
|
fn parseAtom(p: *Parse, tag: Ast.Node.Tag) Error!*Ast.Node {
|
|
const main_token = p.nextToken();
|
|
return .createLeaf(p.arena, tag, .{
|
|
.start = main_token.loc.start,
|
|
.end = main_token.loc.end,
|
|
});
|
|
}
|
|
|
|
fn parseIdentifier(p: *Parse) Error!*Ast.Node {
|
|
return parseAtom(p, .identifier);
|
|
}
|
|
|
|
fn expectIdentifier(p: *Parse) Error!*Ast.Node {
|
|
if (!p.checkToken(.identifier)) {
|
|
return p.fail(.expected_identifier, p.token);
|
|
}
|
|
return parseIdentifier(p);
|
|
}
|
|
|
|
fn parsePrimaryExpr(p: *Parse) Error!?*Ast.Node {
|
|
switch (p.token.tag) {
|
|
.number_literal => return parseAtom(p, .number_literal),
|
|
.keyword_true => return parseAtom(p, .true_literal),
|
|
.keyword_false => return parseAtom(p, .false_literal),
|
|
.identifier => return parseIdentifierExpr(p),
|
|
.double_quote => return parseStringExpr(p),
|
|
.left_paren => {
|
|
_ = p.nextToken();
|
|
|
|
const node = try parseInfixExpr(p, null, .none);
|
|
if (node == null) return null;
|
|
|
|
_ = try p.expectToken(.right_paren, true);
|
|
return node;
|
|
},
|
|
else => return null,
|
|
}
|
|
}
|
|
|
|
fn parsePrefixExpr(p: *Parse) Error!?*Ast.Node {
|
|
switch (p.token.tag) {
|
|
.keyword_not, .minus, .exclaimation_mark => {
|
|
const main_token = p.nextToken();
|
|
const lhs = try parsePrefixExpr(p);
|
|
if (lhs == null) return null;
|
|
|
|
const tag = getTokenPrefixType(main_token.tag);
|
|
return .createBinary(p.arena, tag, .{
|
|
.start = main_token.loc.start,
|
|
.end = if (lhs) |n| n.loc.end else p.token.loc.end,
|
|
}, lhs, null);
|
|
},
|
|
else => return parsePrimaryExpr(p),
|
|
}
|
|
}
|
|
|
|
fn parseInfixExpr(
|
|
p: *Parse,
|
|
prev_node: ?*Ast.Node,
|
|
precedence: Precedence,
|
|
) Error!?*Ast.Node {
|
|
var lhs = prev_node;
|
|
if (lhs == null) {
|
|
lhs = try parsePrefixExpr(p);
|
|
if (lhs == null) return lhs;
|
|
}
|
|
while (true) {
|
|
var next_node: ?*Ast.Node = null;
|
|
const token = p.token;
|
|
const token_precedence = getBindingPower(token.tag);
|
|
|
|
if (@intFromEnum(token_precedence) > @intFromEnum(precedence)) {
|
|
_ = p.nextToken();
|
|
next_node = try parseInfixExpr(p, null, token_precedence);
|
|
if (next_node) |rhs| {
|
|
const tag = getTokenInfixType(token.tag);
|
|
lhs = try Ast.Node.createBinary(p.arena, tag, .{
|
|
.start = if (lhs) |n| n.loc.start else token.loc.start,
|
|
.end = rhs.loc.end,
|
|
}, lhs, rhs);
|
|
} else return null;
|
|
} else break;
|
|
}
|
|
return lhs;
|
|
}
|
|
|
|
fn parseExpression(p: *Parse) Error!?*Ast.Node {
|
|
return parseInfixExpr(p, null, .none);
|
|
}
|
|
|
|
fn parseStringExpr(p: *Parse) Error!*Ast.Node {
|
|
assert(p.token.tag == .double_quote);
|
|
const main_token = p.nextToken();
|
|
|
|
while (true) switch (p.token.tag) {
|
|
.double_quote, .newline, .eof => break,
|
|
else => _ = p.nextToken(),
|
|
};
|
|
|
|
const last_token = try p.expectToken(.double_quote, true);
|
|
const expr = try Ast.Node.createLeaf(p.arena, .string_literal, .{
|
|
.start = main_token.loc.end,
|
|
.end = last_token.loc.start,
|
|
});
|
|
return .createBinary(p.arena, .string_expr, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, expr, null);
|
|
}
|
|
|
|
fn parseExprStmt(p: *Parse, lhs: ?*Ast.Node) Error!*Ast.Node {
|
|
const main_token = p.token;
|
|
const node = try parseInfixExpr(p, lhs, .none);
|
|
_ = try p.expectNewline();
|
|
|
|
return .createBinary(p.arena, .expr_stmt, .{
|
|
.start = if (lhs) |n| n.loc.start else main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, node, null);
|
|
}
|
|
|
|
fn parseAssignStmt(p: *Parse) Error!*Ast.Node {
|
|
const main_token = p.token;
|
|
const lhs = try parseIdentifierExpr(p);
|
|
|
|
if (getTokenAssignType(p.token.tag)) |op| {
|
|
_ = p.nextToken();
|
|
|
|
const rhs = try p.expectExpr();
|
|
_ = try p.expectNewline();
|
|
|
|
return .createBinary(p.arena, op, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, rhs);
|
|
}
|
|
return parseExprStmt(p, lhs);
|
|
}
|
|
|
|
fn parseTempDecl(p: *Parse) Error!*Ast.Node {
|
|
const main_token = p.nextToken();
|
|
const lhs = try p.expectIdentifier();
|
|
_ = try p.expectToken(.equal, true);
|
|
|
|
const rhs = try p.expectExpr();
|
|
_ = try p.expectNewline();
|
|
|
|
return .createBinary(p.arena, .temp_decl, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, rhs);
|
|
}
|
|
|
|
fn parseTildeStmt(p: *Parse) Error!*Ast.Node {
|
|
try p.pushGrammar(.expression);
|
|
defer p.popGrammar();
|
|
|
|
_ = p.nextToken();
|
|
const node = switch (p.token.tag) {
|
|
.keyword_temp => try parseTempDecl(p),
|
|
.keyword_return => try parseReturnStmt(p),
|
|
.identifier => try parseAssignStmt(p),
|
|
else => try parseExprStmt(p, null),
|
|
};
|
|
return node;
|
|
}
|
|
|
|
fn parseReturnStmt(p: *Parse) Error!*Ast.Node {
|
|
var node: ?*Ast.Node = null;
|
|
const main_token = p.nextToken();
|
|
|
|
if (!p.checkToken(.newline) and !p.checkToken(.eof)) {
|
|
node = try parseInfixExpr(p, null, .none);
|
|
}
|
|
|
|
_ = try p.expectNewline();
|
|
return .createBinary(p.arena, .return_stmt, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, node, null);
|
|
}
|
|
|
|
fn parseIdentifierExpr(p: *Parse) Error!*Ast.Node {
|
|
var lhs = try p.expectIdentifier();
|
|
|
|
while (true) {
|
|
switch (p.token.tag) {
|
|
.dot => {
|
|
_ = p.nextToken();
|
|
const rhs = try p.expectIdentifier();
|
|
lhs = try Ast.Node.createBinary(p.arena, .selector_expr, .{
|
|
.start = lhs.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, rhs);
|
|
},
|
|
.left_paren => {
|
|
const rhs = try parseArgumentList(p);
|
|
return .createBinary(p.arena, .call_expr, .{
|
|
.start = lhs.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, rhs);
|
|
},
|
|
else => return lhs,
|
|
}
|
|
}
|
|
}
|
|
|
|
fn parseDivertExpr(p: *Parse) Error!*Ast.Node {
|
|
try p.pushGrammar(.expression);
|
|
defer p.popGrammar();
|
|
|
|
const main_token = p.nextToken();
|
|
const node = try parseIdentifierExpr(p);
|
|
return .createBinary(p.arena, .divert_expr, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, node, null);
|
|
}
|
|
|
|
fn parseDivertStmt(p: *Parse) Error!*Ast.Node {
|
|
const main_token = p.token;
|
|
const node = try parseDivertExpr(p);
|
|
_ = try p.expectNewline();
|
|
|
|
return .createBinary(p.arena, .divert_stmt, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, node, null);
|
|
}
|
|
|
|
fn parseChoiceExpr(p: *Parse) Error!?*Ast.Node {
|
|
const main_token = p.token;
|
|
|
|
const lhs = try parseContentList(p, .{ .ignore_brackets = false });
|
|
if (lhs) |nodes| {
|
|
const last = nodes[nodes.len - 1];
|
|
if (last.data.content.trailing_divert != null) {
|
|
const end_token = try p.expectNewline();
|
|
return .createChoice(p.arena, .choice_expr, .{
|
|
.start = main_token.loc.start,
|
|
.end = end_token.loc.start,
|
|
}, lhs, null, null);
|
|
}
|
|
}
|
|
if (p.checkToken(.left_bracket)) {
|
|
_ = p.nextToken();
|
|
p.eatToken(.whitespace);
|
|
|
|
const mhs = try parseContentList(p, .{ .ignore_brackets = false });
|
|
if (mhs) |nodes| {
|
|
const last = nodes[nodes.len - 1];
|
|
if (last.data.content.trailing_divert) |_| {
|
|
return p.fail(.unexpected_token, p.token);
|
|
}
|
|
}
|
|
|
|
_ = try p.expectToken(.right_bracket, false);
|
|
const rhs = try parseContentList(p, .{ .ignore_brackets = false });
|
|
|
|
return .createChoice(p.arena, .choice_expr, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, mhs, rhs);
|
|
}
|
|
return .createChoice(p.arena, .choice_expr, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, null, null);
|
|
}
|
|
|
|
fn parseChoiceStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
|
const main_token = p.token;
|
|
const level = p.eatTokenLooped(main_token.tag, true);
|
|
const node = try parseChoiceExpr(p);
|
|
const span_end = if (node) |n| n.loc.end else p.token.loc.start;
|
|
context.level = level;
|
|
|
|
if (p.checkToken(.newline)) _ = p.nextToken();
|
|
|
|
return .createBinary(p.arena, getBranchTag(main_token.tag), .{
|
|
.start = main_token.loc.start,
|
|
.end = span_end,
|
|
}, node, null);
|
|
}
|
|
|
|
fn parseGatherPointStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
|
const main_token = p.token;
|
|
const level = p.eatTokenLooped(main_token.tag, true);
|
|
const span_end = p.token.loc.start;
|
|
context.level = level;
|
|
|
|
p.eatToken(.whitespace);
|
|
p.eatToken(.newline);
|
|
return .createBinary(p.arena, .gather_point_stmt, .{
|
|
.start = main_token.loc.start,
|
|
.end = span_end,
|
|
}, null, null);
|
|
}
|
|
|
|
fn parseConditional(p: *Parse, main_token: Token, expr: ?*Ast.Node) Error!?*Ast.Node {
|
|
var context = p.makeStmtContext(.conditional, expr);
|
|
p.eatToken(.whitespace);
|
|
|
|
while (!p.checkToken(.eof) and !p.checkToken(.right_brace)) {
|
|
const node = parseStmt(p, &context) catch |err| switch (err) {
|
|
error.ParseError => {
|
|
if (p.panic_mode) p.synchronize();
|
|
p.eatToken(.newline);
|
|
continue;
|
|
},
|
|
else => |e| return e,
|
|
};
|
|
try p.scratch.append(p.gpa, node);
|
|
}
|
|
|
|
const end_token = try p.expectToken(.right_brace, true);
|
|
const node = try p.collectContext(&context, 0, false);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
|
|
const list = try p.makeNodeSliceFromScratch(context.scratch_top);
|
|
return .createSwitch(p.arena, if (expr != null and !context.is_block_created)
|
|
.switch_stmt
|
|
else if (expr == null and !context.is_block_created)
|
|
.multi_if_stmt
|
|
else
|
|
.if_stmt, .{
|
|
.start = main_token.loc.start,
|
|
.end = end_token.loc.start,
|
|
}, expr, list);
|
|
}
|
|
|
|
fn parseInlineIf(p: *Parse, main_token: Token, lhs: ?*Ast.Node) Error!*Ast.Node {
|
|
p.eatToken(.whitespace);
|
|
|
|
const content_node = switch (try parseContent(p, .{}, false)) {
|
|
.node, .split => |n| n,
|
|
.none => null,
|
|
};
|
|
const end_token = try p.expectToken(.right_brace, true);
|
|
|
|
return .createBinary(p.arena, .inline_if_stmt, .{
|
|
.start = main_token.loc.start,
|
|
.end = end_token.loc.end,
|
|
}, lhs, content_node);
|
|
}
|
|
|
|
fn parseLbraceExpr(p: *Parse) Error!?*Ast.Node {
|
|
const lbrace_token = p.token;
|
|
const lhs = n: {
|
|
try p.pushGrammar(.expression);
|
|
defer p.popGrammar();
|
|
|
|
_ = p.nextToken();
|
|
const node = try parseExpression(p);
|
|
break :n node;
|
|
};
|
|
if (lhs == null) {
|
|
try p.pushGrammar(.content);
|
|
defer p.popGrammar();
|
|
|
|
_ = try p.expectToken(.newline, true);
|
|
return parseConditional(p, lbrace_token, null);
|
|
}
|
|
if (p.checkToken(.right_brace)) {
|
|
const rbrace_token = p.nextToken();
|
|
return .createBinary(p.arena, .inline_logic_expr, .{
|
|
.start = lbrace_token.loc.start,
|
|
.end = rbrace_token.loc.end,
|
|
}, lhs, null);
|
|
}
|
|
|
|
_ = try p.expectToken(.colon, false);
|
|
if (p.checkToken(.newline)) {
|
|
_ = p.nextToken();
|
|
return parseConditional(p, lbrace_token, lhs);
|
|
} else {
|
|
return parseInlineIf(p, lbrace_token, lhs);
|
|
}
|
|
}
|
|
|
|
const ContentOptions = struct {
|
|
ignore_brackets: bool = true,
|
|
skip_leading_whitespace: bool = false,
|
|
skip_trailing_whitespace: bool = false,
|
|
};
|
|
|
|
const ContentResult = union(enum) {
|
|
node: *Ast.Node,
|
|
split: *Ast.Node,
|
|
none,
|
|
};
|
|
|
|
// TODO: This function is getting to be a mess. Refactor if possible.
|
|
fn parseContentString(p: *Parse, options: ContentOptions) Error!?*Ast.Node {
|
|
const main_token = p.token;
|
|
var end_pos = main_token.loc.start;
|
|
|
|
if (options.skip_leading_whitespace) {
|
|
while (p.token.tag == .whitespace) _ = p.nextToken();
|
|
switch (p.token.tag) {
|
|
.eof, .newline, .glue => return null,
|
|
else => {},
|
|
}
|
|
}
|
|
|
|
const start_pos = p.token.loc.start;
|
|
end_pos = start_pos;
|
|
|
|
while (true) {
|
|
switch (p.token.tag) {
|
|
.eof,
|
|
.newline,
|
|
.left_arrow,
|
|
.right_arrow,
|
|
.left_brace,
|
|
.right_brace,
|
|
.glue,
|
|
=> break,
|
|
.left_bracket, .right_bracket => {
|
|
if (!options.ignore_brackets) break;
|
|
end_pos = p.token.loc.end;
|
|
_ = p.nextToken();
|
|
},
|
|
.whitespace => {
|
|
// TODO: Test for this.
|
|
if (options.skip_trailing_whitespace) {
|
|
const ws_end = p.token.loc.end;
|
|
_ = p.nextToken();
|
|
|
|
if (p.token.tag == .glue) break;
|
|
end_pos = ws_end;
|
|
} else {
|
|
end_pos = p.token.loc.end;
|
|
_ = p.nextToken();
|
|
}
|
|
},
|
|
else => {
|
|
end_pos = p.token.loc.end;
|
|
_ = p.nextToken();
|
|
},
|
|
}
|
|
}
|
|
if (start_pos == end_pos)
|
|
return null;
|
|
|
|
return .createLeaf(p.arena, .string_literal, .{
|
|
.start = start_pos,
|
|
.end = end_pos,
|
|
});
|
|
}
|
|
|
|
// TODO: This function is getting to be a mess. Refactor if possible.
|
|
fn parseContent(
|
|
p: *Parse,
|
|
options: ContentOptions,
|
|
leading_glue: bool,
|
|
) Error!ContentResult {
|
|
const main_token = p.token;
|
|
const scratch_top = p.scratch.items.len;
|
|
var trailing_divert: ?*Ast.Node = null;
|
|
var trailing_glue = false;
|
|
var has_internal_leading_glue = false;
|
|
var is_split = false;
|
|
|
|
if (p.token.tag == .glue) {
|
|
has_internal_leading_glue = true;
|
|
_ = p.nextToken();
|
|
}
|
|
if (options.skip_leading_whitespace and !has_internal_leading_glue) {
|
|
while (p.token.tag == .whitespace) _ = p.nextToken();
|
|
}
|
|
|
|
const effective_leading_glue = leading_glue or has_internal_leading_glue;
|
|
|
|
loop: while (true) {
|
|
const node: ?*Ast.Node = switch (p.token.tag) {
|
|
.eof,
|
|
.newline,
|
|
.left_arrow,
|
|
.right_brace,
|
|
=> break,
|
|
.left_brace => try parseLbraceExpr(p),
|
|
.right_arrow => {
|
|
trailing_divert = try parseDivertExpr(p);
|
|
break :loop;
|
|
},
|
|
.glue => {
|
|
_ = p.nextToken();
|
|
switch (p.token.tag) {
|
|
.eof, .newline, .right_brace => {
|
|
trailing_glue = true;
|
|
break :loop;
|
|
},
|
|
else => {
|
|
is_split = true;
|
|
break :loop;
|
|
},
|
|
}
|
|
},
|
|
else => |tag| blk: {
|
|
if (tag == .left_bracket or tag == .right_bracket) {
|
|
if (!options.ignore_brackets) break;
|
|
}
|
|
break :blk try parseContentString(p, .{
|
|
.ignore_brackets = options.ignore_brackets,
|
|
.skip_leading_whitespace = false,
|
|
.skip_trailing_whitespace = true,
|
|
});
|
|
},
|
|
};
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
}
|
|
if (main_token.loc.start == p.token.loc.start) return .none;
|
|
|
|
const items = try p.makeNodeSliceFromScratch(scratch_top);
|
|
const node = try Ast.Node.createContent(p.arena, .content, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, .{
|
|
.items = items,
|
|
.leading_glue = effective_leading_glue,
|
|
.trailing_glue = trailing_glue,
|
|
.trailing_divert = trailing_divert,
|
|
});
|
|
return if (is_split) .{ .split = node } else .{ .node = node };
|
|
}
|
|
|
|
fn parseContentList(
|
|
p: *Parse,
|
|
options: ContentOptions,
|
|
) Error!?[]*Ast.Node {
|
|
const scratch_top = p.scratch.items.len;
|
|
var has_leading_glue = false;
|
|
var is_first = true;
|
|
|
|
while (true) {
|
|
const segment_options: ContentOptions = .{
|
|
.ignore_brackets = options.ignore_brackets,
|
|
.skip_leading_whitespace = is_first and options.skip_leading_whitespace,
|
|
.skip_trailing_whitespace = true,
|
|
};
|
|
switch (try parseContent(p, segment_options, has_leading_glue)) {
|
|
.node => |n| {
|
|
try p.scratch.append(p.gpa, n);
|
|
break;
|
|
},
|
|
.split => |n| {
|
|
try p.scratch.append(p.gpa, n);
|
|
has_leading_glue = true;
|
|
},
|
|
.none => break,
|
|
}
|
|
|
|
is_first = false;
|
|
}
|
|
|
|
const items = try p.makeNodeSliceFromScratch(scratch_top);
|
|
return if (items.len > 0) items else null;
|
|
}
|
|
|
|
fn parseContentStmt(p: *Parse) Error!*Ast.Node {
|
|
const main_token = p.token;
|
|
const context = makeStmtContext(p, .block, null);
|
|
var has_leading_glue = false;
|
|
|
|
while (true) {
|
|
switch (try parseContent(p, .{}, has_leading_glue)) {
|
|
.node => |n| {
|
|
try p.scratch.append(p.gpa, n);
|
|
break;
|
|
},
|
|
.split => |n| {
|
|
try p.scratch.append(p.gpa, n);
|
|
has_leading_glue = true;
|
|
},
|
|
.none => break,
|
|
}
|
|
}
|
|
|
|
const end_token = try p.expectNewline();
|
|
return p.makeNodeSlice(&context, .content_stmt, .{
|
|
.start = main_token.loc.start,
|
|
.end = end_token.loc.start,
|
|
});
|
|
}
|
|
|
|
fn parseParameterDecl(p: *Parse) Error!*Ast.Node {
|
|
const tag: Ast.Node.Tag = if (p.checkToken(.keyword_ref)) blk: {
|
|
_ = p.nextToken();
|
|
break :blk .ref_parameter_decl;
|
|
} else .parameter_decl;
|
|
|
|
const node = try p.expectIdentifier();
|
|
node.tag = tag;
|
|
return node;
|
|
}
|
|
|
|
fn parseParameterList(p: *Parse) Error!?*Ast.Node {
|
|
const context = p.makeStmtContext(.block, null);
|
|
const main_token = try p.expectToken(.left_paren, true);
|
|
|
|
if (!p.checkToken(.right_paren)) {
|
|
var cnt: usize = 0;
|
|
while (true) : (cnt += 1) {
|
|
if (cnt == p.max_argument_count) {
|
|
return p.fail(.too_many_parameters, p.token);
|
|
}
|
|
|
|
const node = try parseParameterDecl(p);
|
|
try p.scratch.append(p.gpa, node);
|
|
|
|
if (p.checkToken(.comma)) {
|
|
_ = p.nextToken();
|
|
} else break;
|
|
}
|
|
}
|
|
|
|
_ = try p.expectToken(.right_paren, true);
|
|
return p.makeNodeSlice(&context, .parameter_list, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
});
|
|
}
|
|
|
|
fn parseArgumentList(p: *Parse) Error!?*Ast.Node {
|
|
const context = p.makeStmtContext(.block, null);
|
|
const main_token = try p.expectToken(.left_paren, true);
|
|
|
|
if (!p.checkToken(.right_paren)) {
|
|
var cnt: usize = 0;
|
|
while (true) : (cnt += 1) {
|
|
if (cnt == p.max_argument_count) {
|
|
return p.fail(.too_many_arguments, p.token);
|
|
}
|
|
|
|
const node = try parseInfixExpr(p, null, .none);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
if (p.checkToken(.comma)) {
|
|
_ = p.nextToken();
|
|
} else break;
|
|
}
|
|
}
|
|
|
|
_ = try p.expectToken(.right_paren, false);
|
|
return p.makeNodeSlice(&context, .argument_list, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
});
|
|
}
|
|
|
|
fn parseConditionalBranch(p: *Parse, tag: Ast.Node.Tag) Error!?*Ast.Node {
|
|
const main_token = p.token;
|
|
var node_tag = tag;
|
|
var node: ?*Ast.Node = null;
|
|
|
|
if (p.checkToken(.keyword_else)) {
|
|
_ = p.nextToken();
|
|
node_tag = .else_branch;
|
|
} else {
|
|
node = try parseExpression(p);
|
|
if (node == null) return null;
|
|
}
|
|
if (!p.checkToken(.colon)) return null;
|
|
_ = p.nextToken();
|
|
if (p.checkToken(.newline)) _ = p.nextToken();
|
|
|
|
return .createBinary(p.arena, node_tag, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, node, null);
|
|
}
|
|
|
|
fn parseConditionalStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
|
try p.pushGrammar(.expression);
|
|
defer p.popGrammar();
|
|
_ = p.nextToken();
|
|
|
|
const node = try parseConditionalBranch(p, if (context.expression_node) |_|
|
|
.switch_case
|
|
else
|
|
.if_branch);
|
|
if (node) |n| return n;
|
|
|
|
p.rewindGrammar();
|
|
try p.pushGrammar(.content);
|
|
defer p.popGrammar();
|
|
|
|
_ = p.nextToken();
|
|
return parseGatherPointStmt(p, context);
|
|
}
|
|
|
|
fn parseVar(p: *Parse, tag: Ast.Node.Tag) Error!*Ast.Node {
|
|
try p.pushGrammar(.expression);
|
|
defer p.popGrammar();
|
|
|
|
const main_token = p.nextToken();
|
|
const lhs = try p.expectIdentifier();
|
|
_ = try p.expectToken(.equal, true);
|
|
|
|
const rhs = try p.expectExpr();
|
|
const last_token = try p.expectNewline();
|
|
return .createBinary(p.arena, tag, .{
|
|
.start = main_token.loc.start,
|
|
.end = last_token.loc.start,
|
|
}, lhs, rhs);
|
|
}
|
|
|
|
fn parseVarDecl(p: *Parse) Error!*Ast.Node {
|
|
assert(p.token.tag == .keyword_var);
|
|
return parseVar(p, .var_decl);
|
|
}
|
|
|
|
fn parseConstDecl(p: *Parse) Error!*Ast.Node {
|
|
assert(p.token.tag == .keyword_const);
|
|
return parseVar(p, .const_decl);
|
|
}
|
|
|
|
fn parseKnotDecl(p: *Parse) Error!*Ast.Node {
|
|
var tag: Ast.Node.Tag = .stitch_prototype;
|
|
var lhs: ?*Ast.Node = null;
|
|
var rhs: ?*Ast.Node = null;
|
|
const main_token = p.nextToken();
|
|
|
|
try p.pushGrammar(.expression);
|
|
defer p.popGrammar();
|
|
|
|
p.eatToken(.whitespace);
|
|
if (p.checkToken(.equal)) {
|
|
tag = .knot_prototype;
|
|
|
|
while (p.checkToken(.equal)) {
|
|
_ = p.nextToken();
|
|
}
|
|
}
|
|
if (p.checkToken(.keyword_function)) {
|
|
_ = p.nextToken();
|
|
tag = .function_prototype;
|
|
}
|
|
|
|
lhs = try p.expectIdentifier();
|
|
if (p.checkToken(.left_paren)) {
|
|
rhs = try parseParameterList(p);
|
|
}
|
|
while (p.checkToken(.equal) or p.checkToken(.equal_equal)) {
|
|
_ = p.nextToken();
|
|
}
|
|
|
|
_ = try p.expectNewline();
|
|
return .createBinary(p.arena, tag, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.start,
|
|
}, lhs, rhs);
|
|
}
|
|
|
|
fn parseStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
|
p.eatToken(.whitespace);
|
|
|
|
var node = switch (p.token.tag) {
|
|
.star, .plus => try parseChoiceStmt(p, context),
|
|
.minus => switch (context.tag) {
|
|
.block => try parseGatherPointStmt(p, context),
|
|
.conditional => try parseConditionalStmt(p, context),
|
|
},
|
|
.equal, .equal_equal => switch (context.tag) {
|
|
.block => try parseKnotDecl(p),
|
|
else => try parseContentStmt(p),
|
|
},
|
|
.tilde => try parseTildeStmt(p),
|
|
.right_arrow => try parseDivertStmt(p),
|
|
.right_brace => {
|
|
const token = p.nextToken();
|
|
return p.fail(.unexpected_token, token);
|
|
},
|
|
.keyword_const => try parseConstDecl(p),
|
|
.keyword_var => try parseVarDecl(p),
|
|
else => try parseContentStmt(p),
|
|
};
|
|
|
|
p.eatToken(.newline);
|
|
p.eatToken(.whitespace);
|
|
|
|
switch (node.tag) {
|
|
.if_branch, .else_branch, .switch_case => try p.handleConditionalBranch(context),
|
|
.choice_star_stmt => try p.handleChoiceBranch(context, node),
|
|
.choice_plus_stmt => try p.handleChoiceBranch(context, node),
|
|
.gather_point_stmt => try p.handleGatherPoint(context, &node),
|
|
.knot_prototype => try p.handleKnotDecl(context),
|
|
.stitch_prototype => try p.handleStitchDecl(context),
|
|
.function_prototype => try p.handleFunctionDecl(context),
|
|
else => try p.handleContentStmt(context, node),
|
|
}
|
|
return node;
|
|
}
|
|
|
|
pub fn parseFile(p: *Parse) Error!*Ast.Node {
|
|
var context = p.makeStmtContext(.block, null);
|
|
try p.pushGrammar(.content);
|
|
defer p.popGrammar();
|
|
|
|
const main_token = p.nextToken();
|
|
while (!p.checkToken(.eof)) {
|
|
const node = parseStmt(p, &context) catch |err| switch (err) {
|
|
error.ParseError => {
|
|
if (p.panic_mode) p.synchronize();
|
|
p.eatToken(.newline);
|
|
continue;
|
|
},
|
|
error.OutOfMemory => @panic("Out of memory!"),
|
|
};
|
|
try p.scratch.append(p.gpa, node);
|
|
}
|
|
|
|
const node = try p.collectKnot(&context);
|
|
if (node) |n| try p.scratch.append(p.gpa, n);
|
|
|
|
return p.makeNodeSlice(&context, .file, .{
|
|
.start = main_token.loc.start,
|
|
.end = p.token.loc.end,
|
|
});
|
|
}
|