feat: improved parsing and regression test suite
This commit is contained in:
parent
4ebdd3c66e
commit
619eb3b338
39 changed files with 1116 additions and 339 deletions
622
src/Parse.zig
622
src/Parse.zig
|
|
@ -1,10 +1,9 @@
|
|||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const tok = @import("tokenizer.zig");
|
||||
const Ast = @import("Ast.zig");
|
||||
const assert = std.debug.assert;
|
||||
const Token = tok.Token;
|
||||
const Tokenizer = tok.Tokenizer;
|
||||
const Ast = @import("Ast.zig");
|
||||
|
||||
const Parse = @This();
|
||||
|
||||
gpa: std.mem.Allocator,
|
||||
|
|
@ -99,7 +98,7 @@ fn getTokenInfixType(tag: Token.Tag) Ast.Node.Tag {
|
|||
.star => .multiply_expr,
|
||||
.slash => .divide_expr,
|
||||
// .question_mark => .contains_expr,
|
||||
.equal => .assign_stmt,
|
||||
// .equal => .assign_stmt,
|
||||
.ampersand_ampersand, .keyword_and => .logical_and_expr,
|
||||
.pipe_pipe, .keyword_or => .logical_or_expr,
|
||||
.equal_equal => .logical_equality_expr,
|
||||
|
|
@ -165,7 +164,6 @@ fn checkTokenInSet(p: *const Parse, tag_set: []const Token.Tag) bool {
|
|||
|
||||
fn nextToken(p: *Parse) Token {
|
||||
assert(p.grammar_stack.items.len > 0);
|
||||
|
||||
const token = p.token;
|
||||
const context = p.grammar_stack.getLast();
|
||||
p.token = p.tokenizer.next(context.grammar);
|
||||
|
|
@ -252,6 +250,7 @@ fn nodeListFromScratch(p: *Parse, start_offset: usize, end_offset: usize) Error!
|
|||
|
||||
const span = end_offset - start_offset;
|
||||
assert(span > 0);
|
||||
|
||||
const list = try p.arena.alloc(*Ast.Node, span);
|
||||
defer p.scratch.shrinkRetainingCapacity(start_offset);
|
||||
|
||||
|
|
@ -261,7 +260,6 @@ fn nodeListFromScratch(p: *Parse, start_offset: usize, end_offset: usize) Error!
|
|||
list[li] = p.scratch.items[i];
|
||||
li += 1;
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
|
|
@ -269,15 +267,14 @@ fn makeNodeSequence(
|
|||
p: *Parse,
|
||||
context: *const StmtContext,
|
||||
tag: Ast.Node.Tag,
|
||||
bytes_start: usize,
|
||||
bytes_end: usize,
|
||||
loc: Ast.Node.Span,
|
||||
scratch_offset: usize,
|
||||
) Error!*Ast.Node {
|
||||
var list: ?[]*Ast.Node = null;
|
||||
if (!p.isScratchEmpty(context)) {
|
||||
list = try p.nodeListFromScratch(scratch_offset, p.scratch.items.len);
|
||||
}
|
||||
return Ast.createListNode(p.arena, tag, bytes_start, bytes_end, list);
|
||||
return .createList(p.arena, tag, loc, list);
|
||||
}
|
||||
|
||||
fn isBlockStackEmpty(p: *Parse, context: *const StmtContext) bool {
|
||||
|
|
@ -337,23 +334,20 @@ fn collectBlock(p: *Parse, context: *StmtContext, level: usize) Error!?*Ast.Node
|
|||
const block = p.peekBlockStack(context);
|
||||
if (block.level < level) return null;
|
||||
|
||||
const bytes_start = block.source_offset;
|
||||
var bytes_end: usize = 0;
|
||||
const span_start = block.source_offset;
|
||||
var span_end: usize = 0;
|
||||
|
||||
if (!p.isScratchEmpty(context)) {
|
||||
const last = p.peekScratch(context);
|
||||
bytes_end = last.source_end;
|
||||
span_end = last.loc.end;
|
||||
} else {
|
||||
bytes_end = bytes_start;
|
||||
span_end = span_start;
|
||||
}
|
||||
|
||||
var node = try p.makeNodeSequence(
|
||||
context,
|
||||
.block_stmt,
|
||||
bytes_start,
|
||||
bytes_end,
|
||||
block.scratch_offset,
|
||||
);
|
||||
var node = try p.makeNodeSequence(context, .block_stmt, .{
|
||||
.start = span_start,
|
||||
.end = span_end,
|
||||
}, block.scratch_offset);
|
||||
node = try p.fixupBlock(context, node);
|
||||
_ = p.popBlockStack(context);
|
||||
return node;
|
||||
|
|
@ -407,13 +401,10 @@ fn collectContext(
|
|||
}
|
||||
}
|
||||
|
||||
const node = try p.makeNodeSequence(
|
||||
context,
|
||||
.choice_stmt,
|
||||
choice_state.source_offset,
|
||||
p.token.loc.start,
|
||||
choice_state.scratch_offset,
|
||||
);
|
||||
const node = try p.makeNodeSequence(context, .choice_stmt, .{
|
||||
.start = choice_state.source_offset,
|
||||
.end = p.token.loc.start,
|
||||
}, choice_state.scratch_offset);
|
||||
try p.scratch.append(p.gpa, node);
|
||||
}
|
||||
if (!should_gather) return p.collectBlock(context, level);
|
||||
|
|
@ -431,10 +422,11 @@ fn collectStitch(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
|||
.function_prototype => .function_decl,
|
||||
else => return node,
|
||||
};
|
||||
const span_start = proto.source_start;
|
||||
const span_end = if (node) |n| n.source_end else proto.source_end;
|
||||
_ = p.popScratch(context);
|
||||
return Ast.createBinaryNode(p.arena, tag, span_start, span_end, proto, node);
|
||||
return .createBinary(p.arena, tag, .{
|
||||
.start = proto.loc.start,
|
||||
.end = if (node) |n| n.loc.end else proto.loc.end,
|
||||
}, proto, node);
|
||||
}
|
||||
|
||||
fn collectKnot(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
||||
|
|
@ -449,10 +441,10 @@ fn collectKnot(p: *Parse, context: *StmtContext) Error!?*Ast.Node {
|
|||
const list = try p.nodeListFromScratch(p.knot_offset + 1, p.scratch.items.len);
|
||||
defer _ = p.popScratch(context);
|
||||
|
||||
const bytes_start = proto.source_start;
|
||||
const bytes_end = if (child) |n| n.source_end else proto.source_end;
|
||||
|
||||
return Ast.createKnotDeclNode(p.arena, .knot_decl, bytes_start, bytes_end, proto, list);
|
||||
return .createKnot(p.arena, .knot_decl, .{
|
||||
.start = proto.loc.start,
|
||||
.end = if (child) |n| n.loc.end else proto.loc.end,
|
||||
}, proto, list);
|
||||
}
|
||||
|
||||
fn handleChoiceBranch(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
||||
|
|
@ -462,14 +454,14 @@ fn handleChoiceBranch(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|||
try p.block_stack.append(p.gpa, .{
|
||||
.level = 0,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
.source_offset = node.loc.start,
|
||||
});
|
||||
}
|
||||
if (p.isChoiceStackEmpty(context)) {
|
||||
try p.choice_stack.append(p.gpa, .{
|
||||
.level = level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
.source_offset = node.loc.start,
|
||||
});
|
||||
} else {
|
||||
const choice_state = p.peekChoiceStack(context);
|
||||
|
|
@ -483,11 +475,10 @@ fn handleChoiceBranch(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|||
.source_offset = p.token.loc.start,
|
||||
});
|
||||
}
|
||||
|
||||
try p.choice_stack.append(p.gpa, .{
|
||||
.level = level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
.source_offset = node.loc.start,
|
||||
});
|
||||
} else if (level == choice_state.level) {
|
||||
const t_node = try p.collectBlock(context, level);
|
||||
|
|
@ -500,16 +491,15 @@ fn handleChoiceBranch(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|||
}
|
||||
|
||||
fn handleGatherPoint(p: *Parse, context: *StmtContext, node: **Ast.Node) !void {
|
||||
const token = p.token;
|
||||
const main_token = p.token;
|
||||
const level = context.level;
|
||||
|
||||
if (p.isBlockStackEmpty(context)) {
|
||||
assert(p.isChoiceStackEmpty(context));
|
||||
|
||||
try p.block_stack.append(p.gpa, .{
|
||||
.level = 0,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.*.source_start,
|
||||
.source_offset = node.*.loc.start,
|
||||
});
|
||||
}
|
||||
// Gather points terminate compound statements at the appropriate level.
|
||||
|
|
@ -522,20 +512,16 @@ fn handleGatherPoint(p: *Parse, context: *StmtContext, node: **Ast.Node) !void {
|
|||
try p.block_stack.append(p.gpa, .{
|
||||
.level = choice_state.level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.*.source_start,
|
||||
.source_offset = node.*.loc.start,
|
||||
});
|
||||
}
|
||||
} else if (!p.isScratchEmpty(context)) {
|
||||
const tmp = (try p.collectContext(context, level - 1, true)) orelse @panic("FUCK!");
|
||||
if (tmp.tag == .choice_stmt) {
|
||||
node.* = try Ast.createBinaryNode(
|
||||
p.arena,
|
||||
.gathered_stmt,
|
||||
tmp.source_start,
|
||||
token.loc.start,
|
||||
tmp,
|
||||
node.*,
|
||||
);
|
||||
node.* = try Ast.Node.createBinary(p.arena, .gathered_stmt, .{
|
||||
.start = tmp.loc.start,
|
||||
.end = main_token.loc.start,
|
||||
}, tmp, node.*);
|
||||
}
|
||||
if (!p.isBlockStackEmpty(context)) {
|
||||
const b = p.peekBlockStack(context);
|
||||
|
|
@ -553,7 +539,7 @@ fn handleContentStmt(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|||
try p.block_stack.append(p.gpa, .{
|
||||
.level = 0,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
.source_offset = node.loc.start,
|
||||
});
|
||||
}
|
||||
if (!p.isChoiceStackEmpty(context)) {
|
||||
|
|
@ -564,7 +550,7 @@ fn handleContentStmt(p: *Parse, context: *StmtContext, node: *Ast.Node) !void {
|
|||
try p.block_stack.append(p.gpa, .{
|
||||
.level = choice_state.level,
|
||||
.scratch_offset = p.scratch.items.len,
|
||||
.source_offset = node.source_start,
|
||||
.source_offset = node.loc.start,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -600,10 +586,11 @@ fn expectExpr(p: *Parse) Error!*Ast.Node {
|
|||
}
|
||||
|
||||
fn parseAtom(p: *Parse, tag: Ast.Node.Tag) Error!*Ast.Node {
|
||||
const token = p.nextToken();
|
||||
const span_start = token.loc.start;
|
||||
const span_end = token.loc.end;
|
||||
return Ast.createLeafNode(p.arena, tag, span_start, span_end);
|
||||
const main_token = p.nextToken();
|
||||
return .createLeaf(p.arena, tag, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = main_token.loc.end,
|
||||
});
|
||||
}
|
||||
|
||||
fn parseIdentifier(p: *Parse) Error!*Ast.Node {
|
||||
|
|
@ -640,14 +627,15 @@ fn parsePrimaryExpr(p: *Parse) Error!?*Ast.Node {
|
|||
fn parsePrefixExpr(p: *Parse) Error!?*Ast.Node {
|
||||
switch (p.token.tag) {
|
||||
.keyword_not, .minus, .exclaimation_mark => {
|
||||
const token = p.nextToken();
|
||||
const main_token = p.nextToken();
|
||||
const lhs = try parsePrefixExpr(p);
|
||||
if (lhs == null) return null;
|
||||
|
||||
const tag = getTokenPrefixType(token.tag);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = if (lhs) |n| n.source_end else p.token.loc.end;
|
||||
return Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, lhs, null);
|
||||
const tag = getTokenPrefixType(main_token.tag);
|
||||
return .createBinary(p.arena, tag, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = if (lhs) |n| n.loc.end else p.token.loc.end,
|
||||
}, lhs, null);
|
||||
},
|
||||
else => return parsePrimaryExpr(p),
|
||||
}
|
||||
|
|
@ -673,10 +661,10 @@ fn parseInfixExpr(
|
|||
next_node = try parseInfixExpr(p, null, token_precedence);
|
||||
if (next_node) |rhs| {
|
||||
const tag = getTokenInfixType(token.tag);
|
||||
const bytes_start = if (lhs) |n| n.source_start else token.loc.start;
|
||||
const bytes_end = rhs.source_end;
|
||||
|
||||
lhs = try Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, lhs, rhs);
|
||||
lhs = try Ast.Node.createBinary(p.arena, tag, .{
|
||||
.start = if (lhs) |n| n.loc.start else token.loc.start,
|
||||
.end = rhs.loc.end,
|
||||
}, lhs, rhs);
|
||||
} else return null;
|
||||
} else break;
|
||||
}
|
||||
|
|
@ -689,7 +677,7 @@ fn parseExpression(p: *Parse) Error!?*Ast.Node {
|
|||
|
||||
fn parseStringExpr(p: *Parse) Error!*Ast.Node {
|
||||
assert(p.token.tag == .double_quote);
|
||||
const leading_token = p.nextToken();
|
||||
const main_token = p.nextToken();
|
||||
|
||||
while (true) switch (p.token.tag) {
|
||||
.double_quote, .newline, .eof => break,
|
||||
|
|
@ -697,34 +685,42 @@ fn parseStringExpr(p: *Parse) Error!*Ast.Node {
|
|||
};
|
||||
|
||||
const last_token = try p.expectToken(.double_quote, true);
|
||||
const span_start = leading_token.loc.start;
|
||||
const span_end = p.token.loc.start;
|
||||
const expr = try Ast.createLeafNode(p.arena, .string_literal, leading_token.loc.end, last_token.loc.start);
|
||||
return Ast.createBinaryNode(p.arena, .string_expr, span_start, span_end, expr, null);
|
||||
const expr = try Ast.Node.createLeaf(p.arena, .string_literal, .{
|
||||
.start = main_token.loc.end,
|
||||
.end = last_token.loc.start,
|
||||
});
|
||||
return .createBinary(p.arena, .string_expr, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, expr, null);
|
||||
}
|
||||
|
||||
fn parseContentString(p: *Parse, token_set: []const Token.Tag) Error!?*Ast.Node {
|
||||
const main_token = p.token;
|
||||
while (!p.checkTokenInSet(token_set)) _ = p.nextToken();
|
||||
|
||||
const span_start = main_token.loc.start;
|
||||
const span_end = p.token.loc.start;
|
||||
const tag: Ast.Node.Tag = if (span_start == span_end) .empty_string else .string_literal;
|
||||
return Ast.createLeafNode(p.arena, tag, span_start, span_end);
|
||||
return .createLeaf(p.arena, if (main_token.loc.start == p.token.loc.start)
|
||||
.empty_string
|
||||
else
|
||||
.string_literal, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
});
|
||||
}
|
||||
|
||||
fn parseExprStmt(p: *Parse, lhs: ?*Ast.Node) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const main_token = p.token;
|
||||
const node = try parseInfixExpr(p, lhs, .none);
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = if (lhs) |n| n.source_start else token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .expr_stmt, bytes_start, bytes_end, node, null);
|
||||
return .createBinary(p.arena, .expr_stmt, .{
|
||||
.start = if (lhs) |n| n.loc.start else main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseAssignStmt(p: *Parse) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const main_token = p.token;
|
||||
const lhs = try parseIdentifierExpr(p);
|
||||
|
||||
if (!p.checkToken(.equal)) return parseExprStmt(p, lhs);
|
||||
|
|
@ -733,21 +729,24 @@ fn parseAssignStmt(p: *Parse) Error!*Ast.Node {
|
|||
const rhs = try p.expectExpr();
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .assign_stmt, bytes_start, bytes_end, lhs, rhs);
|
||||
return .createBinary(p.arena, .assign_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseTempDecl(p: *Parse) Error!*Ast.Node {
|
||||
const token = p.nextToken();
|
||||
const main_token = p.nextToken();
|
||||
const lhs = try p.expectIdentifier();
|
||||
_ = try p.expectToken(.equal, true);
|
||||
|
||||
const rhs = try p.expectExpr();
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .temp_decl, bytes_start, bytes_end, lhs, rhs);
|
||||
return .createBinary(p.arena, .temp_decl, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseTildeStmt(p: *Parse) Error!*Ast.Node {
|
||||
|
|
@ -755,7 +754,7 @@ fn parseTildeStmt(p: *Parse) Error!*Ast.Node {
|
|||
defer p.popGrammar();
|
||||
|
||||
_ = p.nextToken();
|
||||
const node: *Ast.Node = switch (p.token.tag) {
|
||||
const node = switch (p.token.tag) {
|
||||
.keyword_temp => try parseTempDecl(p),
|
||||
.keyword_return => try parseReturnStmt(p),
|
||||
.identifier => try parseAssignStmt(p),
|
||||
|
|
@ -766,17 +765,17 @@ fn parseTildeStmt(p: *Parse) Error!*Ast.Node {
|
|||
|
||||
fn parseReturnStmt(p: *Parse) Error!*Ast.Node {
|
||||
var node: ?*Ast.Node = null;
|
||||
const token = p.nextToken();
|
||||
const main_token = p.nextToken();
|
||||
|
||||
if (!p.checkToken(.newline) and !p.checkToken(.eof)) {
|
||||
node = try parseInfixExpr(p, null, .none);
|
||||
}
|
||||
|
||||
_ = try p.expectNewline();
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
return Ast.createBinaryNode(p.arena, .return_stmt, bytes_start, bytes_end, node, null);
|
||||
return .createBinary(p.arena, .return_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseIdentifierExpr(p: *Parse) Error!*Ast.Node {
|
||||
|
|
@ -787,97 +786,44 @@ fn parseIdentifierExpr(p: *Parse) Error!*Ast.Node {
|
|||
.dot => {
|
||||
_ = p.nextToken();
|
||||
const rhs = try p.expectIdentifier();
|
||||
const bytes_start = lhs.source_start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
lhs = try Ast.createBinaryNode(
|
||||
p.arena,
|
||||
.selector_expr,
|
||||
bytes_start,
|
||||
bytes_end,
|
||||
lhs,
|
||||
rhs,
|
||||
);
|
||||
lhs = try Ast.Node.createBinary(p.arena, .selector_expr, .{
|
||||
.start = lhs.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, lhs, rhs);
|
||||
},
|
||||
.left_paren => {
|
||||
const rhs = try parseArgumentList(p);
|
||||
const bytes_start = lhs.source_start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
return Ast.createBinaryNode(
|
||||
p.arena,
|
||||
.call_expr,
|
||||
bytes_start,
|
||||
bytes_end,
|
||||
lhs,
|
||||
rhs,
|
||||
);
|
||||
return .createBinary(p.arena, .call_expr, .{
|
||||
.start = lhs.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, lhs, rhs);
|
||||
},
|
||||
else => return lhs,
|
||||
}
|
||||
}
|
||||
}
|
||||
fn parseContentExpr(p: *Parse, token_set: []const Token.Tag) Error!?*Ast.Node {
|
||||
const main_token = p.token;
|
||||
const context = makeStmtContext(p, .block, null);
|
||||
|
||||
while (true) {
|
||||
var node: ?*Ast.Node = null;
|
||||
if (!p.checkTokenInSet(token_set)) {
|
||||
node = try parseContentString(p, token_set);
|
||||
} else switch (p.token.tag) {
|
||||
.eof, .newline, .right_brace => break,
|
||||
// .left_brace => node = try parseLbraceExpr(p),
|
||||
// .right_arrow => node = try parseDivertStmt(p),
|
||||
//.INK_TT_GLUE => node = ink_parse_glue(p),
|
||||
else => {
|
||||
return p.fail(.unexpected_token, p.token);
|
||||
},
|
||||
}
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
|
||||
const span_start = main_token.loc.start;
|
||||
const span_end = p.token.loc.start;
|
||||
return p.makeNodeSequence(&context, .content, span_start, span_end, context.scratch_top);
|
||||
fn parseDivertExpr(p: *Parse) Error!*Ast.Node {
|
||||
const main_token = p.nextToken();
|
||||
const node = try parseIdentifierExpr(p);
|
||||
return .createBinary(p.arena, .divert_expr, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseArgumentList(p: *Parse) Error!?*Ast.Node {
|
||||
const context = p.makeStmtContext(.block, null);
|
||||
const token = try p.expectToken(.left_paren, true);
|
||||
fn parseDivertStmt(p: *Parse) Error!*Ast.Node {
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
|
||||
if (!p.checkToken(.right_paren)) {
|
||||
var cnt: usize = 0;
|
||||
while (true) : (cnt += 1) {
|
||||
if (cnt == p.max_argument_count) {
|
||||
return p.fail(.too_many_arguments, p.token);
|
||||
}
|
||||
|
||||
const node = try parseInfixExpr(p, null, .none);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
if (p.checkToken(.comma)) {
|
||||
_ = p.nextToken();
|
||||
} else break;
|
||||
}
|
||||
}
|
||||
|
||||
_ = try p.expectToken(.right_paren, false);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return p.makeNodeSequence(&context, .argument_list, bytes_start, bytes_end, context.scratch_top);
|
||||
}
|
||||
|
||||
fn parseContentStmt(p: *Parse) Error!*Ast.Node {
|
||||
const token_set = [_]Token.Tag{
|
||||
.left_brace, .right_brace, .right_arrow,
|
||||
.glue, .newline, .eof,
|
||||
};
|
||||
const main_token = p.token;
|
||||
const node = try parseContentExpr(p, &token_set);
|
||||
const end_token = try p.expectNewline();
|
||||
const span_start = main_token.loc.start;
|
||||
const span_end = end_token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, .content_stmt, span_start, span_end, node, null);
|
||||
const node = try parseDivertExpr(p);
|
||||
_ = try p.expectNewline();
|
||||
|
||||
return .createBinary(p.arena, .divert_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseChoiceExpr(p: *Parse) Error!?*Ast.Node {
|
||||
|
|
@ -886,7 +832,7 @@ fn parseChoiceExpr(p: *Parse) Error!?*Ast.Node {
|
|||
.right_bracket, .right_arrow, .newline,
|
||||
.eof,
|
||||
};
|
||||
const token = p.token;
|
||||
const main_token = p.token;
|
||||
var lhs: ?*Ast.Node = null;
|
||||
var mhs: ?*Ast.Node = null;
|
||||
var rhs: ?*Ast.Node = null;
|
||||
|
|
@ -920,50 +866,286 @@ fn parseChoiceExpr(p: *Parse) Error!?*Ast.Node {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
return Ast.createChoiceExprNode(p.arena, .choice_expr, bytes_start, bytes_end, lhs, mhs, rhs);
|
||||
return .createChoice(p.arena, .choice_expr, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, lhs, mhs, rhs);
|
||||
}
|
||||
|
||||
fn parseChoiceStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const level = p.eatTokenLooped(token.tag, true);
|
||||
const main_token = p.token;
|
||||
const level = p.eatTokenLooped(main_token.tag, true);
|
||||
const node = try parseChoiceExpr(p);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = if (node) |n| n.source_end else p.token.loc.start;
|
||||
|
||||
const span_end = if (node) |n| n.loc.end else p.token.loc.start;
|
||||
context.level = level;
|
||||
|
||||
if (p.checkToken(.newline)) _ = p.nextToken();
|
||||
const tag = getBranchTag(token.tag);
|
||||
return Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, node, null);
|
||||
|
||||
return .createBinary(p.arena, getBranchTag(main_token.tag), .{
|
||||
.start = main_token.loc.start,
|
||||
.end = span_end,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseGatherPointStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
const token = p.token;
|
||||
const level = p.eatTokenLooped(token.tag, true);
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = p.token.loc.start;
|
||||
|
||||
const main_token = p.token;
|
||||
const level = p.eatTokenLooped(main_token.tag, true);
|
||||
const span_end = p.token.loc.start;
|
||||
context.level = level;
|
||||
|
||||
return Ast.createBinaryNode(p.arena, .gather_point_stmt, bytes_start, bytes_end, null, null);
|
||||
p.eatToken(.whitespace);
|
||||
p.eatToken(.newline);
|
||||
return .createBinary(p.arena, .gather_point_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = span_end,
|
||||
}, null, null);
|
||||
}
|
||||
|
||||
fn parseConditional(p: *Parse, main_token: Token, expr: ?*Ast.Node) Error!?*Ast.Node {
|
||||
var context = p.makeStmtContext(.conditional, expr);
|
||||
p.eatToken(.whitespace);
|
||||
|
||||
while (!p.checkToken(.eof) and !p.checkToken(.right_brace)) {
|
||||
const node = parseStmt(p, &context) catch |err| switch (err) {
|
||||
error.ParseError => {
|
||||
if (p.panic_mode) p.synchronize();
|
||||
p.eatToken(.newline);
|
||||
continue;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
try p.scratch.append(p.gpa, node);
|
||||
}
|
||||
|
||||
const end_token = try p.expectToken(.right_brace, true);
|
||||
const node = try p.collectContext(&context, 0, false);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
|
||||
const list = try p.nodeListFromScratch(context.scratch_top, p.scratch.items.len);
|
||||
return .createSwitch(p.arena, if (expr != null and !context.is_block_created)
|
||||
.switch_stmt
|
||||
else if (expr == null and !context.is_block_created)
|
||||
.multi_if_stmt
|
||||
else
|
||||
.if_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = end_token.loc.start,
|
||||
}, expr, list);
|
||||
}
|
||||
|
||||
fn parseInlineIf(p: *Parse, main_token: Token, lhs: ?*Ast.Node) Error!?*Ast.Node {
|
||||
const token_set = [_]Token.Tag{
|
||||
.left_brace, .right_brace, .right_arrow,
|
||||
.glue, .newline, .eof,
|
||||
};
|
||||
const scratch_top = p.scratch.items.len;
|
||||
|
||||
p.eatToken(.whitespace);
|
||||
|
||||
const content_node = try parseContentExpr(p, &token_set);
|
||||
const end_token = try p.expectToken(.right_brace, true);
|
||||
if (content_node) |n| try p.scratch.append(p.gpa, n);
|
||||
|
||||
const list = try p.nodeListFromScratch(scratch_top, p.scratch.items.len);
|
||||
return try .createSwitch(p.arena, .if_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = end_token.loc.end,
|
||||
}, lhs, list);
|
||||
}
|
||||
|
||||
fn parseLbraceExpr(p: *Parse) Error!?*Ast.Node {
|
||||
const lbrace_token = p.token;
|
||||
const lhs = n: {
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
|
||||
_ = p.nextToken();
|
||||
const node = try parseExpression(p);
|
||||
break :n node;
|
||||
};
|
||||
if (lhs == null) {
|
||||
try p.pushGrammar(.content);
|
||||
defer p.popGrammar();
|
||||
|
||||
_ = try p.expectToken(.newline, true);
|
||||
return parseConditional(p, lbrace_token, null);
|
||||
}
|
||||
if (p.checkToken(.right_brace)) {
|
||||
const rbrace_token = p.nextToken();
|
||||
return .createBinary(p.arena, .inline_logic_expr, .{
|
||||
.start = lbrace_token.loc.start,
|
||||
.end = rbrace_token.loc.end,
|
||||
}, lhs, null);
|
||||
}
|
||||
_ = try p.expectToken(.colon, true);
|
||||
|
||||
if (p.checkToken(.newline)) {
|
||||
_ = p.nextToken();
|
||||
return parseConditional(p, lbrace_token, lhs);
|
||||
} else {
|
||||
_ = p.nextToken();
|
||||
return parseInlineIf(p, lbrace_token, lhs);
|
||||
}
|
||||
}
|
||||
|
||||
fn parseContentExpr(p: *Parse, token_set: []const Token.Tag) Error!?*Ast.Node {
|
||||
const main_token = p.token;
|
||||
const context = makeStmtContext(p, .block, null);
|
||||
|
||||
while (true) {
|
||||
var node: ?*Ast.Node = null;
|
||||
if (!p.checkTokenInSet(token_set)) {
|
||||
node = try parseContentString(p, token_set);
|
||||
} else switch (p.token.tag) {
|
||||
.eof, .newline, .right_brace => break,
|
||||
.left_brace => node = try parseLbraceExpr(p),
|
||||
.right_arrow => node = try parseDivertStmt(p),
|
||||
//.INK_TT_GLUE => node = ink_parse_glue(p),
|
||||
else => {
|
||||
return p.fail(.unexpected_token, p.token);
|
||||
},
|
||||
}
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
}
|
||||
return p.makeNodeSequence(&context, .content, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, context.scratch_top);
|
||||
}
|
||||
|
||||
fn parseContentStmt(p: *Parse) Error!*Ast.Node {
|
||||
const token_set = [_]Token.Tag{
|
||||
.left_brace, .right_brace, .right_arrow,
|
||||
.glue, .newline, .eof,
|
||||
};
|
||||
const main_token = p.token;
|
||||
const node = try parseContentExpr(p, &token_set);
|
||||
const end_token = try p.expectNewline();
|
||||
return .createBinary(p.arena, .content_stmt, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = end_token.loc.start,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseParameterDecl(p: *Parse) Error!*Ast.Node {
|
||||
const tag: Ast.Node.Tag = if (p.checkToken(.keyword_ref)) blk: {
|
||||
_ = p.nextToken();
|
||||
break :blk .ref_parameter_decl;
|
||||
} else .parameter_decl;
|
||||
|
||||
const node = try p.expectIdentifier();
|
||||
node.tag = tag;
|
||||
return node;
|
||||
}
|
||||
|
||||
fn parseParameterList(p: *Parse) Error!?*Ast.Node {
|
||||
const context = p.makeStmtContext(.block, null);
|
||||
const main_token = try p.expectToken(.left_paren, true);
|
||||
|
||||
if (!p.checkToken(.right_paren)) {
|
||||
var cnt: usize = 0;
|
||||
while (true) : (cnt += 1) {
|
||||
if (cnt == p.max_argument_count) {
|
||||
return p.fail(.too_many_parameters, p.token);
|
||||
}
|
||||
|
||||
const node = try parseParameterDecl(p);
|
||||
try p.scratch.append(p.gpa, node);
|
||||
|
||||
if (p.checkToken(.comma)) {
|
||||
_ = p.nextToken();
|
||||
} else break;
|
||||
}
|
||||
}
|
||||
|
||||
_ = try p.expectToken(.right_paren, true);
|
||||
return p.makeNodeSequence(&context, .parameter_list, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, context.scratch_top);
|
||||
}
|
||||
|
||||
fn parseArgumentList(p: *Parse) Error!?*Ast.Node {
|
||||
const context = p.makeStmtContext(.block, null);
|
||||
const main_token = try p.expectToken(.left_paren, true);
|
||||
|
||||
if (!p.checkToken(.right_paren)) {
|
||||
var cnt: usize = 0;
|
||||
while (true) : (cnt += 1) {
|
||||
if (cnt == p.max_argument_count) {
|
||||
return p.fail(.too_many_arguments, p.token);
|
||||
}
|
||||
|
||||
const node = try parseInfixExpr(p, null, .none);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
if (p.checkToken(.comma)) {
|
||||
_ = p.nextToken();
|
||||
} else break;
|
||||
}
|
||||
}
|
||||
|
||||
_ = try p.expectToken(.right_paren, false);
|
||||
return p.makeNodeSequence(&context, .argument_list, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, context.scratch_top);
|
||||
}
|
||||
|
||||
fn parseConditionalBranch(p: *Parse, tag: Ast.Node.Tag) Error!?*Ast.Node {
|
||||
const main_token = p.token;
|
||||
var node_tag = tag;
|
||||
var node: ?*Ast.Node = null;
|
||||
|
||||
if (p.checkToken(.keyword_else)) {
|
||||
_ = p.nextToken();
|
||||
node_tag = .else_branch;
|
||||
} else {
|
||||
node = try parseExpression(p);
|
||||
if (node == null) return null;
|
||||
}
|
||||
if (!p.checkToken(.colon)) return null;
|
||||
_ = p.nextToken();
|
||||
if (p.checkToken(.newline)) _ = p.nextToken();
|
||||
|
||||
return .createBinary(p.arena, node_tag, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, node, null);
|
||||
}
|
||||
|
||||
fn parseConditionalStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
_ = p.nextToken();
|
||||
|
||||
const node = try parseConditionalBranch(p, if (context.expression_node) |_|
|
||||
.switch_case
|
||||
else
|
||||
.if_branch);
|
||||
if (node) |n| return n;
|
||||
|
||||
p.rewindGrammar();
|
||||
try p.pushGrammar(.content);
|
||||
defer p.popGrammar();
|
||||
|
||||
_ = p.nextToken();
|
||||
return parseGatherPointStmt(p, context);
|
||||
}
|
||||
|
||||
fn parseVar(p: *Parse, tag: Ast.Node.Tag) Error!*Ast.Node {
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
|
||||
const token = p.nextToken();
|
||||
const main_token = p.nextToken();
|
||||
const lhs = try p.expectIdentifier();
|
||||
_ = try p.expectToken(.equal, true);
|
||||
|
||||
const rhs = try p.expectExpr();
|
||||
const last_token = try p.expectNewline();
|
||||
const bytes_start = token.loc.start;
|
||||
const bytes_end = last_token.loc.start;
|
||||
return Ast.createBinaryNode(p.arena, tag, bytes_start, bytes_end, lhs, rhs);
|
||||
return .createBinary(p.arena, tag, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = last_token.loc.start,
|
||||
}, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseVarDecl(p: *Parse) Error!*Ast.Node {
|
||||
|
|
@ -976,6 +1158,43 @@ fn parseConstDecl(p: *Parse) Error!*Ast.Node {
|
|||
return parseVar(p, .const_decl);
|
||||
}
|
||||
|
||||
fn parseKnotDecl(p: *Parse) Error!*Ast.Node {
|
||||
var tag: Ast.Node.Tag = .stitch_prototype;
|
||||
var lhs: ?*Ast.Node = null;
|
||||
var rhs: ?*Ast.Node = null;
|
||||
const main_token = p.nextToken();
|
||||
|
||||
try p.pushGrammar(.expression);
|
||||
defer p.popGrammar();
|
||||
|
||||
p.eatToken(.whitespace);
|
||||
if (p.checkToken(.equal)) {
|
||||
tag = .knot_prototype;
|
||||
|
||||
while (p.checkToken(.equal)) {
|
||||
_ = p.nextToken();
|
||||
}
|
||||
}
|
||||
if (p.checkToken(.keyword_function)) {
|
||||
_ = p.nextToken();
|
||||
tag = .function_prototype;
|
||||
}
|
||||
|
||||
lhs = try p.expectIdentifier();
|
||||
if (p.checkToken(.left_paren)) {
|
||||
rhs = try parseParameterList(p);
|
||||
}
|
||||
while (p.checkToken(.equal) or p.checkToken(.equal_equal)) {
|
||||
_ = p.nextToken();
|
||||
}
|
||||
|
||||
_ = try p.expectNewline();
|
||||
return .createBinary(p.arena, tag, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.start,
|
||||
}, lhs, rhs);
|
||||
}
|
||||
|
||||
fn parseStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
||||
p.eatToken(.whitespace);
|
||||
|
||||
|
|
@ -983,15 +1202,14 @@ fn parseStmt(p: *Parse, context: *StmtContext) Error!*Ast.Node {
|
|||
.star, .plus => try parseChoiceStmt(p, context),
|
||||
.minus => switch (context.tag) {
|
||||
.block => try parseGatherPointStmt(p, context),
|
||||
else => unreachable,
|
||||
//.conditional => try parseConditionalStmt(p, context),
|
||||
.conditional => try parseConditionalStmt(p, context),
|
||||
},
|
||||
.equal, .equal_equal => switch (context.tag) {
|
||||
//.block => try parseKnotDecl(p),
|
||||
.block => try parseKnotDecl(p),
|
||||
else => try parseContentStmt(p),
|
||||
},
|
||||
.tilde => try parseTildeStmt(p),
|
||||
//.right_arrow => try parseDivertStmt(p),
|
||||
.right_arrow => try parseDivertStmt(p),
|
||||
.right_brace => {
|
||||
const token = p.nextToken();
|
||||
return p.fail(.unexpected_token, token);
|
||||
|
|
@ -1022,12 +1240,11 @@ pub fn parseFile(p: *Parse) Error!*Ast.Node {
|
|||
try p.pushGrammar(.content);
|
||||
defer p.popGrammar();
|
||||
|
||||
const token = p.nextToken();
|
||||
const main_token = p.nextToken();
|
||||
while (!p.checkToken(.eof)) {
|
||||
const node = parseStmt(p, &context) catch |err| switch (err) {
|
||||
error.ParseError => {
|
||||
if (p.panic_mode) p.synchronize();
|
||||
|
||||
p.eatToken(.newline);
|
||||
continue;
|
||||
},
|
||||
|
|
@ -1039,7 +1256,8 @@ pub fn parseFile(p: *Parse) Error!*Ast.Node {
|
|||
const node = try p.collectKnot(&context);
|
||||
if (node) |n| try p.scratch.append(p.gpa, n);
|
||||
|
||||
const span_start = token.loc.start;
|
||||
const span_end = p.token.loc.end;
|
||||
return p.makeNodeSequence(&context, .file, span_start, span_end, context.scratch_top);
|
||||
return p.makeNodeSequence(&context, .file, .{
|
||||
.start = main_token.loc.start,
|
||||
.end = p.token.loc.end,
|
||||
}, context.scratch_top);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue