use singly linked lists for std.zig.parse

std.ast uses a singly linked list for lists of things. This is a
breaking change to the self-hosted parser API.

std.ast.Tree has been separated into a private "Parser" type which
represents in-progress parsing, and std.ast.Tree which has only
"output" data. This means cleaner, but breaking, API for parse results.
Specifically, `tokens` and `errors` are no longer SegmentedList but a
slice.

The way to iterate over AST nodes has necessarily changed since lists of
nodes are now singly linked lists rather than SegmentedList.

From these changes, I observe the following on the
self-hosted-parser benchmark from ziglang/gotta-go-fast:

throughput: 45.6 MiB/s => 55.6 MiB/s
maxrss: 359 KB => 342 KB

This commit breaks the build; more updates are necessary to fix API
usage of the self-hosted parser.
This commit is contained in:
Andrew Kelley
2020-05-19 21:11:30 -04:00
parent ed137d25ef
commit 93384f7428
5 changed files with 3651 additions and 3373 deletions
+2 -53
View File
@@ -21,6 +21,8 @@ pub fn SinglyLinkedList(comptime T: type) type {
next: ?*Node = null,
data: T,
pub const Data = T;
pub fn init(data: T) Node {
return Node{
.data = data,
@@ -51,25 +53,6 @@ pub fn SinglyLinkedList(comptime T: type) type {
first: ?*Node = null,
/// Initialize a linked list.
///
/// Returns:
/// An empty linked list.
pub fn init() Self {
return Self{
.first = null,
};
}
/// Insert a new node after an existing one.
///
/// Arguments:
/// node: Pointer to a node in the list.
/// new_node: Pointer to the new node to insert.
pub fn insertAfter(list: *Self, node: *Node, new_node: *Node) void {
node.insertAfter(new_node);
}
/// Insert a new node at the head.
///
/// Arguments:
@@ -104,40 +87,6 @@ pub fn SinglyLinkedList(comptime T: type) type {
list.first = first.next;
return first;
}
/// Allocate a new node.
///
/// Arguments:
/// allocator: Dynamic memory allocator.
///
/// Returns:
/// A pointer to the new node.
pub fn allocateNode(list: *Self, allocator: *Allocator) !*Node {
return allocator.create(Node);
}
/// Deallocate a node.
///
/// Arguments:
/// node: Pointer to the node to deallocate.
/// allocator: Dynamic memory allocator.
pub fn destroyNode(list: *Self, node: *Node, allocator: *Allocator) void {
allocator.destroy(node);
}
/// Allocate and initialize a node and its data.
///
/// Arguments:
/// data: The data to put inside the node.
/// allocator: Dynamic memory allocator.
///
/// Returns:
/// A pointer to the new node.
pub fn createNode(list: *Self, data: T, allocator: *Allocator) !*Node {
var node = try list.allocateNode(allocator);
node.* = Node.init(data);
return node;
}
};
}
+467 -178
View File
@@ -1,42 +1,39 @@
const std = @import("../std.zig");
const assert = std.debug.assert;
const testing = std.testing;
const SegmentedList = std.SegmentedList;
const LinkedList = std.SinglyLinkedList;
const mem = std.mem;
const Token = std.zig.Token;
pub const TokenIndex = usize;
pub const Tree = struct {
/// Reference to externally-owned data.
source: []const u8,
tokens: TokenList,
/// undefined on parse error (errors not empty)
tokens: []const Token,
errors: []const Error,
/// undefined on parse error (when errors field is not empty)
root_node: *Node.Root,
arena_allocator: std.heap.ArenaAllocator,
errors: ErrorList,
arena: std.heap.ArenaAllocator.State,
gpa: *mem.Allocator,
/// translate-c uses this to avoid having to emit correct newlines
/// TODO get rid of this hack
generated: bool = false,
pub const TokenList = SegmentedList(Token, 64);
pub const ErrorList = SegmentedList(Error, 0);
pub fn deinit(self: *Tree) void {
// Here we copy the arena allocator into stack memory, because
// otherwise it would destroy itself while it was still working.
var arena_allocator = self.arena_allocator;
arena_allocator.deinit();
// self is destroyed
self.gpa.free(self.tokens);
self.gpa.free(self.errors);
self.arena.promote(self.gpa).deinit();
}
pub fn renderError(self: *Tree, parse_error: *Error, stream: var) !void {
return parse_error.render(&self.tokens, stream);
pub fn renderError(self: *Tree, parse_error: *const Error, stream: var) !void {
return parse_error.render(self.tokens, stream);
}
pub fn tokenSlice(self: *Tree, token_index: TokenIndex) []const u8 {
return self.tokenSlicePtr(self.tokens.at(token_index));
return self.tokenSlicePtr(self.tokens[token_index]);
}
pub fn tokenSlicePtr(self: *Tree, token: *const Token) []const u8 {
@@ -44,8 +41,8 @@ pub const Tree = struct {
}
pub fn getNodeSource(self: *const Tree, node: *const Node) []const u8 {
const first_token = self.tokens.at(node.firstToken());
const last_token = self.tokens.at(node.lastToken());
const first_token = self.tokens[node.firstToken()];
const last_token = self.tokens[node.lastToken()];
return self.source[first_token.start..last_token.end];
}
@@ -57,7 +54,7 @@ pub const Tree = struct {
};
/// Return the Location of the token relative to the offset specified by `start_index`.
pub fn tokenLocationPtr(self: *Tree, start_index: usize, token: *const Token) Location {
pub fn tokenLocationPtr(self: *Tree, start_index: usize, token: Token) Location {
var loc = Location{
.line = 0,
.column = 0,
@@ -85,11 +82,11 @@ pub const Tree = struct {
}
pub fn tokenLocation(self: *Tree, start_index: usize, token_index: TokenIndex) Location {
return self.tokenLocationPtr(start_index, self.tokens.at(token_index));
return self.tokenLocationPtr(start_index, self.tokens[token_index]);
}
pub fn tokensOnSameLine(self: *Tree, token1_index: TokenIndex, token2_index: TokenIndex) bool {
return self.tokensOnSameLinePtr(self.tokens.at(token1_index), self.tokens.at(token2_index));
return self.tokensOnSameLinePtr(self.tokens[token1_index], self.tokens[token2_index]);
}
pub fn tokensOnSameLinePtr(self: *Tree, token1: *const Token, token2: *const Token) bool {
@@ -103,7 +100,7 @@ pub const Tree = struct {
/// Skips over comments
pub fn prevToken(self: *Tree, token_index: TokenIndex) TokenIndex {
var index = token_index - 1;
while (self.tokens.at(index).id == Token.Id.LineComment) {
while (self.tokens[index].id == Token.Id.LineComment) {
index -= 1;
}
return index;
@@ -112,7 +109,7 @@ pub const Tree = struct {
/// Skips over comments
pub fn nextToken(self: *Tree, token_index: TokenIndex) TokenIndex {
var index = token_index + 1;
while (self.tokens.at(index).id == Token.Id.LineComment) {
while (self.tokens[index].id == Token.Id.LineComment) {
index += 1;
}
return index;
@@ -169,7 +166,7 @@ pub const Error = union(enum) {
DeclBetweenFields: DeclBetweenFields,
InvalidAnd: InvalidAnd,
pub fn render(self: *const Error, tokens: *Tree.TokenList, stream: var) !void {
pub fn render(self: *const Error, tokens: []const Token, stream: var) !void {
switch (self.*) {
.InvalidToken => |*x| return x.render(tokens, stream),
.ExpectedContainerMembers => |*x| return x.render(tokens, stream),
@@ -324,7 +321,7 @@ pub const Error = union(enum) {
pub const ExpectedCall = struct {
node: *Node,
pub fn render(self: *const ExpectedCall, tokens: *Tree.TokenList, stream: var) !void {
pub fn render(self: *const ExpectedCall, tokens: []const Token, stream: var) !void {
return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ ", found {}", .{
@tagName(self.node.id),
});
@@ -334,7 +331,7 @@ pub const Error = union(enum) {
pub const ExpectedCallOrFnProto = struct {
node: *Node,
pub fn render(self: *const ExpectedCallOrFnProto, tokens: *Tree.TokenList, stream: var) !void {
pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token, stream: var) !void {
return stream.print("expected " ++ @tagName(@TagType(Node.SuffixOp.Op).Call) ++ " or " ++
@tagName(Node.Id.FnProto) ++ ", found {}", .{@tagName(self.node.id)});
}
@@ -344,8 +341,8 @@ pub const Error = union(enum) {
token: TokenIndex,
expected_id: Token.Id,
pub fn render(self: *const ExpectedToken, tokens: *Tree.TokenList, stream: var) !void {
const found_token = tokens.at(self.token);
pub fn render(self: *const ExpectedToken, tokens: []const Token, stream: var) !void {
const found_token = tokens[self.token];
switch (found_token.id) {
.Invalid => {
return stream.print("expected '{}', found invalid bytes", .{self.expected_id.symbol()});
@@ -362,8 +359,8 @@ pub const Error = union(enum) {
token: TokenIndex,
end_id: Token.Id,
pub fn render(self: *const ExpectedCommaOrEnd, tokens: *Tree.TokenList, stream: var) !void {
const actual_token = tokens.at(self.token);
pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token, stream: var) !void {
const actual_token = tokens[self.token];
return stream.print("expected ',' or '{}', found '{}'", .{
self.end_id.symbol(),
actual_token.id.symbol(),
@@ -377,8 +374,8 @@ pub const Error = union(enum) {
token: TokenIndex,
pub fn render(self: *const ThisError, tokens: *Tree.TokenList, stream: var) !void {
const actual_token = tokens.at(self.token);
pub fn render(self: *const ThisError, tokens: []const Token, stream: var) !void {
const actual_token = tokens[self.token];
return stream.print(msg, .{actual_token.id.symbol()});
}
};
@@ -390,7 +387,7 @@ pub const Error = union(enum) {
token: TokenIndex,
pub fn render(self: *const ThisError, tokens: *Tree.TokenList, stream: var) !void {
pub fn render(self: *const ThisError, tokens: []const Token, stream: var) !void {
return stream.writeAll(msg);
}
};
@@ -400,6 +397,23 @@ pub const Error = union(enum) {
pub const Node = struct {
id: Id,
/// All the child Node types use this same Iterator state for their iteration.
pub const Iterator = struct {
parent_node: *const Node,
node: ?*LinkedList(*Node).Node,
index: usize,
pub fn next(it: *Iterator) ?*Node {
inline for (@typeInfo(Id).Enum.fields) |f| {
if (it.parent_node.id == @field(Id, f.name)) {
const T = @field(Node, f.name);
return @fieldParentPtr(T, "base", it.parent_node).iterateNext(it);
}
}
unreachable;
}
};
pub const Id = enum {
// Top level
Root,
@@ -473,11 +487,11 @@ pub const Node = struct {
return null;
}
pub fn iterate(base: *Node, index: usize) ?*Node {
pub fn iterate(base: *Node) Iterator {
inline for (@typeInfo(Id).Enum.fields) |f| {
if (base.id == @field(Id, f.name)) {
const T = @field(Node, f.name);
return @fieldParentPtr(T, "base", base).iterate(index);
return @fieldParentPtr(T, "base", base).iterate();
}
}
unreachable;
@@ -607,21 +621,35 @@ pub const Node = struct {
decls: DeclList,
eof_token: TokenIndex,
pub const DeclList = SegmentedList(*Node, 4);
pub const DeclList = LinkedList(*Node);
pub fn iterate(self: *Root, index: usize) ?*Node {
if (index < self.decls.len) {
return self.decls.at(index).*;
}
return null;
pub fn iterate(self: *const Root) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.decls.first };
}
pub fn iterateNext(self: *const Root, it: *Node.Iterator) ?*Node {
const decl = it.node orelse return null;
it.node = decl.next;
return decl.data;
}
pub fn firstToken(self: *const Root) TokenIndex {
return if (self.decls.len == 0) self.eof_token else (self.decls.at(0).*).firstToken();
if (self.decls.first) |first| {
return first.data.firstToken();
} else {
return self.eof_token;
}
}
pub fn lastToken(self: *const Root) TokenIndex {
return if (self.decls.len == 0) self.eof_token else (self.decls.at(self.decls.len - 1).*).lastToken();
if (self.decls.first) |first| {
var node = first;
while (true) {
node = node.next orelse return node.data.lastToken();
}
} else {
return self.eof_token;
}
}
};
@@ -642,8 +670,13 @@ pub const Node = struct {
init_node: ?*Node,
semicolon_token: TokenIndex,
pub fn iterate(self: *VarDecl, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const VarDecl) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const VarDecl, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.type_node) |type_node| {
if (i < 1) return type_node;
@@ -668,6 +701,7 @@ pub const Node = struct {
return null;
}
pub fn firstToken(self: *const VarDecl) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.thread_local_token) |thread_local_token| return thread_local_token;
@@ -690,8 +724,13 @@ pub const Node = struct {
expr: *Node,
semicolon_token: TokenIndex,
pub fn iterate(self: *Use, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Use) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Use, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
@@ -715,15 +754,16 @@ pub const Node = struct {
decls: DeclList,
rbrace_token: TokenIndex,
pub const DeclList = SegmentedList(*Node, 2);
pub const DeclList = LinkedList(*Node);
pub fn iterate(self: *ErrorSetDecl, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const ErrorSetDecl) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.decls.first };
}
if (i < self.decls.len) return self.decls.at(i).*;
i -= self.decls.len;
return null;
pub fn iterateNext(self: *const ErrorSetDecl, it: *Node.Iterator) ?*Node {
const decl = it.node orelse return null;
it.node = decl.next;
return decl.data;
}
pub fn firstToken(self: *const ErrorSetDecl) TokenIndex {
@@ -752,8 +792,13 @@ pub const Node = struct {
Type: *Node,
};
pub fn iterate(self: *ContainerDecl, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const ContainerDecl) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.fields_and_decls.first };
}
pub fn iterateNext(self: *const ContainerDecl, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
switch (self.init_arg_expr) {
.Type => |t| {
@@ -763,8 +808,10 @@ pub const Node = struct {
.None, .Enum => {},
}
if (i < self.fields_and_decls.len) return self.fields_and_decls.at(i).*;
i -= self.fields_and_decls.len;
if (it.node) |child| {
it.node = child.next;
return child.data;
}
return null;
}
@@ -790,8 +837,13 @@ pub const Node = struct {
value_expr: ?*Node,
align_expr: ?*Node,
pub fn iterate(self: *ContainerField, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const ContainerField) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const ContainerField, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.type_expr) |type_expr| {
if (i < 1) return type_expr;
@@ -837,8 +889,13 @@ pub const Node = struct {
doc_comments: ?*DocComment,
name_token: TokenIndex,
pub fn iterate(self: *ErrorTag, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const ErrorTag) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const ErrorTag, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.doc_comments) |comments| {
if (i < 1) return &comments.base;
@@ -861,7 +918,11 @@ pub const Node = struct {
base: Node = Node{ .id = .Identifier },
token: TokenIndex,
pub fn iterate(self: *Identifier, index: usize) ?*Node {
pub fn iterate(self: *const Identifier) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Identifier, it: *Node.Iterator) ?*Node {
return null;
}
@@ -892,7 +953,7 @@ pub const Node = struct {
is_extern_prototype: bool = false, // TODO: Remove once extern fn rewriting is
is_async: bool = false, // TODO: remove once async fn rewriting is
pub const ParamList = SegmentedList(*Node, 2);
pub const ParamList = LinkedList(*Node);
pub const ReturnType = union(enum) {
Explicit: *Node,
@@ -900,16 +961,24 @@ pub const Node = struct {
Invalid: TokenIndex,
};
pub fn iterate(self: *FnProto, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const FnProto) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.params.first };
}
pub fn iterateNext(self: *const FnProto, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.lib_name) |lib_name| {
if (i < 1) return lib_name;
i -= 1;
}
if (i < self.params.len) return self.params.at(self.params.len - i - 1).*;
i -= self.params.len;
if (it.node) |param| {
it.index -= 1;
it.node = param.next;
return param.data;
}
if (self.align_expr) |align_expr| {
if (i < 1) return align_expr;
@@ -963,8 +1032,13 @@ pub const Node = struct {
return_type: *Node,
};
pub fn iterate(self: *AnyFrameType, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const AnyFrameType) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const AnyFrameType, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.result) |result| {
if (i < 1) return result.return_type;
@@ -998,8 +1072,13 @@ pub const Node = struct {
type_expr: *Node,
};
pub fn iterate(self: *ParamDecl, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const ParamDecl) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const ParamDecl, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) {
switch (self.param_type) {
@@ -1039,13 +1118,14 @@ pub const Node = struct {
pub const StatementList = Root.DeclList;
pub fn iterate(self: *Block, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Block) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.statements.first };
}
if (i < self.statements.len) return self.statements.at(i).*;
i -= self.statements.len;
return null;
pub fn iterateNext(self: *const Block, it: *Node.Iterator) ?*Node {
const child = it.node orelse return null;
it.node = child.next;
return child.data;
}
pub fn firstToken(self: *const Block) TokenIndex {
@@ -1067,8 +1147,13 @@ pub const Node = struct {
payload: ?*Node,
expr: *Node,
pub fn iterate(self: *Defer, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Defer) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Defer, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
@@ -1091,8 +1176,13 @@ pub const Node = struct {
comptime_token: TokenIndex,
expr: *Node,
pub fn iterate(self: *Comptime, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Comptime) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Comptime, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
@@ -1114,8 +1204,13 @@ pub const Node = struct {
nosuspend_token: TokenIndex,
expr: *Node,
pub fn iterate(self: *Nosuspend, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Nosuspend) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Nosuspend, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
@@ -1138,8 +1233,13 @@ pub const Node = struct {
error_symbol: *Node,
rpipe: TokenIndex,
pub fn iterate(self: *Payload, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Payload) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Payload, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.error_symbol;
i -= 1;
@@ -1163,8 +1263,13 @@ pub const Node = struct {
value_symbol: *Node,
rpipe: TokenIndex,
pub fn iterate(self: *PointerPayload, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const PointerPayload) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const PointerPayload, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.value_symbol;
i -= 1;
@@ -1189,8 +1294,13 @@ pub const Node = struct {
index_symbol: ?*Node,
rpipe: TokenIndex,
pub fn iterate(self: *PointerIndexPayload, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const PointerIndexPayload) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const PointerIndexPayload, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.value_symbol;
i -= 1;
@@ -1218,8 +1328,13 @@ pub const Node = struct {
payload: ?*Node,
body: *Node,
pub fn iterate(self: *Else, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Else) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Else, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.payload) |payload| {
if (i < 1) return payload;
@@ -1250,16 +1365,24 @@ pub const Node = struct {
cases: CaseList,
rbrace: TokenIndex,
pub const CaseList = SegmentedList(*Node, 2);
pub const CaseList = LinkedList(*Node);
pub fn iterate(self: *Switch, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Switch) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.cases.first };
}
pub fn iterateNext(self: *const Switch, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
if (i < self.cases.len) return self.cases.at(i).*;
i -= self.cases.len;
if (it.node) |child| {
it.index -= 1;
it.node = child.next;
return child.data;
}
return null;
}
@@ -1280,13 +1403,21 @@ pub const Node = struct {
payload: ?*Node,
expr: *Node,
pub const ItemList = SegmentedList(*Node, 1);
pub const ItemList = LinkedList(*Node);
pub fn iterate(self: *SwitchCase, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const SwitchCase) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.items.first };
}
if (i < self.items.len) return self.items.at(i).*;
i -= self.items.len;
pub fn iterateNext(self: *const SwitchCase, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (it.node) |child| {
it.index -= 1;
it.node = child.next;
return child.data;
}
if (self.payload) |payload| {
if (i < 1) return payload;
@@ -1300,7 +1431,7 @@ pub const Node = struct {
}
pub fn firstToken(self: *const SwitchCase) TokenIndex {
return (self.items.at(0).*).firstToken();
return self.items.first.?.data.firstToken();
}
pub fn lastToken(self: *const SwitchCase) TokenIndex {
@@ -1312,7 +1443,11 @@ pub const Node = struct {
base: Node = Node{ .id = .SwitchElse },
token: TokenIndex,
pub fn iterate(self: *SwitchElse, index: usize) ?*Node {
pub fn iterate(self: *const SwitchElse) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const SwitchElse, it: *Node.Iterator) ?*Node {
return null;
}
@@ -1336,8 +1471,13 @@ pub const Node = struct {
body: *Node,
@"else": ?*Else,
pub fn iterate(self: *While, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const While) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const While, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.condition;
i -= 1;
@@ -1394,8 +1534,13 @@ pub const Node = struct {
body: *Node,
@"else": ?*Else,
pub fn iterate(self: *For, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const For) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const For, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.array_expr;
i -= 1;
@@ -1443,8 +1588,13 @@ pub const Node = struct {
body: *Node,
@"else": ?*Else,
pub fn iterate(self: *If, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const If) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const If, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.condition;
i -= 1;
@@ -1531,8 +1681,13 @@ pub const Node = struct {
UnwrapOptional,
};
pub fn iterate(self: *InfixOp, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const InfixOp) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const InfixOp, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.lhs;
i -= 1;
@@ -1649,8 +1804,13 @@ pub const Node = struct {
};
};
pub fn iterate(self: *PrefixOp, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const PrefixOp) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const PrefixOp, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
switch (self.op) {
.PtrType, .SliceType => |addr_of_info| {
@@ -1707,8 +1867,13 @@ pub const Node = struct {
name_token: TokenIndex,
expr: *Node,
pub fn iterate(self: *FieldInitializer, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const FieldInitializer) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const FieldInitializer, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
@@ -1745,13 +1910,13 @@ pub const Node = struct {
Deref,
UnwrapOptional,
pub const InitList = SegmentedList(*Node, 2);
pub const InitList = LinkedList(*Node);
pub const Call = struct {
params: ParamList,
async_token: ?TokenIndex,
pub const ParamList = SegmentedList(*Node, 2);
pub const ParamList = LinkedList(*Node);
};
pub const Slice = struct {
@@ -1761,8 +1926,20 @@ pub const Node = struct {
};
};
pub fn iterate(self: *SuffixOp, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const SuffixOp) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0,
.node = switch(self.op) {
.Call => |call| call.params.first,
.ArrayInitializer => |ai| ai.first,
.StructInitializer => |si| si.first,
else => null,
},
};
}
pub fn iterateNext(self: *const SuffixOp, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
switch (self.lhs) {
.node => |node| {
@@ -1773,9 +1950,12 @@ pub const Node = struct {
}
switch (self.op) {
.Call => |*call_info| {
if (i < call_info.params.len) return call_info.params.at(i).*;
i -= call_info.params.len;
.Call => |call_info| {
if (it.node) |child| {
it.index -= 1;
it.node = child.next;
return child.data;
}
},
.ArrayAccess => |index_expr| {
if (i < 1) return index_expr;
@@ -1794,13 +1974,19 @@ pub const Node = struct {
i -= 1;
}
},
.ArrayInitializer => |*exprs| {
if (i < exprs.len) return exprs.at(i).*;
i -= exprs.len;
.ArrayInitializer => |exprs| {
if (it.node) |child| {
it.index -= 1;
it.node = child.next;
return child.data;
}
},
.StructInitializer => |*fields| {
if (i < fields.len) return fields.at(i).*;
i -= fields.len;
.StructInitializer => |fields| {
if (it.node) |child| {
it.index -= 1;
it.node = child.next;
return child.data;
}
},
.UnwrapOptional,
.Deref,
@@ -1832,8 +2018,13 @@ pub const Node = struct {
expr: *Node,
rparen: TokenIndex,
pub fn iterate(self: *GroupedExpression, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const GroupedExpression) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const GroupedExpression, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.expr;
i -= 1;
@@ -1862,8 +2053,13 @@ pub const Node = struct {
Return,
};
pub fn iterate(self: *ControlFlowExpression, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const ControlFlowExpression) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const ControlFlowExpression, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
switch (self.kind) {
.Break, .Continue => |maybe_label| {
@@ -1910,8 +2106,13 @@ pub const Node = struct {
suspend_token: TokenIndex,
body: ?*Node,
pub fn iterate(self: *Suspend, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Suspend) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Suspend, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (self.body) |body| {
if (i < 1) return body;
@@ -1938,7 +2139,11 @@ pub const Node = struct {
base: Node = Node{ .id = .IntegerLiteral },
token: TokenIndex,
pub fn iterate(self: *IntegerLiteral, index: usize) ?*Node {
pub fn iterate(self: *const IntegerLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const IntegerLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -1956,7 +2161,11 @@ pub const Node = struct {
dot: TokenIndex,
name: TokenIndex,
pub fn iterate(self: *EnumLiteral, index: usize) ?*Node {
pub fn iterate(self: *const EnumLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const EnumLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -1973,7 +2182,11 @@ pub const Node = struct {
base: Node = Node{ .id = .FloatLiteral },
token: TokenIndex,
pub fn iterate(self: *FloatLiteral, index: usize) ?*Node {
pub fn iterate(self: *const FloatLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const FloatLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -1992,15 +2205,16 @@ pub const Node = struct {
params: ParamList,
rparen_token: TokenIndex,
pub const ParamList = SegmentedList(*Node, 2);
pub const ParamList = LinkedList(*Node);
pub fn iterate(self: *BuiltinCall, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const BuiltinCall) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = self.params.first };
}
if (i < self.params.len) return self.params.at(i).*;
i -= self.params.len;
return null;
pub fn iterateNext(self: *const BuiltinCall, it: *Node.Iterator) ?*Node {
const param = it.node orelse return null;
it.node = param.next;
return param.data;
}
pub fn firstToken(self: *const BuiltinCall) TokenIndex {
@@ -2016,7 +2230,11 @@ pub const Node = struct {
base: Node = Node{ .id = .StringLiteral },
token: TokenIndex,
pub fn iterate(self: *StringLiteral, index: usize) ?*Node {
pub fn iterate(self: *const StringLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const StringLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2033,18 +2251,25 @@ pub const Node = struct {
base: Node = Node{ .id = .MultilineStringLiteral },
lines: LineList,
pub const LineList = SegmentedList(TokenIndex, 4);
pub const LineList = LinkedList(TokenIndex);
pub fn iterate(self: *MultilineStringLiteral, index: usize) ?*Node {
pub fn iterate(self: *const MultilineStringLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const MultilineStringLiteral, it: *Node.Iterator) ?*Node {
return null;
}
pub fn firstToken(self: *const MultilineStringLiteral) TokenIndex {
return self.lines.at(0).*;
return self.lines.first.?.data;
}
pub fn lastToken(self: *const MultilineStringLiteral) TokenIndex {
return self.lines.at(self.lines.len - 1).*;
var node = self.lines.first.?;
while (true) {
node = node.next orelse return node.data;
}
}
};
@@ -2052,7 +2277,11 @@ pub const Node = struct {
base: Node = Node{ .id = .CharLiteral },
token: TokenIndex,
pub fn iterate(self: *CharLiteral, index: usize) ?*Node {
pub fn iterate(self: *const CharLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const CharLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2069,7 +2298,11 @@ pub const Node = struct {
base: Node = Node{ .id = .BoolLiteral },
token: TokenIndex,
pub fn iterate(self: *BoolLiteral, index: usize) ?*Node {
pub fn iterate(self: *const BoolLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const BoolLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2086,7 +2319,11 @@ pub const Node = struct {
base: Node = Node{ .id = .NullLiteral },
token: TokenIndex,
pub fn iterate(self: *NullLiteral, index: usize) ?*Node {
pub fn iterate(self: *const NullLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const NullLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2103,7 +2340,11 @@ pub const Node = struct {
base: Node = Node{ .id = .UndefinedLiteral },
token: TokenIndex,
pub fn iterate(self: *UndefinedLiteral, index: usize) ?*Node {
pub fn iterate(self: *const UndefinedLiteral) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const UndefinedLiteral, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2129,8 +2370,13 @@ pub const Node = struct {
Return: *Node,
};
pub fn iterate(self: *AsmOutput, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const AsmOutput) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const AsmOutput, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.symbolic_name;
i -= 1;
@@ -2169,8 +2415,13 @@ pub const Node = struct {
expr: *Node,
rparen: TokenIndex,
pub fn iterate(self: *AsmInput, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const AsmInput) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const AsmInput, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.symbolic_name;
i -= 1;
@@ -2203,18 +2454,31 @@ pub const Node = struct {
clobbers: ClobberList,
rparen: TokenIndex,
pub const OutputList = SegmentedList(*AsmOutput, 2);
pub const InputList = SegmentedList(*AsmInput, 2);
pub const ClobberList = SegmentedList(*Node, 2);
pub const OutputList = LinkedList(*AsmOutput);
pub const InputList = LinkedList(*AsmInput);
pub const ClobberList = LinkedList(*Node);
pub fn iterate(self: *Asm, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const Asm) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null};
}
if (i < self.outputs.len) return &self.outputs.at(i).*.base;
i -= self.outputs.len;
pub fn iterateNext(self: *const Asm, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < self.inputs.len) return &self.inputs.at(i).*.base;
i -= self.inputs.len;
var output: ?*LinkedList(*AsmOutput).Node = self.outputs.first;
while (output) |o| {
if (i < 1) return &o.data.base;
i -= 1;
output = o.next;
}
var input: ?*LinkedList(*AsmInput).Node = self.inputs.first;
while (input) |o| {
if (i < 1) return &o.data.base;
i -= 1;
input = o.next;
}
return null;
}
@@ -2232,7 +2496,11 @@ pub const Node = struct {
base: Node = Node{ .id = .Unreachable },
token: TokenIndex,
pub fn iterate(self: *Unreachable, index: usize) ?*Node {
pub fn iterate(self: *const Unreachable) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const Unreachable, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2249,7 +2517,11 @@ pub const Node = struct {
base: Node = Node{ .id = .ErrorType },
token: TokenIndex,
pub fn iterate(self: *ErrorType, index: usize) ?*Node {
pub fn iterate(self: *const ErrorType) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const ErrorType, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2266,7 +2538,11 @@ pub const Node = struct {
base: Node = Node{ .id = .VarType },
token: TokenIndex,
pub fn iterate(self: *VarType, index: usize) ?*Node {
pub fn iterate(self: *const VarType) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const VarType, it: *Node.Iterator) ?*Node {
return null;
}
@@ -2283,18 +2559,25 @@ pub const Node = struct {
base: Node = Node{ .id = .DocComment },
lines: LineList,
pub const LineList = SegmentedList(TokenIndex, 4);
pub const LineList = LinkedList(TokenIndex);
pub fn iterate(self: *DocComment, index: usize) ?*Node {
pub fn iterate(self: *const DocComment) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const DocComment, it: *Node.Iterator) ?*Node {
return null;
}
pub fn firstToken(self: *const DocComment) TokenIndex {
return self.lines.at(0).*;
return self.lines.first.?.data;
}
pub fn lastToken(self: *const DocComment) TokenIndex {
return self.lines.at(self.lines.len - 1).*;
var node = self.lines.first.?;
while (true) {
node = node.next orelse return node.data;
}
}
};
@@ -2305,8 +2588,13 @@ pub const Node = struct {
name: *Node,
body_node: *Node,
pub fn iterate(self: *TestDecl, index: usize) ?*Node {
var i = index;
pub fn iterate(self: *const TestDecl) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0, .node = null };
}
pub fn iterateNext(self: *const TestDecl, it: *Node.Iterator) ?*Node {
var i = it.index;
it.index += 1;
if (i < 1) return self.body_node;
i -= 1;
@@ -2331,5 +2619,6 @@ test "iterate" {
.eof_token = 0,
};
var base = &root.base;
testing.expect(base.iterate(0) == null);
var it = base.iterate();
testing.expect(it.next() == null);
}
+3117 -3071
View File
@@ -7,3303 +7,3349 @@ const Tree = ast.Tree;
const AstError = ast.Error;
const TokenIndex = ast.TokenIndex;
const Token = std.zig.Token;
const TokenIterator = Tree.TokenList.Iterator;
pub const Error = error{ParseError} || Allocator.Error;
/// Result should be freed with tree.deinit() when there are
/// no more references to any of the tokens or nodes.
pub fn parse(allocator: *Allocator, source: []const u8) Allocator.Error!*Tree {
const tree = blk: {
// This block looks unnecessary, but is a "foot-shield" to prevent the SegmentedLists
// from being initialized with a pointer to this `arena`, which is created on
// the stack. Following code should instead refer to `&tree.arena_allocator`, a
// pointer to data which lives safely on the heap and will outlive `parse`. See:
// https://github.com/ziglang/zig/commit/cb4fb14b6e66bd213575f69eec9598be8394fae6
var arena = std.heap.ArenaAllocator.init(allocator);
errdefer arena.deinit();
const tree = try arena.allocator.create(ast.Tree);
tree.* = .{
.source = source,
.root_node = undefined,
.arena_allocator = arena,
.tokens = undefined,
.errors = undefined,
};
break :blk tree;
};
errdefer tree.deinit();
const arena = &tree.arena_allocator.allocator;
tree.tokens = ast.Tree.TokenList.init(arena);
tree.errors = ast.Tree.ErrorList.init(arena);
pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!*Tree {
// TODO optimization idea: ensureCapacity on the tokens list and
// then appendAssumeCapacity inside the loop.
var tokens = std.ArrayList(Token).init(gpa);
defer tokens.deinit();
var tokenizer = std.zig.Tokenizer.init(source);
while (true) {
const tree_token = try tree.tokens.addOne();
const tree_token = try tokens.addOne();
tree_token.* = tokenizer.next();
if (tree_token.id == .Eof) break;
}
var it = tree.tokens.iterator(0);
while (it.peek().?.id == .LineComment) _ = it.next();
var parser: Parser = .{
.source = source,
.arena = std.heap.ArenaAllocator.init(gpa),
.gpa = gpa,
.tokens = tokens.items,
.errors = .{},
.tok_i = 0,
};
defer parser.errors.deinit(gpa);
errdefer parser.arena.deinit();
tree.root_node = try parseRoot(arena, &it, tree);
while (tokens.items[parser.tok_i].id == .LineComment) parser.tok_i += 1;
const root_node = try parser.parseRoot();
const tree = try parser.arena.allocator.create(Tree);
tree.* = .{
.gpa = gpa,
.source = source,
.tokens = tokens.toOwnedSlice(),
.errors = parser.errors.toOwnedSlice(gpa),
.root_node = root_node,
.arena = parser.arena.state,
};
return tree;
}
/// Root <- skip ContainerMembers eof
fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Allocator.Error!*Node.Root {
const node = try arena.create(Node.Root);
node.* = .{
.decls = try parseContainerMembers(arena, it, tree, true),
// parseContainerMembers will try to skip as much
// invalid tokens as it can so this can only be the EOF
.eof_token = eatToken(it, .Eof).?,
};
return node;
}
/// Represents in-progress parsing, will be converted to an ast.Tree after completion.
const Parser = struct {
arena: std.heap.ArenaAllocator,
gpa: *Allocator,
source: []const u8,
tokens: []const Token,
tok_i: TokenIndex,
errors: std.ArrayListUnmanaged(AstError),
/// ContainerMembers
/// <- TestDecl ContainerMembers
/// / TopLevelComptime ContainerMembers
/// / KEYWORD_pub? TopLevelDecl ContainerMembers
/// / ContainerField COMMA ContainerMembers
/// / ContainerField
/// /
fn parseContainerMembers(arena: *Allocator, it: *TokenIterator, tree: *Tree, top_level: bool) !Node.Root.DeclList {
var list = Node.Root.DeclList.init(arena);
/// Root <- skip ContainerMembers eof
fn parseRoot(p: *Parser) Allocator.Error!*Node.Root {
const node = try p.arena.allocator.create(Node.Root);
node.* = .{
.decls = try parseContainerMembers(p, true),
// parseContainerMembers will try to skip as much
// invalid tokens as it can so this can only be the EOF
.eof_token = p.eatToken(.Eof).?,
};
return node;
}
var field_state: union(enum) {
/// no fields have been seen
none,
/// currently parsing fields
seen,
/// saw fields and then a declaration after them.
/// payload is first token of previous declaration.
end: TokenIndex,
/// ther was a declaration between fields, don't report more errors
err,
} = .none;
fn llpush(
p: *Parser,
comptime T: type,
it: *?*std.SinglyLinkedList(T).Node,
data: T,
) !*?*std.SinglyLinkedList(T).Node {
const llnode = try p.arena.allocator.create(std.SinglyLinkedList(T).Node);
llnode.* = .{ .data = data };
it.* = llnode;
return &llnode.next;
}
while (true) {
if (try parseContainerDocComments(arena, it, tree)) |node| {
try list.push(node);
continue;
}
/// ContainerMembers
/// <- TestDecl ContainerMembers
/// / TopLevelComptime ContainerMembers
/// / KEYWORD_pub? TopLevelDecl ContainerMembers
/// / ContainerField COMMA ContainerMembers
/// / ContainerField
/// /
fn parseContainerMembers(p: *Parser, top_level: bool) !Node.Root.DeclList {
var list = Node.Root.DeclList{};
var list_it = &list.first;
const doc_comments = try parseDocComment(arena, it, tree);
var field_state: union(enum) {
/// no fields have been seen
none,
/// currently parsing fields
seen,
/// saw fields and then a declaration after them.
/// payload is first token of previous declaration.
end: TokenIndex,
/// ther was a declaration between fields, don't report more errors
err,
} = .none;
if (parseTestDecl(arena, it, tree) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
findNextContainerMember(it);
while (true) {
if (try p.parseContainerDocComments()) |node| {
list_it = try p.llpush(*Node, list_it, node);
continue;
},
}) |node| {
if (field_state == .seen) {
field_state = .{ .end = node.firstToken() };
}
node.cast(Node.TestDecl).?.doc_comments = doc_comments;
try list.push(node);
continue;
}
if (parseTopLevelComptime(arena, it, tree) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
findNextContainerMember(it);
const doc_comments = try p.parseDocComment();
if (p.parseTestDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
}) |node| {
if (field_state == .seen) {
field_state = .{ .end = node.firstToken() };
}
node.cast(Node.TestDecl).?.doc_comments = doc_comments;
list_it = try p.llpush(*Node, list_it, node);
continue;
},
}) |node| {
if (field_state == .seen) {
field_state = .{ .end = node.firstToken() };
}
node.cast(Node.Comptime).?.doc_comments = doc_comments;
try list.push(node);
continue;
}
const visib_token = eatToken(it, .Keyword_pub);
if (parseTopLevelDecl(arena, it, tree) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
findNextContainerMember(it);
if (p.parseTopLevelComptime() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
}) |node| {
if (field_state == .seen) {
field_state = .{ .end = node.firstToken() };
}
node.cast(Node.Comptime).?.doc_comments = doc_comments;
list_it = try p.llpush(*Node, list_it, node);
continue;
},
}) |node| {
if (field_state == .seen) {
field_state = .{ .end = visib_token orelse node.firstToken() };
}
switch (node.id) {
.FnProto => {
node.cast(Node.FnProto).?.doc_comments = doc_comments;
node.cast(Node.FnProto).?.visib_token = visib_token;
const visib_token = p.eatToken(.Keyword_pub);
if (p.parseTopLevelDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
continue;
},
.VarDecl => {
node.cast(Node.VarDecl).?.doc_comments = doc_comments;
node.cast(Node.VarDecl).?.visib_token = visib_token;
},
.Use => {
node.cast(Node.Use).?.doc_comments = doc_comments;
node.cast(Node.Use).?.visib_token = visib_token;
},
else => unreachable,
}
try list.push(node);
if (try parseAppendedDocComment(arena, it, tree, node.lastToken())) |appended_comment| {
}) |node| {
if (field_state == .seen) {
field_state = .{ .end = visib_token orelse node.firstToken() };
}
switch (node.id) {
.FnProto => {},
.VarDecl => node.cast(Node.VarDecl).?.doc_comments = appended_comment,
.Use => node.cast(Node.Use).?.doc_comments = appended_comment,
.FnProto => {
node.cast(Node.FnProto).?.doc_comments = doc_comments;
node.cast(Node.FnProto).?.visib_token = visib_token;
},
.VarDecl => {
node.cast(Node.VarDecl).?.doc_comments = doc_comments;
node.cast(Node.VarDecl).?.visib_token = visib_token;
},
.Use => {
node.cast(Node.Use).?.doc_comments = doc_comments;
node.cast(Node.Use).?.visib_token = visib_token;
},
else => unreachable,
}
}
continue;
}
if (visib_token != null) {
try tree.errors.push(.{
.ExpectedPubItem = .{ .token = it.index },
});
// ignore this pub
continue;
}
if (parseContainerField(arena, it, tree) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
// attempt to recover
findNextContainerMember(it);
list_it = try p.llpush(*Node, list_it, node);
if (try p.parseAppendedDocComment(node.lastToken())) |appended_comment| {
switch (node.id) {
.FnProto => {},
.VarDecl => node.cast(Node.VarDecl).?.doc_comments = appended_comment,
.Use => node.cast(Node.Use).?.doc_comments = appended_comment,
else => unreachable,
}
}
continue;
},
}) |node| {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |tok| {
try tree.errors.push(.{
.DeclBetweenFields = .{ .token = tok },
});
// continue parsing, error will be reported later
field_state = .err;
},
}
const field = node.cast(Node.ContainerField).?;
field.doc_comments = doc_comments;
try list.push(node);
const comma = eatToken(it, .Comma) orelse {
// try to continue parsing
const index = it.index;
findNextContainerMember(it);
const next = it.peek().?.id;
switch (next) {
.Eof => break,
else => {
if (next == .RBrace) {
if (!top_level) break;
_ = nextToken(it);
}
if (visib_token != null) {
try p.errors.append(p.gpa, .{
.ExpectedPubItem = .{ .token = p.tok_i },
});
// ignore this pub
continue;
}
// add error and continue
try tree.errors.push(.{
.ExpectedToken = .{ .token = index, .expected_id = .Comma },
if (p.parseContainerField() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
// attempt to recover
p.findNextContainerMember();
continue;
},
}) |node| {
switch (field_state) {
.none => field_state = .seen,
.err, .seen => {},
.end => |tok| {
try p.errors.append(p.gpa, .{
.DeclBetweenFields = .{ .token = tok },
});
continue;
// continue parsing, error will be reported later
field_state = .err;
},
}
};
if (try parseAppendedDocComment(arena, it, tree, comma)) |appended_comment|
field.doc_comments = appended_comment;
continue;
}
// Dangling doc comment
if (doc_comments != null) {
try tree.errors.push(.{
.UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
});
}
const field = node.cast(Node.ContainerField).?;
field.doc_comments = doc_comments;
list_it = try p.llpush(*Node, list_it, node);
const comma = p.eatToken(.Comma) orelse {
// try to continue parsing
const index = p.tok_i;
p.findNextContainerMember();
const next = p.tokens[p.tok_i].id;
switch (next) {
.Eof => break,
else => {
if (next == .RBrace) {
if (!top_level) break;
_ = p.nextToken();
}
const next = it.peek().?.id;
switch (next) {
.Eof => break,
.Keyword_comptime => {
_ = nextToken(it);
try tree.errors.push(.{
.ExpectedBlockOrField = .{ .token = it.index },
// add error and continue
try p.errors.append(p.gpa, .{
.ExpectedToken = .{ .token = index, .expected_id = .Comma },
});
continue;
},
}
};
if (try p.parseAppendedDocComment(comma)) |appended_comment|
field.doc_comments = appended_comment;
continue;
}
// Dangling doc comment
if (doc_comments != null) {
try p.errors.append(p.gpa, .{
.UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
});
},
else => {
const index = it.index;
if (next == .RBrace) {
if (!top_level) break;
_ = nextToken(it);
}
}
// this was likely not supposed to end yet,
// try to find the next declaration
findNextContainerMember(it);
try tree.errors.push(.{
.ExpectedContainerMembers = .{ .token = index },
const next = p.tokens[p.tok_i].id;
switch (next) {
.Eof => break,
.Keyword_comptime => {
_ = p.nextToken();
try p.errors.append(p.gpa, .{
.ExpectedBlockOrField = .{ .token = p.tok_i },
});
},
else => {
const index = p.tok_i;
if (next == .RBrace) {
if (!top_level) break;
_ = p.nextToken();
}
// this was likely not supposed to end yet,
// try to find the next declaration
p.findNextContainerMember();
try p.errors.append(p.gpa, .{
.ExpectedContainerMembers = .{ .token = index },
});
},
}
}
return list;
}
/// Attempts to find next container member by searching for certain tokens
fn findNextContainerMember(p: *Parser) void {
var level: u32 = 0;
while (true) {
const tok = p.nextToken();
switch (tok.ptr.id) {
// any of these can start a new top level declaration
.Keyword_test,
.Keyword_comptime,
.Keyword_pub,
.Keyword_export,
.Keyword_extern,
.Keyword_inline,
.Keyword_noinline,
.Keyword_usingnamespace,
.Keyword_threadlocal,
.Keyword_const,
.Keyword_var,
.Keyword_fn,
.Identifier,
=> {
if (level == 0) {
p.putBackToken(tok.index);
return;
}
},
.Comma, .Semicolon => {
// this decl was likely meant to end here
if (level == 0) {
return;
}
},
.LParen, .LBracket, .LBrace => level += 1,
.RParen, .RBracket => {
if (level != 0) level -= 1;
},
.RBrace => {
if (level == 0) {
// end of container, exit
p.putBackToken(tok.index);
return;
}
level -= 1;
},
.Eof => {
p.putBackToken(tok.index);
return;
},
else => {},
}
}
}
/// Attempts to find the next statement by searching for a semicolon
fn findNextStmt(p: *Parser) void {
var level: u32 = 0;
while (true) {
const tok = p.nextToken();
switch (tok.ptr.id) {
.LBrace => level += 1,
.RBrace => {
if (level == 0) {
p.putBackToken(tok.index);
return;
}
level -= 1;
},
.Semicolon => {
if (level == 0) {
return;
}
},
.Eof => {
p.putBackToken(tok.index);
return;
},
else => {},
}
}
}
/// Eat a multiline container doc comment
fn parseContainerDocComments(p: *Parser) !?*Node {
var lines = Node.DocComment.LineList{};
var lines_it: *?*Node.DocComment.LineList.Node = &lines.first;
while (p.eatToken(.ContainerDocComment)) |line| {
lines_it = try p.llpush(TokenIndex, lines_it, line);
}
if (lines.first == null) return null;
const node = try p.arena.allocator.create(Node.DocComment);
node.* = .{
.lines = lines,
};
return &node.base;
}
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
fn parseTestDecl(p: *Parser) !?*Node {
const test_token = p.eatToken(.Keyword_test) orelse return null;
const name_node = try p.expectNode(parseStringLiteralSingle, .{
.ExpectedStringLiteral = .{ .token = p.tok_i },
});
const block_node = try p.expectNode(parseBlock, .{
.ExpectedLBrace = .{ .token = p.tok_i },
});
const test_node = try p.arena.allocator.create(Node.TestDecl);
test_node.* = .{
.doc_comments = null,
.test_token = test_token,
.name = name_node,
.body_node = block_node,
};
return &test_node.base;
}
/// TopLevelComptime <- KEYWORD_comptime BlockExpr
fn parseTopLevelComptime(p: *Parser) !?*Node {
const tok = p.eatToken(.Keyword_comptime) orelse return null;
const lbrace = p.eatToken(.LBrace) orelse {
p.putBackToken(tok);
return null;
};
p.putBackToken(lbrace);
const block_node = try p.expectNode(parseBlockExpr, .{
.ExpectedLabelOrLBrace = .{ .token = p.tok_i },
});
const comptime_node = try p.arena.allocator.create(Node.Comptime);
comptime_node.* = .{
.doc_comments = null,
.comptime_token = tok,
.expr = block_node,
};
return &comptime_node.base;
}
/// TopLevelDecl
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON
fn parseTopLevelDecl(p: *Parser) !?*Node {
var lib_name: ?*Node = null;
const extern_export_inline_token = blk: {
if (p.eatToken(.Keyword_export)) |token| break :blk token;
if (p.eatToken(.Keyword_extern)) |token| {
lib_name = try p.parseStringLiteralSingle();
break :blk token;
}
if (p.eatToken(.Keyword_inline)) |token| break :blk token;
if (p.eatToken(.Keyword_noinline)) |token| break :blk token;
break :blk null;
};
if (try p.parseFnProto()) |node| {
const fn_node = node.cast(Node.FnProto).?;
fn_node.*.extern_export_inline_token = extern_export_inline_token;
fn_node.*.lib_name = lib_name;
if (p.eatToken(.Semicolon)) |_| return node;
if (try p.expectNodeRecoverable(parseBlock, .{
// since parseBlock only return error.ParseError on
// a missing '}' we can assume this function was
// supposed to end here.
.ExpectedSemiOrLBrace = .{ .token = p.tok_i },
})) |body_node| {
fn_node.body_node = body_node;
}
return node;
}
if (extern_export_inline_token) |token| {
if (p.tokens[token].id == .Keyword_inline or
p.tokens[token].id == .Keyword_noinline)
{
try p.errors.append(p.gpa, .{
.ExpectedFn = .{ .token = p.tok_i },
});
},
return error.ParseError;
}
}
}
return list;
}
const thread_local_token = p.eatToken(.Keyword_threadlocal);
/// Attempts to find next container member by searching for certain tokens
fn findNextContainerMember(it: *TokenIterator) void {
var level: u32 = 0;
while (true) {
const tok = nextToken(it);
switch (tok.ptr.id) {
// any of these can start a new top level declaration
.Keyword_test,
.Keyword_comptime,
.Keyword_pub,
.Keyword_export,
.Keyword_extern,
.Keyword_inline,
.Keyword_noinline,
.Keyword_usingnamespace,
.Keyword_threadlocal,
.Keyword_const,
.Keyword_var,
.Keyword_fn,
.Identifier,
=> {
if (level == 0) {
putBackToken(it, tok.index);
return;
}
},
.Comma, .Semicolon => {
// this decl was likely meant to end here
if (level == 0) {
return;
}
},
.LParen, .LBracket, .LBrace => level += 1,
.RParen, .RBracket => {
if (level != 0) level -= 1;
},
.RBrace => {
if (level == 0) {
// end of container, exit
putBackToken(it, tok.index);
return;
}
level -= 1;
},
.Eof => {
putBackToken(it, tok.index);
return;
},
else => {},
if (try p.parseVarDecl()) |node| {
var var_decl = node.cast(Node.VarDecl).?;
var_decl.*.thread_local_token = thread_local_token;
var_decl.*.comptime_token = null;
var_decl.*.extern_export_token = extern_export_inline_token;
var_decl.*.lib_name = lib_name;
return node;
}
}
}
/// Attempts to find the next statement by searching for a semicolon
fn findNextStmt(it: *TokenIterator) void {
var level: u32 = 0;
while (true) {
const tok = nextToken(it);
switch (tok.ptr.id) {
.LBrace => level += 1,
.RBrace => {
if (level == 0) {
putBackToken(it, tok.index);
return;
}
level -= 1;
},
.Semicolon => {
if (level == 0) {
return;
}
},
.Eof => {
putBackToken(it, tok.index);
return;
},
else => {},
}
}
}
/// Eat a multiline container doc comment
fn parseContainerDocComments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
var lines = Node.DocComment.LineList.init(arena);
while (eatToken(it, .ContainerDocComment)) |line| {
try lines.push(line);
}
if (lines.len == 0) return null;
const node = try arena.create(Node.DocComment);
node.* = .{
.lines = lines,
};
return &node.base;
}
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
fn parseTestDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const test_token = eatToken(it, .Keyword_test) orelse return null;
const name_node = try expectNode(arena, it, tree, parseStringLiteralSingle, .{
.ExpectedStringLiteral = .{ .token = it.index },
});
const block_node = try expectNode(arena, it, tree, parseBlock, .{
.ExpectedLBrace = .{ .token = it.index },
});
const test_node = try arena.create(Node.TestDecl);
test_node.* = .{
.doc_comments = null,
.test_token = test_token,
.name = name_node,
.body_node = block_node,
};
return &test_node.base;
}
/// TopLevelComptime <- KEYWORD_comptime BlockExpr
fn parseTopLevelComptime(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const tok = eatToken(it, .Keyword_comptime) orelse return null;
const lbrace = eatToken(it, .LBrace) orelse {
putBackToken(it, tok);
return null;
};
putBackToken(it, lbrace);
const block_node = try expectNode(arena, it, tree, parseBlockExpr, .{
.ExpectedLabelOrLBrace = .{ .token = it.index },
});
const comptime_node = try arena.create(Node.Comptime);
comptime_node.* = .{
.doc_comments = null,
.comptime_token = tok,
.expr = block_node,
};
return &comptime_node.base;
}
/// TopLevelDecl
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON
fn parseTopLevelDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
var lib_name: ?*Node = null;
const extern_export_inline_token = blk: {
if (eatToken(it, .Keyword_export)) |token| break :blk token;
if (eatToken(it, .Keyword_extern)) |token| {
lib_name = try parseStringLiteralSingle(arena, it, tree);
break :blk token;
}
if (eatToken(it, .Keyword_inline)) |token| break :blk token;
if (eatToken(it, .Keyword_noinline)) |token| break :blk token;
break :blk null;
};
if (try parseFnProto(arena, it, tree)) |node| {
const fn_node = node.cast(Node.FnProto).?;
fn_node.*.extern_export_inline_token = extern_export_inline_token;
fn_node.*.lib_name = lib_name;
if (eatToken(it, .Semicolon)) |_| return node;
if (try expectNodeRecoverable(arena, it, tree, parseBlock, .{
// since parseBlock only return error.ParseError on
// a missing '}' we can assume this function was
// supposed to end here.
.ExpectedSemiOrLBrace = .{ .token = it.index },
})) |body_node| {
fn_node.body_node = body_node;
}
return node;
}
if (extern_export_inline_token) |token| {
if (tree.tokens.at(token).id == .Keyword_inline or
tree.tokens.at(token).id == .Keyword_noinline)
{
try tree.errors.push(.{
.ExpectedFn = .{ .token = it.index },
if (thread_local_token != null) {
try p.errors.append(p.gpa, .{
.ExpectedVarDecl = .{ .token = p.tok_i },
});
// ignore this and try again;
return error.ParseError;
}
}
const thread_local_token = eatToken(it, .Keyword_threadlocal);
if (try parseVarDecl(arena, it, tree)) |node| {
var var_decl = node.cast(Node.VarDecl).?;
var_decl.*.thread_local_token = thread_local_token;
var_decl.*.comptime_token = null;
var_decl.*.extern_export_token = extern_export_inline_token;
var_decl.*.lib_name = lib_name;
return node;
}
if (thread_local_token != null) {
try tree.errors.push(.{
.ExpectedVarDecl = .{ .token = it.index },
});
// ignore this and try again;
return error.ParseError;
}
if (extern_export_inline_token) |token| {
try tree.errors.push(.{
.ExpectedVarDeclOrFn = .{ .token = it.index },
});
// ignore this and try again;
return error.ParseError;
}
return try parseUse(arena, it, tree);
}
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
// TODO: Remove once extern/async fn rewriting is
var is_async = false;
var is_extern = false;
const cc_token: ?usize = blk: {
if (eatToken(it, .Keyword_extern)) |token| {
is_extern = true;
break :blk token;
}
if (eatToken(it, .Keyword_async)) |token| {
is_async = true;
break :blk token;
}
break :blk null;
};
const fn_token = eatToken(it, .Keyword_fn) orelse {
if (cc_token) |token|
putBackToken(it, token);
return null;
};
const name_token = eatToken(it, .Identifier);
const lparen = try expectToken(it, tree, .LParen);
const params = try parseParamDeclList(arena, it, tree);
const rparen = try expectToken(it, tree, .RParen);
const align_expr = try parseByteAlign(arena, it, tree);
const section_expr = try parseLinkSection(arena, it, tree);
const callconv_expr = try parseCallconv(arena, it, tree);
const exclamation_token = eatToken(it, .Bang);
const return_type_expr = (try parseVarType(arena, it, tree)) orelse
try expectNodeRecoverable(arena, it, tree, parseTypeExpr, .{
// most likely the user forgot to specify the return type.
// Mark return type as invalid and try to continue.
.ExpectedReturnType = .{ .token = it.index },
});
// TODO https://github.com/ziglang/zig/issues/3750
const R = Node.FnProto.ReturnType;
const return_type = if (return_type_expr == null)
R{ .Invalid = rparen }
else if (exclamation_token != null)
R{ .InferErrorSet = return_type_expr.? }
else
R{ .Explicit = return_type_expr.? };
const var_args_token = if (params.len > 0) blk: {
const param_type = params.at(params.len - 1).*.cast(Node.ParamDecl).?.param_type;
break :blk if (param_type == .var_args) param_type.var_args else null;
} else
null;
const fn_proto_node = try arena.create(Node.FnProto);
fn_proto_node.* = .{
.doc_comments = null,
.visib_token = null,
.fn_token = fn_token,
.name_token = name_token,
.params = params,
.return_type = return_type,
.var_args_token = var_args_token,
.extern_export_inline_token = null,
.body_node = null,
.lib_name = null,
.align_expr = align_expr,
.section_expr = section_expr,
.callconv_expr = callconv_expr,
.is_extern_prototype = is_extern,
.is_async = is_async,
};
return &fn_proto_node.base;
}
/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
fn parseVarDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const mut_token = eatToken(it, .Keyword_const) orelse
eatToken(it, .Keyword_var) orelse
return null;
const name_token = try expectToken(it, tree, .Identifier);
const type_node = if (eatToken(it, .Colon) != null)
try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
})
else
null;
const align_node = try parseByteAlign(arena, it, tree);
const section_node = try parseLinkSection(arena, it, tree);
const eq_token = eatToken(it, .Equal);
const init_node = if (eq_token != null) blk: {
break :blk try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
} else null;
const semicolon_token = try expectToken(it, tree, .Semicolon);
const node = try arena.create(Node.VarDecl);
node.* = .{
.doc_comments = null,
.visib_token = null,
.thread_local_token = null,
.name_token = name_token,
.eq_token = eq_token,
.mut_token = mut_token,
.comptime_token = null,
.extern_export_token = null,
.lib_name = null,
.type_node = type_node,
.align_node = align_node,
.section_node = section_node,
.init_node = init_node,
.semicolon_token = semicolon_token,
};
return &node.base;
}
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
fn parseContainerField(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const comptime_token = eatToken(it, .Keyword_comptime);
const name_token = eatToken(it, .Identifier) orelse {
if (comptime_token) |t| putBackToken(it, t);
return null;
};
var align_expr: ?*Node = null;
var type_expr: ?*Node = null;
if (eatToken(it, .Colon)) |_| {
if (eatToken(it, .Keyword_var)) |var_tok| {
const node = try arena.create(ast.Node.VarType);
node.* = .{ .token = var_tok };
type_expr = &node.base;
} else {
type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
if (extern_export_inline_token) |token| {
try p.errors.append(p.gpa, .{
.ExpectedVarDeclOrFn = .{ .token = p.tok_i },
});
align_expr = try parseByteAlign(arena, it, tree);
// ignore this and try again;
return error.ParseError;
}
return p.parseUse();
}
const value_expr = if (eatToken(it, .Equal)) |_|
try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
})
else
null;
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
fn parseFnProto(p: *Parser) !?*Node {
// TODO: Remove once extern/async fn rewriting is
var is_async = false;
var is_extern = false;
const cc_token: ?TokenIndex = blk: {
if (p.eatToken(.Keyword_extern)) |token| {
is_extern = true;
break :blk token;
}
if (p.eatToken(.Keyword_async)) |token| {
is_async = true;
break :blk token;
}
break :blk null;
};
const fn_token = p.eatToken(.Keyword_fn) orelse {
if (cc_token) |token|
p.putBackToken(token);
return null;
};
var var_args_token: ?TokenIndex = null;
const name_token = p.eatToken(.Identifier);
const lparen = try p.expectToken(.LParen);
const params = try p.parseParamDeclList(&var_args_token);
const rparen = try p.expectToken(.RParen);
const align_expr = try p.parseByteAlign();
const section_expr = try p.parseLinkSection();
const callconv_expr = try p.parseCallconv();
const exclamation_token = p.eatToken(.Bang);
const node = try arena.create(Node.ContainerField);
node.* = .{
.doc_comments = null,
.comptime_token = comptime_token,
.name_token = name_token,
.type_expr = type_expr,
.value_expr = value_expr,
.align_expr = align_expr,
};
return &node.base;
}
/// Statement
/// <- KEYWORD_comptime? VarDecl
/// / KEYWORD_comptime BlockExprStatement
/// / KEYWORD_nosuspend BlockExprStatement
/// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
/// / KEYWORD_defer BlockExprStatement
/// / KEYWORD_errdefer Payload? BlockExprStatement
/// / IfStatement
/// / LabeledStatement
/// / SwitchExpr
/// / AssignExpr SEMICOLON
fn parseStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
const comptime_token = eatToken(it, .Keyword_comptime);
const var_decl_node = try parseVarDecl(arena, it, tree);
if (var_decl_node) |node| {
const var_decl = node.cast(Node.VarDecl).?;
var_decl.comptime_token = comptime_token;
return node;
}
if (comptime_token) |token| {
const block_expr = try expectNode(arena, it, tree, parseBlockExprStatement, .{
.ExpectedBlockOrAssignment = .{ .token = it.index },
const return_type_expr = (try p.parseVarType()) orelse
try p.expectNodeRecoverable(parseTypeExpr, .{
// most likely the user forgot to specify the return type.
// Mark return type as invalid and try to continue.
.ExpectedReturnType = .{ .token = p.tok_i },
});
const node = try arena.create(Node.Comptime);
node.* = .{
// TODO https://github.com/ziglang/zig/issues/3750
const R = Node.FnProto.ReturnType;
const return_type = if (return_type_expr == null)
R{ .Invalid = rparen }
else if (exclamation_token != null)
R{ .InferErrorSet = return_type_expr.? }
else
R{ .Explicit = return_type_expr.? };
const fn_proto_node = try p.arena.allocator.create(Node.FnProto);
fn_proto_node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = block_expr,
.visib_token = null,
.fn_token = fn_token,
.name_token = name_token,
.params = params,
.return_type = return_type,
.var_args_token = var_args_token,
.extern_export_inline_token = null,
.body_node = null,
.lib_name = null,
.align_expr = align_expr,
.section_expr = section_expr,
.callconv_expr = callconv_expr,
.is_extern_prototype = is_extern,
.is_async = is_async,
};
return &node.base;
return &fn_proto_node.base;
}
if (eatToken(it, .Keyword_nosuspend)) |nosuspend_token| {
const block_expr = try expectNode(arena, it, tree, parseBlockExprStatement, .{
.ExpectedBlockOrAssignment = .{ .token = it.index },
});
/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
fn parseVarDecl(p: *Parser) !?*Node {
const mut_token = p.eatToken(.Keyword_const) orelse
p.eatToken(.Keyword_var) orelse
return null;
const node = try arena.create(Node.Nosuspend);
node.* = .{
.nosuspend_token = nosuspend_token,
.expr = block_expr,
};
return &node.base;
}
if (eatToken(it, .Keyword_suspend)) |suspend_token| {
const semicolon = eatToken(it, .Semicolon);
const body_node = if (semicolon == null) blk: {
break :blk try expectNode(arena, it, tree, parseBlockExprStatement, .{
.ExpectedBlockOrExpression = .{ .token = it.index },
});
} else null;
const node = try arena.create(Node.Suspend);
node.* = .{
.suspend_token = suspend_token,
.body = body_node,
};
return &node.base;
}
const defer_token = eatToken(it, .Keyword_defer) orelse eatToken(it, .Keyword_errdefer);
if (defer_token) |token| {
const payload = if (tree.tokens.at(token).id == .Keyword_errdefer)
try parsePayload(arena, it, tree)
const name_token = try p.expectToken(.Identifier);
const type_node = if (p.eatToken(.Colon) != null)
try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
})
else
null;
const expr_node = try expectNode(arena, it, tree, parseBlockExprStatement, .{
.ExpectedBlockOrExpression = .{ .token = it.index },
});
const node = try arena.create(Node.Defer);
const align_node = try p.parseByteAlign();
const section_node = try p.parseLinkSection();
const eq_token = p.eatToken(.Equal);
const init_node = if (eq_token != null) blk: {
break :blk try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
} else null;
const semicolon_token = try p.expectToken(.Semicolon);
const node = try p.arena.allocator.create(Node.VarDecl);
node.* = .{
.defer_token = token,
.expr = expr_node,
.payload = payload,
.doc_comments = null,
.visib_token = null,
.thread_local_token = null,
.name_token = name_token,
.eq_token = eq_token,
.mut_token = mut_token,
.comptime_token = null,
.extern_export_token = null,
.lib_name = null,
.type_node = type_node,
.align_node = align_node,
.section_node = section_node,
.init_node = init_node,
.semicolon_token = semicolon_token,
};
return &node.base;
}
if (try parseIfStatement(arena, it, tree)) |node| return node;
if (try parseLabeledStatement(arena, it, tree)) |node| return node;
if (try parseSwitchExpr(arena, it, tree)) |node| return node;
if (try parseAssignExpr(arena, it, tree)) |node| {
_ = try expectTokenRecoverable(it, tree, .Semicolon);
return node;
}
return null;
}
/// IfStatement
/// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
fn parseIfStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const if_node = (try parseIfPrefix(arena, it, tree)) orelse return null;
const if_prefix = if_node.cast(Node.If).?;
const block_expr = (try parseBlockExpr(arena, it, tree));
const assign_expr = if (block_expr == null)
try expectNode(arena, it, tree, parseAssignExpr, .{
.ExpectedBlockOrAssignment = .{ .token = it.index },
})
else
null;
const semicolon = if (assign_expr != null) eatToken(it, .Semicolon) else null;
const else_node = if (semicolon == null) blk: {
const else_token = eatToken(it, .Keyword_else) orelse break :blk null;
const payload = try parsePayload(arena, it, tree);
const else_body = try expectNode(arena, it, tree, parseStatement, .{
.InvalidToken = .{ .token = it.index },
});
const node = try arena.create(Node.Else);
node.* = .{
.else_token = else_token,
.payload = payload,
.body = else_body,
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
fn parseContainerField(p: *Parser) !?*Node {
const comptime_token = p.eatToken(.Keyword_comptime);
const name_token = p.eatToken(.Identifier) orelse {
if (comptime_token) |t| p.putBackToken(t);
return null;
};
break :blk node;
} else null;
var align_expr: ?*Node = null;
var type_expr: ?*Node = null;
if (p.eatToken(.Colon)) |_| {
if (p.eatToken(.Keyword_var)) |var_tok| {
const node = try p.arena.allocator.create(ast.Node.VarType);
node.* = .{ .token = var_tok };
type_expr = &node.base;
} else {
type_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
align_expr = try p.parseByteAlign();
}
}
if (block_expr) |body| {
if_prefix.body = body;
if_prefix.@"else" = else_node;
return if_node;
const value_expr = if (p.eatToken(.Equal)) |_|
try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
})
else
null;
const node = try p.arena.allocator.create(Node.ContainerField);
node.* = .{
.doc_comments = null,
.comptime_token = comptime_token,
.name_token = name_token,
.type_expr = type_expr,
.value_expr = value_expr,
.align_expr = align_expr,
};
return &node.base;
}
if (assign_expr) |body| {
if_prefix.body = body;
if (semicolon != null) return if_node;
if (else_node != null) {
/// Statement
/// <- KEYWORD_comptime? VarDecl
/// / KEYWORD_comptime BlockExprStatement
/// / KEYWORD_nosuspend BlockExprStatement
/// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
/// / KEYWORD_defer BlockExprStatement
/// / KEYWORD_errdefer Payload? BlockExprStatement
/// / IfStatement
/// / LabeledStatement
/// / SwitchExpr
/// / AssignExpr SEMICOLON
fn parseStatement(p: *Parser) Error!?*Node {
const comptime_token = p.eatToken(.Keyword_comptime);
const var_decl_node = try p.parseVarDecl();
if (var_decl_node) |node| {
const var_decl = node.cast(Node.VarDecl).?;
var_decl.comptime_token = comptime_token;
return node;
}
if (comptime_token) |token| {
const block_expr = try p.expectNode(parseBlockExprStatement, .{
.ExpectedBlockOrAssignment = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Comptime);
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = block_expr,
};
return &node.base;
}
if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| {
const block_expr = try p.expectNode(parseBlockExprStatement, .{
.ExpectedBlockOrAssignment = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Nosuspend);
node.* = .{
.nosuspend_token = nosuspend_token,
.expr = block_expr,
};
return &node.base;
}
if (p.eatToken(.Keyword_suspend)) |suspend_token| {
const semicolon = p.eatToken(.Semicolon);
const body_node = if (semicolon == null) blk: {
break :blk try p.expectNode(parseBlockExprStatement, .{
.ExpectedBlockOrExpression = .{ .token = p.tok_i },
});
} else null;
const node = try p.arena.allocator.create(Node.Suspend);
node.* = .{
.suspend_token = suspend_token,
.body = body_node,
};
return &node.base;
}
const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer);
if (defer_token) |token| {
const payload = if (p.tokens[token].id == .Keyword_errdefer)
try p.parsePayload()
else
null;
const expr_node = try p.expectNode(parseBlockExprStatement, .{
.ExpectedBlockOrExpression = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Defer);
node.* = .{
.defer_token = token,
.expr = expr_node,
.payload = payload,
};
return &node.base;
}
if (try p.parseIfStatement()) |node| return node;
if (try p.parseLabeledStatement()) |node| return node;
if (try p.parseSwitchExpr()) |node| return node;
if (try p.parseAssignExpr()) |node| {
_ = try p.expectTokenRecoverable(.Semicolon);
return node;
}
return null;
}
/// IfStatement
/// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
fn parseIfStatement(p: *Parser) !?*Node {
const if_node = (try p.parseIfPrefix()) orelse return null;
const if_prefix = if_node.cast(Node.If).?;
const block_expr = (try p.parseBlockExpr());
const assign_expr = if (block_expr == null)
try p.expectNode(parseAssignExpr, .{
.ExpectedBlockOrAssignment = .{ .token = p.tok_i },
})
else
null;
const semicolon = if (assign_expr != null) p.eatToken(.Semicolon) else null;
const else_node = if (semicolon == null) blk: {
const else_token = p.eatToken(.Keyword_else) orelse break :blk null;
const payload = try p.parsePayload();
const else_body = try p.expectNode(parseStatement, .{
.InvalidToken = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Else);
node.* = .{
.else_token = else_token,
.payload = payload,
.body = else_body,
};
break :blk node;
} else null;
if (block_expr) |body| {
if_prefix.body = body;
if_prefix.@"else" = else_node;
return if_node;
}
try tree.errors.push(.{
.ExpectedSemiOrElse = .{ .token = it.index },
});
if (assign_expr) |body| {
if_prefix.body = body;
if (semicolon != null) return if_node;
if (else_node != null) {
if_prefix.@"else" = else_node;
return if_node;
}
try p.errors.append(p.gpa, .{
.ExpectedSemiOrElse = .{ .token = p.tok_i },
});
}
return if_node;
}
return if_node;
}
/// LabeledStatement <- BlockLabel? (Block / LoopStatement)
fn parseLabeledStatement(p: *Parser) !?*Node {
var colon: TokenIndex = undefined;
const label_token = p.parseBlockLabel(&colon);
/// LabeledStatement <- BlockLabel? (Block / LoopStatement)
fn parseLabeledStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
var colon: TokenIndex = undefined;
const label_token = parseBlockLabel(arena, it, tree, &colon);
if (try p.parseBlock()) |node| {
node.cast(Node.Block).?.label = label_token;
return node;
}
if (try parseBlock(arena, it, tree)) |node| {
node.cast(Node.Block).?.label = label_token;
return node;
if (try p.parseLoopStatement()) |node| {
if (node.cast(Node.For)) |for_node| {
for_node.label = label_token;
} else if (node.cast(Node.While)) |while_node| {
while_node.label = label_token;
} else unreachable;
return node;
}
if (label_token != null) {
try p.errors.append(p.gpa, .{
.ExpectedLabelable = .{ .token = p.tok_i },
});
return error.ParseError;
}
return null;
}
if (try parseLoopStatement(arena, it, tree)) |node| {
if (node.cast(Node.For)) |for_node| {
for_node.label = label_token;
} else if (node.cast(Node.While)) |while_node| {
while_node.label = label_token;
} else unreachable;
return node;
}
/// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
fn parseLoopStatement(p: *Parser) !?*Node {
const inline_token = p.eatToken(.Keyword_inline);
if (label_token != null) {
try tree.errors.push(.{
.ExpectedLabelable = .{ .token = it.index },
if (try p.parseForStatement()) |node| {
node.cast(Node.For).?.inline_token = inline_token;
return node;
}
if (try p.parseWhileStatement()) |node| {
node.cast(Node.While).?.inline_token = inline_token;
return node;
}
if (inline_token == null) return null;
// If we've seen "inline", there should have been a "for" or "while"
try p.errors.append(p.gpa, .{
.ExpectedInlinable = .{ .token = p.tok_i },
});
return error.ParseError;
}
return null;
}
/// ForStatement
/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
fn parseForStatement(p: *Parser) !?*Node {
const node = (try p.parseForPrefix()) orelse return null;
const for_prefix = node.cast(Node.For).?;
/// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
fn parseLoopStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const inline_token = eatToken(it, .Keyword_inline);
if (try p.parseBlockExpr()) |block_expr_node| {
for_prefix.body = block_expr_node;
if (try parseForStatement(arena, it, tree)) |node| {
node.cast(Node.For).?.inline_token = inline_token;
return node;
}
if (p.eatToken(.Keyword_else)) |else_token| {
const statement_node = try p.expectNode(parseStatement, .{
.InvalidToken = .{ .token = p.tok_i },
});
if (try parseWhileStatement(arena, it, tree)) |node| {
node.cast(Node.While).?.inline_token = inline_token;
return node;
}
if (inline_token == null) return null;
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = statement_node,
};
for_prefix.@"else" = else_node;
// If we've seen "inline", there should have been a "for" or "while"
try tree.errors.push(.{
.ExpectedInlinable = .{ .token = it.index },
});
return error.ParseError;
}
/// ForStatement
/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
fn parseForStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseForPrefix(arena, it, tree)) orelse return null;
const for_prefix = node.cast(Node.For).?;
if (try parseBlockExpr(arena, it, tree)) |block_expr_node| {
for_prefix.body = block_expr_node;
if (eatToken(it, .Keyword_else)) |else_token| {
const statement_node = try expectNode(arena, it, tree, parseStatement, .{
.InvalidToken = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = statement_node,
};
for_prefix.@"else" = else_node;
return node;
}
return node;
}
return node;
}
if (try p.parseAssignExpr()) |assign_expr| {
for_prefix.body = assign_expr;
if (try parseAssignExpr(arena, it, tree)) |assign_expr| {
for_prefix.body = assign_expr;
if (p.eatToken(.Semicolon) != null) return node;
if (eatToken(it, .Semicolon) != null) return node;
if (p.eatToken(.Keyword_else)) |else_token| {
const statement_node = try p.expectNode(parseStatement, .{
.ExpectedStatement = .{ .token = p.tok_i },
});
if (eatToken(it, .Keyword_else)) |else_token| {
const statement_node = try expectNode(arena, it, tree, parseStatement, .{
.ExpectedStatement = .{ .token = it.index },
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = statement_node,
};
for_prefix.@"else" = else_node;
return node;
}
try p.errors.append(p.gpa, .{
.ExpectedSemiOrElse = .{ .token = p.tok_i },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = statement_node,
};
for_prefix.@"else" = else_node;
return node;
}
try tree.errors.push(.{
.ExpectedSemiOrElse = .{ .token = it.index },
});
return node;
}
return null;
}
/// WhileStatement
/// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
fn parseWhileStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseWhilePrefix(arena, it, tree)) orelse return null;
const while_prefix = node.cast(Node.While).?;
if (try parseBlockExpr(arena, it, tree)) |block_expr_node| {
while_prefix.body = block_expr_node;
if (eatToken(it, .Keyword_else)) |else_token| {
const payload = try parsePayload(arena, it, tree);
const statement_node = try expectNode(arena, it, tree, parseStatement, .{
.InvalidToken = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = statement_node,
};
while_prefix.@"else" = else_node;
return node;
}
return node;
return null;
}
if (try parseAssignExpr(arena, it, tree)) |assign_expr_node| {
while_prefix.body = assign_expr_node;
/// WhileStatement
/// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
fn parseWhileStatement(p: *Parser) !?*Node {
const node = (try p.parseWhilePrefix()) orelse return null;
const while_prefix = node.cast(Node.While).?;
if (eatToken(it, .Semicolon) != null) return node;
if (try p.parseBlockExpr()) |block_expr_node| {
while_prefix.body = block_expr_node;
if (eatToken(it, .Keyword_else)) |else_token| {
const payload = try parsePayload(arena, it, tree);
if (p.eatToken(.Keyword_else)) |else_token| {
const payload = try p.parsePayload();
const statement_node = try expectNode(arena, it, tree, parseStatement, .{
.ExpectedStatement = .{ .token = it.index },
});
const statement_node = try p.expectNode(parseStatement, .{
.InvalidToken = .{ .token = p.tok_i },
});
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = statement_node,
};
while_prefix.@"else" = else_node;
return node;
}
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = statement_node,
};
while_prefix.@"else" = else_node;
return node;
}
try tree.errors.push(.{
.ExpectedSemiOrElse = .{ .token = it.index },
});
if (try p.parseAssignExpr()) |assign_expr_node| {
while_prefix.body = assign_expr_node;
return node;
if (p.eatToken(.Semicolon) != null) return node;
if (p.eatToken(.Keyword_else)) |else_token| {
const payload = try p.parsePayload();
const statement_node = try p.expectNode(parseStatement, .{
.ExpectedStatement = .{ .token = p.tok_i },
});
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = statement_node,
};
while_prefix.@"else" = else_node;
return node;
}
try p.errors.append(p.gpa, .{
.ExpectedSemiOrElse = .{ .token = p.tok_i },
});
return node;
}
return null;
}
return null;
}
/// BlockExprStatement
/// <- BlockExpr
/// / AssignExpr SEMICOLON
fn parseBlockExprStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (try parseBlockExpr(arena, it, tree)) |node| return node;
if (try parseAssignExpr(arena, it, tree)) |node| {
_ = try expectTokenRecoverable(it, tree, .Semicolon);
return node;
}
return null;
}
/// BlockExpr <- BlockLabel? Block
fn parseBlockExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
var colon: TokenIndex = undefined;
const label_token = parseBlockLabel(arena, it, tree, &colon);
const block_node = (try parseBlock(arena, it, tree)) orelse {
if (label_token) |label| {
putBackToken(it, label + 1); // ":"
putBackToken(it, label); // IDENTIFIER
/// BlockExprStatement
/// <- BlockExpr
/// / AssignExpr SEMICOLON
fn parseBlockExprStatement(p: *Parser) !?*Node {
if (try p.parseBlockExpr()) |node| return node;
if (try p.parseAssignExpr()) |node| {
_ = try p.expectTokenRecoverable(.Semicolon);
return node;
}
return null;
};
block_node.cast(Node.Block).?.label = label_token;
return block_node;
}
/// AssignExpr <- Expr (AssignOp Expr)?
fn parseAssignExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(arena, it, tree, parseAssignOp, parseExpr, .Once);
}
/// Expr <- KEYWORD_try* BoolOrExpr
fn parseExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
return parsePrefixOpExpr(arena, it, tree, parseTry, parseBoolOrExpr);
}
/// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
fn parseBoolOrExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(
arena,
it,
tree,
SimpleBinOpParseFn(.Keyword_or, Node.InfixOp.Op.BoolOr),
parseBoolAndExpr,
.Infinitely,
);
}
/// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
fn parseBoolAndExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(
arena,
it,
tree,
SimpleBinOpParseFn(.Keyword_and, .BoolAnd),
parseCompareExpr,
.Infinitely,
);
}
/// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
fn parseCompareExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(arena, it, tree, parseCompareOp, parseBitwiseExpr, .Once);
}
/// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
fn parseBitwiseExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(arena, it, tree, parseBitwiseOp, parseBitShiftExpr, .Infinitely);
}
/// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
fn parseBitShiftExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(arena, it, tree, parseBitShiftOp, parseAdditionExpr, .Infinitely);
}
/// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
fn parseAdditionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(arena, it, tree, parseAdditionOp, parseMultiplyExpr, .Infinitely);
}
/// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
fn parseMultiplyExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseBinOpExpr(arena, it, tree, parseMultiplyOp, parsePrefixExpr, .Infinitely);
}
/// PrefixExpr <- PrefixOp* PrimaryExpr
fn parsePrefixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parsePrefixOpExpr(arena, it, tree, parsePrefixOp, parsePrimaryExpr);
}
/// PrimaryExpr
/// <- AsmExpr
/// / IfExpr
/// / KEYWORD_break BreakLabel? Expr?
/// / KEYWORD_comptime Expr
/// / KEYWORD_nosuspend Expr
/// / KEYWORD_continue BreakLabel?
/// / KEYWORD_resume Expr
/// / KEYWORD_return Expr?
/// / BlockLabel? LoopExpr
/// / Block
/// / CurlySuffixExpr
fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (try parseAsmExpr(arena, it, tree)) |node| return node;
if (try parseIfExpr(arena, it, tree)) |node| return node;
if (eatToken(it, .Keyword_break)) |token| {
const label = try parseBreakLabel(arena, it, tree);
const expr_node = try parseExpr(arena, it, tree);
const node = try arena.create(Node.ControlFlowExpression);
node.* = .{
.ltoken = token,
.kind = .{ .Break = label },
.rhs = expr_node,
};
return &node.base;
}
if (eatToken(it, .Keyword_comptime)) |token| {
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const node = try arena.create(Node.Comptime);
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = expr_node,
};
return &node.base;
}
if (eatToken(it, .Keyword_nosuspend)) |token| {
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const node = try arena.create(Node.Nosuspend);
node.* = .{
.nosuspend_token = token,
.expr = expr_node,
};
return &node.base;
}
if (eatToken(it, .Keyword_continue)) |token| {
const label = try parseBreakLabel(arena, it, tree);
const node = try arena.create(Node.ControlFlowExpression);
node.* = .{
.ltoken = token,
.kind = .{ .Continue = label },
.rhs = null,
};
return &node.base;
}
if (eatToken(it, .Keyword_resume)) |token| {
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = token,
.op = .Resume,
.rhs = expr_node,
};
return &node.base;
}
if (eatToken(it, .Keyword_return)) |token| {
const expr_node = try parseExpr(arena, it, tree);
const node = try arena.create(Node.ControlFlowExpression);
node.* = .{
.ltoken = token,
.kind = .Return,
.rhs = expr_node,
};
return &node.base;
}
var colon: TokenIndex = undefined;
const label = parseBlockLabel(arena, it, tree, &colon);
if (try parseLoopExpr(arena, it, tree)) |node| {
if (node.cast(Node.For)) |for_node| {
for_node.label = label;
} else if (node.cast(Node.While)) |while_node| {
while_node.label = label;
} else unreachable;
return node;
}
if (label) |token| {
putBackToken(it, token + 1); // ":"
putBackToken(it, token); // IDENTIFIER
}
if (try parseBlock(arena, it, tree)) |node| return node;
if (try parseCurlySuffixExpr(arena, it, tree)) |node| return node;
return null;
}
/// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
fn parseIfExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseIf(arena, it, tree, parseExpr);
}
/// Block <- LBRACE Statement* RBRACE
fn parseBlock(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const lbrace = eatToken(it, .LBrace) orelse return null;
var statements = Node.Block.StatementList.init(arena);
while (true) {
const statement = (parseStatement(arena, it, tree) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
// try to skip to the next statement
findNextStmt(it);
continue;
},
}) orelse break;
try statements.push(statement);
}
const rbrace = try expectToken(it, tree, .RBrace);
const block_node = try arena.create(Node.Block);
block_node.* = .{
.label = null,
.lbrace = lbrace,
.statements = statements,
.rbrace = rbrace,
};
return &block_node.base;
}
/// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr)
fn parseLoopExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const inline_token = eatToken(it, .Keyword_inline);
if (try parseForExpr(arena, it, tree)) |node| {
node.cast(Node.For).?.inline_token = inline_token;
return node;
}
if (try parseWhileExpr(arena, it, tree)) |node| {
node.cast(Node.While).?.inline_token = inline_token;
return node;
}
if (inline_token == null) return null;
// If we've seen "inline", there should have been a "for" or "while"
try tree.errors.push(.{
.ExpectedInlinable = .{ .token = it.index },
});
return error.ParseError;
}
/// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
fn parseForExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseForPrefix(arena, it, tree)) orelse return null;
const for_prefix = node.cast(Node.For).?;
const body_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
for_prefix.body = body_node;
if (eatToken(it, .Keyword_else)) |else_token| {
const body = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = body,
};
for_prefix.@"else" = else_node;
}
return node;
}
/// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
fn parseWhileExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseWhilePrefix(arena, it, tree)) orelse return null;
const while_prefix = node.cast(Node.While).?;
const body_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
while_prefix.body = body_node;
if (eatToken(it, .Keyword_else)) |else_token| {
const payload = try parsePayload(arena, it, tree);
const body = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = body,
};
while_prefix.@"else" = else_node;
}
return node;
}
/// CurlySuffixExpr <- TypeExpr InitList?
fn parseCurlySuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const type_expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
const suffix_op = (try parseInitList(arena, it, tree)) orelse return type_expr;
suffix_op.lhs.node = type_expr;
return &suffix_op.base;
}
/// InitList
/// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
/// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
/// / LBRACE RBRACE
fn parseInitList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.SuffixOp {
const lbrace = eatToken(it, .LBrace) orelse return null;
var init_list = Node.SuffixOp.Op.InitList.init(arena);
const op: Node.SuffixOp.Op = blk: {
if (try parseFieldInit(arena, it, tree)) |field_init| {
try init_list.push(field_init);
while (eatToken(it, .Comma)) |_| {
const next = (try parseFieldInit(arena, it, tree)) orelse break;
try init_list.push(next);
/// BlockExpr <- BlockLabel? Block
fn parseBlockExpr(p: *Parser) Error!?*Node {
var colon: TokenIndex = undefined;
const label_token = p.parseBlockLabel(&colon);
const block_node = (try p.parseBlock()) orelse {
if (label_token) |label| {
p.putBackToken(label + 1); // ":"
p.putBackToken(label); // IDENTIFIER
}
break :blk .{ .StructInitializer = init_list };
}
if (try parseExpr(arena, it, tree)) |expr| {
try init_list.push(expr);
while (eatToken(it, .Comma)) |_| {
const next = (try parseExpr(arena, it, tree)) orelse break;
try init_list.push(next);
}
break :blk .{ .ArrayInitializer = init_list };
}
break :blk .{ .StructInitializer = init_list };
};
const node = try arena.create(Node.SuffixOp);
node.* = .{
.lhs = .{ .node = undefined }, // set by caller
.op = op,
.rtoken = try expectToken(it, tree, .RBrace),
};
return node;
}
/// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
fn parseTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
return parsePrefixOpExpr(arena, it, tree, parsePrefixTypeOp, parseErrorUnionExpr);
}
/// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
fn parseErrorUnionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const suffix_expr = (try parseSuffixExpr(arena, it, tree)) orelse return null;
if (try SimpleBinOpParseFn(.Bang, Node.InfixOp.Op.ErrorUnion)(arena, it, tree)) |node| {
const error_union = node.cast(Node.InfixOp).?;
const type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
});
error_union.lhs = suffix_expr;
error_union.rhs = type_expr;
return node;
return null;
};
block_node.cast(Node.Block).?.label = label_token;
return block_node;
}
return suffix_expr;
}
/// AssignExpr <- Expr (AssignOp Expr)?
fn parseAssignExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(parseAssignOp, parseExpr, .Once);
}
/// SuffixExpr
/// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const maybe_async = eatToken(it, .Keyword_async);
if (maybe_async) |async_token| {
const token_fn = eatToken(it, .Keyword_fn);
if (token_fn != null) {
// TODO: remove this hack when async fn rewriting is
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
// prefix...
putBackToken(it, token_fn.?);
putBackToken(it, async_token);
return parsePrimaryTypeExpr(arena, it, tree);
}
var res = try expectNode(arena, it, tree, parsePrimaryTypeExpr, .{
.ExpectedPrimaryTypeExpr = .{ .token = it.index },
});
/// Expr <- KEYWORD_try* BoolOrExpr
fn parseExpr(p: *Parser) Error!?*Node {
return p.parsePrefixOpExpr(parseTry, parseBoolOrExpr);
}
while (try parseSuffixOp(arena, it, tree)) |node| {
switch (node.id) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = .{ .node = res },
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
else => unreachable,
}
res = node;
/// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
fn parseBoolOrExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(
SimpleBinOpParseFn(.Keyword_or, Node.InfixOp.Op.BoolOr),
parseBoolAndExpr,
.Infinitely,
);
}
/// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
fn parseBoolAndExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(
SimpleBinOpParseFn(.Keyword_and, .BoolAnd),
parseCompareExpr,
.Infinitely,
);
}
/// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
fn parseCompareExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(parseCompareOp, parseBitwiseExpr, .Once);
}
/// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
fn parseBitwiseExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(parseBitwiseOp, parseBitShiftExpr, .Infinitely);
}
/// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
fn parseBitShiftExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(parseBitShiftOp, parseAdditionExpr, .Infinitely);
}
/// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
fn parseAdditionExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(parseAdditionOp, parseMultiplyExpr, .Infinitely);
}
/// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
fn parseMultiplyExpr(p: *Parser) !?*Node {
return p.parseBinOpExpr(parseMultiplyOp, parsePrefixExpr, .Infinitely);
}
/// PrefixExpr <- PrefixOp* PrimaryExpr
fn parsePrefixExpr(p: *Parser) !?*Node {
return p.parsePrefixOpExpr(parsePrefixOp, parsePrimaryExpr);
}
/// PrimaryExpr
/// <- AsmExpr
/// / IfExpr
/// / KEYWORD_break BreakLabel? Expr?
/// / KEYWORD_comptime Expr
/// / KEYWORD_nosuspend Expr
/// / KEYWORD_continue BreakLabel?
/// / KEYWORD_resume Expr
/// / KEYWORD_return Expr?
/// / BlockLabel? LoopExpr
/// / Block
/// / CurlySuffixExpr
fn parsePrimaryExpr(p: *Parser) !?*Node {
if (try p.parseAsmExpr()) |node| return node;
if (try p.parseIfExpr()) |node| return node;
if (p.eatToken(.Keyword_break)) |token| {
const label = try p.parseBreakLabel();
const expr_node = try p.parseExpr();
const node = try p.arena.allocator.create(Node.ControlFlowExpression);
node.* = .{
.ltoken = token,
.kind = .{ .Break = label },
.rhs = expr_node,
};
return &node.base;
}
const params = (try parseFnCallArguments(arena, it, tree)) orelse {
try tree.errors.push(.{
.ExpectedParamList = .{ .token = it.index },
if (p.eatToken(.Keyword_comptime)) |token| {
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
// ignore this, continue parsing
return res;
};
const node = try arena.create(Node.SuffixOp);
node.* = .{
.lhs = .{ .node = res },
.op = .{
.Call = .{
.params = params.list,
.async_token = async_token,
},
},
.rtoken = params.rparen,
};
return &node.base;
}
if (try parsePrimaryTypeExpr(arena, it, tree)) |expr| {
var res = expr;
const node = try p.arena.allocator.create(Node.Comptime);
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = expr_node,
};
return &node.base;
}
if (p.eatToken(.Keyword_nosuspend)) |token| {
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Nosuspend);
node.* = .{
.nosuspend_token = token,
.expr = expr_node,
};
return &node.base;
}
if (p.eatToken(.Keyword_continue)) |token| {
const label = try p.parseBreakLabel();
const node = try p.arena.allocator.create(Node.ControlFlowExpression);
node.* = .{
.ltoken = token,
.kind = .{ .Continue = label },
.rhs = null,
};
return &node.base;
}
if (p.eatToken(.Keyword_resume)) |token| {
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = token,
.op = .Resume,
.rhs = expr_node,
};
return &node.base;
}
if (p.eatToken(.Keyword_return)) |token| {
const expr_node = try p.parseExpr();
const node = try p.arena.allocator.create(Node.ControlFlowExpression);
node.* = .{
.ltoken = token,
.kind = .Return,
.rhs = expr_node,
};
return &node.base;
}
var colon: TokenIndex = undefined;
const label = p.parseBlockLabel(&colon);
if (try p.parseLoopExpr()) |node| {
if (node.cast(Node.For)) |for_node| {
for_node.label = label;
} else if (node.cast(Node.While)) |while_node| {
while_node.label = label;
} else unreachable;
return node;
}
if (label) |token| {
p.putBackToken(token + 1); // ":"
p.putBackToken(token); // IDENTIFIER
}
if (try p.parseBlock()) |node| return node;
if (try p.parseCurlySuffixExpr()) |node| return node;
return null;
}
/// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
fn parseIfExpr(p: *Parser) !?*Node {
return p.parseIf(parseExpr);
}
/// Block <- LBRACE Statement* RBRACE
fn parseBlock(p: *Parser) !?*Node {
const lbrace = p.eatToken(.LBrace) orelse return null;
var statements = Node.Block.StatementList{};
var statements_it = &statements.first;
while (true) {
if (try parseSuffixOp(arena, it, tree)) |node| {
const statement = (p.parseStatement() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
// try to skip to the next statement
p.findNextStmt();
continue;
},
}) orelse break;
statements_it = try p.llpush(*Node, statements_it, statement);
}
const rbrace = try p.expectToken(.RBrace);
const block_node = try p.arena.allocator.create(Node.Block);
block_node.* = .{
.label = null,
.lbrace = lbrace,
.statements = statements,
.rbrace = rbrace,
};
return &block_node.base;
}
/// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr)
fn parseLoopExpr(p: *Parser) !?*Node {
const inline_token = p.eatToken(.Keyword_inline);
if (try p.parseForExpr()) |node| {
node.cast(Node.For).?.inline_token = inline_token;
return node;
}
if (try p.parseWhileExpr()) |node| {
node.cast(Node.While).?.inline_token = inline_token;
return node;
}
if (inline_token == null) return null;
// If we've seen "inline", there should have been a "for" or "while"
try p.errors.append(p.gpa, .{
.ExpectedInlinable = .{ .token = p.tok_i },
});
return error.ParseError;
}
/// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
fn parseForExpr(p: *Parser) !?*Node {
const node = (try p.parseForPrefix()) orelse return null;
const for_prefix = node.cast(Node.For).?;
const body_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
for_prefix.body = body_node;
if (p.eatToken(.Keyword_else)) |else_token| {
const body = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = body,
};
for_prefix.@"else" = else_node;
}
return node;
}
/// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
fn parseWhileExpr(p: *Parser) !?*Node {
const node = (try p.parseWhilePrefix()) orelse return null;
const while_prefix = node.cast(Node.While).?;
const body_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
while_prefix.body = body_node;
if (p.eatToken(.Keyword_else)) |else_token| {
const payload = try p.parsePayload();
const body = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = body,
};
while_prefix.@"else" = else_node;
}
return node;
}
/// CurlySuffixExpr <- TypeExpr InitList?
fn parseCurlySuffixExpr(p: *Parser) !?*Node {
const type_expr = (try p.parseTypeExpr()) orelse return null;
const suffix_op = (try p.parseInitList()) orelse return type_expr;
suffix_op.lhs.node = type_expr;
return &suffix_op.base;
}
/// InitList
/// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
/// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
/// / LBRACE RBRACE
fn parseInitList(p: *Parser) !?*Node.SuffixOp {
const lbrace = p.eatToken(.LBrace) orelse return null;
var init_list = Node.SuffixOp.Op.InitList{};
var init_list_it = &init_list.first;
const op: Node.SuffixOp.Op = blk: {
if (try p.parseFieldInit()) |field_init| {
init_list_it = try p.llpush(*Node, init_list_it, field_init);
while (p.eatToken(.Comma)) |_| {
const next = (try p.parseFieldInit()) orelse break;
init_list_it = try p.llpush(*Node, init_list_it, next);
}
break :blk .{ .StructInitializer = init_list };
}
if (try p.parseExpr()) |expr| {
init_list_it = try p.llpush(*Node, init_list_it, expr);
while (p.eatToken(.Comma)) |_| {
const next = (try p.parseExpr()) orelse break;
init_list_it = try p.llpush(*Node, init_list_it, next);
}
break :blk .{ .ArrayInitializer = init_list };
}
break :blk .{ .StructInitializer = init_list };
};
const node = try p.arena.allocator.create(Node.SuffixOp);
node.* = .{
.lhs = .{ .node = undefined }, // set by caller
.op = op,
.rtoken = try p.expectToken(.RBrace),
};
return node;
}
/// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
fn parseTypeExpr(p: *Parser) Error!?*Node {
return p.parsePrefixOpExpr(parsePrefixTypeOp, parseErrorUnionExpr);
}
/// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
fn parseErrorUnionExpr(p: *Parser) !?*Node {
const suffix_expr = (try p.parseSuffixExpr()) orelse return null;
if (try SimpleBinOpParseFn(.Bang, Node.InfixOp.Op.ErrorUnion)(p)) |node| {
const error_union = node.cast(Node.InfixOp).?;
const type_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
error_union.lhs = suffix_expr;
error_union.rhs = type_expr;
return node;
}
return suffix_expr;
}
/// SuffixExpr
/// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
fn parseSuffixExpr(p: *Parser) !?*Node {
const maybe_async = p.eatToken(.Keyword_async);
if (maybe_async) |async_token| {
const token_fn = p.eatToken(.Keyword_fn);
if (token_fn != null) {
// TODO: remove this hack when async fn rewriting is
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
// prefix...
p.putBackToken(token_fn.?);
p.putBackToken(async_token);
return p.parsePrimaryTypeExpr();
}
var res = try p.expectNode(parsePrimaryTypeExpr, .{
.ExpectedPrimaryTypeExpr = .{ .token = p.tok_i },
});
while (try p.parseSuffixOp()) |node| {
switch (node.id) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = .{ .node = res },
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
else => unreachable,
}
res = node;
continue;
}
if (try parseFnCallArguments(arena, it, tree)) |params| {
const call = try arena.create(Node.SuffixOp);
call.* = .{
.lhs = .{ .node = res },
.op = .{
.Call = .{
.params = params.list,
.async_token = null,
},
const params = (try p.parseFnCallArguments()) orelse {
try p.errors.append(p.gpa, .{
.ExpectedParamList = .{ .token = p.tok_i },
});
// ignore this, continue parsing
return res;
};
const node = try p.arena.allocator.create(Node.SuffixOp);
node.* = .{
.lhs = .{ .node = res },
.op = .{
.Call = .{
.params = params.list,
.async_token = async_token,
},
.rtoken = params.rparen,
};
res = &call.base;
continue;
}
break;
},
.rtoken = params.rparen,
};
return &node.base;
}
return res;
if (try p.parsePrimaryTypeExpr()) |expr| {
var res = expr;
while (true) {
if (try p.parseSuffixOp()) |node| {
switch (node.id) {
.SuffixOp => node.cast(Node.SuffixOp).?.lhs = .{ .node = res },
.InfixOp => node.cast(Node.InfixOp).?.lhs = res,
else => unreachable,
}
res = node;
continue;
}
if (try p.parseFnCallArguments()) |params| {
const call = try p.arena.allocator.create(Node.SuffixOp);
call.* = .{
.lhs = .{ .node = res },
.op = .{
.Call = .{
.params = params.list,
.async_token = null,
},
},
.rtoken = params.rparen,
};
res = &call.base;
continue;
}
break;
}
return res;
}
return null;
}
return null;
}
/// PrimaryTypeExpr
/// <- BUILTINIDENTIFIER FnCallArguments
/// / CHAR_LITERAL
/// / ContainerDecl
/// / DOT IDENTIFIER
/// / ErrorSetDecl
/// / FLOAT
/// / FnProto
/// / GroupedExpr
/// / LabeledTypeExpr
/// / IDENTIFIER
/// / IfTypeExpr
/// / INTEGER
/// / KEYWORD_comptime TypeExpr
/// / KEYWORD_error DOT IDENTIFIER
/// / KEYWORD_false
/// / KEYWORD_null
/// / KEYWORD_anyframe
/// / KEYWORD_true
/// / KEYWORD_undefined
/// / KEYWORD_unreachable
/// / STRINGLITERAL
/// / SwitchExpr
fn parsePrimaryTypeExpr(p: *Parser) !?*Node {
if (try p.parseBuiltinCall()) |node| return node;
if (p.eatToken(.CharLiteral)) |token| {
const node = try p.arena.allocator.create(Node.CharLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
if (try p.parseContainerDecl()) |node| return node;
if (try p.parseAnonLiteral()) |node| return node;
if (try p.parseErrorSetDecl()) |node| return node;
if (try p.parseFloatLiteral()) |node| return node;
if (try p.parseFnProto()) |node| return node;
if (try p.parseGroupedExpr()) |node| return node;
if (try p.parseLabeledTypeExpr()) |node| return node;
if (try p.parseIdentifier()) |node| return node;
if (try p.parseIfTypeExpr()) |node| return node;
if (try p.parseIntegerLiteral()) |node| return node;
if (p.eatToken(.Keyword_comptime)) |token| {
const expr = (try p.parseTypeExpr()) orelse return null;
const node = try p.arena.allocator.create(Node.Comptime);
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = expr,
};
return &node.base;
}
if (p.eatToken(.Keyword_error)) |token| {
const period = try p.expectTokenRecoverable(.Period);
const identifier = try p.expectNodeRecoverable(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const global_error_set = try p.createLiteral(Node.ErrorType, token);
if (period == null or identifier == null) return global_error_set;
/// PrimaryTypeExpr
/// <- BUILTINIDENTIFIER FnCallArguments
/// / CHAR_LITERAL
/// / ContainerDecl
/// / DOT IDENTIFIER
/// / ErrorSetDecl
/// / FLOAT
/// / FnProto
/// / GroupedExpr
/// / LabeledTypeExpr
/// / IDENTIFIER
/// / IfTypeExpr
/// / INTEGER
/// / KEYWORD_comptime TypeExpr
/// / KEYWORD_error DOT IDENTIFIER
/// / KEYWORD_false
/// / KEYWORD_null
/// / KEYWORD_anyframe
/// / KEYWORD_true
/// / KEYWORD_undefined
/// / KEYWORD_unreachable
/// / STRINGLITERAL
/// / SwitchExpr
fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (try parseBuiltinCall(arena, it, tree)) |node| return node;
if (eatToken(it, .CharLiteral)) |token| {
const node = try arena.create(Node.CharLiteral);
const node = try p.arena.allocator.create(Node.InfixOp);
node.* = .{
.op_token = period.?,
.lhs = global_error_set,
.op = .Period,
.rhs = identifier.?,
};
return &node.base;
}
if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(Node.BoolLiteral, token);
if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(Node.NullLiteral, token);
if (p.eatToken(.Keyword_anyframe)) |token| {
const node = try p.arena.allocator.create(Node.AnyFrameType);
node.* = .{
.anyframe_token = token,
.result = null,
};
return &node.base;
}
if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(Node.BoolLiteral, token);
if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(Node.UndefinedLiteral, token);
if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(Node.Unreachable, token);
if (try p.parseStringLiteral()) |node| return node;
if (try p.parseSwitchExpr()) |node| return node;
return null;
}
/// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
fn parseContainerDecl(p: *Parser) !?*Node {
const layout_token = p.eatToken(.Keyword_extern) orelse
p.eatToken(.Keyword_packed);
const node = (try p.parseContainerDeclAuto()) orelse {
if (layout_token) |token|
p.putBackToken(token);
return null;
};
node.cast(Node.ContainerDecl).?.*.layout_token = layout_token;
return node;
}
/// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE
fn parseErrorSetDecl(p: *Parser) !?*Node {
const error_token = p.eatToken(.Keyword_error) orelse return null;
if (p.eatToken(.LBrace) == null) {
// Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error
p.putBackToken(error_token);
return null;
}
const decls = try p.parseErrorTagList();
const rbrace = try p.expectToken(.RBrace);
const node = try p.arena.allocator.create(Node.ErrorSetDecl);
node.* = .{
.token = token,
.error_token = error_token,
.decls = decls,
.rbrace_token = rbrace,
};
return &node.base;
}
if (try parseContainerDecl(arena, it, tree)) |node| return node;
if (try parseAnonLiteral(arena, it, tree)) |node| return node;
if (try parseErrorSetDecl(arena, it, tree)) |node| return node;
if (try parseFloatLiteral(arena, it, tree)) |node| return node;
if (try parseFnProto(arena, it, tree)) |node| return node;
if (try parseGroupedExpr(arena, it, tree)) |node| return node;
if (try parseLabeledTypeExpr(arena, it, tree)) |node| return node;
if (try parseIdentifier(arena, it, tree)) |node| return node;
if (try parseIfTypeExpr(arena, it, tree)) |node| return node;
if (try parseIntegerLiteral(arena, it, tree)) |node| return node;
if (eatToken(it, .Keyword_comptime)) |token| {
const expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
const node = try arena.create(Node.Comptime);
node.* = .{
.doc_comments = null,
.comptime_token = token,
.expr = expr,
};
return &node.base;
}
if (eatToken(it, .Keyword_error)) |token| {
const period = try expectTokenRecoverable(it, tree, .Period);
const identifier = try expectNodeRecoverable(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
/// GroupedExpr <- LPAREN Expr RPAREN
fn parseGroupedExpr(p: *Parser) !?*Node {
const lparen = p.eatToken(.LParen) orelse return null;
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const global_error_set = try createLiteral(arena, Node.ErrorType, token);
if (period == null or identifier == null) return global_error_set;
const rparen = try p.expectToken(.RParen);
const node = try arena.create(Node.InfixOp);
const node = try p.arena.allocator.create(Node.GroupedExpression);
node.* = .{
.op_token = period.?,
.lhs = global_error_set,
.op = .Period,
.rhs = identifier.?,
.lparen = lparen,
.expr = expr,
.rparen = rparen,
};
return &node.base;
}
if (eatToken(it, .Keyword_false)) |token| return createLiteral(arena, Node.BoolLiteral, token);
if (eatToken(it, .Keyword_null)) |token| return createLiteral(arena, Node.NullLiteral, token);
if (eatToken(it, .Keyword_anyframe)) |token| {
const node = try arena.create(Node.AnyFrameType);
node.* = .{
.anyframe_token = token,
.result = null,
};
return &node.base;
/// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
fn parseIfTypeExpr(p: *Parser) !?*Node {
return p.parseIf(parseTypeExpr);
}
if (eatToken(it, .Keyword_true)) |token| return createLiteral(arena, Node.BoolLiteral, token);
if (eatToken(it, .Keyword_undefined)) |token| return createLiteral(arena, Node.UndefinedLiteral, token);
if (eatToken(it, .Keyword_unreachable)) |token| return createLiteral(arena, Node.Unreachable, token);
if (try parseStringLiteral(arena, it, tree)) |node| return node;
if (try parseSwitchExpr(arena, it, tree)) |node| return node;
return null;
}
/// LabeledTypeExpr
/// <- BlockLabel Block
/// / BlockLabel? LoopTypeExpr
fn parseLabeledTypeExpr(p: *Parser) !?*Node {
var colon: TokenIndex = undefined;
const label = p.parseBlockLabel(&colon);
/// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
fn parseContainerDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const layout_token = eatToken(it, .Keyword_extern) orelse
eatToken(it, .Keyword_packed);
if (label) |token| {
if (try p.parseBlock()) |node| {
node.cast(Node.Block).?.label = token;
return node;
}
}
const node = (try parseContainerDeclAuto(arena, it, tree)) orelse {
if (layout_token) |token|
putBackToken(it, token);
return null;
};
node.cast(Node.ContainerDecl).?.*.layout_token = layout_token;
return node;
}
/// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE
fn parseErrorSetDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const error_token = eatToken(it, .Keyword_error) orelse return null;
if (eatToken(it, .LBrace) == null) {
// Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error
putBackToken(it, error_token);
return null;
}
const decls = try parseErrorTagList(arena, it, tree);
const rbrace = try expectToken(it, tree, .RBrace);
const node = try arena.create(Node.ErrorSetDecl);
node.* = .{
.error_token = error_token,
.decls = decls,
.rbrace_token = rbrace,
};
return &node.base;
}
/// GroupedExpr <- LPAREN Expr RPAREN
fn parseGroupedExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const lparen = eatToken(it, .LParen) orelse return null;
const expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const rparen = try expectToken(it, tree, .RParen);
const node = try arena.create(Node.GroupedExpression);
node.* = .{
.lparen = lparen,
.expr = expr,
.rparen = rparen,
};
return &node.base;
}
/// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
fn parseIfTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return parseIf(arena, it, tree, parseTypeExpr);
}
/// LabeledTypeExpr
/// <- BlockLabel Block
/// / BlockLabel? LoopTypeExpr
fn parseLabeledTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
var colon: TokenIndex = undefined;
const label = parseBlockLabel(arena, it, tree, &colon);
if (label) |token| {
if (try parseBlock(arena, it, tree)) |node| {
node.cast(Node.Block).?.label = token;
if (try p.parseLoopTypeExpr()) |node| {
switch (node.id) {
.For => node.cast(Node.For).?.label = label,
.While => node.cast(Node.While).?.label = label,
else => unreachable,
}
return node;
}
}
if (try parseLoopTypeExpr(arena, it, tree)) |node| {
switch (node.id) {
.For => node.cast(Node.For).?.label = label,
.While => node.cast(Node.While).?.label = label,
else => unreachable,
if (label) |token| {
p.putBackToken(colon);
p.putBackToken(token);
}
return node;
return null;
}
if (label) |token| {
putBackToken(it, colon);
putBackToken(it, token);
}
return null;
}
/// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
fn parseLoopTypeExpr(p: *Parser) !?*Node {
const inline_token = p.eatToken(.Keyword_inline);
/// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
fn parseLoopTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const inline_token = eatToken(it, .Keyword_inline);
if (try parseForTypeExpr(arena, it, tree)) |node| {
node.cast(Node.For).?.inline_token = inline_token;
return node;
}
if (try parseWhileTypeExpr(arena, it, tree)) |node| {
node.cast(Node.While).?.inline_token = inline_token;
return node;
}
if (inline_token == null) return null;
// If we've seen "inline", there should have been a "for" or "while"
try tree.errors.push(.{
.ExpectedInlinable = .{ .token = it.index },
});
return error.ParseError;
}
/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
fn parseForTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseForPrefix(arena, it, tree)) orelse return null;
const for_prefix = node.cast(Node.For).?;
const type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
});
for_prefix.body = type_expr;
if (eatToken(it, .Keyword_else)) |else_token| {
const else_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = else_expr,
};
for_prefix.@"else" = else_node;
}
return node;
}
/// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
fn parseWhileTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseWhilePrefix(arena, it, tree)) orelse return null;
const while_prefix = node.cast(Node.While).?;
const type_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
});
while_prefix.body = type_expr;
if (eatToken(it, .Keyword_else)) |else_token| {
const payload = try parsePayload(arena, it, tree);
const else_expr = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = else_expr,
};
while_prefix.@"else" = else_node;
}
return node;
}
/// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
fn parseSwitchExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const switch_token = eatToken(it, .Keyword_switch) orelse return null;
_ = try expectToken(it, tree, .LParen);
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
_ = try expectToken(it, tree, .LBrace);
const cases = try parseSwitchProngList(arena, it, tree);
const rbrace = try expectToken(it, tree, .RBrace);
const node = try arena.create(Node.Switch);
node.* = .{
.switch_token = switch_token,
.expr = expr_node,
.cases = cases,
.rbrace = rbrace,
};
return &node.base;
}
/// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN
fn parseAsmExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const asm_token = eatToken(it, .Keyword_asm) orelse return null;
const volatile_token = eatToken(it, .Keyword_volatile);
_ = try expectToken(it, tree, .LParen);
const template = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const node = try arena.create(Node.Asm);
node.* = .{
.asm_token = asm_token,
.volatile_token = volatile_token,
.template = template,
.outputs = Node.Asm.OutputList.init(arena),
.inputs = Node.Asm.InputList.init(arena),
.clobbers = Node.Asm.ClobberList.init(arena),
.rparen = undefined,
};
try parseAsmOutput(arena, it, tree, node);
node.rparen = try expectToken(it, tree, .RParen);
return &node.base;
}
/// DOT IDENTIFIER
fn parseAnonLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const dot = eatToken(it, .Period) orelse return null;
// anon enum literal
if (eatToken(it, .Identifier)) |name| {
const node = try arena.create(Node.EnumLiteral);
node.* = .{
.dot = dot,
.name = name,
};
return &node.base;
}
// anon container literal
if (try parseInitList(arena, it, tree)) |node| {
node.lhs = .{ .dot = dot };
return &node.base;
}
putBackToken(it, dot);
return null;
}
/// AsmOutput <- COLON AsmOutputList AsmInput?
fn parseAsmOutput(arena: *Allocator, it: *TokenIterator, tree: *Tree, asm_node: *Node.Asm) !void {
if (eatToken(it, .Colon) == null) return;
asm_node.outputs = try parseAsmOutputList(arena, it, tree);
try parseAsmInput(arena, it, tree, asm_node);
}
/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
fn parseAsmOutputItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.AsmOutput {
const lbracket = eatToken(it, .LBracket) orelse return null;
const name = try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RBracket);
const constraint = try expectNode(arena, it, tree, parseStringLiteral, .{
.ExpectedStringLiteral = .{ .token = it.index },
});
_ = try expectToken(it, tree, .LParen);
const kind: Node.AsmOutput.Kind = blk: {
if (eatToken(it, .Arrow) != null) {
const return_ident = try expectNode(arena, it, tree, parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = it.index },
});
break :blk .{ .Return = return_ident };
if (try p.parseForTypeExpr()) |node| {
node.cast(Node.For).?.inline_token = inline_token;
return node;
}
const variable = try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
break :blk .{ .Variable = variable.cast(Node.Identifier).? };
};
const rparen = try expectToken(it, tree, .RParen);
const node = try arena.create(Node.AsmOutput);
node.* = .{
.lbracket = lbracket,
.symbolic_name = name,
.constraint = constraint,
.kind = kind,
.rparen = rparen,
};
return node;
}
if (try p.parseWhileTypeExpr()) |node| {
node.cast(Node.While).?.inline_token = inline_token;
return node;
}
/// AsmInput <- COLON AsmInputList AsmClobbers?
fn parseAsmInput(arena: *Allocator, it: *TokenIterator, tree: *Tree, asm_node: *Node.Asm) !void {
if (eatToken(it, .Colon) == null) return;
asm_node.inputs = try parseAsmInputList(arena, it, tree);
try parseAsmClobbers(arena, it, tree, asm_node);
}
if (inline_token == null) return null;
/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
fn parseAsmInputItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.AsmInput {
const lbracket = eatToken(it, .LBracket) orelse return null;
const name = try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RBracket);
const constraint = try expectNode(arena, it, tree, parseStringLiteral, .{
.ExpectedStringLiteral = .{ .token = it.index },
});
_ = try expectToken(it, tree, .LParen);
const expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const rparen = try expectToken(it, tree, .RParen);
const node = try arena.create(Node.AsmInput);
node.* = .{
.lbracket = lbracket,
.symbolic_name = name,
.constraint = constraint,
.expr = expr,
.rparen = rparen,
};
return node;
}
/// AsmClobbers <- COLON StringList
/// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
fn parseAsmClobbers(arena: *Allocator, it: *TokenIterator, tree: *Tree, asm_node: *Node.Asm) !void {
if (eatToken(it, .Colon) == null) return;
asm_node.clobbers = try ListParseFn(
Node.Asm.ClobberList,
parseStringLiteral,
)(arena, it, tree);
}
/// BreakLabel <- COLON IDENTIFIER
fn parseBreakLabel(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
_ = eatToken(it, .Colon) orelse return null;
return try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
}
/// BlockLabel <- IDENTIFIER COLON
fn parseBlockLabel(arena: *Allocator, it: *TokenIterator, tree: *Tree, colon_token: *TokenIndex) ?TokenIndex {
const identifier = eatToken(it, .Identifier) orelse return null;
if (eatToken(it, .Colon)) |colon| {
colon_token.* = colon;
return identifier;
}
putBackToken(it, identifier);
return null;
}
/// FieldInit <- DOT IDENTIFIER EQUAL Expr
fn parseFieldInit(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const period_token = eatToken(it, .Period) orelse return null;
const name_token = eatToken(it, .Identifier) orelse {
// Because of anon literals `.{` is also valid.
putBackToken(it, period_token);
return null;
};
const eq_token = eatToken(it, .Equal) orelse {
// `.Name` may also be an enum literal, which is a later rule.
putBackToken(it, name_token);
putBackToken(it, period_token);
return null;
};
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const node = try arena.create(Node.FieldInitializer);
node.* = .{
.period_token = period_token,
.name_token = name_token,
.expr = expr_node,
};
return &node.base;
}
/// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
fn parseWhileContinueExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
_ = eatToken(it, .Colon) orelse return null;
_ = try expectToken(it, tree, .LParen);
const node = try expectNode(arena, it, tree, parseAssignExpr, .{
.ExpectedExprOrAssignment = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
return node;
}
/// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
fn parseLinkSection(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
_ = eatToken(it, .Keyword_linksection) orelse return null;
_ = try expectToken(it, tree, .LParen);
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
return expr_node;
}
/// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
fn parseCallconv(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
_ = eatToken(it, .Keyword_callconv) orelse return null;
_ = try expectToken(it, tree, .LParen);
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
return expr_node;
}
/// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
fn parseParamDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const doc_comments = try parseDocComment(arena, it, tree);
const noalias_token = eatToken(it, .Keyword_noalias);
const comptime_token = if (noalias_token == null) eatToken(it, .Keyword_comptime) else null;
const name_token = blk: {
const identifier = eatToken(it, .Identifier) orelse break :blk null;
if (eatToken(it, .Colon) != null) break :blk identifier;
putBackToken(it, identifier); // ParamType may also be an identifier
break :blk null;
};
const param_type = (try parseParamType(arena, it, tree)) orelse {
// Only return cleanly if no keyword, identifier, or doc comment was found
if (noalias_token == null and
comptime_token == null and
name_token == null and
doc_comments == null) return null;
try tree.errors.push(.{
.ExpectedParamType = .{ .token = it.index },
// If we've seen "inline", there should have been a "for" or "while"
try p.errors.append(p.gpa, .{
.ExpectedInlinable = .{ .token = p.tok_i },
});
return error.ParseError;
};
}
const param_decl = try arena.create(Node.ParamDecl);
param_decl.* = .{
.doc_comments = doc_comments,
.comptime_token = comptime_token,
.noalias_token = noalias_token,
.name_token = name_token,
.param_type = param_type,
};
return &param_decl.base;
}
/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
fn parseForTypeExpr(p: *Parser) !?*Node {
const node = (try p.parseForPrefix()) orelse return null;
const for_prefix = node.cast(Node.For).?;
/// ParamType
/// <- KEYWORD_var
/// / DOT3
/// / TypeExpr
fn parseParamType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?Node.ParamDecl.ParamType {
// TODO cast from tuple to error union is broken
const P = Node.ParamDecl.ParamType;
if (try parseVarType(arena, it, tree)) |node| return P{ .var_type = node };
if (eatToken(it, .Ellipsis3)) |token| return P{ .var_args = token };
if (try parseTypeExpr(arena, it, tree)) |node| return P{ .type_expr = node };
return null;
}
/// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
fn parseIfPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const if_token = eatToken(it, .Keyword_if) orelse return null;
_ = try expectToken(it, tree, .LParen);
const condition = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
const payload = try parsePtrPayload(arena, it, tree);
const node = try arena.create(Node.If);
node.* = .{
.if_token = if_token,
.condition = condition,
.payload = payload,
.body = undefined, // set by caller
.@"else" = null,
};
return &node.base;
}
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
fn parseWhilePrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const while_token = eatToken(it, .Keyword_while) orelse return null;
_ = try expectToken(it, tree, .LParen);
const condition = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
const payload = try parsePtrPayload(arena, it, tree);
const continue_expr = try parseWhileContinueExpr(arena, it, tree);
const node = try arena.create(Node.While);
node.* = .{
.label = null,
.inline_token = null,
.while_token = while_token,
.condition = condition,
.payload = payload,
.continue_expr = continue_expr,
.body = undefined, // set by caller
.@"else" = null,
};
return &node.base;
}
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
fn parseForPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const for_token = eatToken(it, .Keyword_for) orelse return null;
_ = try expectToken(it, tree, .LParen);
const array_expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
const payload = try expectNode(arena, it, tree, parsePtrIndexPayload, .{
.ExpectedPayload = .{ .token = it.index },
});
const node = try arena.create(Node.For);
node.* = .{
.label = null,
.inline_token = null,
.for_token = for_token,
.array_expr = array_expr,
.payload = payload,
.body = undefined, // set by caller
.@"else" = null,
};
return &node.base;
}
/// Payload <- PIPE IDENTIFIER PIPE
fn parsePayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const lpipe = eatToken(it, .Pipe) orelse return null;
const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
const rpipe = try expectToken(it, tree, .Pipe);
const node = try arena.create(Node.Payload);
node.* = .{
.lpipe = lpipe,
.error_symbol = identifier,
.rpipe = rpipe,
};
return &node.base;
}
/// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
fn parsePtrPayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const lpipe = eatToken(it, .Pipe) orelse return null;
const asterisk = eatToken(it, .Asterisk);
const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
const rpipe = try expectToken(it, tree, .Pipe);
const node = try arena.create(Node.PointerPayload);
node.* = .{
.lpipe = lpipe,
.ptr_token = asterisk,
.value_symbol = identifier,
.rpipe = rpipe,
};
return &node.base;
}
/// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
fn parsePtrIndexPayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const lpipe = eatToken(it, .Pipe) orelse return null;
const asterisk = eatToken(it, .Asterisk);
const identifier = try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
});
const index = if (eatToken(it, .Comma) == null)
null
else
try expectNode(arena, it, tree, parseIdentifier, .{
.ExpectedIdentifier = .{ .token = it.index },
const type_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
for_prefix.body = type_expr;
const rpipe = try expectToken(it, tree, .Pipe);
const node = try arena.create(Node.PointerIndexPayload);
node.* = .{
.lpipe = lpipe,
.ptr_token = asterisk,
.value_symbol = identifier,
.index_symbol = index,
.rpipe = rpipe,
};
return &node.base;
}
/// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr
fn parseSwitchProng(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseSwitchCase(arena, it, tree)) orelse return null;
const arrow = try expectToken(it, tree, .EqualAngleBracketRight);
const payload = try parsePtrPayload(arena, it, tree);
const expr = try expectNode(arena, it, tree, parseAssignExpr, .{
.ExpectedExprOrAssignment = .{ .token = it.index },
});
const switch_case = node.cast(Node.SwitchCase).?;
switch_case.arrow_token = arrow;
switch_case.payload = payload;
switch_case.expr = expr;
return node;
}
/// SwitchCase
/// <- SwitchItem (COMMA SwitchItem)* COMMA?
/// / KEYWORD_else
fn parseSwitchCase(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
var list = Node.SwitchCase.ItemList.init(arena);
if (try parseSwitchItem(arena, it, tree)) |first_item| {
try list.push(first_item);
while (eatToken(it, .Comma) != null) {
const next_item = (try parseSwitchItem(arena, it, tree)) orelse break;
try list.push(next_item);
}
} else if (eatToken(it, .Keyword_else)) |else_token| {
const else_node = try arena.create(Node.SwitchElse);
else_node.* = .{
.token = else_token,
};
try list.push(&else_node.base);
} else return null;
const node = try arena.create(Node.SwitchCase);
node.* = .{
.items = list,
.arrow_token = undefined, // set by caller
.payload = null,
.expr = undefined, // set by caller
};
return &node.base;
}
/// SwitchItem <- Expr (DOT3 Expr)?
fn parseSwitchItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const expr = (try parseExpr(arena, it, tree)) orelse return null;
if (eatToken(it, .Ellipsis3)) |token| {
const range_end = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
const node = try arena.create(Node.InfixOp);
node.* = .{
.op_token = token,
.lhs = expr,
.op = .Range,
.rhs = range_end,
};
return &node.base;
}
return expr;
}
/// AssignOp
/// <- ASTERISKEQUAL
/// / SLASHEQUAL
/// / PERCENTEQUAL
/// / PLUSEQUAL
/// / MINUSEQUAL
/// / LARROW2EQUAL
/// / RARROW2EQUAL
/// / AMPERSANDEQUAL
/// / CARETEQUAL
/// / PIPEEQUAL
/// / ASTERISKPERCENTEQUAL
/// / PLUSPERCENTEQUAL
/// / MINUSPERCENTEQUAL
/// / EQUAL
fn parseAssignOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.AsteriskEqual => .AssignMul,
.SlashEqual => .AssignDiv,
.PercentEqual => .AssignMod,
.PlusEqual => .AssignAdd,
.MinusEqual => .AssignSub,
.AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft,
.AngleBracketAngleBracketRightEqual => .AssignBitShiftRight,
.AmpersandEqual => .AssignBitAnd,
.CaretEqual => .AssignBitXor,
.PipeEqual => .AssignBitOr,
.AsteriskPercentEqual => .AssignMulWrap,
.PlusPercentEqual => .AssignAddWrap,
.MinusPercentEqual => .AssignSubWrap,
.Equal => .Assign,
else => {
putBackToken(it, token.index);
return null;
},
};
const node = try arena.create(Node.InfixOp);
node.* = .{
.op_token = token.index,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
/// CompareOp
/// <- EQUALEQUAL
/// / EXCLAMATIONMARKEQUAL
/// / LARROW
/// / RARROW
/// / LARROWEQUAL
/// / RARROWEQUAL
fn parseCompareOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.EqualEqual => .EqualEqual,
.BangEqual => .BangEqual,
.AngleBracketLeft => .LessThan,
.AngleBracketRight => .GreaterThan,
.AngleBracketLeftEqual => .LessOrEqual,
.AngleBracketRightEqual => .GreaterOrEqual,
else => {
putBackToken(it, token.index);
return null;
},
};
return try createInfixOp(arena, token.index, op);
}
/// BitwiseOp
/// <- AMPERSAND
/// / CARET
/// / PIPE
/// / KEYWORD_orelse
/// / KEYWORD_catch Payload?
fn parseBitwiseOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.Ampersand => .BitAnd,
.Caret => .BitXor,
.Pipe => .BitOr,
.Keyword_orelse => .UnwrapOptional,
.Keyword_catch => .{ .Catch = try parsePayload(arena, it, tree) },
else => {
putBackToken(it, token.index);
return null;
},
};
return try createInfixOp(arena, token.index, op);
}
/// BitShiftOp
/// <- LARROW2
/// / RARROW2
fn parseBitShiftOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.AngleBracketAngleBracketLeft => .BitShiftLeft,
.AngleBracketAngleBracketRight => .BitShiftRight,
else => {
putBackToken(it, token.index);
return null;
},
};
return try createInfixOp(arena, token.index, op);
}
/// AdditionOp
/// <- PLUS
/// / MINUS
/// / PLUS2
/// / PLUSPERCENT
/// / MINUSPERCENT
fn parseAdditionOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.Plus => .Add,
.Minus => .Sub,
.PlusPlus => .ArrayCat,
.PlusPercent => .AddWrap,
.MinusPercent => .SubWrap,
else => {
putBackToken(it, token.index);
return null;
},
};
return try createInfixOp(arena, token.index, op);
}
/// MultiplyOp
/// <- PIPE2
/// / ASTERISK
/// / SLASH
/// / PERCENT
/// / ASTERISK2
/// / ASTERISKPERCENT
fn parseMultiplyOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.PipePipe => .MergeErrorSets,
.Asterisk => .Mul,
.Slash => .Div,
.Percent => .Mod,
.AsteriskAsterisk => .ArrayMult,
.AsteriskPercent => .MulWrap,
else => {
putBackToken(it, token.index);
return null;
},
};
return try createInfixOp(arena, token.index, op);
}
/// PrefixOp
/// <- EXCLAMATIONMARK
/// / MINUS
/// / TILDE
/// / MINUSPERCENT
/// / AMPERSAND
/// / KEYWORD_try
/// / KEYWORD_await
fn parsePrefixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = nextToken(it);
const op: Node.PrefixOp.Op = switch (token.ptr.id) {
.Bang => .BoolNot,
.Minus => .Negation,
.Tilde => .BitNot,
.MinusPercent => .NegationWrap,
.Ampersand => .AddressOf,
.Keyword_try => .Try,
.Keyword_await => .Await,
else => {
putBackToken(it, token.index);
return null;
},
};
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = token.index,
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
// TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on
// pointers. Consider updating this rule:
// ...
// / ArrayTypeStart
// / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
// / PtrTypeStart ...
/// PrefixTypeOp
/// <- QUESTIONMARK
/// / KEYWORD_anyframe MINUSRARROW
/// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
/// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (eatToken(it, .QuestionMark)) |token| {
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = token,
.op = .OptionalType,
.rhs = undefined, // set by caller
};
return &node.base;
}
// TODO: Returning a AnyFrameType instead of PrefixOp makes casting and setting .rhs or
// .return_type more difficult for the caller (see parsePrefixOpExpr helper).
// Consider making the AnyFrameType a member of PrefixOp and add a
// PrefixOp.AnyFrameType variant?
if (eatToken(it, .Keyword_anyframe)) |token| {
const arrow = eatToken(it, .Arrow) orelse {
putBackToken(it, token);
return null;
};
const node = try arena.create(Node.AnyFrameType);
node.* = .{
.anyframe_token = token,
.result = .{
.arrow_token = arrow,
.return_type = undefined, // set by caller
},
};
return &node.base;
}
if (try parsePtrTypeStart(arena, it, tree)) |node| {
// If the token encountered was **, there will be two nodes instead of one.
// The attributes should be applied to the rightmost operator.
const prefix_op = node.cast(Node.PrefixOp).?;
var ptr_info = if (tree.tokens.at(prefix_op.op_token).id == .AsteriskAsterisk)
&prefix_op.rhs.cast(Node.PrefixOp).?.op.PtrType
else
&prefix_op.op.PtrType;
while (true) {
if (eatToken(it, .Keyword_align)) |align_token| {
const lparen = try expectToken(it, tree, .LParen);
const expr_node = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
// Optional bit range
const bit_range = if (eatToken(it, .Colon)) |_| bit_range_value: {
const range_start = try expectNode(arena, it, tree, parseIntegerLiteral, .{
.ExpectedIntegerLiteral = .{ .token = it.index },
});
_ = try expectToken(it, tree, .Colon);
const range_end = try expectNode(arena, it, tree, parseIntegerLiteral, .{
.ExpectedIntegerLiteral = .{ .token = it.index },
});
break :bit_range_value Node.PrefixOp.PtrInfo.Align.BitRange{
.start = range_start,
.end = range_end,
};
} else null;
_ = try expectToken(it, tree, .RParen);
if (ptr_info.align_info != null) {
try tree.errors.push(.{
.ExtraAlignQualifier = .{ .token = it.index - 1 },
});
continue;
}
ptr_info.align_info = Node.PrefixOp.PtrInfo.Align{
.node = expr_node,
.bit_range = bit_range,
};
continue;
}
if (eatToken(it, .Keyword_const)) |const_token| {
if (ptr_info.const_token != null) {
try tree.errors.push(.{
.ExtraConstQualifier = .{ .token = it.index - 1 },
});
continue;
}
ptr_info.const_token = const_token;
continue;
}
if (eatToken(it, .Keyword_volatile)) |volatile_token| {
if (ptr_info.volatile_token != null) {
try tree.errors.push(.{
.ExtraVolatileQualifier = .{ .token = it.index - 1 },
});
continue;
}
ptr_info.volatile_token = volatile_token;
continue;
}
if (eatToken(it, .Keyword_allowzero)) |allowzero_token| {
if (ptr_info.allowzero_token != null) {
try tree.errors.push(.{
.ExtraAllowZeroQualifier = .{ .token = it.index - 1 },
});
continue;
}
ptr_info.allowzero_token = allowzero_token;
continue;
}
break;
}
return node;
}
if (try parseArrayTypeStart(arena, it, tree)) |node| {
switch (node.cast(Node.PrefixOp).?.op) {
.ArrayType => {},
.SliceType => |*slice_type| {
// Collect pointer qualifiers in any order, but disallow duplicates
while (true) {
if (try parseByteAlign(arena, it, tree)) |align_expr| {
if (slice_type.align_info != null) {
try tree.errors.push(.{
.ExtraAlignQualifier = .{ .token = it.index - 1 },
});
continue;
}
slice_type.align_info = Node.PrefixOp.PtrInfo.Align{
.node = align_expr,
.bit_range = null,
};
continue;
}
if (eatToken(it, .Keyword_const)) |const_token| {
if (slice_type.const_token != null) {
try tree.errors.push(.{
.ExtraConstQualifier = .{ .token = it.index - 1 },
});
continue;
}
slice_type.const_token = const_token;
continue;
}
if (eatToken(it, .Keyword_volatile)) |volatile_token| {
if (slice_type.volatile_token != null) {
try tree.errors.push(.{
.ExtraVolatileQualifier = .{ .token = it.index - 1 },
});
continue;
}
slice_type.volatile_token = volatile_token;
continue;
}
if (eatToken(it, .Keyword_allowzero)) |allowzero_token| {
if (slice_type.allowzero_token != null) {
try tree.errors.push(.{
.ExtraAllowZeroQualifier = .{ .token = it.index - 1 },
});
continue;
}
slice_type.allowzero_token = allowzero_token;
continue;
}
break;
}
},
else => unreachable,
}
return node;
}
return null;
}
/// SuffixOp
/// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET
/// / DOT IDENTIFIER
/// / DOTASTERISK
/// / DOTQUESTIONMARK
fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const OpAndToken = struct {
op: Node.SuffixOp.Op,
token: TokenIndex,
};
const op_and_token: OpAndToken = blk: {
if (eatToken(it, .LBracket)) |_| {
const index_expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
if (p.eatToken(.Keyword_else)) |else_token| {
const else_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
if (eatToken(it, .Ellipsis2) != null) {
const end_expr = try parseExpr(arena, it, tree);
const sentinel: ?*ast.Node = if (eatToken(it, .Colon) != null)
try parseExpr(arena, it, tree)
else
null;
break :blk .{
.op = .{
.Slice = .{
.start = index_expr,
.end = end_expr,
.sentinel = sentinel,
},
},
.token = try expectToken(it, tree, .RBracket),
};
}
break :blk .{
.op = .{ .ArrayAccess = index_expr },
.token = try expectToken(it, tree, .RBracket),
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = else_expr,
};
for_prefix.@"else" = else_node;
}
if (eatToken(it, .PeriodAsterisk)) |period_asterisk| {
break :blk .{ .op = .Deref, .token = period_asterisk };
}
return node;
}
if (eatToken(it, .Period)) |period| {
if (try parseIdentifier(arena, it, tree)) |identifier| {
// TODO: It's a bit weird to return an InfixOp from the SuffixOp parser.
// Should there be an ast.Node.SuffixOp.FieldAccess variant? Or should
// this grammar rule be altered?
const node = try arena.create(Node.InfixOp);
node.* = .{
.op_token = period,
.lhs = undefined, // set by caller
.op = .Period,
.rhs = identifier,
};
return &node.base;
}
if (eatToken(it, .QuestionMark)) |question_mark| {
break :blk .{ .op = .UnwrapOptional, .token = question_mark };
}
try tree.errors.push(.{
.ExpectedSuffixOp = .{ .token = it.index },
/// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
fn parseWhileTypeExpr(p: *Parser) !?*Node {
const node = (try p.parseWhilePrefix()) orelse return null;
const while_prefix = node.cast(Node.While).?;
const type_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
while_prefix.body = type_expr;
if (p.eatToken(.Keyword_else)) |else_token| {
const payload = try p.parsePayload();
const else_expr = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
return null;
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = null,
.body = else_expr,
};
while_prefix.@"else" = else_node;
}
return null;
};
return node;
}
const node = try arena.create(Node.SuffixOp);
node.* = .{
.lhs = undefined, // set by caller
.op = op_and_token.op,
.rtoken = op_and_token.token,
};
return &node.base;
}
/// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
fn parseSwitchExpr(p: *Parser) !?*Node {
const switch_token = p.eatToken(.Keyword_switch) orelse return null;
_ = try p.expectToken(.LParen);
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
_ = try p.expectToken(.LBrace);
const cases = try p.parseSwitchProngList();
const rbrace = try p.expectToken(.RBrace);
/// FnCallArguments <- LPAREN ExprList RPAREN
/// ExprList <- (Expr COMMA)* Expr?
fn parseFnCallArguments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?AnnotatedParamList {
if (eatToken(it, .LParen) == null) return null;
const list = try ListParseFn(Node.FnProto.ParamList, parseExpr)(arena, it, tree);
const rparen = try expectToken(it, tree, .RParen);
return AnnotatedParamList{ .list = list, .rparen = rparen };
}
const AnnotatedParamList = struct {
list: Node.FnProto.ParamList, // NOTE: may also be any other type SegmentedList(*Node, 2)
rparen: TokenIndex,
};
/// ArrayTypeStart <- LBRACKET Expr? RBRACKET
fn parseArrayTypeStart(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const lbracket = eatToken(it, .LBracket) orelse return null;
const expr = try parseExpr(arena, it, tree);
const sentinel = if (eatToken(it, .Colon)) |_|
try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
})
else
null;
const rbracket = try expectToken(it, tree, .RBracket);
const op: Node.PrefixOp.Op = if (expr) |len_expr|
.{
.ArrayType = .{
.len_expr = len_expr,
.sentinel = sentinel,
},
}
else
.{
.SliceType = Node.PrefixOp.PtrInfo{
.allowzero_token = null,
.align_info = null,
.const_token = null,
.volatile_token = null,
.sentinel = sentinel,
},
};
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = lbracket,
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
/// PtrTypeStart
/// <- ASTERISK
/// / ASTERISK2
/// / PTRUNKNOWN
/// / PTRC
fn parsePtrTypeStart(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (eatToken(it, .Asterisk)) |asterisk| {
const sentinel = if (eatToken(it, .Colon)) |_|
try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
})
else
null;
const node = try arena.create(Node.PrefixOp);
const node = try p.arena.allocator.create(Node.Switch);
node.* = .{
.op_token = asterisk,
.op = .{ .PtrType = .{ .sentinel = sentinel } },
.rhs = undefined, // set by caller
.switch_token = switch_token,
.expr = expr_node,
.cases = cases,
.rbrace = rbrace,
};
return &node.base;
}
if (eatToken(it, .AsteriskAsterisk)) |double_asterisk| {
const node = try arena.create(Node.PrefixOp);
/// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN
fn parseAsmExpr(p: *Parser) !?*Node {
const asm_token = p.eatToken(.Keyword_asm) orelse return null;
const volatile_token = p.eatToken(.Keyword_volatile);
_ = try p.expectToken(.LParen);
const template = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.Asm);
node.* = .{
.op_token = double_asterisk,
.op = .{ .PtrType = .{} },
.rhs = undefined, // set by caller
.asm_token = asm_token,
.volatile_token = volatile_token,
.template = template,
.outputs = Node.Asm.OutputList{},
.inputs = Node.Asm.InputList{},
.clobbers = Node.Asm.ClobberList{},
.rparen = undefined,
};
// Special case for **, which is its own token
const child = try arena.create(Node.PrefixOp);
child.* = .{
.op_token = double_asterisk,
.op = .{ .PtrType = .{} },
.rhs = undefined, // set by caller
};
node.rhs = &child.base;
try p.parseAsmOutput(node);
node.rparen = try p.expectToken(.RParen);
return &node.base;
}
if (eatToken(it, .LBracket)) |lbracket| {
const asterisk = eatToken(it, .Asterisk) orelse {
putBackToken(it, lbracket);
return null;
};
if (eatToken(it, .Identifier)) |ident| {
if (!std.mem.eql(u8, tree.tokenSlice(ident), "c")) {
putBackToken(it, ident);
} else {
_ = try expectToken(it, tree, .RBracket);
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = lbracket,
.op = .{ .PtrType = .{} },
.rhs = undefined, // set by caller
};
return &node.base;
}
}
const sentinel = if (eatToken(it, .Colon)) |_|
try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
})
else
null;
_ = try expectToken(it, tree, .RBracket);
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = lbracket,
.op = .{ .PtrType = .{ .sentinel = sentinel } },
.rhs = undefined, // set by caller
};
return &node.base;
}
return null;
}
/// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
fn parseContainerDeclAuto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const node = (try parseContainerDeclType(arena, it, tree)) orelse return null;
const lbrace = try expectToken(it, tree, .LBrace);
const members = try parseContainerMembers(arena, it, tree, false);
const rbrace = try expectToken(it, tree, .RBrace);
/// DOT IDENTIFIER
fn parseAnonLiteral(p: *Parser) !?*Node {
const dot = p.eatToken(.Period) orelse return null;
const decl_type = node.cast(Node.ContainerDecl).?;
decl_type.fields_and_decls = members;
decl_type.lbrace_token = lbrace;
decl_type.rbrace_token = rbrace;
return node;
}
/// ContainerDeclType
/// <- KEYWORD_struct
/// / KEYWORD_enum (LPAREN Expr RPAREN)?
/// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
fn parseContainerDeclType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const kind_token = nextToken(it);
const init_arg_expr = switch (kind_token.ptr.id) {
.Keyword_struct => Node.ContainerDecl.InitArg{ .None = {} },
.Keyword_enum => blk: {
if (eatToken(it, .LParen) != null) {
const expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
break :blk Node.ContainerDecl.InitArg{ .Type = expr };
}
break :blk Node.ContainerDecl.InitArg{ .None = {} };
},
.Keyword_union => blk: {
if (eatToken(it, .LParen) != null) {
if (eatToken(it, .Keyword_enum) != null) {
if (eatToken(it, .LParen) != null) {
const expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
_ = try expectToken(it, tree, .RParen);
break :blk Node.ContainerDecl.InitArg{ .Enum = expr };
}
_ = try expectToken(it, tree, .RParen);
break :blk Node.ContainerDecl.InitArg{ .Enum = null };
}
const expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
break :blk Node.ContainerDecl.InitArg{ .Type = expr };
}
break :blk Node.ContainerDecl.InitArg{ .None = {} };
},
else => {
putBackToken(it, kind_token.index);
return null;
},
};
const node = try arena.create(Node.ContainerDecl);
node.* = .{
.layout_token = null,
.kind_token = kind_token.index,
.init_arg_expr = init_arg_expr,
.fields_and_decls = undefined, // set by caller
.lbrace_token = undefined, // set by caller
.rbrace_token = undefined, // set by caller
};
return &node.base;
}
/// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
fn parseByteAlign(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
_ = eatToken(it, .Keyword_align) orelse return null;
_ = try expectToken(it, tree, .LParen);
const expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
});
_ = try expectToken(it, tree, .RParen);
return expr;
}
/// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER?
/// Only ErrorSetDecl parses an IdentifierList
fn parseErrorTagList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.ErrorSetDecl.DeclList {
return try ListParseFn(Node.ErrorSetDecl.DeclList, parseErrorTag)(arena, it, tree);
}
/// SwitchProngList <- (SwitchProng COMMA)* SwitchProng?
fn parseSwitchProngList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.Switch.CaseList {
return try ListParseFn(Node.Switch.CaseList, parseSwitchProng)(arena, it, tree);
}
/// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
fn parseAsmOutputList(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!Node.Asm.OutputList {
return try ListParseFn(Node.Asm.OutputList, parseAsmOutputItem)(arena, it, tree);
}
/// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
fn parseAsmInputList(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!Node.Asm.InputList {
return try ListParseFn(Node.Asm.InputList, parseAsmInputItem)(arena, it, tree);
}
/// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
fn parseParamDeclList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !Node.FnProto.ParamList {
return try ListParseFn(Node.FnProto.ParamList, parseParamDecl)(arena, it, tree);
}
fn ParseFn(comptime T: type) type {
return fn (*Allocator, *TokenIterator, *Tree) Error!T;
}
const NodeParseFn = fn (*Allocator, *TokenIterator, *Tree) Error!?*Node;
fn ListParseFn(comptime L: type, comptime nodeParseFn: var) ParseFn(L) {
return struct {
pub fn parse(arena: *Allocator, it: *TokenIterator, tree: *Tree) !L {
var list = L.init(arena);
while (try nodeParseFn(arena, it, tree)) |node| {
try list.push(node);
switch (it.peek().?.id) {
.Comma => _ = nextToken(it),
// all possible delimiters
.Colon, .RParen, .RBrace, .RBracket => break,
else => {
// this is likely just a missing comma,
// continue parsing this list and give an error
try tree.errors.push(.{
.ExpectedToken = .{ .token = it.index, .expected_id = .Comma },
});
},
}
}
return list;
}
}.parse;
}
fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) NodeParseFn {
return struct {
pub fn parse(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*Node {
const op_token = if (token == .Keyword_and) switch (it.peek().?.id) {
.Keyword_and => nextToken(it).index,
.Invalid_ampersands => blk: {
try tree.errors.push(.{
.InvalidAnd = .{ .token = it.index },
});
break :blk nextToken(it).index;
},
else => return null,
} else eatToken(it, token) orelse return null;
const node = try arena.create(Node.InfixOp);
// anon enum literal
if (p.eatToken(.Identifier)) |name| {
const node = try p.arena.allocator.create(Node.EnumLiteral);
node.* = .{
.op_token = op_token,
.lhs = undefined, // set by caller
.op = op,
.dot = dot,
.name = name,
};
return &node.base;
}
// anon container literal
if (try p.parseInitList()) |node| {
node.lhs = .{ .dot = dot };
return &node.base;
}
p.putBackToken(dot);
return null;
}
/// AsmOutput <- COLON AsmOutputList AsmInput?
fn parseAsmOutput(p: *Parser, asm_node: *Node.Asm) !void {
if (p.eatToken(.Colon) == null) return;
asm_node.outputs = try p.parseAsmOutputList();
try p.parseAsmInput(asm_node);
}
/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
fn parseAsmOutputItem(p: *Parser) !?*Node.AsmOutput {
const lbracket = p.eatToken(.LBracket) orelse return null;
const name = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RBracket);
const constraint = try p.expectNode(parseStringLiteral, .{
.ExpectedStringLiteral = .{ .token = p.tok_i },
});
_ = try p.expectToken(.LParen);
const kind: Node.AsmOutput.Kind = blk: {
if (p.eatToken(.Arrow) != null) {
const return_ident = try p.expectNode(parseTypeExpr, .{
.ExpectedTypeExpr = .{ .token = p.tok_i },
});
break :blk .{ .Return = return_ident };
}
const variable = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
break :blk .{ .Variable = variable.cast(Node.Identifier).? };
};
const rparen = try p.expectToken(.RParen);
const node = try p.arena.allocator.create(Node.AsmOutput);
node.* = .{
.lbracket = lbracket,
.symbolic_name = name,
.constraint = constraint,
.kind = kind,
.rparen = rparen,
};
return node;
}
/// AsmInput <- COLON AsmInputList AsmClobbers?
fn parseAsmInput(p: *Parser, asm_node: *Node.Asm) !void {
if (p.eatToken(.Colon) == null) return;
asm_node.inputs = try p.parseAsmInputList();
try p.parseAsmClobbers(asm_node);
}
/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
fn parseAsmInputItem(p: *Parser) !?*Node.AsmInput {
const lbracket = p.eatToken(.LBracket) orelse return null;
const name = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RBracket);
const constraint = try p.expectNode(parseStringLiteral, .{
.ExpectedStringLiteral = .{ .token = p.tok_i },
});
_ = try p.expectToken(.LParen);
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const rparen = try p.expectToken(.RParen);
const node = try p.arena.allocator.create(Node.AsmInput);
node.* = .{
.lbracket = lbracket,
.symbolic_name = name,
.constraint = constraint,
.expr = expr,
.rparen = rparen,
};
return node;
}
/// AsmClobbers <- COLON StringList
/// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
fn parseAsmClobbers(p: *Parser, asm_node: *Node.Asm) !void {
if (p.eatToken(.Colon) == null) return;
asm_node.clobbers = try ListParseFn(
Node.Asm.ClobberList,
parseStringLiteral,
)(p);
}
/// BreakLabel <- COLON IDENTIFIER
fn parseBreakLabel(p: *Parser) !?*Node {
_ = p.eatToken(.Colon) orelse return null;
return p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
}
/// BlockLabel <- IDENTIFIER COLON
fn parseBlockLabel(p: *Parser, colon_token: *TokenIndex) ?TokenIndex {
const identifier = p.eatToken(.Identifier) orelse return null;
if (p.eatToken(.Colon)) |colon| {
colon_token.* = colon;
return identifier;
}
p.putBackToken(identifier);
return null;
}
/// FieldInit <- DOT IDENTIFIER EQUAL Expr
fn parseFieldInit(p: *Parser) !?*Node {
const period_token = p.eatToken(.Period) orelse return null;
const name_token = p.eatToken(.Identifier) orelse {
// Because of anon literals `.{` is also valid.
p.putBackToken(period_token);
return null;
};
const eq_token = p.eatToken(.Equal) orelse {
// `.Name` may also be an enum literal, which is a later rule.
p.putBackToken(name_token);
p.putBackToken(period_token);
return null;
};
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.FieldInitializer);
node.* = .{
.period_token = period_token,
.name_token = name_token,
.expr = expr_node,
};
return &node.base;
}
/// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
fn parseWhileContinueExpr(p: *Parser) !?*Node {
_ = p.eatToken(.Colon) orelse return null;
_ = try p.expectToken(.LParen);
const node = try p.expectNode(parseAssignExpr, .{
.ExpectedExprOrAssignment = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
return node;
}
/// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
fn parseLinkSection(p: *Parser) !?*Node {
_ = p.eatToken(.Keyword_linksection) orelse return null;
_ = try p.expectToken(.LParen);
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
return expr_node;
}
/// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
fn parseCallconv(p: *Parser) !?*Node {
_ = p.eatToken(.Keyword_callconv) orelse return null;
_ = try p.expectToken(.LParen);
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
return expr_node;
}
/// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
fn parseParamDecl(p: *Parser) !?*Node {
const doc_comments = try p.parseDocComment();
const noalias_token = p.eatToken(.Keyword_noalias);
const comptime_token = if (noalias_token == null) p.eatToken(.Keyword_comptime) else null;
const name_token = blk: {
const identifier = p.eatToken(.Identifier) orelse break :blk null;
if (p.eatToken(.Colon) != null) break :blk identifier;
p.putBackToken(identifier); // ParamType may also be an identifier
break :blk null;
};
const param_type = (try p.parseParamType()) orelse {
// Only return cleanly if no keyword, identifier, or doc comment was found
if (noalias_token == null and
comptime_token == null and
name_token == null and
doc_comments == null) return null;
try p.errors.append(p.gpa, .{
.ExpectedParamType = .{ .token = p.tok_i },
});
return error.ParseError;
};
const param_decl = try p.arena.allocator.create(Node.ParamDecl);
param_decl.* = .{
.doc_comments = doc_comments,
.comptime_token = comptime_token,
.noalias_token = noalias_token,
.name_token = name_token,
.param_type = param_type,
};
return &param_decl.base;
}
/// ParamType
/// <- KEYWORD_var
/// / DOT3
/// / TypeExpr
fn parseParamType(p: *Parser) !?Node.ParamDecl.ParamType {
// TODO cast from tuple to error union is broken
const P = Node.ParamDecl.ParamType;
if (try p.parseVarType()) |node| return P{ .var_type = node };
if (p.eatToken(.Ellipsis3)) |token| return P{ .var_args = token };
if (try p.parseTypeExpr()) |node| return P{ .type_expr = node };
return null;
}
/// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
fn parseIfPrefix(p: *Parser) !?*Node {
const if_token = p.eatToken(.Keyword_if) orelse return null;
_ = try p.expectToken(.LParen);
const condition = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
const payload = try p.parsePtrPayload();
const node = try p.arena.allocator.create(Node.If);
node.* = .{
.if_token = if_token,
.condition = condition,
.payload = payload,
.body = undefined, // set by caller
.@"else" = null,
};
return &node.base;
}
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
fn parseWhilePrefix(p: *Parser) !?*Node {
const while_token = p.eatToken(.Keyword_while) orelse return null;
_ = try p.expectToken(.LParen);
const condition = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
const payload = try p.parsePtrPayload();
const continue_expr = try p.parseWhileContinueExpr();
const node = try p.arena.allocator.create(Node.While);
node.* = .{
.label = null,
.inline_token = null,
.while_token = while_token,
.condition = condition,
.payload = payload,
.continue_expr = continue_expr,
.body = undefined, // set by caller
.@"else" = null,
};
return &node.base;
}
/// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
fn parseForPrefix(p: *Parser) !?*Node {
const for_token = p.eatToken(.Keyword_for) orelse return null;
_ = try p.expectToken(.LParen);
const array_expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
const payload = try p.expectNode(parsePtrIndexPayload, .{
.ExpectedPayload = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.For);
node.* = .{
.label = null,
.inline_token = null,
.for_token = for_token,
.array_expr = array_expr,
.payload = payload,
.body = undefined, // set by caller
.@"else" = null,
};
return &node.base;
}
/// Payload <- PIPE IDENTIFIER PIPE
fn parsePayload(p: *Parser) !?*Node {
const lpipe = p.eatToken(.Pipe) orelse return null;
const identifier = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const rpipe = try p.expectToken(.Pipe);
const node = try p.arena.allocator.create(Node.Payload);
node.* = .{
.lpipe = lpipe,
.error_symbol = identifier,
.rpipe = rpipe,
};
return &node.base;
}
/// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
fn parsePtrPayload(p: *Parser) !?*Node {
const lpipe = p.eatToken(.Pipe) orelse return null;
const asterisk = p.eatToken(.Asterisk);
const identifier = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const rpipe = try p.expectToken(.Pipe);
const node = try p.arena.allocator.create(Node.PointerPayload);
node.* = .{
.lpipe = lpipe,
.ptr_token = asterisk,
.value_symbol = identifier,
.rpipe = rpipe,
};
return &node.base;
}
/// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
fn parsePtrIndexPayload(p: *Parser) !?*Node {
const lpipe = p.eatToken(.Pipe) orelse return null;
const asterisk = p.eatToken(.Asterisk);
const identifier = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const index = if (p.eatToken(.Comma) == null)
null
else
try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const rpipe = try p.expectToken(.Pipe);
const node = try p.arena.allocator.create(Node.PointerIndexPayload);
node.* = .{
.lpipe = lpipe,
.ptr_token = asterisk,
.value_symbol = identifier,
.index_symbol = index,
.rpipe = rpipe,
};
return &node.base;
}
/// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr
fn parseSwitchProng(p: *Parser) !?*Node {
const node = (try p.parseSwitchCase()) orelse return null;
const arrow = try p.expectToken(.EqualAngleBracketRight);
const payload = try p.parsePtrPayload();
const expr = try p.expectNode(parseAssignExpr, .{
.ExpectedExprOrAssignment = .{ .token = p.tok_i },
});
const switch_case = node.cast(Node.SwitchCase).?;
switch_case.arrow_token = arrow;
switch_case.payload = payload;
switch_case.expr = expr;
return node;
}
/// SwitchCase
/// <- SwitchItem (COMMA SwitchItem)* COMMA?
/// / KEYWORD_else
fn parseSwitchCase(p: *Parser) !?*Node {
var list = Node.SwitchCase.ItemList{};
var list_it = &list.first;
if (try p.parseSwitchItem()) |first_item| {
list_it = try p.llpush(*Node, list_it, first_item);
while (p.eatToken(.Comma) != null) {
const next_item = (try p.parseSwitchItem()) orelse break;
list_it = try p.llpush(*Node, list_it, next_item);
}
} else if (p.eatToken(.Keyword_else)) |else_token| {
const else_node = try p.arena.allocator.create(Node.SwitchElse);
else_node.* = .{
.token = else_token,
};
list_it = try p.llpush(*Node, list_it, &else_node.base);
} else return null;
const node = try p.arena.allocator.create(Node.SwitchCase);
node.* = .{
.items = list,
.arrow_token = undefined, // set by caller
.payload = null,
.expr = undefined, // set by caller
};
return &node.base;
}
/// SwitchItem <- Expr (DOT3 Expr)?
fn parseSwitchItem(p: *Parser) !?*Node {
const expr = (try p.parseExpr()) orelse return null;
if (p.eatToken(.Ellipsis3)) |token| {
const range_end = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
const node = try p.arena.allocator.create(Node.InfixOp);
node.* = .{
.op_token = token,
.lhs = expr,
.op = .Range,
.rhs = range_end,
};
return &node.base;
}
return expr;
}
/// AssignOp
/// <- ASTERISKEQUAL
/// / SLASHEQUAL
/// / PERCENTEQUAL
/// / PLUSEQUAL
/// / MINUSEQUAL
/// / LARROW2EQUAL
/// / RARROW2EQUAL
/// / AMPERSANDEQUAL
/// / CARETEQUAL
/// / PIPEEQUAL
/// / ASTERISKPERCENTEQUAL
/// / PLUSPERCENTEQUAL
/// / MINUSPERCENTEQUAL
/// / EQUAL
fn parseAssignOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.AsteriskEqual => .AssignMul,
.SlashEqual => .AssignDiv,
.PercentEqual => .AssignMod,
.PlusEqual => .AssignAdd,
.MinusEqual => .AssignSub,
.AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft,
.AngleBracketAngleBracketRightEqual => .AssignBitShiftRight,
.AmpersandEqual => .AssignBitAnd,
.CaretEqual => .AssignBitXor,
.PipeEqual => .AssignBitOr,
.AsteriskPercentEqual => .AssignMulWrap,
.PlusPercentEqual => .AssignAddWrap,
.MinusPercentEqual => .AssignSubWrap,
.Equal => .Assign,
else => {
p.putBackToken(token.index);
return null;
},
};
const node = try p.arena.allocator.create(Node.InfixOp);
node.* = .{
.op_token = token.index,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
/// CompareOp
/// <- EQUALEQUAL
/// / EXCLAMATIONMARKEQUAL
/// / LARROW
/// / RARROW
/// / LARROWEQUAL
/// / RARROWEQUAL
fn parseCompareOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.EqualEqual => .EqualEqual,
.BangEqual => .BangEqual,
.AngleBracketLeft => .LessThan,
.AngleBracketRight => .GreaterThan,
.AngleBracketLeftEqual => .LessOrEqual,
.AngleBracketRightEqual => .GreaterOrEqual,
else => {
p.putBackToken(token.index);
return null;
},
};
return p.createInfixOp(token.index, op);
}
/// BitwiseOp
/// <- AMPERSAND
/// / CARET
/// / PIPE
/// / KEYWORD_orelse
/// / KEYWORD_catch Payload?
fn parseBitwiseOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.Ampersand => .BitAnd,
.Caret => .BitXor,
.Pipe => .BitOr,
.Keyword_orelse => .UnwrapOptional,
.Keyword_catch => .{ .Catch = try p.parsePayload() },
else => {
p.putBackToken(token.index);
return null;
},
};
return p.createInfixOp(token.index, op);
}
/// BitShiftOp
/// <- LARROW2
/// / RARROW2
fn parseBitShiftOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.AngleBracketAngleBracketLeft => .BitShiftLeft,
.AngleBracketAngleBracketRight => .BitShiftRight,
else => {
p.putBackToken(token.index);
return null;
},
};
return p.createInfixOp(token.index, op);
}
/// AdditionOp
/// <- PLUS
/// / MINUS
/// / PLUS2
/// / PLUSPERCENT
/// / MINUSPERCENT
fn parseAdditionOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.Plus => .Add,
.Minus => .Sub,
.PlusPlus => .ArrayCat,
.PlusPercent => .AddWrap,
.MinusPercent => .SubWrap,
else => {
p.putBackToken(token.index);
return null;
},
};
return p.createInfixOp(token.index, op);
}
/// MultiplyOp
/// <- PIPE2
/// / ASTERISK
/// / SLASH
/// / PERCENT
/// / ASTERISK2
/// / ASTERISKPERCENT
fn parseMultiplyOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.InfixOp.Op = switch (token.ptr.id) {
.PipePipe => .MergeErrorSets,
.Asterisk => .Mul,
.Slash => .Div,
.Percent => .Mod,
.AsteriskAsterisk => .ArrayMult,
.AsteriskPercent => .MulWrap,
else => {
p.putBackToken(token.index);
return null;
},
};
return p.createInfixOp(token.index, op);
}
/// PrefixOp
/// <- EXCLAMATIONMARK
/// / MINUS
/// / TILDE
/// / MINUSPERCENT
/// / AMPERSAND
/// / KEYWORD_try
/// / KEYWORD_await
fn parsePrefixOp(p: *Parser) !?*Node {
const token = p.nextToken();
const op: Node.PrefixOp.Op = switch (token.ptr.id) {
.Bang => .BoolNot,
.Minus => .Negation,
.Tilde => .BitNot,
.MinusPercent => .NegationWrap,
.Ampersand => .AddressOf,
.Keyword_try => .Try,
.Keyword_await => .Await,
else => {
p.putBackToken(token.index);
return null;
},
};
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = token.index,
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
// TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on
// pointers. Consider updating this rule:
// ...
// / ArrayTypeStart
// / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
// / PtrTypeStart ...
/// PrefixTypeOp
/// <- QUESTIONMARK
/// / KEYWORD_anyframe MINUSRARROW
/// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
/// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
fn parsePrefixTypeOp(p: *Parser) !?*Node {
if (p.eatToken(.QuestionMark)) |token| {
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = token,
.op = .OptionalType,
.rhs = undefined, // set by caller
};
return &node.base;
}
}.parse;
}
// Helper parsers not included in the grammar
fn parseBuiltinCall(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .Builtin) orelse return null;
const params = (try parseFnCallArguments(arena, it, tree)) orelse {
try tree.errors.push(.{
.ExpectedParamList = .{ .token = it.index },
});
// lets pretend this was an identifier so we can continue parsing
const node = try arena.create(Node.Identifier);
node.* = .{
.token = token,
};
return &node.base;
};
const node = try arena.create(Node.BuiltinCall);
node.* = .{
.builtin_token = token,
.params = params.list,
.rparen_token = params.rparen,
};
return &node.base;
}
fn parseErrorTag(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const doc_comments = try parseDocComment(arena, it, tree); // no need to rewind on failure
const token = eatToken(it, .Identifier) orelse return null;
const node = try arena.create(Node.ErrorTag);
node.* = .{
.doc_comments = doc_comments,
.name_token = token,
};
return &node.base;
}
fn parseIdentifier(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .Identifier) orelse return null;
const node = try arena.create(Node.Identifier);
node.* = .{
.token = token,
};
return &node.base;
}
fn parseVarType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .Keyword_var) orelse return null;
const node = try arena.create(Node.VarType);
node.* = .{
.token = token,
};
return &node.base;
}
fn createLiteral(arena: *Allocator, comptime T: type, token: TokenIndex) !*Node {
const result = try arena.create(T);
result.* = T{
.base = Node{ .id = Node.typeToId(T) },
.token = token,
};
return &result.base;
}
fn parseStringLiteralSingle(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (eatToken(it, .StringLiteral)) |token| {
const node = try arena.create(Node.StringLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
return null;
}
// string literal or multiline string literal
fn parseStringLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (try parseStringLiteralSingle(arena, it, tree)) |node| return node;
if (eatToken(it, .MultilineStringLiteralLine)) |first_line| {
const node = try arena.create(Node.MultilineStringLiteral);
node.* = .{
.lines = Node.MultilineStringLiteral.LineList.init(arena),
};
try node.lines.push(first_line);
while (eatToken(it, .MultilineStringLiteralLine)) |line|
try node.lines.push(line);
return &node.base;
}
return null;
}
fn parseIntegerLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .IntegerLiteral) orelse return null;
const node = try arena.create(Node.IntegerLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
fn parseFloatLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .FloatLiteral) orelse return null;
const node = try arena.create(Node.FloatLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
fn parseTry(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .Keyword_try) orelse return null;
const node = try arena.create(Node.PrefixOp);
node.* = .{
.op_token = token,
.op = .Try,
.rhs = undefined, // set by caller
};
return &node.base;
}
fn parseUse(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const token = eatToken(it, .Keyword_usingnamespace) orelse return null;
const node = try arena.create(Node.Use);
node.* = .{
.doc_comments = null,
.visib_token = null,
.use_token = token,
.expr = try expectNode(arena, it, tree, parseExpr, .{
.ExpectedExpr = .{ .token = it.index },
}),
.semicolon_token = try expectToken(it, tree, .Semicolon),
};
return &node.base;
}
/// IfPrefix Body (KEYWORD_else Payload? Body)?
fn parseIf(arena: *Allocator, it: *TokenIterator, tree: *Tree, bodyParseFn: NodeParseFn) !?*Node {
const node = (try parseIfPrefix(arena, it, tree)) orelse return null;
const if_prefix = node.cast(Node.If).?;
if_prefix.body = try expectNode(arena, it, tree, bodyParseFn, .{
.InvalidToken = .{ .token = it.index },
});
const else_token = eatToken(it, .Keyword_else) orelse return node;
const payload = try parsePayload(arena, it, tree);
const else_expr = try expectNode(arena, it, tree, bodyParseFn, .{
.InvalidToken = .{ .token = it.index },
});
const else_node = try arena.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = else_expr,
};
if_prefix.@"else" = else_node;
return node;
}
/// Eat a multiline doc comment
fn parseDocComment(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.DocComment {
var lines = Node.DocComment.LineList.init(arena);
while (eatToken(it, .DocComment)) |line| {
try lines.push(line);
}
if (lines.len == 0) return null;
const node = try arena.create(Node.DocComment);
node.* = .{
.lines = lines,
};
return node;
}
/// Eat a single-line doc comment on the same line as another node
fn parseAppendedDocComment(arena: *Allocator, it: *TokenIterator, tree: *Tree, after_token: TokenIndex) !?*Node.DocComment {
const comment_token = eatToken(it, .DocComment) orelse return null;
if (tree.tokensOnSameLine(after_token, comment_token)) {
const node = try arena.create(Node.DocComment);
node.* = .{
.lines = Node.DocComment.LineList.init(arena),
};
try node.lines.push(comment_token);
return node;
}
putBackToken(it, comment_token);
return null;
}
/// Op* Child
fn parsePrefixOpExpr(
arena: *Allocator,
it: *TokenIterator,
tree: *Tree,
opParseFn: NodeParseFn,
childParseFn: NodeParseFn,
) Error!?*Node {
if (try opParseFn(arena, it, tree)) |first_op| {
var rightmost_op = first_op;
while (true) {
switch (rightmost_op.id) {
.PrefixOp => {
var prefix_op = rightmost_op.cast(Node.PrefixOp).?;
// If the token encountered was **, there will be two nodes
if (tree.tokens.at(prefix_op.op_token).id == .AsteriskAsterisk) {
rightmost_op = prefix_op.rhs;
prefix_op = rightmost_op.cast(Node.PrefixOp).?;
}
if (try opParseFn(arena, it, tree)) |rhs| {
prefix_op.rhs = rhs;
rightmost_op = rhs;
} else break;
// TODO: Returning a AnyFrameType instead of PrefixOp makes casting and setting .rhs or
// .return_type more difficult for the caller (see parsePrefixOpExpr helper).
// Consider making the AnyFrameType a member of PrefixOp and add a
// PrefixOp.AnyFrameType variant?
if (p.eatToken(.Keyword_anyframe)) |token| {
const arrow = p.eatToken(.Arrow) orelse {
p.putBackToken(token);
return null;
};
const node = try p.arena.allocator.create(Node.AnyFrameType);
node.* = .{
.anyframe_token = token,
.result = .{
.arrow_token = arrow,
.return_type = undefined, // set by caller
},
.AnyFrameType => {
const prom = rightmost_op.cast(Node.AnyFrameType).?;
if (try opParseFn(arena, it, tree)) |rhs| {
prom.result.?.return_type = rhs;
rightmost_op = rhs;
} else break;
};
return &node.base;
}
if (try p.parsePtrTypeStart()) |node| {
// If the token encountered was **, there will be two nodes instead of one.
// The attributes should be applied to the rightmost operator.
const prefix_op = node.cast(Node.PrefixOp).?;
var ptr_info = if (p.tokens[prefix_op.op_token].id == .AsteriskAsterisk)
&prefix_op.rhs.cast(Node.PrefixOp).?.op.PtrType
else
&prefix_op.op.PtrType;
while (true) {
if (p.eatToken(.Keyword_align)) |align_token| {
const lparen = try p.expectToken(.LParen);
const expr_node = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
// Optional bit range
const bit_range = if (p.eatToken(.Colon)) |_| bit_range_value: {
const range_start = try p.expectNode(parseIntegerLiteral, .{
.ExpectedIntegerLiteral = .{ .token = p.tok_i },
});
_ = try p.expectToken(.Colon);
const range_end = try p.expectNode(parseIntegerLiteral, .{
.ExpectedIntegerLiteral = .{ .token = p.tok_i },
});
break :bit_range_value Node.PrefixOp.PtrInfo.Align.BitRange{
.start = range_start,
.end = range_end,
};
} else null;
_ = try p.expectToken(.RParen);
if (ptr_info.align_info != null) {
try p.errors.append(p.gpa, .{
.ExtraAlignQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
ptr_info.align_info = Node.PrefixOp.PtrInfo.Align{
.node = expr_node,
.bit_range = bit_range,
};
continue;
}
if (p.eatToken(.Keyword_const)) |const_token| {
if (ptr_info.const_token != null) {
try p.errors.append(p.gpa, .{
.ExtraConstQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
ptr_info.const_token = const_token;
continue;
}
if (p.eatToken(.Keyword_volatile)) |volatile_token| {
if (ptr_info.volatile_token != null) {
try p.errors.append(p.gpa, .{
.ExtraVolatileQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
ptr_info.volatile_token = volatile_token;
continue;
}
if (p.eatToken(.Keyword_allowzero)) |allowzero_token| {
if (ptr_info.allowzero_token != null) {
try p.errors.append(p.gpa, .{
.ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
ptr_info.allowzero_token = allowzero_token;
continue;
}
break;
}
return node;
}
if (try p.parseArrayTypeStart()) |node| {
switch (node.cast(Node.PrefixOp).?.op) {
.ArrayType => {},
.SliceType => |*slice_type| {
// Collect pointer qualifiers in any order, but disallow duplicates
while (true) {
if (try p.parseByteAlign()) |align_expr| {
if (slice_type.align_info != null) {
try p.errors.append(p.gpa, .{
.ExtraAlignQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
slice_type.align_info = Node.PrefixOp.PtrInfo.Align{
.node = align_expr,
.bit_range = null,
};
continue;
}
if (p.eatToken(.Keyword_const)) |const_token| {
if (slice_type.const_token != null) {
try p.errors.append(p.gpa, .{
.ExtraConstQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
slice_type.const_token = const_token;
continue;
}
if (p.eatToken(.Keyword_volatile)) |volatile_token| {
if (slice_type.volatile_token != null) {
try p.errors.append(p.gpa, .{
.ExtraVolatileQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
slice_type.volatile_token = volatile_token;
continue;
}
if (p.eatToken(.Keyword_allowzero)) |allowzero_token| {
if (slice_type.allowzero_token != null) {
try p.errors.append(p.gpa, .{
.ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 },
});
continue;
}
slice_type.allowzero_token = allowzero_token;
continue;
}
break;
}
},
else => unreachable,
}
return node;
}
// If any prefix op existed, a child node on the RHS is required
switch (rightmost_op.id) {
.PrefixOp => {
const prefix_op = rightmost_op.cast(Node.PrefixOp).?;
prefix_op.rhs = try expectNode(arena, it, tree, childParseFn, .{
.InvalidToken = .{ .token = it.index },
});
},
.AnyFrameType => {
const prom = rightmost_op.cast(Node.AnyFrameType).?;
prom.result.?.return_type = try expectNode(arena, it, tree, childParseFn, .{
.InvalidToken = .{ .token = it.index },
});
},
else => unreachable,
}
return first_op;
}
// Otherwise, the child node is optional
return try childParseFn(arena, it, tree);
}
/// Child (Op Child)*
/// Child (Op Child)?
fn parseBinOpExpr(
arena: *Allocator,
it: *TokenIterator,
tree: *Tree,
opParseFn: NodeParseFn,
childParseFn: NodeParseFn,
chain: enum {
Once,
Infinitely,
},
) Error!?*Node {
var res = (try childParseFn(arena, it, tree)) orelse return null;
while (try opParseFn(arena, it, tree)) |node| {
const right = try expectNode(arena, it, tree, childParseFn, .{
.InvalidToken = .{ .token = it.index },
});
const left = res;
res = node;
const op = node.cast(Node.InfixOp).?;
op.*.lhs = left;
op.*.rhs = right;
switch (chain) {
.Once => break,
.Infinitely => continue,
}
}
return res;
}
fn createInfixOp(arena: *Allocator, index: TokenIndex, op: Node.InfixOp.Op) !*Node {
const node = try arena.create(Node.InfixOp);
node.* = .{
.op_token = index,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
fn eatToken(it: *TokenIterator, id: Token.Id) ?TokenIndex {
return if (eatAnnotatedToken(it, id)) |token| token.index else null;
}
fn eatAnnotatedToken(it: *TokenIterator, id: Token.Id) ?AnnotatedToken {
return if (it.peek().?.id == id) nextToken(it) else null;
}
fn expectToken(it: *TokenIterator, tree: *Tree, id: Token.Id) Error!TokenIndex {
return (try expectTokenRecoverable(it, tree, id)) orelse
error.ParseError;
}
fn expectTokenRecoverable(it: *TokenIterator, tree: *Tree, id: Token.Id) !?TokenIndex {
const token = nextToken(it);
if (token.ptr.id != id) {
try tree.errors.push(.{
.ExpectedToken = .{ .token = token.index, .expected_id = id },
});
// go back so that we can recover properly
putBackToken(it, token.index);
return null;
}
return token.index;
}
fn nextToken(it: *TokenIterator) AnnotatedToken {
const result = AnnotatedToken{
.index = it.index,
.ptr = it.next().?,
/// SuffixOp
/// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET
/// / DOT IDENTIFIER
/// / DOTASTERISK
/// / DOTQUESTIONMARK
fn parseSuffixOp(p: *Parser) !?*Node {
const OpAndToken = struct {
op: Node.SuffixOp.Op,
token: TokenIndex,
};
const op_and_token: OpAndToken = blk: {
if (p.eatToken(.LBracket)) |_| {
const index_expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
if (p.eatToken(.Ellipsis2) != null) {
const end_expr = try p.parseExpr();
const sentinel: ?*ast.Node = if (p.eatToken(.Colon) != null)
try p.parseExpr()
else
null;
break :blk .{
.op = .{
.Slice = .{
.start = index_expr,
.end = end_expr,
.sentinel = sentinel,
},
},
.token = try p.expectToken(.RBracket),
};
}
break :blk .{
.op = .{ .ArrayAccess = index_expr },
.token = try p.expectToken(.RBracket),
};
}
if (p.eatToken(.PeriodAsterisk)) |period_asterisk| {
break :blk .{ .op = .Deref, .token = period_asterisk };
}
if (p.eatToken(.Period)) |period| {
if (try p.parseIdentifier()) |identifier| {
// TODO: It's a bit weird to return an InfixOp from the SuffixOp parser.
// Should there be an ast.Node.SuffixOp.FieldAccess variant? Or should
// this grammar rule be altered?
const node = try p.arena.allocator.create(Node.InfixOp);
node.* = .{
.op_token = period,
.lhs = undefined, // set by caller
.op = .Period,
.rhs = identifier,
};
return &node.base;
}
if (p.eatToken(.QuestionMark)) |question_mark| {
break :blk .{ .op = .UnwrapOptional, .token = question_mark };
}
try p.errors.append(p.gpa, .{
.ExpectedSuffixOp = .{ .token = p.tok_i },
});
return null;
}
return null;
};
const node = try p.arena.allocator.create(Node.SuffixOp);
node.* = .{
.lhs = undefined, // set by caller
.op = op_and_token.op,
.rtoken = op_and_token.token,
};
return &node.base;
}
/// FnCallArguments <- LPAREN ExprList RPAREN
/// ExprList <- (Expr COMMA)* Expr?
fn parseFnCallArguments(p: *Parser) !?AnnotatedParamList {
if (p.eatToken(.LParen) == null) return null;
const list = try ListParseFn(Node.FnProto.ParamList, parseExpr)(p);
const rparen = try p.expectToken(.RParen);
return AnnotatedParamList{ .list = list, .rparen = rparen };
}
const AnnotatedParamList = struct {
list: Node.FnProto.ParamList, // NOTE: may also be any other type SegmentedList(*Node, 2)
rparen: TokenIndex,
};
assert(result.ptr.id != .LineComment);
while (true) {
const next_tok = it.peek() orelse return result;
if (next_tok.id != .LineComment) return result;
_ = it.next();
/// ArrayTypeStart <- LBRACKET Expr? RBRACKET
fn parseArrayTypeStart(p: *Parser) !?*Node {
const lbracket = p.eatToken(.LBracket) orelse return null;
const expr = try p.parseExpr();
const sentinel = if (p.eatToken(.Colon)) |_|
try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
})
else
null;
const rbracket = try p.expectToken(.RBracket);
const op: Node.PrefixOp.Op = if (expr) |len_expr|
.{
.ArrayType = .{
.len_expr = len_expr,
.sentinel = sentinel,
},
}
else
.{
.SliceType = Node.PrefixOp.PtrInfo{
.allowzero_token = null,
.align_info = null,
.const_token = null,
.volatile_token = null,
.sentinel = sentinel,
},
};
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = lbracket,
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
}
fn putBackToken(it: *TokenIterator, putting_back: TokenIndex) void {
while (true) {
const prev_tok = it.prev() orelse return;
if (prev_tok.id == .LineComment) continue;
assert(it.list.at(putting_back) == prev_tok);
return;
/// PtrTypeStart
/// <- ASTERISK
/// / ASTERISK2
/// / PTRUNKNOWN
/// / PTRC
fn parsePtrTypeStart(p: *Parser) !?*Node {
if (p.eatToken(.Asterisk)) |asterisk| {
const sentinel = if (p.eatToken(.Colon)) |_|
try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
})
else
null;
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = asterisk,
.op = .{ .PtrType = .{ .sentinel = sentinel } },
.rhs = undefined, // set by caller
};
return &node.base;
}
if (p.eatToken(.AsteriskAsterisk)) |double_asterisk| {
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = double_asterisk,
.op = .{ .PtrType = .{} },
.rhs = undefined, // set by caller
};
// Special case for **, which is its own token
const child = try p.arena.allocator.create(Node.PrefixOp);
child.* = .{
.op_token = double_asterisk,
.op = .{ .PtrType = .{} },
.rhs = undefined, // set by caller
};
node.rhs = &child.base;
return &node.base;
}
if (p.eatToken(.LBracket)) |lbracket| {
const asterisk = p.eatToken(.Asterisk) orelse {
p.putBackToken(lbracket);
return null;
};
if (p.eatToken(.Identifier)) |ident| {
const token_slice = p.source[p.tokens[ident].start..p.tokens[ident].end];
if (!std.mem.eql(u8, token_slice, "c")) {
p.putBackToken(ident);
} else {
_ = try p.expectToken(.RBracket);
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = lbracket,
.op = .{ .PtrType = .{} },
.rhs = undefined, // set by caller
};
return &node.base;
}
}
const sentinel = if (p.eatToken(.Colon)) |_|
try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
})
else
null;
_ = try p.expectToken(.RBracket);
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = lbracket,
.op = .{ .PtrType = .{ .sentinel = sentinel } },
.rhs = undefined, // set by caller
};
return &node.base;
}
return null;
}
}
const AnnotatedToken = struct {
index: TokenIndex,
ptr: *Token,
/// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
fn parseContainerDeclAuto(p: *Parser) !?*Node {
const node = (try p.parseContainerDeclType()) orelse return null;
const lbrace = try p.expectToken(.LBrace);
const members = try p.parseContainerMembers(false);
const rbrace = try p.expectToken(.RBrace);
const decl_type = node.cast(Node.ContainerDecl).?;
decl_type.fields_and_decls = members;
decl_type.lbrace_token = lbrace;
decl_type.rbrace_token = rbrace;
return node;
}
/// ContainerDeclType
/// <- KEYWORD_struct
/// / KEYWORD_enum (LPAREN Expr RPAREN)?
/// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
fn parseContainerDeclType(p: *Parser) !?*Node {
const kind_token = p.nextToken();
const init_arg_expr = switch (kind_token.ptr.id) {
.Keyword_struct => Node.ContainerDecl.InitArg{ .None = {} },
.Keyword_enum => blk: {
if (p.eatToken(.LParen) != null) {
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
break :blk Node.ContainerDecl.InitArg{ .Type = expr };
}
break :blk Node.ContainerDecl.InitArg{ .None = {} };
},
.Keyword_union => blk: {
if (p.eatToken(.LParen) != null) {
if (p.eatToken(.Keyword_enum) != null) {
if (p.eatToken(.LParen) != null) {
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
_ = try p.expectToken(.RParen);
break :blk Node.ContainerDecl.InitArg{ .Enum = expr };
}
_ = try p.expectToken(.RParen);
break :blk Node.ContainerDecl.InitArg{ .Enum = null };
}
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
break :blk Node.ContainerDecl.InitArg{ .Type = expr };
}
break :blk Node.ContainerDecl.InitArg{ .None = {} };
},
else => {
p.putBackToken(kind_token.index);
return null;
},
};
const node = try p.arena.allocator.create(Node.ContainerDecl);
node.* = .{
.layout_token = null,
.kind_token = kind_token.index,
.init_arg_expr = init_arg_expr,
.fields_and_decls = undefined, // set by caller
.lbrace_token = undefined, // set by caller
.rbrace_token = undefined, // set by caller
};
return &node.base;
}
/// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
fn parseByteAlign(p: *Parser) !?*Node {
_ = p.eatToken(.Keyword_align) orelse return null;
_ = try p.expectToken(.LParen);
const expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
});
_ = try p.expectToken(.RParen);
return expr;
}
/// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER?
/// Only ErrorSetDecl parses an IdentifierList
fn parseErrorTagList(p: *Parser) !Node.ErrorSetDecl.DeclList {
return ListParseFn(Node.ErrorSetDecl.DeclList, parseErrorTag)(p);
}
/// SwitchProngList <- (SwitchProng COMMA)* SwitchProng?
fn parseSwitchProngList(p: *Parser) !Node.Switch.CaseList {
return ListParseFn(Node.Switch.CaseList, parseSwitchProng)(p);
}
/// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
fn parseAsmOutputList(p: *Parser) Error!Node.Asm.OutputList {
return ListParseFn(Node.Asm.OutputList, parseAsmOutputItem)(p);
}
/// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
fn parseAsmInputList(p: *Parser) Error!Node.Asm.InputList {
return ListParseFn(Node.Asm.InputList, parseAsmInputItem)(p);
}
/// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
fn parseParamDeclList(p: *Parser, var_args_token: *?TokenIndex) !Node.FnProto.ParamList {
var list = Node.FnProto.ParamList{};
var list_it = &list.first;
var last: ?*Node = null;
while (try p.parseParamDecl()) |node| {
last = node;
list_it = try p.llpush(*Node, list_it, node);
switch (p.tokens[p.tok_i].id) {
.Comma => _ = p.nextToken(),
// all possible delimiters
.Colon, .RParen, .RBrace, .RBracket => break,
else => {
// this is likely just a missing comma,
// continue parsing this list and give an error
try p.errors.append(p.gpa, .{
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
});
},
}
}
if (last) |node| {
const param_type = node.cast(Node.ParamDecl).?.param_type;
if (param_type == .var_args) {
var_args_token.* = param_type.var_args;
}
}
return list;
}
const NodeParseFn = fn (p: *Parser) Error!?*Node;
fn ListParseFn(comptime L: type, comptime nodeParseFn: var) ParseFn(L) {
return struct {
pub fn parse(p: *Parser) !L {
var list = L{};
var list_it = &list.first;
while (try nodeParseFn(p)) |node| {
list_it = try p.llpush(L.Node.Data, list_it, node);
switch (p.tokens[p.tok_i].id) {
.Comma => _ = p.nextToken(),
// all possible delimiters
.Colon, .RParen, .RBrace, .RBracket => break,
else => {
// this is likely just a missing comma,
// continue parsing this list and give an error
try p.errors.append(p.gpa, .{
.ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
});
},
}
}
return list;
}
}.parse;
}
fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) NodeParseFn {
return struct {
pub fn parse(p: *Parser) Error!?*Node {
const op_token = if (token == .Keyword_and) switch (p.tokens[p.tok_i].id) {
.Keyword_and => p.nextToken().index,
.Invalid_ampersands => blk: {
try p.errors.append(p.gpa, .{
.InvalidAnd = .{ .token = p.tok_i },
});
break :blk p.nextToken().index;
},
else => return null,
} else p.eatToken(token) orelse return null;
const node = try p.arena.allocator.create(Node.InfixOp);
node.* = .{
.op_token = op_token,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
}.parse;
}
// Helper parsers not included in the grammar
fn parseBuiltinCall(p: *Parser) !?*Node {
const token = p.eatToken(.Builtin) orelse return null;
const params = (try p.parseFnCallArguments()) orelse {
try p.errors.append(p.gpa, .{
.ExpectedParamList = .{ .token = p.tok_i },
});
// lets pretend this was an identifier so we can continue parsing
const node = try p.arena.allocator.create(Node.Identifier);
node.* = .{
.token = token,
};
return &node.base;
};
const node = try p.arena.allocator.create(Node.BuiltinCall);
node.* = .{
.builtin_token = token,
.params = params.list,
.rparen_token = params.rparen,
};
return &node.base;
}
fn parseErrorTag(p: *Parser) !?*Node {
const doc_comments = try p.parseDocComment(); // no need to rewind on failure
const token = p.eatToken(.Identifier) orelse return null;
const node = try p.arena.allocator.create(Node.ErrorTag);
node.* = .{
.doc_comments = doc_comments,
.name_token = token,
};
return &node.base;
}
fn parseIdentifier(p: *Parser) !?*Node {
const token = p.eatToken(.Identifier) orelse return null;
const node = try p.arena.allocator.create(Node.Identifier);
node.* = .{
.token = token,
};
return &node.base;
}
fn parseVarType(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_var) orelse return null;
const node = try p.arena.allocator.create(Node.VarType);
node.* = .{
.token = token,
};
return &node.base;
}
fn createLiteral(p: *Parser, comptime T: type, token: TokenIndex) !*Node {
const result = try p.arena.allocator.create(T);
result.* = T{
.base = Node{ .id = Node.typeToId(T) },
.token = token,
};
return &result.base;
}
fn parseStringLiteralSingle(p: *Parser) !?*Node {
if (p.eatToken(.StringLiteral)) |token| {
const node = try p.arena.allocator.create(Node.StringLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
return null;
}
// string literal or multiline string literal
fn parseStringLiteral(p: *Parser) !?*Node {
if (try p.parseStringLiteralSingle()) |node| return node;
if (p.eatToken(.MultilineStringLiteralLine)) |first_line| {
const node = try p.arena.allocator.create(Node.MultilineStringLiteral);
node.* = .{
.lines = Node.MultilineStringLiteral.LineList{},
};
var lines_it = &node.lines.first;
lines_it = try p.llpush(TokenIndex, lines_it, first_line);
while (p.eatToken(.MultilineStringLiteralLine)) |line|
lines_it = try p.llpush(TokenIndex, lines_it, line);
return &node.base;
}
return null;
}
fn parseIntegerLiteral(p: *Parser) !?*Node {
const token = p.eatToken(.IntegerLiteral) orelse return null;
const node = try p.arena.allocator.create(Node.IntegerLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
fn parseFloatLiteral(p: *Parser) !?*Node {
const token = p.eatToken(.FloatLiteral) orelse return null;
const node = try p.arena.allocator.create(Node.FloatLiteral);
node.* = .{
.token = token,
};
return &node.base;
}
fn parseTry(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_try) orelse return null;
const node = try p.arena.allocator.create(Node.PrefixOp);
node.* = .{
.op_token = token,
.op = .Try,
.rhs = undefined, // set by caller
};
return &node.base;
}
fn parseUse(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_usingnamespace) orelse return null;
const node = try p.arena.allocator.create(Node.Use);
node.* = .{
.doc_comments = null,
.visib_token = null,
.use_token = token,
.expr = try p.expectNode(parseExpr, .{
.ExpectedExpr = .{ .token = p.tok_i },
}),
.semicolon_token = try p.expectToken(.Semicolon),
};
return &node.base;
}
/// IfPrefix Body (KEYWORD_else Payload? Body)?
fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !?*Node {
const node = (try p.parseIfPrefix()) orelse return null;
const if_prefix = node.cast(Node.If).?;
if_prefix.body = try p.expectNode(bodyParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
const else_token = p.eatToken(.Keyword_else) orelse return node;
const payload = try p.parsePayload();
const else_expr = try p.expectNode(bodyParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
const else_node = try p.arena.allocator.create(Node.Else);
else_node.* = .{
.else_token = else_token,
.payload = payload,
.body = else_expr,
};
if_prefix.@"else" = else_node;
return node;
}
/// Eat a multiline doc comment
fn parseDocComment(p: *Parser) !?*Node.DocComment {
var lines = Node.DocComment.LineList{};
var lines_it = &lines.first;
while (p.eatToken(.DocComment)) |line| {
lines_it = try p.llpush(TokenIndex, lines_it, line);
}
if (lines.first == null) return null;
const node = try p.arena.allocator.create(Node.DocComment);
node.* = .{
.lines = lines,
};
return node;
}
fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool {
return std.mem.indexOfScalar(u8, p.source[p.tokens[token1].end..p.tokens[token2].start], '\n') == null;
}
/// Eat a single-line doc comment on the same line as another node
fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment {
const comment_token = p.eatToken(.DocComment) orelse return null;
if (p.tokensOnSameLine(after_token, comment_token)) {
var lines = Node.DocComment.LineList{};
_ = try p.llpush(TokenIndex, &lines.first, comment_token);
const node = try p.arena.allocator.create(Node.DocComment);
node.* = .{ .lines = lines };
return node;
}
p.putBackToken(comment_token);
return null;
}
/// Op* Child
fn parsePrefixOpExpr(p: *Parser, opParseFn: NodeParseFn, childParseFn: NodeParseFn) Error!?*Node {
if (try opParseFn(p)) |first_op| {
var rightmost_op = first_op;
while (true) {
switch (rightmost_op.id) {
.PrefixOp => {
var prefix_op = rightmost_op.cast(Node.PrefixOp).?;
// If the token encountered was **, there will be two nodes
if (p.tokens[prefix_op.op_token].id == .AsteriskAsterisk) {
rightmost_op = prefix_op.rhs;
prefix_op = rightmost_op.cast(Node.PrefixOp).?;
}
if (try opParseFn(p)) |rhs| {
prefix_op.rhs = rhs;
rightmost_op = rhs;
} else break;
},
.AnyFrameType => {
const prom = rightmost_op.cast(Node.AnyFrameType).?;
if (try opParseFn(p)) |rhs| {
prom.result.?.return_type = rhs;
rightmost_op = rhs;
} else break;
},
else => unreachable,
}
}
// If any prefix op existed, a child node on the RHS is required
switch (rightmost_op.id) {
.PrefixOp => {
const prefix_op = rightmost_op.cast(Node.PrefixOp).?;
prefix_op.rhs = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
.AnyFrameType => {
const prom = rightmost_op.cast(Node.AnyFrameType).?;
prom.result.?.return_type = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
},
else => unreachable,
}
return first_op;
}
// Otherwise, the child node is optional
return childParseFn(p);
}
/// Child (Op Child)*
/// Child (Op Child)?
fn parseBinOpExpr(
p: *Parser,
opParseFn: NodeParseFn,
childParseFn: NodeParseFn,
chain: enum {
Once,
Infinitely,
},
) Error!?*Node {
var res = (try childParseFn(p)) orelse return null;
while (try opParseFn(p)) |node| {
const right = try p.expectNode(childParseFn, .{
.InvalidToken = .{ .token = p.tok_i },
});
const left = res;
res = node;
const op = node.cast(Node.InfixOp).?;
op.*.lhs = left;
op.*.rhs = right;
switch (chain) {
.Once => break,
.Infinitely => continue,
}
}
return res;
}
fn createInfixOp(p: *Parser, index: TokenIndex, op: Node.InfixOp.Op) !*Node {
const node = try p.arena.allocator.create(Node.InfixOp);
node.* = .{
.op_token = index,
.lhs = undefined, // set by caller
.op = op,
.rhs = undefined, // set by caller
};
return &node.base;
}
fn eatToken(p: *Parser, id: Token.Id) ?TokenIndex {
return if (p.eatAnnotatedToken(id)) |token| token.index else null;
}
fn eatAnnotatedToken(p: *Parser, id: Token.Id) ?AnnotatedToken {
return if (p.tokens[p.tok_i].id == id) p.nextToken() else null;
}
fn expectToken(p: *Parser, id: Token.Id) Error!TokenIndex {
return (try p.expectTokenRecoverable(id)) orelse
error.ParseError;
}
fn expectTokenRecoverable(p: *Parser, id: Token.Id) !?TokenIndex {
const token = p.nextToken();
if (token.ptr.id != id) {
try p.errors.append(p.gpa, .{
.ExpectedToken = .{ .token = token.index, .expected_id = id },
});
// go back so that we can recover properly
p.putBackToken(token.index);
return null;
}
return token.index;
}
fn nextToken(p: *Parser) AnnotatedToken {
const result = AnnotatedToken{
.index = p.tok_i,
.ptr = &p.tokens[p.tok_i],
};
if (p.tokens[p.tok_i].id == .Eof) {
return result;
}
p.tok_i += 1;
assert(result.ptr.id != .LineComment);
while (true) {
const next_tok = p.tokens[p.tok_i];
if (next_tok.id != .LineComment) return result;
p.tok_i += 1;
}
}
fn putBackToken(p: *Parser, putting_back: TokenIndex) void {
while (p.tok_i > 0) {
p.tok_i -= 1;
const prev_tok = p.tokens[p.tok_i];
if (prev_tok.id == .LineComment) continue;
assert(putting_back == p.tok_i);
return;
}
}
const AnnotatedToken = struct {
index: TokenIndex,
ptr: *const Token,
};
fn expectNode(
p: *Parser,
parseFn: NodeParseFn,
/// if parsing fails
err: AstError,
) Error!*Node {
return (try p.expectNodeRecoverable(parseFn, err)) orelse return error.ParseError;
}
fn expectNodeRecoverable(
p: *Parser,
parseFn: NodeParseFn,
/// if parsing fails
err: AstError,
) !?*Node {
return (try parseFn(p)) orelse {
try p.errors.append(p.gpa, err);
return null;
};
}
};
fn expectNode(
arena: *Allocator,
it: *TokenIterator,
tree: *Tree,
parseFn: NodeParseFn,
err: AstError, // if parsing fails
) Error!*Node {
return (try expectNodeRecoverable(arena, it, tree, parseFn, err)) orelse
return error.ParseError;
fn ParseFn(comptime T: type) type {
return fn (p: *Parser) Error!T;
}
fn expectNodeRecoverable(
arena: *Allocator,
it: *TokenIterator,
tree: *Tree,
parseFn: NodeParseFn,
err: AstError, // if parsing fails
) !?*Node {
return (try parseFn(arena, it, tree)) orelse {
try tree.errors.push(err);
return null;
};
}
test "std.zig.parser" {
_ = @import("parser_test.zig");
+3 -6
View File
@@ -3180,9 +3180,8 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
const tree = try std.zig.parse(allocator, source);
defer tree.deinit();
var error_it = tree.errors.iterator(0);
while (error_it.next()) |parse_error| {
const token = tree.tokens.at(parse_error.loc());
for (tree.errors) |*parse_error| {
const token = tree.tokens[parse_error.loc()];
const loc = tree.tokenLocation(0, parse_error.loc());
try stderr.print("(memory buffer):{}:{}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr);
@@ -3271,8 +3270,6 @@ fn testError(source: []const u8, expected_errors: []const Error) !void {
std.testing.expect(tree.errors.len == expected_errors.len);
for (expected_errors) |expected, i| {
const err = tree.errors.at(i);
std.testing.expect(expected == err.*);
std.testing.expect(expected == tree.errors[i]);
}
}
+62 -65
View File
@@ -67,24 +67,21 @@ fn renderRoot(
stream: var,
tree: *ast.Tree,
) (@TypeOf(stream).Error || Error)!void {
var tok_it = tree.tokens.iterator(0);
// render all the line comments at the beginning of the file
while (tok_it.next()) |token| {
for (tree.tokens) |*token, i| {
if (token.id != .LineComment) break;
try stream.print("{}\n", .{mem.trimRight(u8, tree.tokenSlicePtr(token), " ")});
if (tok_it.peek()) |next_token| {
const loc = tree.tokenLocationPtr(token.end, next_token);
if (loc.line >= 2) {
try stream.writeByte('\n');
}
const next_token = &tree.tokens[i + 1];
const loc = tree.tokenLocationPtr(token.end, next_token.*);
if (loc.line >= 2) {
try stream.writeByte('\n');
}
}
var start_col: usize = 0;
var it = tree.root_node.decls.iterator(0);
var it = tree.root_node.decls.first orelse return;
while (true) {
var decl = (it.next() orelse return).*;
var decl = it.data;
// This loop does the following:
//
@@ -103,7 +100,7 @@ fn renderRoot(
while (token_index != 0) {
token_index -= 1;
const token = tree.tokens.at(token_index);
const token = tree.tokens[token_index];
switch (token.id) {
.LineComment => {},
.DocComment => {
@@ -133,17 +130,18 @@ fn renderRoot(
token_index = decl.firstToken();
while (!fmt_active) {
decl = (it.next() orelse {
it = it.next orelse {
// If there's no next reformatted `decl`, just copy the
// remaining input tokens and bail out.
const start = tree.tokens.at(copy_start_token_index).start;
const start = tree.tokens[copy_start_token_index].start;
try copyFixingWhitespace(stream, tree.source[start..]);
return;
}).*;
};
decl = it.data;
var decl_first_token_index = decl.firstToken();
while (token_index < decl_first_token_index) : (token_index += 1) {
const token = tree.tokens.at(token_index);
const token = tree.tokens[token_index];
switch (token.id) {
.LineComment => {},
.Eof => unreachable,
@@ -163,7 +161,7 @@ fn renderRoot(
token_index = copy_end_token_index;
while (token_index != 0) {
token_index -= 1;
const token = tree.tokens.at(token_index);
const token = tree.tokens[token_index];
switch (token.id) {
.LineComment => {},
.DocComment => {
@@ -174,15 +172,14 @@ fn renderRoot(
}
}
const start = tree.tokens.at(copy_start_token_index).start;
const end = tree.tokens.at(copy_end_token_index).start;
const start = tree.tokens[copy_start_token_index].start;
const end = tree.tokens[copy_end_token_index].start;
try copyFixingWhitespace(stream, tree.source[start..end]);
}
try renderTopLevelDecl(allocator, stream, tree, 0, &start_col, decl);
if (it.peek()) |next_decl| {
try renderExtraNewline(tree, stream, &start_col, next_decl.*);
}
it = it.next orelse return;
try renderExtraNewline(tree, stream, &start_col, it.data);
}
}
@@ -191,13 +188,13 @@ fn renderExtraNewline(tree: *ast.Tree, stream: var, start_col: *usize, node: *as
var prev_token = first_token;
if (prev_token == 0) return;
var newline_threshold: usize = 2;
while (tree.tokens.at(prev_token - 1).id == .DocComment) {
if (tree.tokenLocation(tree.tokens.at(prev_token - 1).end, prev_token).line == 1) {
while (tree.tokens[prev_token - 1].id == .DocComment) {
if (tree.tokenLocation(tree.tokens[prev_token - 1].end, prev_token).line == 1) {
newline_threshold += 1;
}
prev_token -= 1;
}
const prev_token_end = tree.tokens.at(prev_token - 1).end;
const prev_token_end = tree.tokens[prev_token - 1].end;
const loc = tree.tokenLocation(prev_token_end, first_token);
if (loc.line >= newline_threshold) {
try stream.writeByte('\n');
@@ -262,7 +259,7 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree,
const src_has_trailing_comma = blk: {
const maybe_comma = tree.nextToken(field.lastToken());
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
// The trailing comma is emitted at the end, but if it's not present
@@ -426,13 +423,13 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent, start_col, infix_op_node.lhs, op_space);
const after_op_space = blk: {
const loc = tree.tokenLocation(tree.tokens.at(infix_op_node.op_token).end, tree.nextToken(infix_op_node.op_token));
const loc = tree.tokenLocation(tree.tokens[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token));
break :blk if (loc.line == 0) op_space else Space.Newline;
};
try renderToken(tree, stream, infix_op_node.op_token, indent, start_col, after_op_space);
if (after_op_space == Space.Newline and
tree.tokens.at(tree.nextToken(infix_op_node.op_token)).id != .MultilineStringLiteralLine)
tree.tokens[tree.nextToken(infix_op_node.op_token)].id != .MultilineStringLiteralLine)
{
try stream.writeByteNTimes(' ', indent + indent_delta);
start_col.* = indent + indent_delta;
@@ -453,10 +450,10 @@ fn renderExpression(
switch (prefix_op_node.op) {
.PtrType => |ptr_info| {
const op_tok_id = tree.tokens.at(prefix_op_node.op_token).id;
const op_tok_id = tree.tokens[prefix_op_node.op_token].id;
switch (op_tok_id) {
.Asterisk, .AsteriskAsterisk => try stream.writeByte('*'),
.LBracket => if (tree.tokens.at(prefix_op_node.op_token + 2).id == .Identifier)
.LBracket => if (tree.tokens[prefix_op_node.op_token + 2].id == .Identifier)
try stream.writeAll("[*c")
else
try stream.writeAll("[*"),
@@ -568,8 +565,8 @@ fn renderExpression(
try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [
const starts_with_comment = tree.tokens.at(lbracket + 1).id == .LineComment;
const ends_with_comment = tree.tokens.at(rbracket - 1).id == .LineComment;
const starts_with_comment = tree.tokens[lbracket + 1].id == .LineComment;
const ends_with_comment = tree.tokens[rbracket - 1].id == .LineComment;
const new_indent = if (ends_with_comment) indent + indent_delta else indent;
const new_space = if (ends_with_comment) Space.Newline else Space.None;
try renderExpression(allocator, stream, tree, new_indent, start_col, array_info.len_expr, new_space);
@@ -630,7 +627,7 @@ fn renderExpression(
const src_has_trailing_comma = blk: {
const maybe_comma = tree.prevToken(suffix_op.rtoken);
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
if (src_has_trailing_comma) {
@@ -682,8 +679,8 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs.node, Space.None);
try renderToken(tree, stream, lbracket, indent, start_col, Space.None); // [
const starts_with_comment = tree.tokens.at(lbracket + 1).id == .LineComment;
const ends_with_comment = tree.tokens.at(rbracket - 1).id == .LineComment;
const starts_with_comment = tree.tokens[lbracket + 1].id == .LineComment;
const ends_with_comment = tree.tokens[rbracket - 1].id == .LineComment;
const new_indent = if (ends_with_comment) indent + indent_delta else indent;
const new_space = if (ends_with_comment) Space.Newline else Space.None;
try renderExpression(allocator, stream, tree, new_indent, start_col, index_expr, new_space);
@@ -750,11 +747,11 @@ fn renderExpression(
const src_has_trailing_comma = blk: {
const maybe_comma = tree.prevToken(suffix_op.rtoken);
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
const src_same_line = blk: {
const loc = tree.tokenLocation(tree.tokens.at(lbrace).end, suffix_op.rtoken);
const loc = tree.tokenLocation(tree.tokens[lbrace].end, suffix_op.rtoken);
break :blk loc.line == 0;
};
@@ -858,7 +855,7 @@ fn renderExpression(
try renderToken(tree, stream, lbrace, indent, start_col, Space.None);
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space);
}
if (exprs.len == 1 and tree.tokens.at(exprs.at(0).*.lastToken() + 1).id == .RBrace) {
if (exprs.len == 1 and tree.tokens[exprs.at(0).*.lastToken() + 1].id == .RBrace) {
const expr = exprs.at(0).*;
switch (suffix_op.lhs) {
@@ -883,17 +880,17 @@ fn renderExpression(
const expr = it.next().?.*;
if (it.peek()) |next_expr| {
const expr_last_token = expr.*.lastToken() + 1;
const loc = tree.tokenLocation(tree.tokens.at(expr_last_token).end, next_expr.*.firstToken());
const loc = tree.tokenLocation(tree.tokens[expr_last_token].end, next_expr.*.firstToken());
if (loc.line != 0) break :blk count;
count += 1;
} else {
const expr_last_token = expr.*.lastToken();
const loc = tree.tokenLocation(tree.tokens.at(expr_last_token).end, suffix_op.rtoken);
const loc = tree.tokenLocation(tree.tokens[expr_last_token].end, suffix_op.rtoken);
if (loc.line == 0) {
// all on one line
const src_has_trailing_comma = trailblk: {
const maybe_comma = tree.prevToken(suffix_op.rtoken);
break :trailblk tree.tokens.at(maybe_comma).id == .Comma;
break :trailblk tree.tokens[maybe_comma].id == .Comma;
};
if (src_has_trailing_comma) {
break :blk 1; // force row size 1
@@ -933,7 +930,7 @@ fn renderExpression(
var new_indent = indent + indent_delta;
if (tree.tokens.at(tree.nextToken(lbrace)).id != .MultilineStringLiteralLine) {
if (tree.tokens[tree.nextToken(lbrace)].id != .MultilineStringLiteralLine) {
try renderToken(tree, stream, lbrace, new_indent, start_col, Space.Newline);
try stream.writeByteNTimes(' ', new_indent);
} else {
@@ -961,7 +958,7 @@ fn renderExpression(
}
col = 1;
if (tree.tokens.at(tree.nextToken(comma)).id != .MultilineStringLiteralLine) {
if (tree.tokens[tree.nextToken(comma)].id != .MultilineStringLiteralLine) {
try renderToken(tree, stream, comma, new_indent, start_col, Space.Newline); // ,
} else {
try renderToken(tree, stream, comma, new_indent, start_col, Space.None); // ,
@@ -1188,9 +1185,9 @@ fn renderExpression(
var maybe_comma = tree.prevToken(container_decl.lastToken());
// Doc comments for a field may also appear after the comma, eg.
// field_name: T, // comment attached to field_name
if (tree.tokens.at(maybe_comma).id == .DocComment)
if (tree.tokens[maybe_comma].id == .DocComment)
maybe_comma = tree.prevToken(maybe_comma);
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
// Check if the first declaration and the { are on the same line
@@ -1285,7 +1282,7 @@ fn renderExpression(
const src_has_trailing_comma = blk: {
const maybe_comma = tree.prevToken(err_set_decl.rbrace_token);
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
if (src_has_trailing_comma) {
@@ -1317,7 +1314,7 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent, start_col, node.*, Space.None);
const comma_token = tree.nextToken(node.*.lastToken());
assert(tree.tokens.at(comma_token).id == .Comma);
assert(tree.tokens[comma_token].id == .Comma);
try renderToken(tree, stream, comma_token, indent, start_col, Space.Space); // ,
try renderExtraNewline(tree, stream, start_col, next_node.*);
} else {
@@ -1342,7 +1339,7 @@ fn renderExpression(
const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
var skip_first_indent = true;
if (tree.tokens.at(multiline_str_literal.firstToken() - 1).id != .LineComment) {
if (tree.tokens[multiline_str_literal.firstToken() - 1].id != .LineComment) {
try stream.print("\n", .{});
skip_first_indent = false;
}
@@ -1372,7 +1369,7 @@ fn renderExpression(
if (builtin_call.params.len < 2) break :blk false;
const last_node = builtin_call.params.at(builtin_call.params.len - 1).*;
const maybe_comma = tree.nextToken(last_node.lastToken());
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
const lparen = tree.nextToken(builtin_call.builtin_token);
@@ -1410,7 +1407,7 @@ fn renderExpression(
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
if (fn_proto.visib_token) |visib_token_index| {
const visib_token = tree.tokens.at(visib_token_index);
const visib_token = tree.tokens[visib_token_index];
assert(visib_token.id == .Keyword_pub or visib_token.id == .Keyword_export);
try renderToken(tree, stream, visib_token_index, indent, start_col, Space.Space); // pub
@@ -1433,7 +1430,7 @@ fn renderExpression(
try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn
break :blk tree.nextToken(fn_proto.fn_token);
};
assert(tree.tokens.at(lparen).id == .LParen);
assert(tree.tokens[lparen].id == .LParen);
const rparen = tree.prevToken(
// the first token for the annotation expressions is the left
@@ -1449,10 +1446,10 @@ fn renderExpression(
.InferErrorSet => |node| tree.prevToken(node.firstToken()),
.Invalid => unreachable,
});
assert(tree.tokens.at(rparen).id == .RParen);
assert(tree.tokens[rparen].id == .RParen);
const src_params_trailing_comma = blk: {
const maybe_comma = tree.tokens.at(rparen - 1).id;
const maybe_comma = tree.tokens[rparen - 1].id;
break :blk maybe_comma == .Comma or maybe_comma == .LineComment;
};
@@ -1591,7 +1588,7 @@ fn renderExpression(
const src_has_trailing_comma = blk: {
const last_node = switch_case.items.at(switch_case.items.len - 1).*;
const maybe_comma = tree.nextToken(last_node.lastToken());
break :blk tree.tokens.at(maybe_comma).id == .Comma;
break :blk tree.tokens[maybe_comma].id == .Comma;
};
if (switch_case.items.len == 1 or !src_has_trailing_comma) {
@@ -1940,7 +1937,7 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
try stream.writeByteNTimes(' ', indent_once);
const comma_or_colon = tree.nextToken(node.lastToken());
break :blk switch (tree.tokens.at(comma_or_colon).id) {
break :blk switch (tree.tokens[comma_or_colon].id) {
.Comma => tree.nextToken(comma_or_colon),
else => comma_or_colon,
};
@@ -1978,7 +1975,7 @@ fn renderExpression(
try renderExpression(allocator, stream, tree, indent_extra, start_col, node, Space.Newline);
try stream.writeByteNTimes(' ', indent_once);
const comma_or_colon = tree.nextToken(node.lastToken());
break :blk switch (tree.tokens.at(comma_or_colon).id) {
break :blk switch (tree.tokens[comma_or_colon].id) {
.Comma => tree.nextToken(comma_or_colon),
else => comma_or_colon,
};
@@ -2174,7 +2171,7 @@ fn renderStatement(
try renderExpression(allocator, stream, tree, indent, start_col, base, Space.None);
const semicolon_index = tree.nextToken(base.lastToken());
assert(tree.tokens.at(semicolon_index).id == .Semicolon);
assert(tree.tokens[semicolon_index].id == .Semicolon);
try renderToken(tree, stream, semicolon_index, indent, start_col, Space.Newline);
} else {
try renderExpression(allocator, stream, tree, indent, start_col, base, Space.Newline);
@@ -2212,13 +2209,13 @@ fn renderTokenOffset(
return;
}
var token = tree.tokens.at(token_index);
var token = tree.tokens[token_index];
try stream.writeAll(mem.trimRight(u8, tree.tokenSlicePtr(token)[token_skip_bytes..], " "));
if (space == Space.NoComment)
return;
var next_token = tree.tokens.at(token_index + 1);
var next_token = tree.tokens[token_index + 1];
if (space == Space.Comma) switch (next_token.id) {
.Comma => return renderToken(tree, stream, token_index + 1, indent, start_col, Space.Newline),
@@ -2227,7 +2224,7 @@ fn renderTokenOffset(
return renderToken(tree, stream, token_index + 1, indent, start_col, Space.Newline);
},
else => {
if (token_index + 2 < tree.tokens.len and tree.tokens.at(token_index + 2).id == .MultilineStringLiteralLine) {
if (token_index + 2 < tree.tokens.len and tree.tokens[token_index + 2].id == .MultilineStringLiteralLine) {
try stream.writeAll(",");
return;
} else {
@@ -2244,7 +2241,7 @@ fn renderTokenOffset(
const loc = tree.tokenLocationPtr(token.end, next_token);
if (loc.line == 0) {
offset += 1;
next_token = tree.tokens.at(token_index + offset);
next_token = tree.tokens[token_index + offset];
}
}
@@ -2277,7 +2274,7 @@ fn renderTokenOffset(
Space.Newline => {
offset += 1;
token = next_token;
next_token = tree.tokens.at(token_index + offset);
next_token = tree.tokens[token_index + offset];
if (next_token.id != .LineComment) {
try stream.writeByte('\n');
start_col.* = 0;
@@ -2296,12 +2293,12 @@ fn renderTokenOffset(
try stream.print(" {}", .{mem.trimRight(u8, tree.tokenSlicePtr(next_token), " ")});
offset = 2;
token = next_token;
next_token = tree.tokens.at(token_index + offset);
next_token = tree.tokens[token_index + offset];
if (next_token.id != .LineComment) {
switch (space) {
Space.None, Space.Space => {
try stream.writeByte('\n');
const after_comment_token = tree.tokens.at(token_index + offset);
const after_comment_token = tree.tokens[token_index + offset];
const next_line_indent = switch (after_comment_token.id) {
.RParen, .RBrace, .RBracket => indent,
else => indent + indent_delta,
@@ -2342,7 +2339,7 @@ fn renderTokenOffset(
offset += 1;
token = next_token;
next_token = tree.tokens.at(token_index + offset);
next_token = tree.tokens[token_index + offset];
if (next_token.id != .LineComment) {
switch (space) {
Space.Newline => {
@@ -2357,7 +2354,7 @@ fn renderTokenOffset(
Space.None, Space.Space => {
try stream.writeByte('\n');
const after_comment_token = tree.tokens.at(token_index + offset);
const after_comment_token = tree.tokens[token_index + offset];
const next_line_indent = switch (after_comment_token.id) {
.RParen, .RBrace, .RBracket => blk: {
if (indent > indent_delta) {