Merge remote-tracking branch 'origin/master' into llvm8

This commit is contained in:
Andrew Kelley
2018-11-17 02:18:56 -05:00
230 changed files with 94493 additions and 93890 deletions
+7 -6
View File
@@ -54,8 +54,7 @@ that counts as "freestanding" for the purposes of this table.
|-------------|--------------|---------|---------|---------|---------|
|i386 | OK | planned | OK | planned | planned |
|x86_64 | OK | OK | OK | OK | planned |
|arm | OK | planned | planned | N/A | planned |
|aarch64 | OK | planned | N/A | planned | planned |
|arm | OK | planned | planned | planned | planned |
|bpf | OK | planned | N/A | N/A | planned |
|hexagon | OK | planned | N/A | N/A | planned |
|mips | OK | planned | N/A | N/A | planned |
@@ -64,20 +63,22 @@ that counts as "freestanding" for the purposes of this table.
|amdgcn | OK | planned | N/A | N/A | planned |
|sparc | OK | planned | N/A | N/A | planned |
|s390x | OK | planned | N/A | N/A | planned |
|thumb | OK | planned | N/A | N/A | planned |
|spir | OK | planned | N/A | N/A | planned |
|lanai | OK | planned | N/A | N/A | planned |
|wasm32 | planned | N/A | N/A | N/A | N/A |
|wasm64 | planned | N/A | N/A | N/A | N/A |
|riscv32 | planned | planned | N/A | N/A | planned |
|riscv64 | planned | planned | N/A | N/A | planned |
## Community
* IRC: `#zig` on Freenode ([Channel Logs](https://irclog.whitequark.org/zig/)).
* Reddit: [/r/zig](https://www.reddit.com/r/zig)
* Email list: [ziglang@googlegroups.com](https://groups.google.com/forum/#!forum/ziglang)
* Email list: [~andrewrk/ziglang@lists.sr.ht](https://lists.sr.ht/%7Eandrewrk/ziglang)
## Building
[![Build Status](https://travis-ci.org/ziglang/zig.svg?branch=master)](https://travis-ci.org/ziglang/zig)
[![Build status](https://ci.appveyor.com/api/projects/status/4t80mk2dmucrc38i/branch/master?svg=true)](https://ci.appveyor.com/project/andrewrk/zig-d3l86/branch/master)
[![Build Status](https://dev.azure.com/ziglang/zig/_apis/build/status/ziglang.zig?branchName=master)](https://dev.azure.com/ziglang/zig/_build/latest?definitionId=1&branchName=master)
Note that you can
[download a binary of master branch](https://ziglang.org/download/#release-master).
+13 -13
View File
@@ -17,7 +17,7 @@ pub fn build(b: *Builder) !void {
const rel_zig_exe = try os.path.relative(b.allocator, b.build_root, b.zig_exe);
const langref_out_path = os.path.join(b.allocator, b.cache_root, "langref.html") catch unreachable;
var docgen_cmd = b.addCommand(null, b.env_map, [][]const u8.{
var docgen_cmd = b.addCommand(null, b.env_map, [][]const u8{
docgen_exe.getOutputPath(),
rel_zig_exe,
"doc" ++ os.path.sep_str ++ "langref.html.in",
@@ -31,12 +31,12 @@ pub fn build(b: *Builder) !void {
const test_step = b.step("test", "Run all the tests");
// find the stage0 build artifacts because we're going to re-use config.h and zig_cpp library
const build_info = try b.exec([][]const u8.{
const build_info = try b.exec([][]const u8{
b.zig_exe,
"BUILD_INFO",
});
var index: usize = 0;
var ctx = Context.{
var ctx = Context{
.cmake_binary_dir = nextValue(&index, build_info),
.cxx_compiler = nextValue(&index, build_info),
.llvm_config_exe = nextValue(&index, build_info),
@@ -162,7 +162,7 @@ fn addCppLib(b: *Builder, lib_exe_obj: var, cmake_binary_dir: []const u8, lib_na
lib_exe_obj.addObjectFile(os.path.join(b.allocator, cmake_binary_dir, "zig_cpp", b.fmt("{}{}{}", lib_prefix, lib_name, lib_exe_obj.target.libFileExt())) catch unreachable);
}
const LibraryDep = struct.{
const LibraryDep = struct {
prefix: []const u8,
libdirs: ArrayList([]const u8),
libs: ArrayList([]const u8),
@@ -171,24 +171,24 @@ const LibraryDep = struct.{
};
fn findLLVM(b: *Builder, llvm_config_exe: []const u8) !LibraryDep {
const shared_mode = try b.exec([][]const u8.{ llvm_config_exe, "--shared-mode" });
const shared_mode = try b.exec([][]const u8{ llvm_config_exe, "--shared-mode" });
const is_static = mem.startsWith(u8, shared_mode, "static");
const libs_output = if (is_static)
try b.exec([][]const u8.{
try b.exec([][]const u8{
llvm_config_exe,
"--libfiles",
"--system-libs",
})
else
try b.exec([][]const u8.{
try b.exec([][]const u8{
llvm_config_exe,
"--libs",
});
const includes_output = try b.exec([][]const u8.{ llvm_config_exe, "--includedir" });
const libdir_output = try b.exec([][]const u8.{ llvm_config_exe, "--libdir" });
const prefix_output = try b.exec([][]const u8.{ llvm_config_exe, "--prefix" });
const includes_output = try b.exec([][]const u8{ llvm_config_exe, "--includedir" });
const libdir_output = try b.exec([][]const u8{ llvm_config_exe, "--libdir" });
const prefix_output = try b.exec([][]const u8{ llvm_config_exe, "--prefix" });
var result = LibraryDep.{
var result = LibraryDep{
.prefix = mem.split(prefix_output, " \r\n").next().?,
.libs = ArrayList([]const u8).init(b.allocator),
.system_libs = ArrayList([]const u8).init(b.allocator),
@@ -328,7 +328,7 @@ fn addCxxKnownPath(
objname: []const u8,
errtxt: ?[]const u8,
) !void {
const path_padded = try b.exec([][]const u8.{
const path_padded = try b.exec([][]const u8{
ctx.cxx_compiler,
b.fmt("-print-file-name={}", objname),
});
@@ -344,7 +344,7 @@ fn addCxxKnownPath(
exe.addObjectFile(path_unpadded);
}
const Context = struct.{
const Context = struct {
cmake_binary_dir: []const u8,
cxx_compiler: []const u8,
llvm_config_exe: []const u8,
+1 -1
View File
@@ -22,7 +22,7 @@ mkdir $TMPDIR
cd $HOME
HAVE_CACHE="true"
wget "https://ziglang.org/builds/$CACHE_BASENAME.tar.xz" || HAVE_CACHE="false"
wget -nv "https://ziglang.org/builds/$CACHE_BASENAME.tar.xz" || HAVE_CACHE="false"
if [ "${HAVE_CACHE}" = "true" ]; then
tar xf "$CACHE_BASENAME.tar.xz"
else
+2 -2
View File
@@ -3,7 +3,7 @@
set -x
set -e
pacman -S --needed --noconfirm wget zip python3-pip
pacman -S --needed --noconfirm wget p7zip python3-pip
pip install s3cmd
wget "https://ziglang.org/deps/llvm%2bclang-8.0.0-win64-msvc-release.tar.xz"
wget -nv "https://ziglang.org/deps/llvm%2bclang-8.0.0-win64-msvc-release.tar.xz"
tar xf llvm+clang-8.0.0-win64-msvc-release.tar.xz
+2 -1
View File
@@ -6,6 +6,7 @@ set -e
if [ "${BUILD_REASON}" != "PullRequest" ]; then
cd "$ZIGBUILDDIR"
rm release/*.lib
mv ../LICENSE release/
mv ../zig-cache/langref.html release/
mv release/bin/zig.exe release/
@@ -15,7 +16,7 @@ if [ "${BUILD_REASON}" != "PullRequest" ]; then
DIRNAME="zig-windows-x86_64-$VERSION"
TARBALL="$DIRNAME.zip"
mv release "$DIRNAME"
zip -r "$TARBALL" "$DIRNAME"
7z a "$TARBALL" "$DIRNAME"
mv "$DOWNLOADSECUREFILE_SECUREFILEPATH" "$HOME/.s3cfg"
s3cmd put -P "$TARBALL" s3://ziglang.org/builds/
+49 -48
View File
@@ -58,12 +58,12 @@ pub fn main() !void {
try buffered_out_stream.flush();
}
const Token = struct.{
const Token = struct {
id: Id,
start: usize,
end: usize,
const Id = enum.{
const Id = enum {
Invalid,
Content,
BracketOpen,
@@ -74,14 +74,14 @@ const Token = struct.{
};
};
const Tokenizer = struct.{
const Tokenizer = struct {
buffer: []const u8,
index: usize,
state: State,
source_file_name: []const u8,
code_node_count: usize,
const State = enum.{
const State = enum {
Start,
LBracket,
Hash,
@@ -90,7 +90,7 @@ const Tokenizer = struct.{
};
fn init(source_file_name: []const u8, buffer: []const u8) Tokenizer {
return Tokenizer.{
return Tokenizer{
.buffer = buffer,
.index = 0,
.state = State.Start,
@@ -100,7 +100,7 @@ const Tokenizer = struct.{
}
fn next(self: *Tokenizer) Token {
var result = Token.{
var result = Token{
.id = Token.Id.Eof,
.start = self.index,
.end = undefined,
@@ -184,7 +184,7 @@ const Tokenizer = struct.{
return result;
}
const Location = struct.{
const Location = struct {
line: usize,
column: usize,
line_start: usize,
@@ -192,7 +192,7 @@ const Tokenizer = struct.{
};
fn getTokenLocation(self: *Tokenizer, token: Token) Location {
var loc = Location.{
var loc = Location{
.line = 0,
.column = 0,
.line_start = 0,
@@ -216,7 +216,7 @@ const Tokenizer = struct.{
}
};
fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: ...) error {
fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: ...) anyerror {
const loc = tokenizer.getTokenLocation(token);
warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args);
if (loc.line_start <= loc.line_end) {
@@ -251,23 +251,23 @@ fn eatToken(tokenizer: *Tokenizer, id: Token.Id) !Token {
return token;
}
const HeaderOpen = struct.{
const HeaderOpen = struct {
name: []const u8,
url: []const u8,
n: usize,
};
const SeeAlsoItem = struct.{
const SeeAlsoItem = struct {
name: []const u8,
token: Token,
};
const ExpectedOutcome = enum.{
const ExpectedOutcome = enum {
Succeed,
Fail,
};
const Code = struct.{
const Code = struct {
id: Id,
name: []const u8,
source_token: Token,
@@ -277,7 +277,7 @@ const Code = struct.{
target_windows: bool,
link_libc: bool,
const Id = union(enum).{
const Id = union(enum) {
Test,
TestError: []const u8,
TestSafety: []const u8,
@@ -286,13 +286,13 @@ const Code = struct.{
};
};
const Link = struct.{
const Link = struct {
url: []const u8,
name: []const u8,
token: Token,
};
const Node = union(enum).{
const Node = union(enum) {
Content: []const u8,
Nav,
Builtin: Token,
@@ -303,13 +303,13 @@ const Node = union(enum).{
Syntax: Token,
};
const Toc = struct.{
const Toc = struct {
nodes: []Node,
toc: []u8,
urls: std.HashMap([]const u8, Token, mem.hash_slice_u8, mem.eql_slice_u8),
};
const Action = enum.{
const Action = enum {
Open,
Close,
};
@@ -343,7 +343,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
break;
},
Token.Id.Content => {
try nodes.append(Node.{ .Content = tokenizer.buffer[token.start..token.end] });
try nodes.append(Node{ .Content = tokenizer.buffer[token.start..token.end] });
},
Token.Id.BracketOpen => {
const tag_token = try eatToken(tokenizer, Token.Id.TagContent);
@@ -355,7 +355,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
try nodes.append(Node.Nav);
} else if (mem.eql(u8, tag_name, "builtin")) {
_ = try eatToken(tokenizer, Token.Id.BracketClose);
try nodes.append(Node.{ .Builtin = tag_token });
try nodes.append(Node{ .Builtin = tag_token });
} else if (mem.eql(u8, tag_name, "header_open")) {
_ = try eatToken(tokenizer, Token.Id.Separator);
const content_token = try eatToken(tokenizer, Token.Id.TagContent);
@@ -365,8 +365,8 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
header_stack_size += 1;
const urlized = try urlize(allocator, content);
try nodes.append(Node.{
.HeaderOpen = HeaderOpen.{
try nodes.append(Node{
.HeaderOpen = HeaderOpen{
.name = content,
.url = urlized,
.n = header_stack_size,
@@ -409,14 +409,14 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
switch (see_also_tok.id) {
Token.Id.TagContent => {
const content = tokenizer.buffer[see_also_tok.start..see_also_tok.end];
try list.append(SeeAlsoItem.{
try list.append(SeeAlsoItem{
.name = content,
.token = see_also_tok,
});
},
Token.Id.Separator => {},
Token.Id.BracketClose => {
try nodes.append(Node.{ .SeeAlso = list.toOwnedSlice() });
try nodes.append(Node{ .SeeAlso = list.toOwnedSlice() });
break;
},
else => return parseError(tokenizer, see_also_tok, "invalid see_also token"),
@@ -440,8 +440,8 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
}
};
try nodes.append(Node.{
.Link = Link.{
try nodes.append(Node{
.Link = Link{
.url = try urlize(allocator, url_name),
.name = name,
.token = name_tok,
@@ -465,24 +465,24 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
var code_kind_id: Code.Id = undefined;
var is_inline = false;
if (mem.eql(u8, code_kind_str, "exe")) {
code_kind_id = Code.Id.{ .Exe = ExpectedOutcome.Succeed };
code_kind_id = Code.Id{ .Exe = ExpectedOutcome.Succeed };
} else if (mem.eql(u8, code_kind_str, "exe_err")) {
code_kind_id = Code.Id.{ .Exe = ExpectedOutcome.Fail };
code_kind_id = Code.Id{ .Exe = ExpectedOutcome.Fail };
} else if (mem.eql(u8, code_kind_str, "test")) {
code_kind_id = Code.Id.Test;
} else if (mem.eql(u8, code_kind_str, "test_err")) {
code_kind_id = Code.Id.{ .TestError = name };
code_kind_id = Code.Id{ .TestError = name };
name = "test";
} else if (mem.eql(u8, code_kind_str, "test_safety")) {
code_kind_id = Code.Id.{ .TestSafety = name };
code_kind_id = Code.Id{ .TestSafety = name };
name = "test";
} else if (mem.eql(u8, code_kind_str, "obj")) {
code_kind_id = Code.Id.{ .Obj = null };
code_kind_id = Code.Id{ .Obj = null };
} else if (mem.eql(u8, code_kind_str, "obj_err")) {
code_kind_id = Code.Id.{ .Obj = name };
code_kind_id = Code.Id{ .Obj = name };
name = "test";
} else if (mem.eql(u8, code_kind_str, "syntax")) {
code_kind_id = Code.Id.{ .Obj = null };
code_kind_id = Code.Id{ .Obj = null };
is_inline = true;
} else {
return parseError(tokenizer, code_kind_tok, "unrecognized code kind: {}", code_kind_str);
@@ -518,8 +518,8 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
_ = try eatToken(tokenizer, Token.Id.BracketClose);
} else
unreachable; // TODO issue #707
try nodes.append(Node.{
.Code = Code.{
try nodes.append(Node{
.Code = Code{
.id = code_kind_id,
.name = name,
.source_token = source_token,
@@ -541,7 +541,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
return parseError(tokenizer, end_syntax_tag, "invalid token inside syntax: {}", end_tag_name);
}
_ = try eatToken(tokenizer, Token.Id.BracketClose);
try nodes.append(Node.{ .Syntax = content_tok });
try nodes.append(Node{ .Syntax = content_tok });
} else {
return parseError(tokenizer, tag_token, "unrecognized tag name: {}", tag_name);
}
@@ -550,7 +550,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
}
}
return Toc.{
return Toc{
.nodes = nodes.toOwnedSlice(),
.toc = toc_buf.toOwnedSlice(),
.urls = urls,
@@ -606,7 +606,7 @@ fn writeEscaped(out: var, input: []const u8) !void {
//#define VT_BOLD "\x1b[0;1m"
//#define VT_RESET "\x1b[0m"
const TermState = enum.{
const TermState = enum {
Start,
Escape,
LBracket,
@@ -703,7 +703,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
return buf.toOwnedSlice();
}
const builtin_types = [][]const u8.{
const builtin_types = [][]const u8{
"f16", "f32", "f64", "f128", "c_longdouble", "c_short",
"c_ushort", "c_int", "c_uint", "c_long", "c_ulong", "c_longlong",
"c_ulonglong", "c_char", "c_void", "void", "bool", "isize",
@@ -735,6 +735,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
std.zig.Token.Id.Keyword_align,
std.zig.Token.Id.Keyword_and,
std.zig.Token.Id.Keyword_anyerror,
std.zig.Token.Id.Keyword_asm,
std.zig.Token.Id.Keyword_async,
std.zig.Token.Id.Keyword_await,
@@ -763,7 +764,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
std.zig.Token.Id.Keyword_pub,
std.zig.Token.Id.Keyword_resume,
std.zig.Token.Id.Keyword_return,
std.zig.Token.Id.Keyword_section,
std.zig.Token.Id.Keyword_linksection,
std.zig.Token.Id.Keyword_stdcallcc,
std.zig.Token.Id.Keyword_struct,
std.zig.Token.Id.Keyword_suspend,
@@ -998,7 +999,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
const tmp_bin_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_bin_ext);
var build_args = std.ArrayList([]const u8).init(allocator);
defer build_args.deinit();
try build_args.appendSlice([][]const u8.{
try build_args.appendSlice([][]const u8{
zig_exe,
"build-exe",
tmp_source_file_name,
@@ -1035,7 +1036,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
}
_ = exec(allocator, &env_map, build_args.toSliceConst()) catch return parseError(tokenizer, code.source_token, "example failed to compile");
const run_args = [][]const u8.{tmp_bin_file_name};
const run_args = [][]const u8{tmp_bin_file_name};
const result = if (expected_outcome == ExpectedOutcome.Fail) blk: {
const result = try os.ChildProcess.exec(allocator, run_args, null, &env_map, max_doc_file_size);
@@ -1069,7 +1070,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
var test_args = std.ArrayList([]const u8).init(allocator);
defer test_args.deinit();
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
zig_exe,
"test",
tmp_source_file_name,
@@ -1093,7 +1094,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
},
}
if (code.target_windows) {
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
"--target-os",
"windows",
"--target-arch",
@@ -1111,7 +1112,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
var test_args = std.ArrayList([]const u8).init(allocator);
defer test_args.deinit();
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
zig_exe,
"test",
"--color",
@@ -1170,7 +1171,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
var test_args = std.ArrayList([]const u8).init(allocator);
defer test_args.deinit();
try test_args.appendSlice([][]const u8.{
try test_args.appendSlice([][]const u8{
zig_exe,
"test",
tmp_source_file_name,
@@ -1222,7 +1223,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
const name_plus_h_ext = try std.fmt.allocPrint(allocator, "{}.h", code.name);
const output_h_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_h_ext);
try build_args.appendSlice([][]const u8.{
try build_args.appendSlice([][]const u8{
zig_exe,
"build-obj",
tmp_source_file_name,
@@ -1332,7 +1333,7 @@ fn exec(allocator: *mem.Allocator, env_map: *std.BufMap, args: []const []const u
}
fn getBuiltinCode(allocator: *mem.Allocator, env_map: *std.BufMap, zig_exe: []const u8) ![]const u8 {
const result = try exec(allocator, env_map, []const []const u8.{
const result = try exec(allocator, env_map, []const []const u8{
zig_exe,
"builtin",
});
+604 -262
View File
@@ -8,6 +8,7 @@
body{
background-color:#111;
color: #bbb;
font-family: sans-serif;
}
a {
color: #88f;
@@ -211,7 +212,7 @@ test "comments" {
{#code_begin|syntax|doc_comments#}
/// A structure for storing a timestamp, with nanosecond precision (this is a
/// multiline doc comment).
const Timestamp = struct.{
const Timestamp = struct {
/// The number of seconds since the epoch (this is also a doc comment).
seconds: i64, // signed so we can represent pre-1970 (not a doc comment)
/// The number of nanoseconds past the second (doc comment again).
@@ -220,7 +221,7 @@ const Timestamp = struct.{
/// Returns a `Timestamp` struct representing the Unix epoch; that is, the
/// moment of 1970 Jan 1 00:00:00 UTC (this is a doc comment too).
pub fn unixEpoch() Timestamp {
return Timestamp.{
return Timestamp{
.seconds = 0,
.nanos = 0,
};
@@ -270,7 +271,7 @@ pub fn main() void {
@typeName(@typeOf(optional_value)), optional_value);
// error union
var number_or_error: error!i32 = error.ArgNotFound;
var number_or_error: anyerror!i32 = error.ArgNotFound;
warn("\nerror union 1\ntype: {}\nvalue: {}\n",
@typeName(@typeOf(number_or_error)), number_or_error);
@@ -448,7 +449,7 @@ pub fn main() void {
<td>the type of types</td>
</tr>
<tr>
<td>{#syntax#}error.{#endsyntax#}</td>
<td>{#syntax#}anyerror{#endsyntax#}</td>
<td>(none)</td>
<td>an error code</td>
</tr>
@@ -467,9 +468,10 @@ pub fn main() void {
<p>
In addition to the integer types above, arbitrary bit-width integers can be referenced by using
an identifier of <code>i</code> or </code>u</code> followed by digits. For example, the identifier
{#syntax#}i7{#endsyntax#} refers to a signed 7-bit integer.
{#syntax#}i7{#endsyntax#} refers to a signed 7-bit integer. The maximum allowed bit-width of an
integer type is {#syntax#}65535{#endsyntax#}.
</p>
{#see_also|Integers|Floats|void|Errors#}
{#see_also|Integers|Floats|void|Errors|@IntType#}
{#header_close#}
{#header_open|Primitive Values#}
<div class="table-wrapper">
@@ -952,7 +954,7 @@ a /= b{#endsyntax#}</pre></td>
<li>{#link|Floats#}</li>
</ul>
</td>
<td>Divison.
<td>Division.
<ul>
<li>Can cause {#link|overflow|Default Operations#} for integers.</li>
<li>Can cause {#link|Division by Zero#} for integers.</li>
@@ -1137,14 +1139,14 @@ a catch |err| b{#endsyntax#}</pre></td>
<li>{#link|Error Unions|Errors#}</li>
</ul>
</td>
<td>If {#syntax#}a{#endsyntax#} is an {#syntax#}error.{#endsyntax#},
<td>If {#syntax#}a{#endsyntax#} is an {#syntax#}error{#endsyntax#},
returns {#syntax#}b{#endsyntax#} ("default value"),
otherwise returns the unwrapped value of {#syntax#}a{#endsyntax#}.
Note that {#syntax#}b{#endsyntax#} may be a value of type {#link|noreturn#}.
{#syntax#}err{#endsyntax#} is the {#syntax#}error.{#endsyntax#} and is in scope of the expression {#syntax#}b{#endsyntax#}.
{#syntax#}err{#endsyntax#} is the {#syntax#}error{#endsyntax#} and is in scope of the expression {#syntax#}b{#endsyntax#}.
</td>
<td>
<pre>{#syntax#}const value: error!u32 = error.Broken;
<pre>{#syntax#}const value: anyerror!u32 = error.Broken;
const unwrapped = value catch 1234;
unwrapped == 1234{#endsyntax#}</pre>
</td>
@@ -1389,9 +1391,9 @@ x.* == 1234{#endsyntax#}</pre>
{#link|Merging Error Sets#}
</td>
<td>
<pre>{#syntax#}const A = error.{One};
const B = error.{Two};
(A || B) == error.{One, Two}{#endsyntax#}</pre>
<pre>{#syntax#}const A = error{One};
const B = error{Two};
(A || B) == error{One, Two}{#endsyntax#}</pre>
</td>
</tr>
</table>
@@ -1421,7 +1423,7 @@ const assert = @import("std").debug.assert;
const mem = @import("std").mem;
// array literal
const message = []u8.{ 'h', 'e', 'l', 'l', 'o' };
const message = []u8{ 'h', 'e', 'l', 'l', 'o' };
// get the size of an array
comptime {
@@ -1457,11 +1459,11 @@ test "modify an array" {
// array concatenation works if the values are known
// at compile time
const part_one = []i32.{ 1, 2, 3, 4 };
const part_two = []i32.{ 5, 6, 7, 8 };
const part_one = []i32{ 1, 2, 3, 4 };
const part_two = []i32{ 5, 6, 7, 8 };
const all_of_it = part_one ++ part_two;
comptime {
assert(mem.eql(i32, all_of_it, []i32.{ 1, 2, 3, 4, 5, 6, 7, 8 }));
assert(mem.eql(i32, all_of_it, []i32{ 1, 2, 3, 4, 5, 6, 7, 8 }));
}
// remember that string literals are arrays
@@ -1479,7 +1481,7 @@ comptime {
}
// initialize an array to zero
const all_zero = []u16.{0} ** 10;
const all_zero = []u16{0} ** 10;
comptime {
assert(all_zero.len == 10);
@@ -1490,14 +1492,14 @@ comptime {
var fancy_array = init: {
var initial_value: [10]Point = undefined;
for (initial_value) |*pt, i| {
pt.* = Point.{
pt.* = Point{
.x = @intCast(i32, i),
.y = @intCast(i32, i) * 2,
};
}
break :init initial_value;
};
const Point = struct.{
const Point = struct {
x: i32,
y: i32,
};
@@ -1508,9 +1510,9 @@ test "compile-time array initalization" {
}
// call a function to initialize an array
var more_points = []Point.{makePoint(3)} ** 10;
var more_points = []Point{makePoint(3)} ** 10;
fn makePoint(x: i32) Point {
return Point.{
return Point{
.x = x,
.y = x * 2,
};
@@ -1589,7 +1591,7 @@ test "pointer array access" {
// Taking an address of an individual element gives a
// pointer to a single item. This kind of pointer
// does not support pointer arithmetic.
var array = []u8.{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
var array = []u8{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
const ptr = &array[2];
assert(@typeOf(ptr) == *u8);
@@ -1611,7 +1613,7 @@ test "pointer array access" {
const assert = @import("std").debug.assert;
test "pointer slicing" {
var array = []u8.{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
var array = []u8{ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
const slice = array[2..4];
assert(slice.len == 2);
@@ -1692,7 +1694,7 @@ test "volatile" {
const assert = @import("std").debug.assert;
test "pointer casting" {
const bytes align(@alignOf(u32)) = []u8.{ 0x12, 0x12, 0x12, 0x12 };
const bytes align(@alignOf(u32)) = []u8{ 0x12, 0x12, 0x12, 0x12 };
const u32_ptr = @ptrCast(*const u32, &bytes);
assert(u32_ptr.* == 0x12121212);
@@ -1781,7 +1783,7 @@ test "function alignment" {
const assert = @import("std").debug.assert;
test "pointer alignment safety" {
var array align(4) = []u32.{ 0x11111111, 0x11111111 };
var array align(4) = []u32{ 0x11111111, 0x11111111 };
const bytes = @sliceToBytes(array[0..]);
assert(foo(bytes) == 0x11111111);
}
@@ -1798,7 +1800,7 @@ fn foo(bytes: []u8) u32 {
const assert = @import("std").debug.assert;
test "basic slices" {
var array = []i32.{ 1, 2, 3, 4 };
var array = []i32{ 1, 2, 3, 4 };
// A slice is a pointer and a length. The difference between an array and
// a slice is that the array's length is part of the type and known at
// compile-time, whereas the slice's length is known at runtime.
@@ -1866,7 +1868,7 @@ test "slice pointer" {
test "slice widening" {
// Zig supports slice widening and slice narrowing. Cast a slice of u8
// to a slice of anything else, and Zig will perform the length conversion.
const array align(@alignOf(u32)) = []u8.{ 0x12, 0x12, 0x12, 0x12, 0x13, 0x13, 0x13, 0x13 };
const array align(@alignOf(u32)) = []u8{ 0x12, 0x12, 0x12, 0x12, 0x13, 0x13, 0x13, 0x13 };
const slice = @bytesToSlice(u32, array[0..]);
assert(slice.len == 2);
assert(slice[0] == 0x12121212);
@@ -1880,27 +1882,27 @@ test "slice widening" {
// Declare a struct.
// Zig gives no guarantees about the order of fields and whether or
// not there will be padding.
const Point = struct.{
const Point = struct {
x: f32,
y: f32,
};
// Maybe we want to pass it to OpenGL so we want to be particular about
// how the bytes are arranged.
const Point2 = packed struct.{
const Point2 = packed struct {
x: f32,
y: f32,
};
// Declare an instance of a struct.
const p = Point.{
const p = Point {
.x = 0.12,
.y = 0.34,
};
// Maybe we're not ready to fill out some of the fields.
var p2 = Point.{
var p2 = Point {
.x = 0.12,
.y = undefined,
};
@@ -1908,13 +1910,13 @@ var p2 = Point.{
// Structs can have methods
// Struct methods are not special, they are only namespaced
// functions that you can call with dot syntax.
const Vec3 = struct.{
const Vec3 = struct {
x: f32,
y: f32,
z: f32,
pub fn init(x: f32, y: f32, z: f32) Vec3 {
return Vec3.{
return Vec3 {
.x = x,
.y = y,
.z = z,
@@ -1940,7 +1942,7 @@ test "dot product" {
// Structs can have global declarations.
// Structs can have 0 fields.
const Empty = struct.{
const Empty = struct {
pub const PI = 3.14;
};
test "struct namespaced variable" {
@@ -1948,7 +1950,7 @@ test "struct namespaced variable" {
assert(@sizeOf(Empty) == 0);
// you can still instantiate an empty struct
const does_nothing = Empty.{};
const does_nothing = Empty {};
}
// struct field order is determined by the compiler for optimal performance.
@@ -1958,7 +1960,7 @@ fn setYBasedOnX(x: *f32, y: f32) void {
point.y = y;
}
test "field parent pointer" {
var point = Point.{
var point = Point {
.x = 0.1234,
.y = 0.5678,
};
@@ -1969,8 +1971,8 @@ test "field parent pointer" {
// You can return a struct from a function. This is how we do generics
// in Zig:
fn LinkedList(comptime T: type) type {
return struct.{
pub const Node = struct.{
return struct {
pub const Node = struct {
prev: ?*Node,
next: ?*Node,
data: T,
@@ -1987,7 +1989,7 @@ test "linked list" {
// do this:
assert(LinkedList(i32) == LinkedList(i32));
var list = LinkedList(i32).{
var list = LinkedList(i32) {
.first = null,
.last = null,
.len = 0,
@@ -1999,12 +2001,12 @@ test "linked list" {
const ListOfInts = LinkedList(i32);
assert(ListOfInts == LinkedList(i32));
var node = ListOfInts.Node.{
var node = ListOfInts.Node {
.prev = null,
.next = null,
.data = 1234,
};
var list2 = LinkedList(i32).{
var list2 = LinkedList(i32) {
.first = &node,
.last = &node,
.len = 1,
@@ -2034,14 +2036,14 @@ test "linked list" {
const std = @import("std");
pub fn main() void {
const Foo = struct.{};
const Foo = struct {};
std.debug.warn("variable: {}\n", @typeName(Foo));
std.debug.warn("anonymous: {}\n", @typeName(struct.{}));
std.debug.warn("anonymous: {}\n", @typeName(struct {}));
std.debug.warn("function: {}\n", @typeName(List(i32)));
}
fn List(comptime T: type) type {
return struct.{
return struct {
x: T,
};
}
@@ -2055,7 +2057,7 @@ const assert = @import("std").debug.assert;
const mem = @import("std").mem;
// Declare an enum.
const Type = enum.{
const Type = enum {
Ok,
NotOk,
};
@@ -2065,7 +2067,7 @@ const c = Type.Ok;
// If you want access to the ordinal value of an enum, you
// can specify the tag type.
const Value = enum(u2).{
const Value = enum(u2) {
Zero,
One,
Two,
@@ -2080,7 +2082,7 @@ test "enum ordinal value" {
}
// You can override the ordinal value for an enum.
const Value2 = enum(u32).{
const Value2 = enum(u32) {
Hundred = 100,
Thousand = 1000,
Million = 1000000,
@@ -2094,7 +2096,7 @@ test "set enum ordinal value" {
// Enums can have methods, the same as structs and unions.
// Enum methods are not special, they are only namespaced
// functions that you can call with dot syntax.
const Suit = enum.{
const Suit = enum {
Clubs,
Spades,
Diamonds,
@@ -2110,7 +2112,7 @@ test "enum method" {
}
// An enum variant of different types can be switched upon.
const Foo = enum.{
const Foo = enum {
String,
Number,
None,
@@ -2126,7 +2128,7 @@ test "enum variant switch" {
}
// @TagType can be used to access the integer tag type of an enum.
const Small = enum.{
const Small = enum {
One,
Two,
Three,
@@ -2156,14 +2158,14 @@ test "@tagName" {
By default, enums are not guaranteed to be compatible with the C ABI:
</p>
{#code_begin|obj_err|parameter of type 'Foo' not allowed in function with calling convention 'ccc'#}
const Foo = enum.{ A, B, C };
const Foo = enum { A, B, C };
export fn entry(foo: Foo) void { }
{#code_end#}
<p>
For a C-ABI-compatible enum, use {#syntax#}extern enum{#endsyntax#}:
</p>
{#code_begin|obj#}
const Foo = extern enum.{ A, B, C };
const Foo = extern enum { A, B, C };
export fn entry(foo: Foo) void { }
{#code_end#}
{#header_close#}
@@ -2175,7 +2177,7 @@ export fn entry(foo: Foo) void { }
const std = @import("std");
test "packed enum" {
const Number = packed enum(u8).{
const Number = packed enum(u8) {
One,
Two,
Three,
@@ -2192,30 +2194,30 @@ const assert = @import("std").debug.assert;
const mem = @import("std").mem;
// A union has only 1 active field at a time.
const Payload = union.{
const Payload = union {
Int: i64,
Float: f64,
Bool: bool,
};
test "simple union" {
var payload = Payload.{.Int = 1234};
var payload = Payload {.Int = 1234};
// payload.Float = 12.34; // ERROR! field not active
assert(payload.Int == 1234);
// You can activate another field by assigning the entire union.
payload = Payload.{.Float = 12.34};
payload = Payload {.Float = 12.34};
assert(payload.Float == 12.34);
}
// Unions can be given an enum tag type:
const ComplexTypeTag = enum.{ Ok, NotOk };
const ComplexType = union(ComplexTypeTag).{
const ComplexTypeTag = enum { Ok, NotOk };
const ComplexType = union(ComplexTypeTag) {
Ok: u8,
NotOk: void,
};
// Declare a specific instance of the union variant.
test "declare union value" {
const c = ComplexType.{ .Ok = 0 };
const c = ComplexType { .Ok = 0 };
assert(ComplexTypeTag(c) == ComplexTypeTag.Ok);
}
@@ -2225,7 +2227,7 @@ test "@TagType" {
}
// Unions can be made to infer the enum tag type.
const Foo = union(enum).{
const Foo = union(enum) {
String: []const u8,
Number: u64,
@@ -2233,7 +2235,7 @@ const Foo = union(enum).{
None,
};
test "union variant switch" {
const p = Foo.{ .Number = 54 };
const p = Foo { .Number = 54 };
const what_is_it = switch (p) {
// Capture by reference
Foo.String => |*x| blk: {
@@ -2255,7 +2257,7 @@ test "union variant switch" {
// Unions can have methods just like structs and enums:
const Variant = union(enum).{
const Variant = union(enum) {
Int: i32,
Bool: bool,
@@ -2268,15 +2270,15 @@ const Variant = union(enum).{
};
test "union method" {
var v1 = Variant.{ .Int = 1 };
var v2 = Variant.{ .Bool = false };
var v1 = Variant { .Int = 1 };
var v2 = Variant { .Bool = false };
assert(v1.truthy());
assert(!v2.truthy());
}
const Small = union.{
const Small = union {
A: i32,
B: bool,
C: u8,
@@ -2294,7 +2296,7 @@ test "@memberName" {
// @tagName gives a []const u8 representation of an enum value,
// but only if the union has an enum tag type.
const Small2 = union(enum).{
const Small2 = union(enum) {
A: i32,
B: bool,
C: u8,
@@ -2388,13 +2390,13 @@ test "switch simple" {
}
test "switch enum" {
const Item = union(enum).{
const Item = union(enum) {
A: u32,
C: struct.{ x: u8, y: u8 },
C: struct { x: u8, y: u8 },
D,
};
var a = Item.{ .A = 3 };
var a = Item { .A = 3 };
// Switching on more complex enums is allowed.
const b = switch (a) {
@@ -2629,7 +2631,7 @@ test "while error union capture" {
var numbers_left: u32 = undefined;
fn eventuallyErrorSequence() error!u32 {
fn eventuallyErrorSequence() anyerror!u32 {
return if (numbers_left == 0) error.ReachedZero else blk: {
numbers_left -= 1;
break :blk numbers_left;
@@ -2683,7 +2685,7 @@ fn typeNameLength(comptime T: type) usize {
const assert = @import("std").debug.assert;
test "for basics" {
const items = []i32.{ 4, 5, 3, 4, 0 };
const items = []i32 { 4, 5, 3, 4, 0 };
var sum: i32 = 0;
// For loops iterate over slices and arrays.
@@ -2713,7 +2715,7 @@ test "for basics" {
}
test "for reference" {
var items = []i32.{ 3, 4, 2 };
var items = []i32 { 3, 4, 2 };
// Iterate over the slice by reference by
// specifying that the capture value is a pointer.
@@ -2728,7 +2730,7 @@ test "for reference" {
test "for else" {
// For allows an else attached to it, the same as a while loop.
var items = []?i32.{ 3, 4, null, 5 };
var items = []?i32 { 3, 4, null, 5 };
// For loops can also be used as expressions.
var sum: i32 = 0;
@@ -2753,8 +2755,8 @@ const assert = std.debug.assert;
test "nested break" {
var count: usize = 0;
outer: for ([]i32.{ 1, 2, 3, 4, 5 }) |_| {
for ([]i32.{ 1, 2, 3, 4, 5 }) |_| {
outer: for ([]i32{ 1, 2, 3, 4, 5 }) |_| {
for ([]i32{ 1, 2, 3, 4, 5 }) |_| {
count += 1;
break :outer;
}
@@ -2764,8 +2766,8 @@ test "nested break" {
test "nested continue" {
var count: usize = 0;
outer: for ([]i32.{ 1, 2, 3, 4, 5, 6, 7, 8 }) |_| {
for ([]i32.{ 1, 2, 3, 4, 5 }) |_| {
outer: for ([]i32{ 1, 2, 3, 4, 5, 6, 7, 8 }) |_| {
for ([]i32{ 1, 2, 3, 4, 5 }) |_| {
count += 1;
continue :outer;
}
@@ -2787,7 +2789,7 @@ test "nested continue" {
const assert = @import("std").debug.assert;
test "inline for loop" {
const nums = []i32.{2, 4, 6};
const nums = []i32{2, 4, 6};
var sum: usize = 0;
inline for (nums) |i| {
const T = switch (i) {
@@ -2822,7 +2824,7 @@ fn typeNameLength(comptime T: type) usize {
// If expressions have three uses, corresponding to the three types:
// * bool
// * ?T
// * error!T
// * anyerror!T
const assert = @import("std").debug.assert;
@@ -2887,14 +2889,14 @@ test "if error union" {
// If expressions test for errors.
// Note the |err| capture on the else.
const a: error!u32 = 0;
const a: anyerror!u32 = 0;
if (a) |value| {
assert(value == 0);
} else |err| {
unreachable;
}
const b: error!u32 = error.BadValue;
const b: anyerror!u32 = error.BadValue;
if (b) |value| {
unreachable;
} else |err| {
@@ -2912,7 +2914,7 @@ test "if error union" {
}
// Access the value by reference using a pointer capture.
var c: error!u32 = 3;
var c: anyerror!u32 = 3;
if (c) |*value| {
value.* = 9;
} else |err| {
@@ -3089,7 +3091,7 @@ test "foo" {
assert(value == 1234);
}
fn bar() error!u32 {
fn bar() anyerror!u32 {
return 1234;
}
@@ -3164,7 +3166,7 @@ fn foo() void { }
In Zig, structs, unions, and enums with payloads can be passed directly to a function:
</p>
{#code_begin|test#}
const Point = struct.{
const Point = struct {
x: i32,
y: i32,
};
@@ -3176,7 +3178,7 @@ fn foo(point: Point) i32 {
const assert = @import("std").debug.assert;
test "pass aggregate type by non-copy value to function" {
assert(foo(Point.{ .x = 1, .y = 2 }) == 3);
assert(foo(Point{ .x = 1, .y = 2 }) == 3);
}
{#code_end#}
<p>
@@ -3216,13 +3218,13 @@ test "fn reflection" {
{#code_begin|test#}
const std = @import("std");
const FileOpenError = error.{
const FileOpenError = error {
AccessDenied,
OutOfMemory,
FileNotFound,
};
const AllocationError = error.{
const AllocationError = error {
OutOfMemory,
};
@@ -3239,13 +3241,13 @@ fn foo(err: AllocationError) FileOpenError {
But you cannot implicitly cast an error from a superset to a subset:
</p>
{#code_begin|test_err|not a member of destination error set#}
const FileOpenError = error.{
const FileOpenError = error {
AccessDenied,
OutOfMemory,
FileNotFound,
};
const AllocationError = error.{
const AllocationError = error {
OutOfMemory,
};
@@ -3265,13 +3267,13 @@ const err = error.FileNotFound;
{#code_end#}
<p>This is equivalent to:</p>
{#code_begin|syntax#}
const err = (error.{FileNotFound}).FileNotFound;
const err = (error {FileNotFound}).FileNotFound;
{#code_end#}
<p>
This becomes useful when using {#link|Inferred Error Sets#}.
</p>
{#header_open|The Global Error Set#}
<p>{#syntax#}error.{#endsyntax#} refers to the global error set.
<p>{#syntax#}error{#endsyntax#} refers to the global error set.
This is the error set that contains all errors in the entire compilation unit.
It is a superset of all other error sets and a subset of none of them.
</p>
@@ -3347,7 +3349,7 @@ test "parse u64" {
<p>
Within the function definition, you can see some return statements that return
an error, and at the bottom a return statement that returns a {#syntax#}u64{#endsyntax#}.
Both types {#link|implicitly cast|Implicit Casts#} to {#syntax#}error!u64{#endsyntax#}.
Both types {#link|implicitly cast|Implicit Casts#} to {#syntax#}anyerror!u64{#endsyntax#}.
</p>
<p>
What it looks like to use this function varies depending on what you're
@@ -3472,7 +3474,7 @@ fn createFoo(param: i32) !Foo {
</li>
<li>
Since Zig understands error types, it can pre-weight branches in favor of
errors not occuring. Just a small optimization benefit that is not available
errors not occurring. Just a small optimization benefit that is not available
in other languages.
</li>
</ul>
@@ -3484,7 +3486,7 @@ fn createFoo(param: i32) !Foo {
const assert = @import("std").debug.assert;
test "error union" {
var foo: error!i32 = undefined;
var foo: anyerror!i32 = undefined;
// Implicitly cast from child type of an error union:
foo = 1234;
@@ -3496,7 +3498,7 @@ test "error union" {
comptime assert(@typeOf(foo).Payload == i32);
// Use compile-time reflection to access the error set type of an error union:
comptime assert(@typeOf(foo).ErrorSet == error);
comptime assert(@typeOf(foo).ErrorSet == anyerror);
}
{#code_end#}
{#header_open|Merging Error Sets#}
@@ -3513,13 +3515,13 @@ test "error union" {
files.
</p>
{#code_begin|test#}
const A = error.{
const A = error{
NotDir,
/// A doc comment
PathNotFound,
};
const B = error.{
const B = error{
OutOfMemory,
/// B doc comment
@@ -3561,7 +3563,7 @@ pub fn add_explicit(comptime T: type, a: T, b: T) Error!T {
return if (@addWithOverflow(T, a, b, &answer)) error.Overflow else answer;
}
const Error = error.{
const Error = error {
Overflow,
};
@@ -3720,11 +3722,11 @@ fn bang2() void {
<li>when returning errors</li>
</ul>
<p>
For the case when no errors are returned, the cost is a single memory write operation, only in the first non-failable function in the call graph that calls a failable function, i.e. when a function returning {#syntax#}void{#endsyntax#} calls a function returning {#syntax#}error.{#endsyntax#}.
For the case when no errors are returned, the cost is a single memory write operation, only in the first non-failable function in the call graph that calls a failable function, i.e. when a function returning {#syntax#}void{#endsyntax#} calls a function returning {#syntax#}error{#endsyntax#}.
This is to initialize this struct in the stack memory:
</p>
{#code_begin|syntax#}
pub const StackTrace = struct.{
pub const StackTrace = struct {
index: usize,
instruction_addresses: [N]usize,
};
@@ -3754,7 +3756,7 @@ fn __zig_return_error(stack_trace: *StackTrace) void {
<p>
As for code size cost, 1 function call before a return statement is no big deal. Even so,
I have <a href="https://github.com/ziglang/zig/issues/690">a plan</a> to make the call to
{#syntax#}__zig_return_error.{#endsyntax#} a tail call, which brings the code size cost down to actually zero. What is a return statement in code without error return tracing can become a jump instruction in code with error return tracing.
{#syntax#}__zig_return_error{#endsyntax#} a tail call, which brings the code size cost down to actually zero. What is a return statement in code without error return tracing can become a jump instruction in code with error return tracing.
</p>
{#header_close#}
{#header_close#}
@@ -3975,7 +3977,7 @@ const assert = std.debug.assert;
const mem = std.mem;
test "cast *[1][*]const u8 to [*]const ?[*]const u8" {
const window_name = [1][*]const u8.{c"window name"};
const window_name = [1][*]const u8{c"window name"};
const x: [*]const ?[*]const u8 = &window_name;
assert(mem.eql(u8, std.cstr.toSliceConst(x[0].?), "window name"));
}
@@ -4158,12 +4160,12 @@ test "peer type resolution: [0]u8 and []const u8" {
}
fn peerTypeEmptyArrayAndSlice(a: bool, slice: []const u8) []const u8 {
if (a) {
return []const u8.{};
return []const u8{};
}
return slice[0..1];
}
test "peer type resolution: [0]u8, []const u8, and error![]u8" {
test "peer type resolution: [0]u8, []const u8, and anyerror![]u8" {
{
var data = "hi";
const slice = data[0..];
@@ -4177,9 +4179,9 @@ test "peer type resolution: [0]u8, []const u8, and error![]u8" {
assert((try peerTypeEmptyArrayAndSliceAndError(false, slice)).len == 1);
}
}
fn peerTypeEmptyArrayAndSliceAndError(a: bool, slice: []u8) error![]u8 {
fn peerTypeEmptyArrayAndSliceAndError(a: bool, slice: []u8) anyerror![]u8 {
if (a) {
return []u8.{};
return []u8{};
}
return slice[0..1];
@@ -4402,15 +4404,15 @@ fn max(a: bool, b: bool) bool {
{#code_begin|test|comptime_vars#}
const assert = @import("std").debug.assert;
const CmdFn = struct.{
const CmdFn = struct {
name: []const u8,
func: fn(i32) i32,
};
const cmd_fns = []CmdFn.{
CmdFn.{.name = "one", .func = one},
CmdFn.{.name = "two", .func = two},
CmdFn.{.name = "three", .func = three},
const cmd_fns = []CmdFn{
CmdFn {.name = "one", .func = one},
CmdFn {.name = "two", .func = two},
CmdFn {.name = "three", .func = three},
};
fn one(value: i32) i32 { return value + 1; }
fn two(value: i32) i32 { return value + 2; }
@@ -4668,7 +4670,7 @@ test "variable values" {
</p>
{#code_begin|syntax#}
fn List(comptime T: type) type {
return struct.{
return struct {
items: []T,
len: usize,
};
@@ -4684,7 +4686,7 @@ fn List(comptime T: type) type {
a name, we assign it to a constant:
</p>
{#code_begin|syntax#}
const Node = struct.{
const Node = struct {
next: *Node,
name: []u8,
};
@@ -4717,8 +4719,8 @@ pub fn main() void {
{#code_begin|syntax#}
/// Calls print and then flushes the buffer.
pub fn printf(self: *OutStream, comptime format: []const u8, args: ...) error!void {
const State = enum.{
pub fn printf(self: *OutStream, comptime format: []const u8, args: ...) anyerror!void {
const State = enum {
Start,
OpenBrace,
CloseBrace,
@@ -4963,7 +4965,7 @@ async fn testAsyncSeq() void {
suspend;
seq('d');
}
var points = []u8.{0} ** "abcdefg".len;
var points = []u8{0} ** "abcdefg".len;
var index: usize = 0;
fn seq(c: u8) void {
@@ -5101,7 +5103,7 @@ async fn another() i32 {
return 1234;
}
var seq_points = []u8.{0} ** "abcdefghi".len;
var seq_points = []u8{0} ** "abcdefghi".len;
var seq_index: usize = 0;
fn seq(c: u8) void {
@@ -5784,7 +5786,7 @@ fn add(a: i32, b: i32) i32 { return a + b; }
{#header_close#}
{#header_open|@intToError#}
<pre>{#syntax#}@intToError(value: @IntType(false, @sizeOf(error) * 8)) error.{#endsyntax#}</pre>
<pre>{#syntax#}@intToError(value: @IntType(false, @sizeOf(anyerror) * 8)) anyerror{#endsyntax#}</pre>
<p>
Converts from the integer representation of an error into the global error set type.
</p>
@@ -5814,9 +5816,10 @@ fn add(a: i32, b: i32) i32 { return a + b; }
{#header_close#}
{#header_open|@IntType#}
<pre>{#syntax#}@IntType(comptime is_signed: bool, comptime bit_count: u32) type{#endsyntax#}</pre>
<pre>{#syntax#}@IntType(comptime is_signed: bool, comptime bit_count: u16) type{#endsyntax#}</pre>
<p>
This function returns an integer type with the given signness and bit count.
This function returns an integer type with the given signness and bit count. The maximum
bit count for an integer type is {#syntax#}65535{#endsyntax#}.
</p>
{#header_close#}
{#header_open|@memberCount#}
@@ -6114,7 +6117,7 @@ test "foo" {
Sets the floating point mode of the current scope. Possible values are:
</p>
{#code_begin|syntax#}
pub const FloatMode = enum.{
pub const FloatMode = enum {
Strict,
Optimized,
};
@@ -6249,13 +6252,13 @@ const std = @import("std");
const assert = std.debug.assert;
test "@This()" {
var items = []i32.{ 1, 2, 3, 4 };
const list = List(i32).{ .items = items[0..] };
var items = []i32{ 1, 2, 3, 4 };
const list = List(i32){ .items = items[0..] };
assert(list.length() == 4);
}
fn List(comptime T: type) type {
return struct.{
return struct {
const Self = @This();
items: []T,
@@ -6303,7 +6306,7 @@ const b: u8 = @truncate(u8, a);
Returns which kind of type something is. Possible values:
</p>
{#code_begin|syntax#}
pub const TypeId = enum.{
pub const TypeId = enum {
Type,
Void,
Bool,
@@ -6337,7 +6340,7 @@ pub const TypeId = enum.{
Returns information on the type. Returns a value of the following union:
</p>
{#code_begin|syntax#}
pub const TypeInfo = union(TypeId).{
pub const TypeInfo = union(TypeId) {
Type: void,
Void: void,
Bool: void,
@@ -6364,96 +6367,96 @@ pub const TypeInfo = union(TypeId).{
Promise: Promise,
pub const Int = struct.{
pub const Int = struct {
is_signed: bool,
bits: u8,
};
pub const Float = struct.{
pub const Float = struct {
bits: u8,
};
pub const Pointer = struct.{
pub const Pointer = struct {
size: Size,
is_const: bool,
is_volatile: bool,
alignment: u32,
child: type,
pub const Size = enum.{
pub const Size = enum {
One,
Many,
Slice,
};
};
pub const Array = struct.{
pub const Array = struct {
len: usize,
child: type,
};
pub const ContainerLayout = enum.{
pub const ContainerLayout = enum {
Auto,
Extern,
Packed,
};
pub const StructField = struct.{
pub const StructField = struct {
name: []const u8,
offset: ?usize,
field_type: type,
};
pub const Struct = struct.{
pub const Struct = struct {
layout: ContainerLayout,
fields: []StructField,
defs: []Definition,
};
pub const Optional = struct.{
pub const Optional = struct {
child: type,
};
pub const ErrorUnion = struct.{
pub const ErrorUnion = struct {
error_set: type,
payload: type,
};
pub const Error = struct.{
pub const Error = struct {
name: []const u8,
value: usize,
};
pub const ErrorSet = struct.{
pub const ErrorSet = struct {
errors: []Error,
};
pub const EnumField = struct.{
pub const EnumField = struct {
name: []const u8,
value: usize,
};
pub const Enum = struct.{
pub const Enum = struct {
layout: ContainerLayout,
tag_type: type,
fields: []EnumField,
defs: []Definition,
};
pub const UnionField = struct.{
pub const UnionField = struct {
name: []const u8,
enum_field: ?EnumField,
field_type: type,
};
pub const Union = struct.{
pub const Union = struct {
layout: ContainerLayout,
tag_type: ?type,
fields: []UnionField,
defs: []Definition,
};
pub const CallingConvention = enum.{
pub const CallingConvention = enum {
Unspecified,
C,
Cold,
@@ -6462,13 +6465,13 @@ pub const TypeInfo = union(TypeId).{
Async,
};
pub const FnArg = struct.{
pub const FnArg = struct {
is_generic: bool,
is_noalias: bool,
arg_type: ?type,
};
pub const Fn = struct.{
pub const Fn = struct {
calling_convention: CallingConvention,
is_generic: bool,
is_var_args: bool,
@@ -6477,21 +6480,21 @@ pub const TypeInfo = union(TypeId).{
args: []FnArg,
};
pub const Promise = struct.{
pub const Promise = struct {
child: ?type,
};
pub const Definition = struct.{
pub const Definition = struct {
name: []const u8,
is_pub: bool,
data: Data,
pub const Data = union(enum).{
pub const Data = union(enum) {
Type: type,
Var: type,
Fn: FnDef,
pub const FnDef = struct.{
pub const FnDef = struct {
fn_type: type,
inline_type: Inline,
calling_convention: CallingConvention,
@@ -6502,7 +6505,7 @@ pub const TypeInfo = union(TypeId).{
return_type: type,
arg_names: [][] const u8,
pub const Inline = enum.{
pub const Inline = enum {
Auto,
Always,
Never,
@@ -6913,7 +6916,7 @@ pub fn main() void {
<p>At compile-time:</p>
{#code_begin|test_err|unable to convert#}
comptime {
var bytes = [5]u8.{ 1, 2, 3, 4, 5 };
var bytes = [5]u8{ 1, 2, 3, 4, 5 };
var slice = @bytesToSlice(u32, bytes);
}
{#code_end#}
@@ -6922,7 +6925,7 @@ comptime {
const std = @import("std");
pub fn main() void {
var bytes = [5]u8.{ 1, 2, 3, 4, 5 };
var bytes = [5]u8{ 1, 2, 3, 4, 5 };
var slice = @bytesToSlice(u32, bytes[0..]);
std.debug.warn("value: {}\n", slice[0]);
}
@@ -7031,7 +7034,7 @@ pub fn main() void {
{#header_open|Invalid Enum Cast#}
<p>At compile-time:</p>
{#code_begin|test_err|has no tag matching integer value 3#}
const Foo = enum.{
const Foo = enum {
A,
B,
C,
@@ -7045,7 +7048,7 @@ comptime {
{#code_begin|exe_err#}
const std = @import("std");
const Foo = enum.{
const Foo = enum {
A,
B,
C,
@@ -7062,11 +7065,11 @@ pub fn main() void {
{#header_open|Invalid Error Set Cast#}
<p>At compile-time:</p>
{#code_begin|test_err|error.B not a member of error set 'Set2'#}
const Set1 = error.{
const Set1 = error{
A,
B,
};
const Set2 = error.{
const Set2 = error{
A,
C,
};
@@ -7078,11 +7081,11 @@ comptime {
{#code_begin|exe_err#}
const std = @import("std");
const Set1 = error.{
const Set1 = error{
A,
B,
};
const Set2 = error.{
const Set2 = error{
A,
C,
};
@@ -7107,7 +7110,7 @@ comptime {
<p>At runtime:</p>
{#code_begin|exe_err#}
pub fn main() !void {
var array align(4) = []u32.{ 0x11111111, 0x11111111 };
var array align(4) = []u32{ 0x11111111, 0x11111111 };
const bytes = @sliceToBytes(array[0..]);
if (foo(bytes) != 0x11111111) return error.Wrong;
}
@@ -7122,11 +7125,11 @@ fn foo(bytes: []u8) u32 {
<p>At compile-time:</p>
{#code_begin|test_err|accessing union field 'float' while field 'int' is set#}
comptime {
var f = Foo.{ .int = 42 };
var f = Foo{ .int = 42 };
f.float = 12.34;
}
const Foo = union.{
const Foo = union {
float: f32,
int: u32,
};
@@ -7135,13 +7138,13 @@ const Foo = union.{
{#code_begin|exe_err#}
const std = @import("std");
const Foo = union.{
const Foo = union {
float: f32,
int: u32,
};
pub fn main() void {
var f = Foo.{ .int = 42 };
var f = Foo{ .int = 42 };
bar(&f);
}
@@ -7159,18 +7162,18 @@ fn bar(f: *Foo) void {
{#code_begin|exe#}
const std = @import("std");
const Foo = union.{
const Foo = union {
float: f32,
int: u32,
};
pub fn main() void {
var f = Foo.{ .int = 42 };
var f = Foo{ .int = 42 };
bar(&f);
}
fn bar(f: *Foo) void {
f.* = Foo.{ .float = 12.34 };
f.* = Foo{ .float = 12.34 };
std.debug.warn("value: {}\n", f.float);
}
{#code_end#}
@@ -7181,14 +7184,14 @@ fn bar(f: *Foo) void {
{#code_begin|exe#}
const std = @import("std");
const Foo = union.{
const Foo = union {
float: f32,
int: u32,
};
pub fn main() void {
var f = Foo.{ .int = 42 };
f = Foo.{ .float = undefined };
var f = Foo{ .int = 42 };
f = Foo{ .float = undefined };
bar(&f);
std.debug.warn("value: {}\n", f.float);
}
@@ -7367,13 +7370,13 @@ pub fn build(b: *Builder) void {
const lib = b.addSharedLibrary("mathtest", "mathtest.zig", b.version(1, 0, 0));
const exe = b.addCExecutable("test");
exe.addCompileFlags([][]const u8.{"-std=c99"});
exe.addCompileFlags([][]const u8{"-std=c99"});
exe.addSourceFile("test.c");
exe.linkLibrary(lib);
b.default_step.dependOn(&exe.step);
const run_cmd = b.addCommand(".", b.env_map, [][]const u8.{exe.getOutputPath()});
const run_cmd = b.addCommand(".", b.env_map, [][]const u8{exe.getOutputPath()});
run_cmd.step.dependOn(&exe.step);
const test_step = b.step("test", "Test the program");
@@ -7433,7 +7436,7 @@ pub fn build(b: *Builder) void {
const obj = b.addObject("base64", "base64.zig");
const exe = b.addCExecutable("test");
exe.addCompileFlags([][]const u8.{
exe.addCompileFlags([][]const u8 {
"-std=c99",
});
exe.addSourceFile("test.c");
@@ -7611,7 +7614,7 @@ coding style.
Open braces on same line, unless you need to wrap.
</li>
<li>If a list of things is longer than 2, put each item on its own line and
exercise the abilty to put an extra comma at the end.
exercise the ability to put an extra comma at the end.
</li>
<li>
Line length: aim for 100; use common sense.
@@ -7661,7 +7664,7 @@ const const_name = 42;
const primitive_type_alias = f32;
const string_alias = []u8;
const StructName = struct.{};
const StructName = struct {};
const StructAlias = StructName;
fn functionName(param_name: TypeName) void {
@@ -7677,7 +7680,7 @@ fn ListTemplateFunction(comptime ChildType: type, comptime fixed_size: usize) ty
}
fn ShortList(comptime T: type, comptime n: usize) type {
return struct.{
return struct {
field_name: [n]T,
fn methodName() void {}
};
@@ -7689,7 +7692,7 @@ const xml_document =
\\<document>
\\</document>
;
const XmlParser = struct.{};
const XmlParser = struct {};
// The initials BE (Big Endian) are just another word in Zig identifier names.
fn readU32Be() u32 {}
@@ -7710,167 +7713,506 @@ fn readU32Be() u32 {}
<p>For some discussion on the rationale behind these design decisions, see <a href="https://github.com/ziglang/zig/issues/663">issue #663</a></p>
{#header_close#}
{#header_open|Grammar#}
<pre><code class="nohighlight">Root = many(TopLevelItem) EOF
<pre><code>Root &lt;- skip ContainerMembers eof
TopLevelItem = CompTimeExpression(Block) | TopLevelDecl | TestDecl
# *** Top level ***
ContainerMembers
&lt;- TestDecl ContainerMembers
/ TopLevelComptime ContainerMembers
/ KEYWORD_pub? TopLevelDecl ContainerMembers
/ KEYWORD_pub? ContainerField COMMA ContainerMembers
/ KEYWORD_pub? ContainerField
/
TestDecl = "test" String Block
TestDecl &lt;- KEYWORD_test STRINGLITERAL Block
TopLevelDecl = option("pub") (FnDef | ExternDecl | GlobalVarDecl | UseDecl)
TopLevelComptime &lt;- KEYWORD_comptime BlockExpr
GlobalVarDecl = option("export") VariableDeclaration ";"
TopLevelDecl
&lt;- (KEYWORD_export / KEYWORD_extern STRINGLITERAL? / KEYWORD_inline)? FnProto (SEMICOLON / Block)
/ (KEYWORD_export / KEYWORD_extern STRINGLITERAL?)? VarDecl
/ KEYWORD_use Expr SEMICOLON
LocalVarDecl = option("comptime") VariableDeclaration
FnProto &lt;- FnCC? KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
VariableDeclaration = ("var" | "const") Symbol option(":" TypeExpr) option("align" "(" Expression ")") option("section" "(" Expression ")") "=" Expression
VarDecl &lt;- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
ContainerMember = (ContainerField | FnDef | GlobalVarDecl)
ContainerField &lt;- IDENTIFIER (COLON TypeExpr)? (EQUAL Expr)?
ContainerField = Symbol option(":" PrefixOpExpression) option("=" PrefixOpExpression) ","
# *** Block Level ***
Statement
&lt;- KEYWORD_comptime? VarDecl
/ KEYWORD_comptime BlockExprStatement
/ KEYWORD_suspend (SEMICOLON / BlockExprStatement)
/ KEYWORD_defer BlockExprStatement
/ KEYWORD_errdefer BlockExprStatement
/ IfStatement
/ LabeledStatement
/ SwitchExpr
/ AssignExpr SEMICOLON
UseDecl = "use" Expression ";"
IfStatement
&lt;- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/ IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
ExternDecl = "extern" option(String) (FnProto | VariableDeclaration) ";"
LabeledStatement &lt;- BlockLabel? (Block / LoopStatement)
FnProto = option("nakedcc" | "stdcallcc" | "extern" | ("async" option("&lt;" Expression "&gt;"))) "fn" option(Symbol) ParamDeclList option("align" "(" Expression ")") option("section" "(" Expression ")") option("!") (TypeExpr | "var")
LoopStatement &lt;- KEYWORD_inline? (ForStatement / WhileStatement)
FnDef = option("inline" | "export") FnProto Block
ForStatement
&lt;- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/ ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
ParamDeclList = "(" list(ParamDecl, ",") ")"
WhileStatement
&lt;- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/ WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
ParamDecl = option("noalias" | "comptime") option(Symbol ":") (TypeExpr | "var" | "...")
BlockExprStatement
&lt;- BlockExpr
/ AssignExpr SEMICOLON
Block = option(Symbol ":") "{" many(Statement) "}"
BlockExpr &lt;- BlockLabel? Block
Statement = LocalVarDecl ";" | Defer(Block) | Defer(Expression) ";" | BlockExpression(Block) | Expression ";" | ";"
# *** Expression Level ***
AssignExpr &lt;- Expr (AssignOp Expr)?
TypeExpr = (PrefixOpExpression "!" PrefixOpExpression) | PrefixOpExpression
Expr &lt;- KEYWORD_try* BoolOrExpr
BlockOrExpression = Block | Expression
BoolOrExpr &lt;- BoolAndExpr (KEYWORD_or BoolAndExpr)*
Expression = TryExpression | ReturnExpression | BreakExpression | AssignmentExpression | CancelExpression | ResumeExpression
BoolAndExpr &lt;- CompareExpr (KEYWORD_and CompareExpr)*
AsmExpression = "asm" option("volatile") "(" String option(AsmOutput) ")"
CompareExpr &lt;- BitwiseExpr (CompareOp BitwiseExpr)?
AsmOutput = ":" list(AsmOutputItem, ",") option(AsmInput)
BitwiseExpr &lt;- BitShiftExpr (BitwiseOp BitShiftExpr)*
AsmInput = ":" list(AsmInputItem, ",") option(AsmClobbers)
BitShiftExpr &lt;- AdditionExpr (BitShiftOp AdditionExpr)*
AsmOutputItem = "[" Symbol "]" String "(" (Symbol | "-&gt;" TypeExpr) ")"
AdditionExpr &lt;- MultiplyExpr (AdditionOp MultiplyExpr)*
AsmInputItem = "[" Symbol "]" String "(" Expression ")"
MultiplyExpr &lt;- PrefixExpr (MultiplyOp PrefixExpr)*
AsmClobbers= ":" list(String, ",")
PrefixExpr &lt;- PrefixOp* PrimaryExpr
UnwrapExpression = BoolOrExpression (UnwrapOptional | UnwrapError) | BoolOrExpression
PrimaryExpr
&lt;- AsmExpr
/ IfExpr
/ KEYWORD_break BreakLabel? Expr?
/ KEYWORD_cancel Expr
/ KEYWORD_comptime Expr
/ KEYWORD_continue BreakLabel?
/ KEYWORD_resume Expr
/ KEYWORD_return Expr?
/ LabeledExpr
/ CurlySuffixExpr
UnwrapOptional = "orelse" Expression
IfExpr &lt;- IfPrefix Expr (KEYWORD_else Payload? Expr)?
UnwrapError = "catch" option("|" Symbol "|") Expression
LabeledExpr &lt;- BlockLabel? (Block / LoopExpr)
AssignmentExpression = UnwrapExpression AssignmentOperator UnwrapExpression | UnwrapExpression
Block &lt;- LBRACE Statement* RBRACE
AssignmentOperator = "=" | "*=" | "/=" | "%=" | "+=" | "-=" | "&lt;&lt;=" | "&gt;&gt;=" | "&amp;=" | "^=" | "|=" | "*%=" | "+%=" | "-%="
LoopExpr &lt;- KEYWORD_inline? (ForExpr / WhileExpr)
BlockExpression(body) = Block | IfExpression(body) | IfErrorExpression(body) | TestExpression(body) | WhileExpression(body) | ForExpression(body) | SwitchExpression | CompTimeExpression(body) | SuspendExpression(body)
ForExpr &lt;- ForPrefix Expr (KEYWORD_else Expr)?
CompTimeExpression(body) = "comptime" body
WhileExpr &lt;- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
SwitchExpression = "switch" "(" Expression ")" "{" many(SwitchProng) "}"
CurlySuffixExpr &lt;- TypeExpr InitList?
SwitchProng = (list(SwitchItem, ",") | "else") "=&gt;" option("|" option("*") Symbol "|") Expression ","
InitList
&lt;- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
/ LBRACE Expr (COMMA Expr)* COMMA? RBRACE
/ LBRACE RBRACE
SwitchItem = Expression | (Expression "..." Expression)
TypeExpr &lt;- PrefixTypeOp* ErrorUnionExpr
ForExpression(body) = option(Symbol ":") option("inline") "for" "(" Expression ")" option("|" option("*") Symbol option("," Symbol) "|") body option("else" BlockExpression(body))
ErrorUnionExpr &lt;- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
BoolOrExpression = BoolAndExpression "or" BoolOrExpression | BoolAndExpression
SuffixExpr
&lt;- AsyncPrefix PrimaryTypeExpr SuffixOp* FnCallArgumnets
/ PrimaryTypeExpr (SuffixOp / FnCallArgumnets)*
ReturnExpression = "return" option(Expression)
PrimaryTypeExpr
&lt;- BUILTININDENTIFIER FnCallArgumnets
/ CHAR_LITERAL
/ ContainerDecl
/ ErrorSetDecl
/ FLOAT
/ FnProto
/ GroupedExpr
/ LabeledTypeExpr
/ IDENTIFIER
/ IfTypeExpr
/ INTEGER
/ KEYWORD_anyerror
/ KEYWORD_comptime TypeExpr
/ KEYWORD_error DOT IDENTIFIER
/ KEYWORD_false
/ KEYWORD_null
/ KEYWORD_promise
/ KEYWORD_true
/ KEYWORD_undefined
/ KEYWORD_unreachable
/ STRINGLITERAL
/ SwitchExpr
TryExpression = "try" Expression
ContainerDecl &lt;- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
AwaitExpression = "await" Expression
ErrorSetDecl &lt;- KEYWORD_error LBRACE IdentifierList RBRACE
BreakExpression = "break" option(":" Symbol) option(Expression)
GroupedExpr &lt;- LPAREN Expr RPAREN
CancelExpression = "cancel" Expression;
IfTypeExpr &lt;- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
ResumeExpression = "resume" Expression;
LabeledTypeExpr
&lt;- BlockLabel Block
/ BlockLabel? LoopTypeExpr
Defer(body) = ("defer" | "errdefer") body
LoopTypeExpr &lt;- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
IfExpression(body) = "if" "(" Expression ")" body option("else" BlockExpression(body))
ForTypeExpr &lt;- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
SuspendExpression(body) = "suspend" option( body )
WhileTypeExpr &lt;- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
IfErrorExpression(body) = "if" "(" Expression ")" option("|" option("*") Symbol "|") body "else" "|" Symbol "|" BlockExpression(body)
SwitchExpr &lt;- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
TestExpression(body) = "if" "(" Expression ")" option("|" option("*") Symbol "|") body option("else" BlockExpression(body))
# *** Assembly ***
AsmExpr &lt;- KEYWORD_asm KEYWORD_volatile? LPAREN STRINGLITERAL AsmOutput? RPAREN
WhileExpression(body) = option(Symbol ":") option("inline") "while" "(" Expression ")" option("|" option("*") Symbol "|") option(":" "(" Expression ")") body option("else" option("|" Symbol "|") BlockExpression(body))
AsmOutput &lt;- COLON AsmOutputList AsmInput?
BoolAndExpression = ComparisonExpression "and" BoolAndExpression | ComparisonExpression
AsmOutputItem &lt;- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
ComparisonExpression = BinaryOrExpression ComparisonOperator BinaryOrExpression | BinaryOrExpression
AsmInput &lt;- COLON AsmInputList AsmCloppers?
ComparisonOperator = "==" | "!=" | "&lt;" | "&gt;" | "&lt;=" | "&gt;="
AsmInputItem &lt;- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
BinaryOrExpression = BinaryXorExpression "|" BinaryOrExpression | BinaryXorExpression
AsmCloppers &lt;- COLON StringList
BinaryXorExpression = BinaryAndExpression "^" BinaryXorExpression | BinaryAndExpression
# *** Helper grammar ***
BreakLabel &lt;- COLON IDENTIFIER
BinaryAndExpression = BitShiftExpression "&amp;" BinaryAndExpression | BitShiftExpression
BlockLabel &lt;- IDENTIFIER COLON
BitShiftExpression = AdditionExpression BitShiftOperator BitShiftExpression | AdditionExpression
FieldInit &lt;- DOT IDENTIFIER EQUAL Expr
BitShiftOperator = "&lt;&lt;" | "&gt;&gt;"
WhileContinueExpr &lt;- COLON LPAREN AssignExpr RPAREN
AdditionExpression = MultiplyExpression AdditionOperator AdditionExpression | MultiplyExpression
LinkSection &lt;- KEYWORD_linksection LPAREN Expr RPAREN
AdditionOperator = "+" | "-" | "++" | "+%" | "-%"
# Fn specific
FnCC
&lt;- KEYWORD_nakedcc
/ KEYWORD_stdcallcc
/ KEYWORD_extern
/ KEYWORD_async (LARROW TypeExpr RARROW)?
MultiplyExpression = CurlySuffixExpression MultiplyOperator MultiplyExpression | CurlySuffixExpression
ParamDecl &lt;- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
CurlySuffixExpression = TypeExpr option(ContainerInitExpression)
ParamType
&lt;- KEYWORD_var
/ DOT3
/ TypeExpr
MultiplyOperator = "||" | "*" | "/" | "%" | "**" | "*%"
# Control flow prefixes
IfPrefix &lt;- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
PrefixOpExpression = PrefixOp TypeExpr | SuffixOpExpression
WhilePrefix &lt;- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
SuffixOpExpression = ("async" option("&lt;" SuffixOpExpression "&gt;") SuffixOpExpression FnCallExpression) | PrimaryExpression option(FnCallExpression | ArrayAccessExpression | FieldAccessExpression | SliceExpression | ".*" | ".?")
ForPrefix &lt;- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
FieldAccessExpression = "." Symbol
# Payloads
Payload &lt;- PIPE IDENTIFIER PIPE
FnCallExpression = "(" list(Expression, ",") ")"
PtrPayload &lt;- PIPE ASTERISK? IDENTIFIER PIPE
ArrayAccessExpression = "[" Expression "]"
PtrIndexPayload &lt;- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
SliceExpression = "[" Expression ".." option(Expression) "]"
ContainerInitExpression = "." "{" ContainerInitBody "}"
# Switch specific
SwitchProng &lt;- SwitchCase EQUALRARROW PtrPayload? AssignExpr
ContainerInitBody = list(StructLiteralField, ",") | list(Expression, ",")
SwitchCase
&lt;- SwitchItem (COMMA SwitchItem)* COMMA?
/ KEYWORD_else
StructLiteralField = "." Symbol "=" Expression
SwitchItem &lt;- Expr (DOT3 Expr)?
PrefixOp = "!" | "-" | "~" | (("*" | "[*]") option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "-%" | "try" | "await"
# Operators
AssignOp
&lt;- ASTERISKEQUAL
/ SLASHEQUAL
/ PERCENTEQUAL
/ PLUSEQUAL
/ MINUSEQUAL
/ LARROW2EQUAL
/ RARROW2EQUAL
/ AMPERSANDEQUAL
/ CARETEQUAL
/ PIPEEQUAL
/ ASTERISKPERCENTEQUAL
/ PLUSPERCENTEQUAL
/ MINUSPERCENTEQUAL
/ EQUAL
PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ContainerDecl | ("continue" option(":" Symbol)) | ErrorSetDecl | PromiseType
CompareOp
&lt;- EQUALEQUAL
/ EXCLAMATIONMARKEQUAL
/ LARROW
/ RARROW
/ LARROWEQUAL
/ RARROWEQUAL
PromiseType = "promise" option("-&gt;" TypeExpr)
BitwiseOp
&lt;- AMPERSAND
/ CARET
/ PIPE
/ KEYWORD_orelse
/ KEYWORD_catch Payload?
ArrayType : "[" option(Expression) "]" option("align" "(" Expression option(":" Integer ":" Integer) ")")) option("const") option("volatile") TypeExpr
BitShiftOp
&lt;- LARROW2
/ RARROW2
GroupedExpression = "(" Expression ")"
KeywordLiteral = "true" | "false" | "null" | "undefined" | "error" | "unreachable" | "suspend"
ErrorSetDecl = "error" "." "{" list(Symbol, ",") "}"
ContainerDecl = option("extern" | "packed")
("struct" "." option(GroupedExpression) | "union" "." option("enum" option(GroupedExpression) | GroupedExpression) | ("enum" "." option(GroupedExpression)))
"{" many(ContainerMember) "}"</code></pre>
AdditionOp
&lt;- PLUS
/ MINUS
/ PLUS2
/ PLUSPERCENT
/ MINUSPERCENT
MultiplyOp
&lt;- PIPE2
/ ASTERISK
/ SLASH
/ PERCENT
/ ASTERISK2
/ ASTERISKPERCENT
PrefixOp
&lt;- EXCLAMATIONMARK
/ MINUS
/ TILDE
/ MINUSPERCENT
/ AMPERSAND
/ KEYWORD_try
/ KEYWORD_await
PrefixTypeOp
&lt;- QUESTIONMARK
/ KEYWORD_promise MINUSRARROW
/ ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile)*
/ PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile)*
SuffixOp
&lt;- LBRACKET Expr (DOT2 Expr?)? RBRACKET
/ DOT IDENTIFIER
/ DOTASTERISK
/ DOTQUESTIONMARK
AsyncPrefix &lt;- KEYWORD_async (LARROW PrefixExpr RARROW)?
FnCallArgumnets &lt;- LPAREN ExprList RPAREN
# Ptr specific
ArrayTypeStart &lt;- LBRACKET Expr? RBRACKET
PtrTypeStart
&lt;- ASTERISK
/ ASTERISK2
/ LBRACKET ASTERISK RBRACKET
# ContainerDecl specific
ContainerDeclAuto &lt;- ContainerDeclType LBRACE ContainerMembers RBRACE
ContainerDeclType
&lt;- (KEYWORD_struct / KEYWORD_enum) (LPAREN Expr RPAREN)?
/ KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
# Alignment
ByteAlign &lt;- KEYWORD_align LPAREN Expr RPAREN
# Lists
IdentifierList &lt;- (IDENTIFIER COMMA)* IDENTIFIER?
SwitchProngList &lt;- (SwitchProng COMMA)* SwitchProng?
AsmOutputList &lt;- (AsmOutputItem COMMA)* AsmOutputItem?
AsmInputList &lt;- (AsmInputItem COMMA)* AsmInputItem?
StringList &lt;- (STRINGLITERAL COMMA)* STRINGLITERAL?
ParamDeclList &lt;- (ParamDecl COMMA)* ParamDecl?
ExprList &lt;- (Expr COMMA)* Expr?
# *** Tokens ***
eof &lt;- !.
hex &lt;- [0-9a-fA-F]
char_escape
&lt;- "\\x" hex hex
/ "\\u" hex hex hex hex
/ "\\U" hex hex hex hex hex hex
/ "\\" [nr\\t'"]
char_char
&lt;- char_escape
/ [^\\'\n]
string_char
&lt;- char_escape
/ [^\\"\n]
line_comment &lt;- '//'[^\n]*
line_string &lt;- ("\\\\" [^\n]* [ \n]*)+
line_cstring &lt;- ("c\\\\" [^\n]* [ \n]*)+
skip &lt;- ([ \n] / line_comment)*
CHAR_LITERAL &lt;- "'" char_char "'" skip
FLOAT
&lt;- "0b" [01]+ "." [01]+ ([eE] [-+]? [01]+)? skip
/ "0o" [0-7]+ "." [0-7]+ ([eE] [-+]? [0-7]+)? skip
/ "0x" hex+ "." hex+ ([pP] [-+]? hex+)? skip
/ [0-9]+ "." [0-9]+ ([eE] [-+]? [0-9]+)? skip
/ "0b" [01]+ "."? [eE] [-+]? [01]+ skip
/ "0o" [0-7]+ "."? [eE] [-+]? [0-7]+ skip
/ "0x" hex+ "."? [pP] [-+]? hex+ skip
/ [0-9]+ "."? [eE] [-+]? [0-9]+ skip
INTEGER
&lt;- "0b" [01]+ skip
/ "0o" [0-7]+ skip
/ "0x" hex+ skip
/ [0-9]+ skip
STRINGLITERAL
&lt;- "c"? "\"" string_char* "\"" skip
/ line_string skip
/ line_cstring skip
IDENTIFIER
&lt;- !keyword ("c" !["\\] / [A-Zabd-z_]) [A-Za-z0-9_]* skip
/ "@\"" string_char* "\"" skip
BUILTININDENTIFIER &lt;- "@"[A-Za-z_][A-Za-z0-9_]* skip
AMPERSAND &lt;- '&' ![=] skip
AMPERSANDEQUAL &lt;- '&=' skip
ASTERISK &lt;- '*' ![*%=] skip
ASTERISK2 &lt;- '**' skip
ASTERISKEQUAL &lt;- '*=' skip
ASTERISKPERCENT &lt;- '*%' ![=] skip
ASTERISKPERCENTEQUAL &lt;- '*%=' skip
CARET &lt;- '^' ![=] skip
CARETEQUAL &lt;- '^=' skip
COLON &lt;- ':' skip
COMMA &lt;- ',' skip
DOT &lt;- '.' ![*.?] skip
DOT2 &lt;- '..' ![.] skip
DOT3 &lt;- '...' skip
DOTASTERISK &lt;- '.*' skip
DOTQUESTIONMARK &lt;- '.?' skip
EQUAL &lt;- '=' ![>=] skip
EQUALEQUAL &lt;- '==' skip
EQUALRARROW &lt;- '=>' skip
EXCLAMATIONMARK &lt;- '!' ![=] skip
EXCLAMATIONMARKEQUAL &lt;- '!=' skip
LARROW &lt;- '&lt;' ![&lt;=] skip
LARROW2 &lt;- '&lt;&lt;' ![=] skip
LARROW2EQUAL &lt;- '&lt;&lt;=' skip
LARROWEQUAL &lt;- '&lt;=' skip
LBRACE &lt;- '{' skip
LBRACKET &lt;- '[' skip
LPAREN &lt;- '(' skip
MINUS &lt;- '-' ![%=>] skip
MINUSEQUAL &lt;- '-=' skip
MINUSPERCENT &lt;- '-%' ![=] skip
MINUSPERCENTEQUAL &lt;- '-%=' skip
MINUSRARROW &lt;- '->' skip
PERCENT &lt;- '%' ![=] skip
PERCENTEQUAL &lt;- '%=' skip
PIPE &lt;- '|' ![|=] skip
PIPE2 &lt;- '||' skip
PIPEEQUAL &lt;- '|=' skip
PLUS &lt;- '+' ![%+=] skip
PLUS2 &lt;- '++' skip
PLUSEQUAL &lt;- '+=' skip
PLUSPERCENT &lt;- '+%' ![=] skip
PLUSPERCENTEQUAL &lt;- '+%=' skip
QUESTIONMARK &lt;- '?' skip
RARROW &lt;- '>' ![>=] skip
RARROW2 &lt;- '>>' ![=] skip
RARROW2EQUAL &lt;- '>>=' skip
RARROWEQUAL &lt;- '>=' skip
RBRACE &lt;- '}' skip
RBRACKET &lt;- ']' skip
RPAREN &lt;- ')' skip
SEMICOLON &lt;- ';' skip
SLASH &lt;- '/' ![=] skip
SLASHEQUAL &lt;- '/=' skip
TILDE &lt;- '~' skip
end_of_word &lt;- ![a-zA-Z0-9_] skip
KEYWORD_align &lt;- 'align' end_of_word
KEYWORD_and &lt;- 'and' end_of_word
KEYWORD_anyerror &lt;- 'anyerror' end_of_word
KEYWORD_asm &lt;- 'asm' end_of_word
KEYWORD_async &lt;- 'async' end_of_word
KEYWORD_await &lt;- 'await' end_of_word
KEYWORD_break &lt;- 'break' end_of_word
KEYWORD_cancel &lt;- 'cancel' end_of_word
KEYWORD_catch &lt;- 'catch' end_of_word
KEYWORD_comptime &lt;- 'comptime' end_of_word
KEYWORD_const &lt;- 'const' end_of_word
KEYWORD_continue &lt;- 'continue' end_of_word
KEYWORD_defer &lt;- 'defer' end_of_word
KEYWORD_else &lt;- 'else' end_of_word
KEYWORD_enum &lt;- 'enum' end_of_word
KEYWORD_errdefer &lt;- 'errdefer' end_of_word
KEYWORD_error &lt;- 'error' end_of_word
KEYWORD_export &lt;- 'export' end_of_word
KEYWORD_extern &lt;- 'extern' end_of_word
KEYWORD_false &lt;- 'false' end_of_word
KEYWORD_fn &lt;- 'fn' end_of_word
KEYWORD_for &lt;- 'for' end_of_word
KEYWORD_if &lt;- 'if' end_of_word
KEYWORD_inline &lt;- 'inline' end_of_word
KEYWORD_nakedcc &lt;- 'nakedcc' end_of_word
KEYWORD_noalias &lt;- 'noalias' end_of_word
KEYWORD_null &lt;- 'null' end_of_word
KEYWORD_or &lt;- 'or' end_of_word
KEYWORD_orelse &lt;- 'orelse' end_of_word
KEYWORD_packed &lt;- 'packed' end_of_word
KEYWORD_promise &lt;- 'promise' end_of_word
KEYWORD_pub &lt;- 'pub' end_of_word
KEYWORD_resume &lt;- 'resume' end_of_word
KEYWORD_return &lt;- 'return' end_of_word
KEYWORD_linksection &lt;- 'linksection' end_of_word
KEYWORD_stdcallcc &lt;- 'stdcallcc' end_of_word
KEYWORD_struct &lt;- 'struct' end_of_word
KEYWORD_suspend &lt;- 'suspend' end_of_word
KEYWORD_switch &lt;- 'switch' end_of_word
KEYWORD_test &lt;- 'test' end_of_word
KEYWORD_true &lt;- 'true' end_of_word
KEYWORD_try &lt;- 'try' end_of_word
KEYWORD_undefined &lt;- 'undefined' end_of_word
KEYWORD_union &lt;- 'union' end_of_word
KEYWORD_unreachable &lt;- 'unreachable' end_of_word
KEYWORD_use &lt;- 'use' end_of_word
KEYWORD_var &lt;- 'var' end_of_word
KEYWORD_volatile &lt;- 'volatile' end_of_word
KEYWORD_while &lt;- 'while' end_of_word
keyword &lt;- KEYWORD_align / KEYWORD_and / KEYWORD_anyerror / KEYWORD_asm
/ KEYWORD_async / KEYWORD_await / KEYWORD_break / KEYWORD_cancel
/ KEYWORD_catch / KEYWORD_comptime / KEYWORD_const / KEYWORD_continue
/ KEYWORD_defer / KEYWORD_else / KEYWORD_enum / KEYWORD_errdefer
/ KEYWORD_error / KEYWORD_export / KEYWORD_extern / KEYWORD_false
/ KEYWORD_fn / KEYWORD_for / KEYWORD_if / KEYWORD_inline
/ KEYWORD_nakedcc / KEYWORD_noalias / KEYWORD_null / KEYWORD_or
/ KEYWORD_orelse / KEYWORD_packed / KEYWORD_promise / KEYWORD_pub
/ KEYWORD_resume / KEYWORD_return / KEYWORD_linksection
/ KEYWORD_stdcallcc / KEYWORD_struct / KEYWORD_suspend
/ KEYWORD_switch / KEYWORD_test / KEYWORD_true / KEYWORD_try
/ KEYWORD_undefined / KEYWORD_union / KEYWORD_unreachable
/ KEYWORD_use / KEYWORD_var / KEYWORD_volatile / KEYWORD_while</code></pre>
{#header_close#}
{#header_open|Zen#}
<ul>
-2
View File
@@ -1,7 +1,5 @@
# Zig Examples
## Working Examples
* **Tetris** - A simple Tetris clone written in Zig. See
[andrewrk/tetris](https://github.com/andrewrk/tetris).
* **hello_world** - demonstration of a printing a single line to stdout.
+1 -1
View File
@@ -61,7 +61,7 @@ fn cat_file(stdout: *os.File, file: *os.File) !void {
}
}
fn unwrapArg(arg: error![]u8) ![]u8 {
fn unwrapArg(arg: anyerror![]u8) ![]u8 {
return arg catch |err| {
warn("Unable to parse command line: {}\n", err);
return err;
+2 -2
View File
@@ -4,13 +4,13 @@ pub fn build(b: *Builder) void {
const obj = b.addObject("base64", "base64.zig");
const exe = b.addCExecutable("test");
exe.addCompileFlags([][]const u8.{"-std=c99"});
exe.addCompileFlags([][]const u8{"-std=c99"});
exe.addSourceFile("test.c");
exe.addObject(obj);
b.default_step.dependOn(&exe.step);
const run_cmd = b.addCommand(".", b.env_map, [][]const u8.{exe.getOutputPath()});
const run_cmd = b.addCommand(".", b.env_map, [][]const u8{exe.getOutputPath()});
run_cmd.step.dependOn(&exe.step);
const test_step = b.step("test", "Test the program");
+2 -2
View File
@@ -4,13 +4,13 @@ pub fn build(b: *Builder) void {
const lib = b.addSharedLibrary("mathtest", "mathtest.zig", b.version(1, 0, 0));
const exe = b.addCExecutable("test");
exe.addCompileFlags([][]const u8.{"-std=c99"});
exe.addCompileFlags([][]const u8{"-std=c99"});
exe.addSourceFile("test.c");
exe.linkLibrary(lib);
b.default_step.dependOn(&exe.step);
const run_cmd = b.addCommand(".", b.env_map, [][]const u8.{exe.getOutputPath()});
const run_cmd = b.addCommand(".", b.env_map, [][]const u8{exe.getOutputPath()});
run_cmd.step.dependOn(&exe.step);
const test_step = b.step("test", "Test the program");
+1 -3
View File
@@ -5,9 +5,7 @@ comptime {
@export("__mh_execute_header", _mh_execute_header, builtin.GlobalLinkage.Weak);
}
}
var _mh_execute_header = extern struct.{
x: usize,
}.{ .x = 0 };
var _mh_execute_header = extern struct {x: usize}{.x = 0};
export fn add(a: i32, b: i32) i32 {
return a + b;
+16 -16
View File
@@ -32,7 +32,7 @@ fn argInAllowedSet(maybe_set: ?[]const []const u8, arg: []const u8) bool {
// Modifies the current argument index during iteration
fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required: usize, allowed_set: ?[]const []const u8, index: *usize) !FlagArg {
switch (required) {
0 => return FlagArg.{ .None = undefined }, // TODO: Required to force non-tag but value?
0 => return FlagArg{ .None = undefined }, // TODO: Required to force non-tag but value?
1 => {
if (index.* + 1 >= args.len) {
return error.MissingFlagArguments;
@@ -45,7 +45,7 @@ fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required:
return error.ArgumentNotInAllowedSet;
}
return FlagArg.{ .Single = arg };
return FlagArg{ .Single = arg };
},
else => |needed| {
var extra = ArrayList([]const u8).init(allocator);
@@ -67,7 +67,7 @@ fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required:
try extra.append(arg);
}
return FlagArg.{ .Many = extra };
return FlagArg{ .Many = extra };
},
}
}
@@ -75,12 +75,12 @@ fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required:
const HashMapFlags = HashMap([]const u8, FlagArg, std.hash.Fnv1a_32.hash, mem.eql_slice_u8);
// A store for querying found flags and positional arguments.
pub const Args = struct.{
pub const Args = struct {
flags: HashMapFlags,
positionals: ArrayList([]const u8),
pub fn parse(allocator: *Allocator, comptime spec: []const Flag, args: []const []const u8) !Args {
var parsed = Args.{
var parsed = Args{
.flags = HashMapFlags.init(allocator),
.positionals = ArrayList([]const u8).init(allocator),
};
@@ -123,7 +123,7 @@ pub const Args = struct.{
FlagArg.Many => |inner| try prev.appendSlice(inner.toSliceConst()),
}
_ = try parsed.flags.put(flag_name_trimmed, FlagArg.{ .Many = prev });
_ = try parsed.flags.put(flag_name_trimmed, FlagArg{ .Many = prev });
} else {
_ = try parsed.flags.put(flag_name_trimmed, flag_args);
}
@@ -149,7 +149,7 @@ pub const Args = struct.{
}
// e.g. --help
pub fn present(self: *Args, name: []const u8) bool {
pub fn present(self: *const Args, name: []const u8) bool {
return self.flags.contains(name);
}
@@ -177,20 +177,20 @@ pub const Args = struct.{
else => @panic("attempted to retrieve flag with wrong type"),
}
} else {
return []const []const u8.{};
return []const []const u8{};
}
}
};
// Arguments for a flag. e.g. arg1, arg2 in `--command arg1 arg2`.
const FlagArg = union(enum).{
const FlagArg = union(enum) {
None,
Single: []const u8,
Many: ArrayList([]const u8),
};
// Specification for how a flag should be parsed.
pub const Flag = struct.{
pub const Flag = struct {
name: []const u8,
required: usize,
mergable: bool,
@@ -205,7 +205,7 @@ pub const Flag = struct.{
}
pub fn ArgN(comptime name: []const u8, comptime n: usize) Flag {
return Flag.{
return Flag{
.name = name,
.required = n,
.mergable = false,
@@ -218,7 +218,7 @@ pub const Flag = struct.{
@compileError("n must be greater than 0");
}
return Flag.{
return Flag{
.name = name,
.required = n,
.mergable = true,
@@ -227,7 +227,7 @@ pub const Flag = struct.{
}
pub fn Option(comptime name: []const u8, comptime set: []const []const u8) Flag {
return Flag.{
return Flag{
.name = name,
.required = 1,
.mergable = false,
@@ -237,11 +237,11 @@ pub const Flag = struct.{
};
test "parse arguments" {
const spec1 = comptime []const Flag.{
const spec1 = comptime []const Flag{
Flag.Bool("--help"),
Flag.Bool("--init"),
Flag.Arg1("--build-file"),
Flag.Option("--color", []const []const u8.{
Flag.Option("--color", []const []const u8{
"on",
"off",
"auto",
@@ -251,7 +251,7 @@ test "parse arguments" {
Flag.ArgN("--library", 1),
};
const cliargs = []const []const u8.{
const cliargs = []const []const u8{
"build",
"--help",
"pos1",
+11 -11
View File
@@ -1,10 +1,10 @@
pub const CInt = struct.{
pub const CInt = struct {
id: Id,
zig_name: []const u8,
c_name: []const u8,
is_signed: bool,
pub const Id = enum.{
pub const Id = enum {
Short,
UShort,
Int,
@@ -15,50 +15,50 @@ pub const CInt = struct.{
ULongLong,
};
pub const list = []CInt.{
CInt.{
pub const list = []CInt{
CInt{
.id = Id.Short,
.zig_name = "c_short",
.c_name = "short",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.UShort,
.zig_name = "c_ushort",
.c_name = "unsigned short",
.is_signed = false,
},
CInt.{
CInt{
.id = Id.Int,
.zig_name = "c_int",
.c_name = "int",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.UInt,
.zig_name = "c_uint",
.c_name = "unsigned int",
.is_signed = false,
},
CInt.{
CInt{
.id = Id.Long,
.zig_name = "c_long",
.c_name = "long",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.ULong,
.zig_name = "c_ulong",
.c_name = "unsigned long",
.is_signed = false,
},
CInt.{
CInt{
.id = Id.LongLong,
.zig_name = "c_longlong",
.c_name = "long long",
.is_signed = true,
},
CInt.{
CInt{
.id = Id.ULongLong,
.zig_name = "c_ulonglong",
.c_name = "unsigned long long",
+2 -2
View File
@@ -73,7 +73,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
!comp.strip,
) orelse return error.OutOfMemory;
var ofile = ObjectFile.{
var ofile = ObjectFile{
.comp = comp,
.module = module,
.builder = builder,
@@ -135,7 +135,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
}
}
pub const ObjectFile = struct.{
pub const ObjectFile = struct {
comp: *Compilation,
module: llvm.ModuleRef,
builder: llvm.BuilderRef,
+69 -69
View File
@@ -35,7 +35,7 @@ const fs = event.fs;
const max_src_size = 2 * 1024 * 1024 * 1024; // 2 GiB
/// Data that is local to the event loop.
pub const ZigCompiler = struct.{
pub const ZigCompiler = struct {
loop: *event.Loop,
llvm_handle_pool: std.atomic.Stack(llvm.ContextRef),
lld_lock: event.Lock,
@@ -57,7 +57,7 @@ pub const ZigCompiler = struct.{
try std.os.getRandomBytes(seed_bytes[0..]);
const seed = std.mem.readInt(seed_bytes, u64, builtin.Endian.Big);
return ZigCompiler.{
return ZigCompiler{
.loop = loop,
.lld_lock = event.Lock.init(loop),
.llvm_handle_pool = std.atomic.Stack(llvm.ContextRef).init(),
@@ -78,18 +78,18 @@ pub const ZigCompiler = struct.{
/// Gets an exclusive handle on any LlvmContext.
/// Caller must release the handle when done.
pub fn getAnyLlvmContext(self: *ZigCompiler) !LlvmHandle {
if (self.llvm_handle_pool.pop()) |node| return LlvmHandle.{ .node = node };
if (self.llvm_handle_pool.pop()) |node| return LlvmHandle{ .node = node };
const context_ref = c.LLVMContextCreate() orelse return error.OutOfMemory;
errdefer c.LLVMContextDispose(context_ref);
const node = try self.loop.allocator.create(std.atomic.Stack(llvm.ContextRef).Node.{
const node = try self.loop.allocator.create(std.atomic.Stack(llvm.ContextRef).Node{
.next = undefined,
.data = context_ref,
});
errdefer self.loop.allocator.destroy(node);
return LlvmHandle.{ .node = node };
return LlvmHandle{ .node = node };
}
pub async fn getNativeLibC(self: *ZigCompiler) !*LibCInstallation {
@@ -102,8 +102,8 @@ pub const ZigCompiler = struct.{
/// Must be called only once, ever. Sets global state.
pub fn setLlvmArgv(allocator: *Allocator, llvm_argv: []const []const u8) !void {
if (llvm_argv.len != 0) {
var c_compatible_args = try std.cstr.NullTerminated2DArray.fromSlices(allocator, [][]const []const u8.{
[][]const u8.{"zig (LLVM option parsing)"},
var c_compatible_args = try std.cstr.NullTerminated2DArray.fromSlices(allocator, [][]const []const u8{
[][]const u8{"zig (LLVM option parsing)"},
llvm_argv,
});
defer c_compatible_args.deinit();
@@ -112,7 +112,7 @@ pub const ZigCompiler = struct.{
}
};
pub const LlvmHandle = struct.{
pub const LlvmHandle = struct {
node: *std.atomic.Stack(llvm.ContextRef).Node,
pub fn release(self: LlvmHandle, zig_compiler: *ZigCompiler) void {
@@ -120,7 +120,7 @@ pub const LlvmHandle = struct.{
}
};
pub const Compilation = struct.{
pub const Compilation = struct {
zig_compiler: *ZigCompiler,
loop: *event.Loop,
name: Buffer,
@@ -254,7 +254,7 @@ pub const Compilation = struct.{
const CompileErrList = std.ArrayList(*Msg);
// TODO handle some of these earlier and report them in a way other than error codes
pub const BuildError = error.{
pub const BuildError = error{
OutOfMemory,
EndOfStream,
IsDir,
@@ -302,25 +302,25 @@ pub const Compilation = struct.{
BadPathName,
};
pub const Event = union(enum).{
pub const Event = union(enum) {
Ok,
Error: BuildError,
Fail: []*Msg,
};
pub const DarwinVersionMin = union(enum).{
pub const DarwinVersionMin = union(enum) {
None,
MacOS: []const u8,
Ios: []const u8,
};
pub const Kind = enum.{
pub const Kind = enum {
Exe,
Lib,
Obj,
};
pub const LinkLib = struct.{
pub const LinkLib = struct {
name: []const u8,
path: ?[]const u8,
@@ -329,7 +329,7 @@ pub const Compilation = struct.{
provided_explicitly: bool,
};
pub const Emit = enum.{
pub const Emit = enum {
Binary,
Assembly,
LlvmIr,
@@ -380,7 +380,7 @@ pub const Compilation = struct.{
}
const loop = zig_compiler.loop;
var comp = Compilation.{
var comp = Compilation{
.loop = loop,
.arena_allocator = std.heap.ArenaAllocator.init(loop.allocator),
.zig_compiler = zig_compiler,
@@ -419,20 +419,20 @@ pub const Compilation = struct.{
.strip = false,
.is_static = is_static,
.linker_rdynamic = false,
.clang_argv = [][]const u8.{},
.lib_dirs = [][]const u8.{},
.rpath_list = [][]const u8.{},
.assembly_files = [][]const u8.{},
.link_objects = [][]const u8.{},
.clang_argv = [][]const u8{},
.lib_dirs = [][]const u8{},
.rpath_list = [][]const u8{},
.assembly_files = [][]const u8{},
.link_objects = [][]const u8{},
.fn_link_set = event.Locked(FnLinkSet).init(loop, FnLinkSet.init()),
.windows_subsystem_windows = false,
.windows_subsystem_console = false,
.link_libs_list = undefined,
.libc_link_lib = null,
.err_color = errmsg.Color.Auto,
.darwin_frameworks = [][]const u8.{},
.darwin_frameworks = [][]const u8{},
.darwin_version_min = DarwinVersionMin.None,
.test_filters = [][]const u8.{},
.test_filters = [][]const u8{},
.test_name_prefix = null,
.emit_file_type = Emit.Binary,
.link_out_file = null,
@@ -575,7 +575,7 @@ pub const Compilation = struct.{
error.Overflow => return error.Overflow,
error.InvalidCharacter => unreachable, // we just checked the characters above
};
const int_type = try await (async Type.Int.get(comp, Type.Int.Key.{
const int_type = try await (async Type.Int.get(comp, Type.Int.Key{
.bit_count = bit_count,
.is_signed = is_signed,
}) catch unreachable);
@@ -595,10 +595,10 @@ pub const Compilation = struct.{
}
fn initTypes(comp: *Compilation) !void {
comp.meta_type = try comp.arena().create(Type.MetaType.{
.base = Type.{
comp.meta_type = try comp.arena().create(Type.MetaType{
.base = Type{
.name = "type",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = undefined,
.ref_count = std.atomic.Int(usize).init(3), // 3 because it references itself twice
@@ -612,10 +612,10 @@ pub const Compilation = struct.{
comp.meta_type.base.base.typ = &comp.meta_type.base;
assert((try comp.primitive_type_table.put(comp.meta_type.base.name, &comp.meta_type.base)) == null);
comp.void_type = try comp.arena().create(Type.Void.{
.base = Type.{
comp.void_type = try comp.arena().create(Type.Void{
.base = Type{
.name = "void",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -626,10 +626,10 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.void_type.base.name, &comp.void_type.base)) == null);
comp.noreturn_type = try comp.arena().create(Type.NoReturn.{
.base = Type.{
comp.noreturn_type = try comp.arena().create(Type.NoReturn{
.base = Type{
.name = "noreturn",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -640,10 +640,10 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.noreturn_type.base.name, &comp.noreturn_type.base)) == null);
comp.comptime_int_type = try comp.arena().create(Type.ComptimeInt.{
.base = Type.{
comp.comptime_int_type = try comp.arena().create(Type.ComptimeInt{
.base = Type{
.name = "comptime_int",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -654,10 +654,10 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.comptime_int_type.base.name, &comp.comptime_int_type.base)) == null);
comp.bool_type = try comp.arena().create(Type.Bool.{
.base = Type.{
comp.bool_type = try comp.arena().create(Type.Bool{
.base = Type{
.name = "bool",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -668,16 +668,16 @@ pub const Compilation = struct.{
});
assert((try comp.primitive_type_table.put(comp.bool_type.base.name, &comp.bool_type.base)) == null);
comp.void_value = try comp.arena().create(Value.Void.{
.base = Value.{
comp.void_value = try comp.arena().create(Value.Void{
.base = Value{
.id = Value.Id.Void,
.typ = &Type.Void.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
});
comp.true_value = try comp.arena().create(Value.Bool.{
.base = Value.{
comp.true_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -685,8 +685,8 @@ pub const Compilation = struct.{
.x = true,
});
comp.false_value = try comp.arena().create(Value.Bool.{
.base = Value.{
comp.false_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -694,8 +694,8 @@ pub const Compilation = struct.{
.x = false,
});
comp.noreturn_value = try comp.arena().create(Value.NoReturn.{
.base = Value.{
comp.noreturn_value = try comp.arena().create(Value.NoReturn{
.base = Value{
.id = Value.Id.NoReturn,
.typ = &Type.NoReturn.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -703,10 +703,10 @@ pub const Compilation = struct.{
});
for (CInt.list) |cint, i| {
const c_int_type = try comp.arena().create(Type.Int.{
.base = Type.{
const c_int_type = try comp.arena().create(Type.Int{
.base = Type{
.name = cint.zig_name,
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -714,7 +714,7 @@ pub const Compilation = struct.{
.id = builtin.TypeId.Int,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
},
.key = Type.Int.Key.{
.key = Type.Int.Key{
.is_signed = cint.is_signed,
.bit_count = comp.target.cIntTypeSizeInBits(cint.id),
},
@@ -723,10 +723,10 @@ pub const Compilation = struct.{
comp.c_int_types[i] = c_int_type;
assert((try comp.primitive_type_table.put(cint.zig_name, &c_int_type.base)) == null);
}
comp.u8_type = try comp.arena().create(Type.Int.{
.base = Type.{
comp.u8_type = try comp.arena().create(Type.Int{
.base = Type{
.name = "u8",
.base = Value.{
.base = Value{
.id = Value.Id.Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -734,7 +734,7 @@ pub const Compilation = struct.{
.id = builtin.TypeId.Int,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
},
.key = Type.Int.Key.{
.key = Type.Int.Key{
.is_signed = false,
.bit_count = 8,
},
@@ -777,13 +777,13 @@ pub const Compilation = struct.{
if (compile_errors.len == 0) {
await (async self.events.put(Event.Ok) catch unreachable);
} else {
await (async self.events.put(Event.{ .Fail = compile_errors }) catch unreachable);
await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
}
} else |err| {
// if there's an error then the compile errors have dangling references
self.gpa().free(compile_errors);
await (async self.events.put(Event.{ .Error = err }) catch unreachable);
await (async self.events.put(Event{ .Error = err }) catch unreachable);
}
// First, get an item from the watch channel, waiting on the channel.
@@ -894,7 +894,7 @@ pub const Compilation = struct.{
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
const name = if (fn_proto.name_token) |name_token| tree_scope.tree.tokenSlice(name_token) else {
try self.addCompileError(tree_scope, Span.{
try self.addCompileError(tree_scope, Span{
.first = fn_proto.fn_token,
.last = fn_proto.fn_token + 1,
}, "missing function name");
@@ -924,8 +924,8 @@ pub const Compilation = struct.{
}
} else {
// add new decl
const fn_decl = try self.gpa().create(Decl.Fn.{
.base = Decl.{
const fn_decl = try self.gpa().create(Decl.Fn{
.base = Decl{
.id = Decl.Id.Fn,
.name = name,
.visib = parseVisibToken(tree_scope.tree, fn_proto.visib_token),
@@ -933,7 +933,7 @@ pub const Compilation = struct.{
.parent_scope = &decl_scope.base,
.tree_scope = tree_scope,
},
.value = Decl.Fn.Val.{ .Unresolved = {} },
.value = Decl.Fn.Val{ .Unresolved = {} },
.fn_proto = fn_proto,
});
tree_scope.base.ref();
@@ -1139,7 +1139,7 @@ pub const Compilation = struct.{
}
}
const link_lib = try self.gpa().create(LinkLib.{
const link_lib = try self.gpa().create(LinkLib{
.name = name,
.path = null,
.provided_explicitly = provided_explicitly,
@@ -1307,7 +1307,7 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
// The Decl.Fn owns the initial 1 reference count
const fn_val = try Value.Fn.create(comp, fn_type, fndef_scope, symbol_name);
fn_decl.value = Decl.Fn.Val.{ .Fn = fn_val };
fn_decl.value = Decl.Fn.Val{ .Fn = fn_val };
symbol_name_consumed = true;
// Define local parameter variables
@@ -1315,7 +1315,7 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
//AstNode *param_decl_node = get_param_decl_node(fn_table_entry, i);
const param_decl = @fieldParentPtr(ast.Node.ParamDecl, "base", fn_decl.fn_proto.params.at(i).*);
const name_token = param_decl.name_token orelse {
try comp.addCompileError(tree_scope, Span.{
try comp.addCompileError(tree_scope, Span{
.first = param_decl.firstToken(),
.last = param_decl.type_node.firstToken(),
}, "missing parameter name");
@@ -1402,17 +1402,17 @@ async fn analyzeFnType(
const param_node = param_node_ptr.*.cast(ast.Node.ParamDecl).?;
const param_type = try await (async comp.analyzeTypeExpr(tree_scope, scope, param_node.type_node) catch unreachable);
errdefer param_type.base.deref(comp);
try params.append(Type.Fn.Param.{
try params.append(Type.Fn.Param{
.typ = param_type,
.is_noalias = param_node.noalias_token != null,
});
}
}
const key = Type.Fn.Key.{
const key = Type.Fn.Key{
.alignment = null,
.data = Type.Fn.Key.Data.{
.Normal = Type.Fn.Key.Normal.{
.data = Type.Fn.Key.Data{
.Normal = Type.Fn.Key.Normal{
.return_type = return_type,
.params = params.toOwnedSlice(),
.is_var_args = false, // TODO
@@ -1451,7 +1451,7 @@ async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
// The Decl.Fn owns the initial 1 reference count
const fn_proto_val = try Value.FnProto.create(comp, fn_type, symbol_name);
fn_decl.value = Decl.Fn.Val.{ .FnProto = fn_proto_val };
fn_decl.value = Decl.Fn.Val{ .FnProto = fn_proto_val };
symbol_name_consumed = true;
}
+7 -7
View File
@@ -10,7 +10,7 @@ const errmsg = @import("errmsg.zig");
const Scope = @import("scope.zig").Scope;
const Compilation = @import("compilation.zig").Compilation;
pub const Decl = struct.{
pub const Decl = struct {
id: Id,
name: []const u8,
visib: Visib,
@@ -44,7 +44,7 @@ pub const Decl = struct.{
const fn_proto = fn_decl.fn_proto;
const start = fn_proto.fn_token;
const end = fn_proto.name_token orelse start;
return errmsg.Span.{
return errmsg.Span{
.first = start,
.last = end + 1,
};
@@ -57,23 +57,23 @@ pub const Decl = struct.{
return base.parent_scope.findRoot();
}
pub const Id = enum.{
pub const Id = enum {
Var,
Fn,
CompTime,
};
pub const Var = struct.{
pub const Var = struct {
base: Decl,
};
pub const Fn = struct.{
pub const Fn = struct {
base: Decl,
value: Val,
fn_proto: *ast.Node.FnProto,
// TODO https://github.com/ziglang/zig/issues/683 and then make this anonymous
pub const Val = union(enum).{
pub const Val = union(enum) {
Unresolved: void,
Fn: *Value.Fn,
FnProto: *Value.FnProto,
@@ -99,7 +99,7 @@ pub const Decl = struct.{
}
};
pub const CompTime = struct.{
pub const CompTime = struct {
base: Decl,
};
};
+23 -23
View File
@@ -7,55 +7,55 @@ const TokenIndex = std.zig.ast.TokenIndex;
const Compilation = @import("compilation.zig").Compilation;
const Scope = @import("scope.zig").Scope;
pub const Color = enum.{
pub const Color = enum {
Auto,
Off,
On,
};
pub const Span = struct.{
pub const Span = struct {
first: ast.TokenIndex,
last: ast.TokenIndex,
pub fn token(i: TokenIndex) Span {
return Span.{
return Span{
.first = i,
.last = i,
};
}
pub fn node(n: *ast.Node) Span {
return Span.{
return Span{
.first = n.firstToken(),
.last = n.lastToken(),
};
}
};
pub const Msg = struct.{
pub const Msg = struct {
text: []u8,
realpath: []u8,
data: Data,
const Data = union(enum).{
const Data = union(enum) {
Cli: Cli,
PathAndTree: PathAndTree,
ScopeAndComp: ScopeAndComp,
};
const PathAndTree = struct.{
const PathAndTree = struct {
span: Span,
tree: *ast.Tree,
allocator: *mem.Allocator,
};
const ScopeAndComp = struct.{
const ScopeAndComp = struct {
span: Span,
tree_scope: *Scope.AstTree,
compilation: *Compilation,
};
const Cli = struct.{
const Cli = struct {
allocator: *mem.Allocator,
};
@@ -118,11 +118,11 @@ pub const Msg = struct.{
const realpath = try mem.dupe(comp.gpa(), u8, tree_scope.root().realpath);
errdefer comp.gpa().free(realpath);
const msg = try comp.gpa().create(Msg.{
const msg = try comp.gpa().create(Msg{
.text = text,
.realpath = realpath,
.data = Data.{
.ScopeAndComp = ScopeAndComp.{
.data = Data{
.ScopeAndComp = ScopeAndComp{
.tree_scope = tree_scope,
.compilation = comp,
.span = span,
@@ -139,11 +139,11 @@ pub const Msg = struct.{
const realpath_copy = try mem.dupe(comp.gpa(), u8, realpath);
errdefer comp.gpa().free(realpath_copy);
const msg = try comp.gpa().create(Msg.{
const msg = try comp.gpa().create(Msg{
.text = text,
.realpath = realpath_copy,
.data = Data.{
.Cli = Cli.{ .allocator = comp.gpa() },
.data = Data{
.Cli = Cli{ .allocator = comp.gpa() },
},
});
return msg;
@@ -164,14 +164,14 @@ pub const Msg = struct.{
var out_stream = &std.io.BufferOutStream.init(&text_buf).stream;
try parse_error.render(&tree_scope.tree.tokens, out_stream);
const msg = try comp.gpa().create(Msg.{
const msg = try comp.gpa().create(Msg{
.text = undefined,
.realpath = realpath_copy,
.data = Data.{
.ScopeAndComp = ScopeAndComp.{
.data = Data{
.ScopeAndComp = ScopeAndComp{
.tree_scope = tree_scope,
.compilation = comp,
.span = Span.{
.span = Span{
.first = loc_token,
.last = loc_token,
},
@@ -203,14 +203,14 @@ pub const Msg = struct.{
var out_stream = &std.io.BufferOutStream.init(&text_buf).stream;
try parse_error.render(&tree.tokens, out_stream);
const msg = try allocator.create(Msg.{
const msg = try allocator.create(Msg{
.text = undefined,
.realpath = realpath_copy,
.data = Data.{
.PathAndTree = PathAndTree.{
.data = Data{
.PathAndTree = PathAndTree{
.allocator = allocator,
.tree = tree,
.span = Span.{
.span = Span{
.first = loc_token,
.last = loc_token,
},
+104 -104
View File
@@ -15,17 +15,17 @@ const ObjectFile = codegen.ObjectFile;
const Decl = @import("decl.zig").Decl;
const mem = std.mem;
pub const LVal = enum.{
pub const LVal = enum {
None,
Ptr,
};
pub const IrVal = union(enum).{
pub const IrVal = union(enum) {
Unknown,
KnownType: *Type,
KnownValue: *Value,
const Init = enum.{
const Init = enum {
Unknown,
NoReturn,
Void,
@@ -48,7 +48,7 @@ pub const IrVal = union(enum).{
}
};
pub const Inst = struct.{
pub const Inst = struct {
id: Id,
scope: *Scope,
debug_id: usize,
@@ -129,7 +129,7 @@ pub const Inst = struct.{
}
}
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error.{OutOfMemory}!?llvm.ValueRef) {
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?llvm.ValueRef) {
switch (base.id) {
Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
@@ -242,7 +242,7 @@ pub const Inst = struct.{
parent.child = self;
}
pub const Id = enum.{
pub const Id = enum {
Return,
Const,
Ref,
@@ -258,11 +258,11 @@ pub const Inst = struct.{
LoadPtr,
};
pub const Call = struct.{
pub const Call = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
fn_ref: *Inst,
args: []*Inst,
};
@@ -305,11 +305,11 @@ pub const Inst = struct.{
for (self.params.args) |arg, i| {
args[i] = try arg.getAsParam();
}
const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params.{
const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params{
.fn_ref = fn_ref,
.args = args,
});
new_inst.val = IrVal.{ .KnownType = fn_type.key.data.Normal.return_type };
new_inst.val = IrVal{ .KnownType = fn_type.key.data.Normal.return_type };
return new_inst;
}
@@ -336,11 +336,11 @@ pub const Inst = struct.{
}
};
pub const Const = struct.{
pub const Const = struct {
base: Inst,
params: Params,
const Params = struct.{};
const Params = struct {};
// Use Builder.buildConst* methods, or, after building a Const instruction,
// manually set the ir_val field.
@@ -355,8 +355,8 @@ pub const Inst = struct.{
}
pub fn analyze(self: *const Const, ira: *Analyze) !*Inst {
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params.{});
new_inst.val = IrVal.{ .KnownValue = self.base.val.KnownValue.getRef() };
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
return new_inst;
}
@@ -365,11 +365,11 @@ pub const Inst = struct.{
}
};
pub const Return = struct.{
pub const Return = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
return_value: *Inst,
};
@@ -389,7 +389,7 @@ pub const Inst = struct.{
// TODO detect returning local variable address
return ira.irb.build(Return, self.base.scope, self.base.span, Params.{ .return_value = casted_value });
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
}
pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) !?llvm.ValueRef {
@@ -405,11 +405,11 @@ pub const Inst = struct.{
}
};
pub const Ref = struct.{
pub const Ref = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
target: *Inst,
mut: Type.Pointer.Mut,
volatility: Type.Pointer.Vol,
@@ -435,13 +435,13 @@ pub const Inst = struct.{
);
}
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params.{
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
.target = target,
.mut = self.params.mut,
.volatility = self.params.volatility,
});
const elem_type = target.getKnownType();
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = elem_type,
.mut = self.params.mut,
.vol = self.params.volatility,
@@ -450,17 +450,17 @@ pub const Inst = struct.{
}) catch unreachable);
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
// could be a ref of a global, for example
new_inst.val = IrVal.{ .KnownType = &ptr_type.base };
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
// TODO potentially add an alloca entry here
return new_inst;
}
};
pub const DeclRef = struct.{
pub const DeclRef = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
decl: *Decl,
lval: LVal,
};
@@ -499,11 +499,11 @@ pub const Inst = struct.{
}
};
pub const VarPtr = struct.{
pub const VarPtr = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
var_scope: *Scope.Var,
};
@@ -525,16 +525,16 @@ pub const Inst = struct.{
Inst.VarPtr,
self.base.scope,
self.base.span,
Inst.VarPtr.Params.{ .var_scope = self.params.var_scope },
Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
);
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = param.typ,
.mut = Type.Pointer.Mut.Const,
.vol = Type.Pointer.Vol.Non,
.size = Type.Pointer.Size.One,
.alignment = Type.Pointer.Align.Abi,
}) catch unreachable);
new_inst.val = IrVal.{ .KnownType = &ptr_type.base };
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
return new_inst;
},
}
@@ -548,11 +548,11 @@ pub const Inst = struct.{
}
};
pub const LoadPtr = struct.{
pub const LoadPtr = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
target: *Inst,
};
@@ -590,9 +590,9 @@ pub const Inst = struct.{
Inst.LoadPtr,
self.base.scope,
self.base.span,
Inst.LoadPtr.Params.{ .target = target },
Inst.LoadPtr.Params{ .target = target },
);
new_inst.val = IrVal.{ .KnownType = ptr_type.key.child_type };
new_inst.val = IrVal{ .KnownType = ptr_type.key.child_type };
return new_inst;
}
@@ -626,11 +626,11 @@ pub const Inst = struct.{
}
};
pub const PtrType = struct.{
pub const PtrType = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
child_type: *Inst,
mut: Type.Pointer.Mut,
vol: Type.Pointer.Vol,
@@ -657,11 +657,11 @@ pub const Inst = struct.{
// }
const alignment = if (self.params.alignment) |align_inst| blk: {
const amt = try align_inst.getAsConstAlign(ira);
break :blk Type.Pointer.Align.{ .Override = amt };
break :blk Type.Pointer.Align{ .Override = amt };
} else blk: {
break :blk Type.Pointer.Align.{ .Abi = {} };
break :blk Type.Pointer.Align{ .Abi = {} };
};
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = child_type,
.mut = self.params.mut,
.vol = self.params.vol,
@@ -674,11 +674,11 @@ pub const Inst = struct.{
}
};
pub const DeclVar = struct.{
pub const DeclVar = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
variable: *Variable,
};
@@ -695,11 +695,11 @@ pub const Inst = struct.{
}
};
pub const CheckVoidStmt = struct.{
pub const CheckVoidStmt = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
target: *Inst,
};
@@ -723,11 +723,11 @@ pub const Inst = struct.{
}
};
pub const Phi = struct.{
pub const Phi = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
incoming_blocks: []*BasicBlock,
incoming_values: []*Inst,
};
@@ -745,11 +745,11 @@ pub const Inst = struct.{
}
};
pub const Br = struct.{
pub const Br = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
dest_block: *BasicBlock,
is_comptime: *Inst,
};
@@ -767,11 +767,11 @@ pub const Inst = struct.{
}
};
pub const CondBr = struct.{
pub const CondBr = struct {
base: Inst,
params: Params,
const Params = struct.{
const Params = struct {
condition: *Inst,
then_block: *BasicBlock,
else_block: *BasicBlock,
@@ -791,11 +791,11 @@ pub const Inst = struct.{
}
};
pub const AddImplicitReturnType = struct.{
pub const AddImplicitReturnType = struct {
base: Inst,
params: Params,
pub const Params = struct.{
pub const Params = struct {
target: *Inst,
};
@@ -816,11 +816,11 @@ pub const Inst = struct.{
}
};
pub const TestErr = struct.{
pub const TestErr = struct {
base: Inst,
params: Params,
pub const Params = struct.{
pub const Params = struct {
target: *Inst,
};
@@ -878,11 +878,11 @@ pub const Inst = struct.{
}
};
pub const TestCompTime = struct.{
pub const TestCompTime = struct {
base: Inst,
params: Params,
pub const Params = struct.{
pub const Params = struct {
target: *Inst,
};
@@ -902,11 +902,11 @@ pub const Inst = struct.{
}
};
pub const SaveErrRetAddr = struct.{
pub const SaveErrRetAddr = struct {
base: Inst,
params: Params,
const Params = struct.{};
const Params = struct {};
const ir_val_init = IrVal.Init.Unknown;
@@ -917,16 +917,16 @@ pub const Inst = struct.{
}
pub fn analyze(self: *const SaveErrRetAddr, ira: *Analyze) !*Inst {
return ira.irb.build(Inst.SaveErrRetAddr, self.base.scope, self.base.span, Params.{});
return ira.irb.build(Inst.SaveErrRetAddr, self.base.scope, self.base.span, Params{});
}
};
};
pub const Variable = struct.{
pub const Variable = struct {
child_scope: *Scope,
};
pub const BasicBlock = struct.{
pub const BasicBlock = struct {
ref_count: usize,
name_hint: [*]const u8, // must be a C string literal
debug_id: usize,
@@ -957,7 +957,7 @@ pub const BasicBlock = struct.{
};
/// Stuff that survives longer than Builder
pub const Code = struct.{
pub const Code = struct {
basic_block_list: std.ArrayList(*BasicBlock),
arena: std.heap.ArenaAllocator,
return_type: ?*Type,
@@ -1009,7 +1009,7 @@ pub const Code = struct.{
}
};
pub const Builder = struct.{
pub const Builder = struct {
comp: *Compilation,
code: *Code,
current_basic_block: *BasicBlock,
@@ -1021,7 +1021,7 @@ pub const Builder = struct.{
pub const Error = Analyze.Error;
pub fn init(comp: *Compilation, tree_scope: *Scope.AstTree, begin_scope: ?*Scope) !Builder {
const code = try comp.gpa().create(Code.{
const code = try comp.gpa().create(Code{
.basic_block_list = undefined,
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
.return_type = null,
@@ -1030,7 +1030,7 @@ pub const Builder = struct.{
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
errdefer code.destroy(comp.gpa());
return Builder.{
return Builder{
.comp = comp,
.current_basic_block = undefined,
.code = code,
@@ -1052,7 +1052,7 @@ pub const Builder = struct.{
/// No need to clean up resources thanks to the arena allocator.
pub fn createBasicBlock(self: *Builder, scope: *Scope, name_hint: [*]const u8) !*BasicBlock {
const basic_block = try self.arena().create(BasicBlock.{
const basic_block = try self.arena().create(BasicBlock{
.ref_count = 0,
.name_hint = name_hint,
.debug_id = self.next_debug_id,
@@ -1208,7 +1208,7 @@ pub const Builder = struct.{
// }
//}
return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params.{
return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params{
.fn_ref = fn_ref,
.args = args,
});
@@ -1272,7 +1272,7 @@ pub const Builder = struct.{
// return irb->codegen->invalid_instruction;
//}
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params.{
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params{
.child_type = child_type,
.mut = Type.Pointer.Mut.Mut,
.vol = Type.Pointer.Vol.Non,
@@ -1336,8 +1336,8 @@ pub const Builder = struct.{
};
errdefer int_val.base.deref(irb.comp);
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params.{});
inst.val = IrVal.{ .KnownValue = &int_val.base };
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = &int_val.base };
return inst;
}
@@ -1455,11 +1455,11 @@ pub const Builder = struct.{
_ = irb.build(
Inst.CheckVoidStmt,
child_scope,
Span.{
Span{
.first = statement_node.firstToken(),
.last = statement_node.lastToken(),
},
Inst.CheckVoidStmt.Params.{ .target = statement_value },
Inst.CheckVoidStmt.Params{ .target = statement_value },
);
}
}
@@ -1471,7 +1471,7 @@ pub const Builder = struct.{
}
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params.{
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
});
@@ -1484,14 +1484,14 @@ pub const Builder = struct.{
);
_ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params.{
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params{
.dest_block = block_scope.end_block,
.is_comptime = block_scope.is_comptime,
});
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params.{
return irb.build(Inst.Phi, parent_scope, Span.token(block.rbrace), Inst.Phi.Params{
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
});
@@ -1553,12 +1553,12 @@ pub const Builder = struct.{
Inst.TestErr,
scope,
src_span,
Inst.TestErr.Params.{ .target = return_value },
Inst.TestErr.Params{ .target = return_value },
);
const err_is_comptime = try irb.buildTestCompTime(scope, src_span, is_err);
_ = try irb.buildGen(Inst.CondBr, scope, src_span, Inst.CondBr.Params.{
_ = try irb.buildGen(Inst.CondBr, scope, src_span, Inst.CondBr.Params{
.condition = is_err,
.then_block = err_block,
.else_block = ok_block,
@@ -1572,9 +1572,9 @@ pub const Builder = struct.{
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ErrorExit) catch unreachable);
}
if (irb.comp.have_err_ret_tracing and !irb.isCompTime(scope)) {
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params.{});
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params{});
}
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params.{
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
.dest_block = ret_stmt_block,
.is_comptime = err_is_comptime,
});
@@ -1583,7 +1583,7 @@ pub const Builder = struct.{
if (have_err_defers) {
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
}
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params.{
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
.dest_block = ret_stmt_block,
.is_comptime = err_is_comptime,
});
@@ -1631,17 +1631,17 @@ pub const Builder = struct.{
switch (await (async irb.findIdent(scope, name) catch unreachable)) {
Ident.Decl => |decl| {
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params.{
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
.decl = decl,
.lval = lval,
});
},
Ident.VarScope => |var_scope| {
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params.{ .var_scope = var_scope });
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params{ .var_scope = var_scope });
switch (lval) {
LVal.Ptr => return var_ptr,
LVal.None => {
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params.{ .target = var_ptr });
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params{ .target = var_ptr });
},
}
},
@@ -1661,13 +1661,13 @@ pub const Builder = struct.{
return error.SemanticAnalysisFailed;
}
const DeferCounts = struct.{
const DeferCounts = struct {
scope_exit: usize,
error_exit: usize,
};
fn countDefers(irb: *Builder, inner_scope: *Scope, outer_scope: *Scope) DeferCounts {
var result = DeferCounts.{ .scope_exit = 0, .error_exit = 0 };
var result = DeferCounts{ .scope_exit = 0, .error_exit = 0 };
var scope = inner_scope;
while (scope != outer_scope) {
@@ -1726,7 +1726,7 @@ pub const Builder = struct.{
Inst.CheckVoidStmt,
&defer_expr_scope.base,
Span.token(defer_expr_scope.expr_node.lastToken()),
Inst.CheckVoidStmt.Params.{ .target = instruction },
Inst.CheckVoidStmt.Params{ .target = instruction },
);
}
}
@@ -1753,7 +1753,7 @@ pub const Builder = struct.{
LVal.Ptr => {
// We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it.
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params.{
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params{
.target = instruction,
.mut = Type.Pointer.Mut.Const,
.volatility = Type.Pointer.Vol.Non,
@@ -1774,16 +1774,16 @@ pub const Builder = struct.{
params: I.Params,
is_generated: bool,
) !*Inst {
const inst = try self.arena().create(I.{
.base = Inst.{
const inst = try self.arena().create(I{
.base = Inst{
.id = Inst.typeToId(I),
.is_generated = is_generated,
.scope = scope,
.debug_id = self.next_debug_id,
.val = switch (I.ir_val_init) {
IrVal.Init.Unknown => IrVal.Unknown,
IrVal.Init.NoReturn => IrVal.{ .KnownValue = &Value.NoReturn.get(self.comp).base },
IrVal.Init.Void => IrVal.{ .KnownValue = &Value.Void.get(self.comp).base },
IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base },
IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base },
},
.ref_count = 0,
.span = span,
@@ -1852,20 +1852,20 @@ pub const Builder = struct.{
}
fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Inst {
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params.{});
inst.val = IrVal.{ .KnownValue = &Value.Bool.get(self.comp, x).base };
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.comp, x).base };
return inst;
}
fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Inst {
const inst = try self.buildExtra(Inst.Const, scope, span, Inst.Const.Params.{}, is_generated);
inst.val = IrVal.{ .KnownValue = &Value.Void.get(self.comp).base };
const inst = try self.buildExtra(Inst.Const, scope, span, Inst.Const.Params{}, is_generated);
inst.val = IrVal{ .KnownValue = &Value.Void.get(self.comp).base };
return inst;
}
fn buildConstValue(self: *Builder, scope: *Scope, span: Span, v: *Value) !*Inst {
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params.{});
inst.val = IrVal.{ .KnownValue = v.getRef() };
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = v.getRef() };
return inst;
}
@@ -1879,7 +1879,7 @@ pub const Builder = struct.{
Inst.TestCompTime,
scope,
span,
Inst.TestCompTime.Params.{ .target = target },
Inst.TestCompTime.Params{ .target = target },
);
}
}
@@ -1889,7 +1889,7 @@ pub const Builder = struct.{
Inst.AddImplicitReturnType,
scope,
span,
Inst.AddImplicitReturnType.Params.{ .target = result },
Inst.AddImplicitReturnType.Params{ .target = result },
);
if (!irb.is_async) {
@@ -1897,7 +1897,7 @@ pub const Builder = struct.{
Inst.Return,
scope,
span,
Inst.Return.Params.{ .return_value = result },
Inst.Return.Params{ .return_value = result },
is_gen,
);
}
@@ -1919,7 +1919,7 @@ pub const Builder = struct.{
//// the above blocks are rendered by ir_gen after the rest of codegen
}
const Ident = union(enum).{
const Ident = union(enum) {
NotFound,
Decl: *Decl,
VarScope: *Scope.Var,
@@ -1935,13 +1935,13 @@ pub const Builder = struct.{
const locked_table = await (async decls.table.acquireRead() catch unreachable);
defer locked_table.release();
if (locked_table.value.get(name)) |entry| {
return Ident.{ .Decl = entry.value };
return Ident{ .Decl = entry.value };
}
},
Scope.Id.Var => {
const var_scope = @fieldParentPtr(Scope.Var, "base", s);
if (mem.eql(u8, var_scope.name, name)) {
return Ident.{ .VarScope = var_scope };
return Ident{ .VarScope = var_scope };
}
},
else => {},
@@ -1951,7 +1951,7 @@ pub const Builder = struct.{
}
};
const Analyze = struct.{
const Analyze = struct {
irb: Builder,
old_bb_index: usize,
const_predecessor_bb: ?*BasicBlock,
@@ -1960,7 +1960,7 @@ const Analyze = struct.{
src_implicit_return_type_list: std.ArrayList(*Inst),
explicit_return_type: ?*Type,
pub const Error = error.{
pub const Error = error{
/// This is only for when we have already reported a compile error. It is the poison value.
SemanticAnalysisFailed,
@@ -1975,7 +1975,7 @@ const Analyze = struct.{
var irb = try Builder.init(comp, tree_scope, null);
errdefer irb.abort();
return Analyze.{
return Analyze{
.irb = irb,
.old_bb_index = 0,
.const_predecessor_bb = null,
+15 -15
View File
@@ -5,7 +5,7 @@ const Target = @import("target.zig").Target;
const c = @import("c.zig");
/// See the render function implementation for documentation of the fields.
pub const LibCInstallation = struct.{
pub const LibCInstallation = struct {
include_dir: []const u8,
lib_dir: ?[]const u8,
static_lib_dir: ?[]const u8,
@@ -13,7 +13,7 @@ pub const LibCInstallation = struct.{
kernel32_lib_dir: ?[]const u8,
dynamic_linker_path: ?[]const u8,
pub const FindError = error.{
pub const FindError = error{
OutOfMemory,
FileSystem,
UnableToSpawnCCompiler,
@@ -34,7 +34,7 @@ pub const LibCInstallation = struct.{
) !void {
self.initEmpty();
const keys = []const []const u8.{
const keys = []const []const u8{
"include_dir",
"lib_dir",
"static_lib_dir",
@@ -42,11 +42,11 @@ pub const LibCInstallation = struct.{
"kernel32_lib_dir",
"dynamic_linker_path",
};
const FoundKey = struct.{
const FoundKey = struct {
found: bool,
allocated: ?[]u8,
};
var found_keys = [1]FoundKey.{FoundKey.{ .found = false, .allocated = null }} ** keys.len;
var found_keys = [1]FoundKey{FoundKey{ .found = false, .allocated = null }} ** keys.len;
errdefer {
self.initEmpty();
for (found_keys) |found_key| {
@@ -182,7 +182,7 @@ pub const LibCInstallation = struct.{
async fn findNativeIncludeDirLinux(self: *LibCInstallation, loop: *event.Loop) !void {
const cc_exe = std.os.getEnvPosix("CC") orelse "cc";
const argv = []const []const u8.{
const argv = []const []const u8{
cc_exe,
"-E",
"-Wp,-v",
@@ -302,12 +302,12 @@ pub const LibCInstallation = struct.{
}
async fn findNativeDynamicLinker(self: *LibCInstallation, loop: *event.Loop) FindError!void {
var dyn_tests = []DynTest.{
DynTest.{
var dyn_tests = []DynTest{
DynTest{
.name = "ld-linux-x86-64.so.2",
.result = null,
},
DynTest.{
DynTest{
.name = "ld-musl-x86_64.so.1",
.result = null,
},
@@ -326,7 +326,7 @@ pub const LibCInstallation = struct.{
}
}
const DynTest = struct.{
const DynTest = struct {
name: []const u8,
result: ?[]const u8,
};
@@ -369,7 +369,7 @@ pub const LibCInstallation = struct.{
}
fn initEmpty(self: *LibCInstallation) void {
self.* = LibCInstallation.{
self.* = LibCInstallation{
.include_dir = ([*]const u8)(undefined)[0..0],
.lib_dir = null,
.static_lib_dir = null,
@@ -385,7 +385,7 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
const cc_exe = std.os.getEnvPosix("CC") orelse "cc";
const arg1 = try std.fmt.allocPrint(loop.allocator, "-print-file-name={}", o_file);
defer loop.allocator.free(arg1);
const argv = []const []const u8.{ cc_exe, arg1 };
const argv = []const []const u8{ cc_exe, arg1 };
// TODO This simulates evented I/O for the child process exec
await (async loop.yield() catch unreachable);
@@ -421,7 +421,7 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
}
}
const Search = struct.{
const Search = struct {
path: []const u8,
version: []const u8,
};
@@ -430,7 +430,7 @@ fn fillSearch(search_buf: *[2]Search, sdk: *c.ZigWindowsSDK) []Search {
var search_end: usize = 0;
if (sdk.path10_ptr) |path10_ptr| {
if (sdk.version10_ptr) |ver10_ptr| {
search_buf[search_end] = Search.{
search_buf[search_end] = Search{
.path = path10_ptr[0..sdk.path10_len],
.version = ver10_ptr[0..sdk.version10_len],
};
@@ -439,7 +439,7 @@ fn fillSearch(search_buf: *[2]Search, sdk: *c.ZigWindowsSDK) []Search {
}
if (sdk.path81_ptr) |path81_ptr| {
if (sdk.version81_ptr) |ver81_ptr| {
search_buf[search_end] = Search.{
search_buf[search_end] = Search{
.path = path81_ptr[0..sdk.path81_len],
.version = ver81_ptr[0..sdk.version81_len],
};
+6 -6
View File
@@ -8,13 +8,13 @@ const Target = @import("target.zig").Target;
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const assert = std.debug.assert;
const Context = struct.{
const Context = struct {
comp: *Compilation,
arena: std.heap.ArenaAllocator,
args: std.ArrayList([*]const u8),
link_in_crt: bool,
link_err: error.{OutOfMemory}!void,
link_err: error{OutOfMemory}!void,
link_msg: std.Buffer,
libc: *LibCInstallation,
@@ -22,7 +22,7 @@ const Context = struct.{
};
pub async fn link(comp: *Compilation) !void {
var ctx = Context.{
var ctx = Context{
.comp = comp,
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
.args = undefined,
@@ -648,13 +648,13 @@ fn addFnObjects(ctx: *Context) !void {
}
}
const DarwinPlatform = struct.{
const DarwinPlatform = struct {
kind: Kind,
major: u32,
minor: u32,
micro: u32,
const Kind = enum.{
const Kind = enum {
MacOS,
IPhoneOS,
IPhoneOSSimulator,
@@ -726,7 +726,7 @@ fn darwinGetReleaseVersion(str: []const u8, major: *u32, minor: *u32, micro: *u3
return error.InvalidDarwinVersionString;
var start_pos: usize = 0;
for ([]*u32.{ major, minor, micro }) |v| {
for ([]*u32{ major, minor, micro }) |v| {
const dot_pos = mem.indexOfScalarPos(u8, str, start_pos, '.');
const end_pos = dot_pos orelse str.len;
v.* = std.fmt.parseUnsigned(u32, str[start_pos..end_pos], 10) catch return error.InvalidDarwinVersionString;
+1 -1
View File
@@ -183,7 +183,7 @@ pub const X86StdcallCallConv = c.LLVMX86StdcallCallConv;
pub const X86FastcallCallConv = c.LLVMX86FastcallCallConv;
pub const CallConv = c.LLVMCallConv;
pub const FnInline = extern enum.{
pub const FnInline = extern enum {
Auto,
Always,
Never,
+59 -39
View File
@@ -43,9 +43,9 @@ const usage =
\\
;
const Command = struct.{
const Command = struct {
name: []const u8,
exec: fn (*Allocator, []const []const u8) error!void,
exec: fn (*Allocator, []const []const u8) anyerror!void,
};
pub fn main() !void {
@@ -72,46 +72,46 @@ pub fn main() !void {
os.exit(1);
}
const commands = []Command.{
Command.{
const commands = []Command{
Command{
.name = "build-exe",
.exec = cmdBuildExe,
},
Command.{
Command{
.name = "build-lib",
.exec = cmdBuildLib,
},
Command.{
Command{
.name = "build-obj",
.exec = cmdBuildObj,
},
Command.{
Command{
.name = "fmt",
.exec = cmdFmt,
},
Command.{
Command{
.name = "libc",
.exec = cmdLibC,
},
Command.{
Command{
.name = "targets",
.exec = cmdTargets,
},
Command.{
Command{
.name = "version",
.exec = cmdVersion,
},
Command.{
Command{
.name = "zen",
.exec = cmdZen,
},
// undocumented commands
Command.{
Command{
.name = "help",
.exec = cmdHelp,
},
Command.{
Command{
.name = "internal",
.exec = cmdInternal,
},
@@ -190,14 +190,14 @@ const usage_build_generic =
\\
;
const args_build_generic = []Flag.{
const args_build_generic = []Flag{
Flag.Bool("--help"),
Flag.Option("--color", []const []const u8.{
Flag.Option("--color", []const []const u8{
"auto",
"off",
"on",
}),
Flag.Option("--mode", []const []const u8.{
Flag.Option("--mode", []const []const u8{
"debug",
"release-fast",
"release-safe",
@@ -205,7 +205,7 @@ const args_build_generic = []Flag.{
}),
Flag.ArgMergeN("--assembly", 1),
Flag.Option("--emit", []const []const u8.{
Flag.Option("--emit", []const []const u8{
"asm",
"bin",
"llvm-ir",
@@ -456,10 +456,10 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
}
if (flags.single("mmacosx-version-min")) |ver| {
comp.darwin_version_min = Compilation.DarwinVersionMin.{ .MacOS = ver };
comp.darwin_version_min = Compilation.DarwinVersionMin{ .MacOS = ver };
}
if (flags.single("mios-version-min")) |ver| {
comp.darwin_version_min = Compilation.DarwinVersionMin.{ .Ios = ver };
comp.darwin_version_min = Compilation.DarwinVersionMin{ .Ios = ver };
}
comp.emit_file_type = emit_type;
@@ -514,18 +514,23 @@ const usage_fmt =
\\usage: zig fmt [file]...
\\
\\ Formats the input files and modifies them in-place.
\\ Arguments can be files or directories, which are searched
\\ recursively.
\\
\\Options:
\\ --help Print this help and exit
\\ --color [auto|off|on] Enable or disable colored error messages
\\ --stdin Format code from stdin
\\ --stdin Format code from stdin; output to stdout
\\ --check List non-conforming files and exit with an error
\\ if the list is non-empty
\\
\\
;
const args_fmt_spec = []Flag.{
const args_fmt_spec = []Flag{
Flag.Bool("--help"),
Flag.Option("--color", []const []const u8.{
Flag.Bool("--check"),
Flag.Option("--color", []const []const u8{
"auto",
"off",
"on",
@@ -533,7 +538,7 @@ const args_fmt_spec = []Flag.{
Flag.Bool("--stdin"),
};
const Fmt = struct.{
const Fmt = struct {
seen: event.Locked(SeenMap),
any_error: bool,
color: errmsg.Color,
@@ -640,6 +645,11 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
if (tree.errors.len != 0) {
os.exit(1);
}
if (flags.present("check")) {
const anything_changed = try std.zig.render(allocator, io.null_out_stream, &tree);
const code = if (anything_changed) u8(1) else u8(0);
os.exit(code);
}
_ = try std.zig.render(allocator, stdout, &tree);
return;
@@ -675,7 +685,7 @@ async fn asyncFmtMainChecked(
result.* = await (async asyncFmtMain(loop, flags, color) catch unreachable);
}
const FmtError = error.{
const FmtError = error{
SystemResources,
OperationAborted,
IoPending,
@@ -704,16 +714,18 @@ async fn asyncFmtMain(
suspend {
resume @handle();
}
var fmt = Fmt.{
var fmt = Fmt{
.seen = event.Locked(Fmt.SeenMap).init(loop, Fmt.SeenMap.init(loop.allocator)),
.any_error = false,
.color = color,
.loop = loop,
};
const check_mode = flags.present("check");
var group = event.Group(FmtError!void).init(loop);
for (flags.positionals.toSliceConst()) |file_path| {
try group.call(fmtPath, &fmt, file_path);
try group.call(fmtPath, &fmt, file_path, check_mode);
}
try await (async group.wait() catch unreachable);
if (fmt.any_error) {
@@ -721,7 +733,7 @@ async fn asyncFmtMain(
}
}
async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8) FmtError!void {
async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtError!void {
const file_path = try std.mem.dupe(fmt.loop.allocator, u8, file_path_ref);
defer fmt.loop.allocator.free(file_path);
@@ -746,7 +758,7 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8) FmtError!void {
while (try dir.next()) |entry| {
if (entry.kind == std.os.Dir.Entry.Kind.Directory or mem.endsWith(u8, entry.name, ".zig")) {
const full_path = try os.path.join(fmt.loop.allocator, file_path, entry.name);
try group.call(fmtPath, fmt, full_path);
try group.call(fmtPath, fmt, full_path, check_mode);
}
}
return await (async group.wait() catch unreachable);
@@ -779,14 +791,22 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8) FmtError!void {
return;
}
// TODO make this evented
const baf = try io.BufferedAtomicFile.create(fmt.loop.allocator, file_path);
defer baf.destroy();
if (check_mode) {
const anything_changed = try std.zig.render(fmt.loop.allocator, io.null_out_stream, &tree);
if (anything_changed) {
try stderr.print("{}\n", file_path);
fmt.any_error = true;
}
} else {
// TODO make this evented
const baf = try io.BufferedAtomicFile.create(fmt.loop.allocator, file_path);
defer baf.destroy();
const anything_changed = try std.zig.render(fmt.loop.allocator, baf.stream(), &tree);
if (anything_changed) {
try stderr.print("{}\n", file_path);
try baf.finish();
const anything_changed = try std.zig.render(fmt.loop.allocator, baf.stream(), &tree);
if (anything_changed) {
try stderr.print("{}\n", file_path);
try baf.finish();
}
}
}
@@ -836,7 +856,7 @@ fn cmdVersion(allocator: *Allocator, args: []const []const u8) !void {
try stdout.print("{}\n", std.cstr.toSliceConst(c.ZIG_VERSION_STRING));
}
const args_test_spec = []Flag.{Flag.Bool("--help")};
const args_test_spec = []Flag{Flag.Bool("--help")};
fn cmdHelp(allocator: *Allocator, args: []const []const u8) !void {
try stdout.write(usage);
@@ -878,7 +898,7 @@ fn cmdInternal(allocator: *Allocator, args: []const []const u8) !void {
os.exit(1);
}
const sub_commands = []Command.{Command.{
const sub_commands = []Command{Command{
.name = "build-info",
.exec = cmdInternalBuildInfo,
}};
@@ -917,14 +937,14 @@ fn cmdInternalBuildInfo(allocator: *Allocator, args: []const []const u8) !void {
);
}
const CliPkg = struct.{
const CliPkg = struct {
name: []const u8,
path: []const u8,
children: ArrayList(*CliPkg),
parent: ?*CliPkg,
pub fn init(allocator: *mem.Allocator, name: []const u8, path: []const u8, parent: ?*CliPkg) !*CliPkg {
var pkg = try allocator.create(CliPkg.{
var pkg = try allocator.create(CliPkg{
.name = name,
.path = path,
.children = ArrayList(*CliPkg).init(allocator),
+2 -2
View File
@@ -3,7 +3,7 @@ const mem = std.mem;
const assert = std.debug.assert;
const Buffer = std.Buffer;
pub const Package = struct.{
pub const Package = struct {
root_src_dir: Buffer,
root_src_path: Buffer,
@@ -15,7 +15,7 @@ pub const Package = struct.{
/// makes internal copies of root_src_dir and root_src_path
/// allocator should be an arena allocator because Package never frees anything
pub fn create(allocator: *mem.Allocator, root_src_dir: []const u8, root_src_path: []const u8) !*Package {
return allocator.create(Package.{
return allocator.create(Package{
.root_src_dir = try Buffer.init(allocator, root_src_dir),
.root_src_path = try Buffer.init(allocator, root_src_path),
.table = Table.init(allocator),
+30 -30
View File
@@ -13,7 +13,7 @@ const assert = std.debug.assert;
const event = std.event;
const llvm = @import("llvm.zig");
pub const Scope = struct.{
pub const Scope = struct {
id: Id,
parent: ?*Scope,
ref_count: std.atomic.Int(usize),
@@ -92,7 +92,7 @@ pub const Scope = struct.{
}
fn init(base: *Scope, id: Id, parent: *Scope) void {
base.* = Scope.{
base.* = Scope{
.id = id,
.parent = parent,
.ref_count = std.atomic.Int(usize).init(1),
@@ -100,7 +100,7 @@ pub const Scope = struct.{
parent.ref();
}
pub const Id = enum.{
pub const Id = enum {
Root,
AstTree,
Decls,
@@ -112,7 +112,7 @@ pub const Scope = struct.{
Var,
};
pub const Root = struct.{
pub const Root = struct {
base: Scope,
realpath: []const u8,
decls: *Decls,
@@ -121,8 +121,8 @@ pub const Scope = struct.{
/// Takes ownership of realpath
pub fn create(comp: *Compilation, realpath: []u8) !*Root {
const self = try comp.gpa().createOne(Root);
self.* = Root.{
.base = Scope.{
self.* = Root{
.base = Scope{
.id = Id.Root,
.parent = null,
.ref_count = std.atomic.Int(usize).init(1),
@@ -143,7 +143,7 @@ pub const Scope = struct.{
}
};
pub const AstTree = struct.{
pub const AstTree = struct {
base: Scope,
tree: *ast.Tree,
@@ -151,7 +151,7 @@ pub const Scope = struct.{
/// Takes ownership of tree, will deinit and destroy when done.
pub fn create(comp: *Compilation, tree: *ast.Tree, root_scope: *Root) !*AstTree {
const self = try comp.gpa().createOne(AstTree);
self.* = AstTree.{
self.* = AstTree{
.base = undefined,
.tree = tree,
};
@@ -172,7 +172,7 @@ pub const Scope = struct.{
}
};
pub const Decls = struct.{
pub const Decls = struct {
base: Scope,
/// This table remains Write Locked when the names are incomplete or possibly outdated.
@@ -183,7 +183,7 @@ pub const Scope = struct.{
/// Creates a Decls scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*Decls {
const self = try comp.gpa().createOne(Decls);
self.* = Decls.{
self.* = Decls{
.base = undefined,
.table = event.RwLocked(Decl.Table).init(comp.loop, Decl.Table.init(comp.gpa())),
};
@@ -197,7 +197,7 @@ pub const Scope = struct.{
}
};
pub const Block = struct.{
pub const Block = struct {
base: Scope,
incoming_values: std.ArrayList(*ir.Inst),
incoming_blocks: std.ArrayList(*ir.BasicBlock),
@@ -206,11 +206,11 @@ pub const Scope = struct.{
safety: Safety,
const Safety = union(enum).{
const Safety = union(enum) {
Auto,
Manual: Manual,
const Manual = struct.{
const Manual = struct {
/// the source span that disabled the safety value
span: Span,
@@ -236,7 +236,7 @@ pub const Scope = struct.{
/// Creates a Block scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*Block {
const self = try comp.gpa().createOne(Block);
self.* = Block.{
self.* = Block{
.base = undefined,
.incoming_values = undefined,
.incoming_blocks = undefined,
@@ -253,7 +253,7 @@ pub const Scope = struct.{
}
};
pub const FnDef = struct.{
pub const FnDef = struct {
base: Scope,
/// This reference is not counted so that the scope can get destroyed with the function
@@ -263,7 +263,7 @@ pub const Scope = struct.{
/// Must set the fn_val later
pub fn create(comp: *Compilation, parent: *Scope) !*FnDef {
const self = try comp.gpa().createOne(FnDef);
self.* = FnDef.{
self.* = FnDef{
.base = undefined,
.fn_val = null,
};
@@ -276,13 +276,13 @@ pub const Scope = struct.{
}
};
pub const CompTime = struct.{
pub const CompTime = struct {
base: Scope,
/// Creates a CompTime scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*CompTime {
const self = try comp.gpa().createOne(CompTime);
self.* = CompTime.{ .base = undefined };
self.* = CompTime{ .base = undefined };
self.base.init(Id.CompTime, parent);
return self;
}
@@ -292,12 +292,12 @@ pub const Scope = struct.{
}
};
pub const Defer = struct.{
pub const Defer = struct {
base: Scope,
defer_expr_scope: *DeferExpr,
kind: Kind,
pub const Kind = enum.{
pub const Kind = enum {
ScopeExit,
ErrorExit,
};
@@ -310,7 +310,7 @@ pub const Scope = struct.{
defer_expr_scope: *DeferExpr,
) !*Defer {
const self = try comp.gpa().createOne(Defer);
self.* = Defer.{
self.* = Defer{
.base = undefined,
.defer_expr_scope = defer_expr_scope,
.kind = kind,
@@ -326,7 +326,7 @@ pub const Scope = struct.{
}
};
pub const DeferExpr = struct.{
pub const DeferExpr = struct {
base: Scope,
expr_node: *ast.Node,
reported_err: bool,
@@ -334,7 +334,7 @@ pub const Scope = struct.{
/// Creates a DeferExpr scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope, expr_node: *ast.Node) !*DeferExpr {
const self = try comp.gpa().createOne(DeferExpr);
self.* = DeferExpr.{
self.* = DeferExpr{
.base = undefined,
.expr_node = expr_node,
.reported_err = false,
@@ -348,18 +348,18 @@ pub const Scope = struct.{
}
};
pub const Var = struct.{
pub const Var = struct {
base: Scope,
name: []const u8,
src_node: *ast.Node,
data: Data,
pub const Data = union(enum).{
pub const Data = union(enum) {
Param: Param,
Const: *Value,
};
pub const Param = struct.{
pub const Param = struct {
index: usize,
typ: *Type,
llvm_value: llvm.ValueRef,
@@ -374,8 +374,8 @@ pub const Scope = struct.{
param_type: *Type,
) !*Var {
const self = try create(comp, parent, name, src_node);
self.data = Data.{
.Param = Param.{
self.data = Data{
.Param = Param{
.index = param_index,
.typ = param_type,
.llvm_value = undefined,
@@ -392,14 +392,14 @@ pub const Scope = struct.{
value: *Value,
) !*Var {
const self = try create(comp, parent, name, src_node);
self.data = Data.{ .Const = value };
self.data = Data{ .Const = value };
value.ref();
return self;
}
fn create(comp: *Compilation, parent: *Scope, name: []const u8, src_node: *ast.Node) !*Var {
const self = try comp.gpa().createOne(Var);
self.* = Var.{
self.* = Var{
.base = undefined,
.name = name,
.src_node = src_node,
+3 -3
View File
@@ -3,17 +3,17 @@ const builtin = @import("builtin");
const llvm = @import("llvm.zig");
const CInt = @import("c_int.zig").CInt;
pub const FloatAbi = enum.{
pub const FloatAbi = enum {
Hard,
Soft,
SoftFp,
};
pub const Target = union(enum).{
pub const Target = union(enum) {
Native,
Cross: Cross,
pub const Cross = struct.{
pub const Cross = struct {
arch: builtin.Arch,
os: builtin.Os,
environ: builtin.Environ,
+6 -6
View File
@@ -23,18 +23,18 @@ test "stage2" {
const file1 = "1.zig";
const allocator = std.heap.c_allocator;
pub const TestContext = struct.{
pub const TestContext = struct {
loop: std.event.Loop,
zig_compiler: ZigCompiler,
zig_lib_dir: []u8,
file_index: std.atomic.Int(usize),
group: std.event.Group(error!void),
any_err: error!void,
group: std.event.Group(anyerror!void),
any_err: anyerror!void,
const tmp_dir_name = "stage2_test_tmp";
fn init(self: *TestContext) !void {
self.* = TestContext.{
self.* = TestContext{
.any_err = {},
.loop = undefined,
.zig_compiler = undefined,
@@ -49,7 +49,7 @@ pub const TestContext = struct.{
self.zig_compiler = try ZigCompiler.init(&self.loop);
errdefer self.zig_compiler.deinit();
self.group = std.event.Group(error!void).init(&self.loop);
self.group = std.event.Group(anyerror!void).init(&self.loop);
errdefer self.group.deinit();
self.zig_lib_dir = try introspect.resolveZigLibDir(allocator);
@@ -162,7 +162,7 @@ pub const TestContext = struct.{
switch (build_event) {
Compilation.Event.Ok => {
const argv = []const []const u8.{exe_file_2};
const argv = []const []const u8{exe_file_2};
// TODO use event loop
const child = try std.os.ChildProcess.exec(allocator, argv, null, null, 1024 * 1024);
switch (child.term) {
+55 -55
View File
@@ -8,13 +8,13 @@ const event = std.event;
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
pub const Type = struct.{
pub const Type = struct {
base: Value,
id: Id,
name: []const u8,
abi_alignment: AbiAlignment,
pub const AbiAlignment = event.Future(error.{OutOfMemory}!u32);
pub const AbiAlignment = event.Future(error{OutOfMemory}!u32);
pub const Id = builtin.TypeId;
@@ -51,7 +51,7 @@ pub const Type = struct.{
base: *Type,
allocator: *Allocator,
llvm_context: llvm.ContextRef,
) (error.{OutOfMemory}!llvm.TypeRef) {
) (error{OutOfMemory}!llvm.TypeRef) {
switch (base.id) {
Id.Struct => return @fieldParentPtr(Struct, "base", base).getLlvmType(allocator, llvm_context),
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmType(allocator, llvm_context),
@@ -162,8 +162,8 @@ pub const Type = struct.{
}
fn init(base: *Type, comp: *Compilation, id: Id, name: []const u8) void {
base.* = Type.{
.base = Value.{
base.* = Type{
.base = Value{
.id = Value.Id.Type,
.typ = &MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -206,7 +206,7 @@ pub const Type = struct.{
return @intCast(u32, llvm.ABIAlignmentOfType(comp.target_data_ref, llvm_type));
}
pub const Struct = struct.{
pub const Struct = struct {
base: Type,
decls: *Scope.Decls,
@@ -219,47 +219,47 @@ pub const Type = struct.{
}
};
pub const Fn = struct.{
pub const Fn = struct {
base: Type,
key: Key,
non_key: NonKey,
garbage_node: std.atomic.Stack(*Fn).Node,
pub const Kind = enum.{
pub const Kind = enum {
Normal,
Generic,
};
pub const NonKey = union.{
pub const NonKey = union {
Normal: Normal,
Generic: void,
pub const Normal = struct.{
pub const Normal = struct {
variable_list: std.ArrayList(*Scope.Var),
};
};
pub const Key = struct.{
pub const Key = struct {
data: Data,
alignment: ?u32,
pub const Data = union(Kind).{
pub const Data = union(Kind) {
Generic: Generic,
Normal: Normal,
};
pub const Normal = struct.{
pub const Normal = struct {
params: []Param,
return_type: *Type,
is_var_args: bool,
cc: CallingConvention,
};
pub const Generic = struct.{
pub const Generic = struct {
param_count: usize,
cc: CC,
pub const CC = union(CallingConvention).{
pub const CC = union(CallingConvention) {
Auto,
C,
Cold,
@@ -362,7 +362,7 @@ pub const Type = struct.{
}
};
pub const CallingConvention = enum.{
pub const CallingConvention = enum {
Auto,
C,
Cold,
@@ -371,7 +371,7 @@ pub const Type = struct.{
Async,
};
pub const Param = struct.{
pub const Param = struct {
is_noalias: bool,
typ: *Type,
};
@@ -410,7 +410,7 @@ pub const Type = struct.{
errdefer key.deref(comp);
const self = try comp.gpa().createOne(Fn);
self.* = Fn.{
self.* = Fn{
.base = undefined,
.key = key,
.non_key = undefined,
@@ -425,7 +425,7 @@ pub const Type = struct.{
switch (key.data) {
Kind.Generic => |generic| {
self.non_key = NonKey.{ .Generic = {} };
self.non_key = NonKey{ .Generic = {} };
switch (generic.cc) {
CallingConvention.Async => |async_allocator_type| {
try name_stream.print("async<{}> ", async_allocator_type.name);
@@ -448,8 +448,8 @@ pub const Type = struct.{
try name_stream.write(" var");
},
Kind.Normal => |normal| {
self.non_key = NonKey.{
.Normal = NonKey.Normal.{ .variable_list = std.ArrayList(*Scope.Var).init(comp.gpa()) },
self.non_key = NonKey{
.Normal = NonKey.Normal{ .variable_list = std.ArrayList(*Scope.Var).init(comp.gpa()) },
};
const cc_str = ccFnTypeStr(normal.cc);
try name_stream.print("{}fn(", cc_str);
@@ -513,7 +513,7 @@ pub const Type = struct.{
}
};
pub const MetaType = struct.{
pub const MetaType = struct {
base: Type,
value: *Type,
@@ -528,7 +528,7 @@ pub const Type = struct.{
}
};
pub const Void = struct.{
pub const Void = struct {
base: Type,
/// Adds 1 reference to the resulting type
@@ -542,7 +542,7 @@ pub const Type = struct.{
}
};
pub const Bool = struct.{
pub const Bool = struct {
base: Type,
/// Adds 1 reference to the resulting type
@@ -560,7 +560,7 @@ pub const Type = struct.{
}
};
pub const NoReturn = struct.{
pub const NoReturn = struct {
base: Type,
/// Adds 1 reference to the resulting type
@@ -574,12 +574,12 @@ pub const Type = struct.{
}
};
pub const Int = struct.{
pub const Int = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Int).Node,
pub const Key = struct.{
pub const Key = struct {
bit_count: u32,
is_signed: bool,
@@ -611,7 +611,7 @@ pub const Type = struct.{
}
}
const self = try comp.gpa().create(Int.{
const self = try comp.gpa().create(Int{
.base = undefined,
.key = key,
.garbage_node = undefined,
@@ -634,7 +634,7 @@ pub const Type = struct.{
}
pub fn destroy(self: *Int, comp: *Compilation) void {
self.garbage_node = std.atomic.Stack(*Int).Node.{
self.garbage_node = std.atomic.Stack(*Int).Node{
.data = self,
.next = undefined,
};
@@ -658,7 +658,7 @@ pub const Type = struct.{
}
};
pub const Float = struct.{
pub const Float = struct {
base: Type,
pub fn destroy(self: *Float, comp: *Compilation) void {
@@ -669,12 +669,12 @@ pub const Type = struct.{
@panic("TODO");
}
};
pub const Pointer = struct.{
pub const Pointer = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Pointer).Node,
pub const Key = struct.{
pub const Key = struct {
child_type: *Type,
mut: Mut,
vol: Vol,
@@ -710,17 +710,17 @@ pub const Type = struct.{
}
};
pub const Mut = enum.{
pub const Mut = enum {
Mut,
Const,
};
pub const Vol = enum.{
pub const Vol = enum {
Non,
Volatile,
};
pub const Align = union(enum).{
pub const Align = union(enum) {
Abi,
Override: u32,
};
@@ -728,7 +728,7 @@ pub const Type = struct.{
pub const Size = builtin.TypeInfo.Pointer.Size;
pub fn destroy(self: *Pointer, comp: *Compilation) void {
self.garbage_node = std.atomic.Stack(*Pointer).Node.{
self.garbage_node = std.atomic.Stack(*Pointer).Node{
.data = self,
.next = undefined,
};
@@ -777,7 +777,7 @@ pub const Type = struct.{
}
}
const self = try comp.gpa().create(Pointer.{
const self = try comp.gpa().create(Pointer{
.base = undefined,
.key = normal_key,
.garbage_node = undefined,
@@ -835,12 +835,12 @@ pub const Type = struct.{
}
};
pub const Array = struct.{
pub const Array = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Array).Node,
pub const Key = struct.{
pub const Key = struct {
elem_type: *Type,
len: usize,
@@ -875,7 +875,7 @@ pub const Type = struct.{
}
}
const self = try comp.gpa().create(Array.{
const self = try comp.gpa().create(Array{
.base = undefined,
.key = key,
.garbage_node = undefined,
@@ -902,7 +902,7 @@ pub const Type = struct.{
}
};
pub const ComptimeFloat = struct.{
pub const ComptimeFloat = struct {
base: Type,
pub fn destroy(self: *ComptimeFloat, comp: *Compilation) void {
@@ -910,7 +910,7 @@ pub const Type = struct.{
}
};
pub const ComptimeInt = struct.{
pub const ComptimeInt = struct {
base: Type,
/// Adds 1 reference to the resulting type
@@ -924,7 +924,7 @@ pub const Type = struct.{
}
};
pub const Undefined = struct.{
pub const Undefined = struct {
base: Type,
pub fn destroy(self: *Undefined, comp: *Compilation) void {
@@ -932,7 +932,7 @@ pub const Type = struct.{
}
};
pub const Null = struct.{
pub const Null = struct {
base: Type,
pub fn destroy(self: *Null, comp: *Compilation) void {
@@ -940,7 +940,7 @@ pub const Type = struct.{
}
};
pub const Optional = struct.{
pub const Optional = struct {
base: Type,
pub fn destroy(self: *Optional, comp: *Compilation) void {
@@ -952,7 +952,7 @@ pub const Type = struct.{
}
};
pub const ErrorUnion = struct.{
pub const ErrorUnion = struct {
base: Type,
pub fn destroy(self: *ErrorUnion, comp: *Compilation) void {
@@ -964,7 +964,7 @@ pub const Type = struct.{
}
};
pub const ErrorSet = struct.{
pub const ErrorSet = struct {
base: Type,
pub fn destroy(self: *ErrorSet, comp: *Compilation) void {
@@ -976,7 +976,7 @@ pub const Type = struct.{
}
};
pub const Enum = struct.{
pub const Enum = struct {
base: Type,
pub fn destroy(self: *Enum, comp: *Compilation) void {
@@ -988,7 +988,7 @@ pub const Type = struct.{
}
};
pub const Union = struct.{
pub const Union = struct {
base: Type,
pub fn destroy(self: *Union, comp: *Compilation) void {
@@ -1000,7 +1000,7 @@ pub const Type = struct.{
}
};
pub const Namespace = struct.{
pub const Namespace = struct {
base: Type,
pub fn destroy(self: *Namespace, comp: *Compilation) void {
@@ -1008,7 +1008,7 @@ pub const Type = struct.{
}
};
pub const BoundFn = struct.{
pub const BoundFn = struct {
base: Type,
pub fn destroy(self: *BoundFn, comp: *Compilation) void {
@@ -1020,7 +1020,7 @@ pub const Type = struct.{
}
};
pub const ArgTuple = struct.{
pub const ArgTuple = struct {
base: Type,
pub fn destroy(self: *ArgTuple, comp: *Compilation) void {
@@ -1028,7 +1028,7 @@ pub const Type = struct.{
}
};
pub const Opaque = struct.{
pub const Opaque = struct {
base: Type,
pub fn destroy(self: *Opaque, comp: *Compilation) void {
@@ -1040,7 +1040,7 @@ pub const Type = struct.{
}
};
pub const Promise = struct.{
pub const Promise = struct {
base: Type,
pub fn destroy(self: *Promise, comp: *Compilation) void {
@@ -1074,7 +1074,7 @@ fn hashAny(x: var, comptime seed: u64) u32 {
builtin.TypeId.Enum => return hashAny(@enumToInt(x), seed),
builtin.TypeId.Bool => {
comptime var rng = comptime std.rand.DefaultPrng.init(seed);
const vals = comptime [2]u32.{ rng.random.scalar(u32), rng.random.scalar(u32) };
const vals = comptime [2]u32{ rng.random.scalar(u32), rng.random.scalar(u32) };
return vals[@boolToInt(x)];
},
builtin.TypeId.Optional => {
+41 -41
View File
@@ -9,7 +9,7 @@ const assert = std.debug.assert;
/// Values are ref-counted, heap-allocated, and copy-on-write
/// If there is only 1 ref then write need not copy
pub const Value = struct.{
pub const Value = struct {
id: Id,
typ: *Type,
ref_count: std.atomic.Int(usize),
@@ -57,7 +57,7 @@ pub const Value = struct.{
std.debug.warn("{}", @tagName(base.id));
}
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error.{OutOfMemory}!?llvm.ValueRef) {
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error{OutOfMemory}!?llvm.ValueRef) {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmConst(ofile),
@@ -71,7 +71,7 @@ pub const Value = struct.{
}
}
pub fn derefAndCopy(self: *Value, comp: *Compilation) (error.{OutOfMemory}!*Value) {
pub fn derefAndCopy(self: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
if (self.ref_count.get() == 1) {
// ( ͡° ͜ʖ ͡°)
return self;
@@ -81,7 +81,7 @@ pub const Value = struct.{
return self.copy(comp);
}
pub fn copy(base: *Value, comp: *Compilation) (error.{OutOfMemory}!*Value) {
pub fn copy(base: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => unreachable,
@@ -95,25 +95,25 @@ pub const Value = struct.{
}
}
pub const Parent = union(enum).{
pub const Parent = union(enum) {
None,
BaseStruct: BaseStruct,
BaseArray: BaseArray,
BaseUnion: *Value,
BaseScalar: *Value,
pub const BaseStruct = struct.{
pub const BaseStruct = struct {
val: *Value,
field_index: usize,
};
pub const BaseArray = struct.{
pub const BaseArray = struct {
val: *Value,
elem_index: usize,
};
};
pub const Id = enum.{
pub const Id = enum {
Type,
Fn,
Void,
@@ -127,7 +127,7 @@ pub const Value = struct.{
pub const Type = @import("type.zig").Type;
pub const FnProto = struct.{
pub const FnProto = struct {
base: Value,
/// The main external name that is used in the .o file.
@@ -135,8 +135,8 @@ pub const Value = struct.{
symbol_name: Buffer,
pub fn create(comp: *Compilation, fn_type: *Type.Fn, symbol_name: Buffer) !*FnProto {
const self = try comp.gpa().create(FnProto.{
.base = Value.{
const self = try comp.gpa().create(FnProto{
.base = Value{
.id = Value.Id.FnProto,
.typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -166,7 +166,7 @@ pub const Value = struct.{
}
};
pub const Fn = struct.{
pub const Fn = struct {
base: Value,
/// The main external name that is used in the .o file.
@@ -190,15 +190,15 @@ pub const Value = struct.{
/// Creates a Fn value with 1 ref
/// Takes ownership of symbol_name
pub fn create(comp: *Compilation, fn_type: *Type.Fn, fndef_scope: *Scope.FnDef, symbol_name: Buffer) !*Fn {
const link_set_node = try comp.gpa().create(Compilation.FnLinkSet.Node.{
const link_set_node = try comp.gpa().create(Compilation.FnLinkSet.Node{
.data = null,
.next = undefined,
.prev = undefined,
});
errdefer comp.gpa().destroy(link_set_node);
const self = try comp.gpa().create(Fn.{
.base = Value.{
const self = try comp.gpa().create(Fn{
.base = Value{
.id = Value.Id.Fn,
.typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
@@ -249,7 +249,7 @@ pub const Value = struct.{
}
};
pub const Void = struct.{
pub const Void = struct {
base: Value,
pub fn get(comp: *Compilation) *Void {
@@ -262,7 +262,7 @@ pub const Value = struct.{
}
};
pub const Bool = struct.{
pub const Bool = struct {
base: Value,
x: bool,
@@ -290,7 +290,7 @@ pub const Value = struct.{
}
};
pub const NoReturn = struct.{
pub const NoReturn = struct {
base: Value,
pub fn get(comp: *Compilation) *NoReturn {
@@ -303,18 +303,18 @@ pub const Value = struct.{
}
};
pub const Ptr = struct.{
pub const Ptr = struct {
base: Value,
special: Special,
mut: Mut,
pub const Mut = enum.{
pub const Mut = enum {
CompTimeConst,
CompTimeVar,
RunTime,
};
pub const Special = union(enum).{
pub const Special = union(enum) {
Scalar: *Value,
BaseArray: BaseArray,
BaseStruct: BaseStruct,
@@ -322,12 +322,12 @@ pub const Value = struct.{
Discard,
};
pub const BaseArray = struct.{
pub const BaseArray = struct {
val: *Value,
elem_index: usize,
};
pub const BaseStruct = struct.{
pub const BaseStruct = struct {
val: *Value,
field_index: usize,
};
@@ -343,7 +343,7 @@ pub const Value = struct.{
errdefer array_val.base.deref(comp);
const elem_type = array_val.base.typ.cast(Type.Array).?.key.elem_type;
const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key.{
const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key{
.child_type = elem_type,
.mut = mut,
.vol = Type.Pointer.Vol.Non,
@@ -353,14 +353,14 @@ pub const Value = struct.{
var ptr_type_consumed = false;
errdefer if (!ptr_type_consumed) ptr_type.base.base.deref(comp);
const self = try comp.gpa().create(Value.Ptr.{
.base = Value.{
const self = try comp.gpa().create(Value.Ptr{
.base = Value{
.id = Value.Id.Ptr,
.typ = &ptr_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
.special = Special.{
.BaseArray = BaseArray.{
.special = Special{
.BaseArray = BaseArray{
.val = &array_val.base,
.elem_index = 0,
},
@@ -387,7 +387,7 @@ pub const Value = struct.{
const array_llvm_value = (try base_array.val.getLlvmConst(ofile)).?;
const ptr_bit_count = ofile.comp.target_ptr_bits;
const usize_llvm_type = llvm.IntTypeInContext(ofile.context, ptr_bit_count) orelse return error.OutOfMemory;
const indices = []llvm.ValueRef.{
const indices = []llvm.ValueRef{
llvm.ConstNull(usize_llvm_type) orelse return error.OutOfMemory,
llvm.ConstInt(usize_llvm_type, base_array.elem_index, 0) orelse return error.OutOfMemory,
};
@@ -404,17 +404,17 @@ pub const Value = struct.{
}
};
pub const Array = struct.{
pub const Array = struct {
base: Value,
special: Special,
pub const Special = union(enum).{
pub const Special = union(enum) {
Undefined,
OwnedBuffer: []u8,
Explicit: Data,
};
pub const Data = struct.{
pub const Data = struct {
parent: Parent,
elements: []*Value,
};
@@ -424,19 +424,19 @@ pub const Value = struct.{
const u8_type = Type.Int.get_u8(comp);
defer u8_type.base.base.deref(comp);
const array_type = try await (async Type.Array.get(comp, Type.Array.Key.{
const array_type = try await (async Type.Array.get(comp, Type.Array.Key{
.elem_type = &u8_type.base,
.len = buffer.len,
}) catch unreachable);
errdefer array_type.base.base.deref(comp);
const self = try comp.gpa().create(Value.Array.{
.base = Value.{
const self = try comp.gpa().create(Value.Array{
.base = Value{
.id = Value.Id.Array,
.typ = &array_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
.special = Special.{ .OwnedBuffer = buffer },
.special = Special{ .OwnedBuffer = buffer },
});
errdefer comp.gpa().destroy(self);
@@ -504,13 +504,13 @@ pub const Value = struct.{
}
};
pub const Int = struct.{
pub const Int = struct {
base: Value,
big_int: std.math.big.Int,
pub fn createFromString(comp: *Compilation, typ: *Type, base: u8, value: []const u8) !*Int {
const self = try comp.gpa().create(Value.Int.{
.base = Value.{
const self = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
.typ = typ,
.ref_count = std.atomic.Int(usize).init(1),
@@ -557,8 +557,8 @@ pub const Value = struct.{
old.base.typ.base.ref();
errdefer old.base.typ.base.deref(comp);
const new = try comp.gpa().create(Value.Int.{
.base = Value.{
const new = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
.typ = old.base.typ,
.ref_count = std.atomic.Int(usize).init(1),
+1 -1
View File
@@ -1,4 +1,4 @@
pub const Visib = enum.{
pub const Visib = enum {
Private,
Pub,
};
+2 -16
View File
@@ -387,7 +387,6 @@ struct TypeUnionField {
};
enum NodeType {
NodeTypeRoot,
NodeTypeFnProto,
NodeTypeFnDef,
NodeTypeParamDecl,
@@ -443,10 +442,6 @@ enum NodeType {
NodeTypePromiseType,
};
struct AstNodeRoot {
ZigList<AstNode *> top_level_decls;
};
enum CallingConvention {
CallingConventionUnspecified,
CallingConventionC,
@@ -812,7 +807,7 @@ struct AstNodeContainerDecl {
ZigList<AstNode *> decls;
ContainerLayout layout;
AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T))
bool auto_enum; // union(enum)
bool auto_enum, is_root; // union(enum)
};
struct AstNodeErrorSetDecl {
@@ -922,7 +917,6 @@ struct AstNode {
size_t column;
ImportTableEntry *owner;
union {
AstNodeRoot root;
AstNodeFnDef fn_def;
AstNodeFnProto fn_proto;
AstNodeParamDecl param_decl;
@@ -1222,7 +1216,6 @@ struct ZigType {
ZigLLVMDIType *di_type;
bool zero_bits; // this is denormalized data
bool is_copyable;
bool gen_h_loop_flag;
union {
@@ -1864,7 +1857,7 @@ struct Scope {
// This scope comes from global declarations or from
// declarations in a container declaration
// NodeTypeRoot, NodeTypeContainerDecl
// NodeTypeContainerDecl
struct ScopeDecls {
Scope base;
@@ -2078,7 +2071,6 @@ enum IrInstructionId {
IrInstructionIdCInclude,
IrInstructionIdCDefine,
IrInstructionIdCUndef,
IrInstructionIdArrayLen,
IrInstructionIdRef,
IrInstructionIdCompileErr,
IrInstructionIdCompileLog,
@@ -2591,12 +2583,6 @@ struct IrInstructionImport {
IrInstruction *name;
};
struct IrInstructionArrayLen {
IrInstruction base;
IrInstruction *array_value;
};
struct IrInstructionRef {
IrInstruction base;
+43 -61
View File
@@ -100,7 +100,7 @@ void init_scope(CodeGen *g, Scope *dest, ScopeId id, AstNode *source_node, Scope
}
ScopeDecls *create_decls_scope(CodeGen *g, AstNode *node, Scope *parent, ZigType *container_type, ImportTableEntry *import) {
assert(node == nullptr || node->type == NodeTypeRoot || node->type == NodeTypeContainerDecl || node->type == NodeTypeFnCallExpr);
assert(node == nullptr || node->type == NodeTypeContainerDecl || node->type == NodeTypeFnCallExpr);
ScopeDecls *scope = allocate<ScopeDecls>(1);
init_scope(g, &scope->base, ScopeIdDecls, node, parent);
scope->decl_table.init(4);
@@ -181,7 +181,6 @@ ScopeFnDef *create_fndef_scope(CodeGen *g, AstNode *node, Scope *parent, ZigFn *
}
Scope *create_comptime_scope(CodeGen *g, AstNode *node, Scope *parent) {
assert(node->type == NodeTypeCompTime || node->type == NodeTypeSwitchExpr);
ScopeCompTime *scope = allocate<ScopeCompTime>(1);
init_scope(g, &scope->base, ScopeIdCompTime, node, parent);
return &scope->base;
@@ -367,23 +366,6 @@ uint64_t type_size_bits(CodeGen *g, ZigType *type_entry) {
return LLVMSizeOfTypeInBits(g->target_data_ref, type_entry->type_ref);
}
Result<bool> type_is_copyable(CodeGen *g, ZigType *type_entry) {
Error err;
if ((err = type_resolve(g, type_entry, ResolveStatusZeroBitsKnown)))
return err;
if (!type_has_bits(type_entry))
return true;
if (!handle_is_ptr(type_entry))
return true;
if ((err = ensure_complete_type(g, type_entry)))
return err;
return type_entry->is_copyable;
}
static bool is_slice(ZigType *type) {
return type->id == ZigTypeIdStruct && type->data.structure.is_slice;
}
@@ -465,7 +447,6 @@ ZigType *get_pointer_to_type_extra(CodeGen *g, ZigType *child_type, bool is_cons
assert(type_is_resolved(child_type, ResolveStatusZeroBitsKnown));
ZigType *entry = new_type_table_entry(ZigTypeIdPointer);
entry->is_copyable = true;
const char *star_str = ptr_len == PtrLenSingle ? "*" : "[*]";
const char *const_str = is_const ? "const " : "";
@@ -581,7 +562,6 @@ ZigType *get_optional_type(CodeGen *g, ZigType *child_type) {
ZigType *entry = new_type_table_entry(ZigTypeIdOptional);
assert(child_type->type_ref || child_type->zero_bits);
entry->is_copyable = type_is_copyable(g, child_type).unwrap();
buf_resize(&entry->name, 0);
buf_appendf(&entry->name, "?%s", buf_ptr(&child_type->name));
@@ -671,7 +651,6 @@ ZigType *get_error_union_type(CodeGen *g, ZigType *err_set_type, ZigType *payloa
}
ZigType *entry = new_type_table_entry(ZigTypeIdErrorUnion);
entry->is_copyable = true;
assert(payload_type->di_type);
assert(type_is_complete(payload_type));
@@ -766,7 +745,6 @@ ZigType *get_array_type(CodeGen *g, ZigType *child_type, uint64_t array_size) {
ZigType *entry = new_type_table_entry(ZigTypeIdArray);
entry->zero_bits = (array_size == 0) || child_type->zero_bits;
entry->is_copyable = false;
buf_resize(&entry->name, 0);
buf_appendf(&entry->name, "[%" ZIG_PRI_u64 "]%s", array_size, buf_ptr(&child_type->name));
@@ -831,7 +809,6 @@ ZigType *get_slice_type(CodeGen *g, ZigType *ptr_type) {
}
ZigType *entry = new_type_table_entry(ZigTypeIdStruct);
entry->is_copyable = true;
// replace the & with [] to go from a ptr type name to a slice type name
buf_resize(&entry->name, 0);
@@ -986,7 +963,6 @@ ZigType *get_opaque_type(CodeGen *g, Scope *scope, AstNode *source_node, const c
ImportTableEntry *import = scope ? get_scope_import(scope) : nullptr;
unsigned line = source_node ? (unsigned)(source_node->line + 1) : 0;
entry->is_copyable = false;
entry->type_ref = LLVMInt8Type();
entry->di_type = ZigLLVMCreateDebugForwardDeclType(g->dbuilder,
ZigLLVMTag_DW_structure_type(), buf_ptr(&entry->name),
@@ -1005,7 +981,6 @@ ZigType *get_bound_fn_type(CodeGen *g, ZigFn *fn_entry) {
return fn_type->data.fn.bound_fn_parent;
ZigType *bound_fn_type = new_type_table_entry(ZigTypeIdBoundFn);
bound_fn_type->is_copyable = false;
bound_fn_type->data.bound_fn.fn_type = fn_type;
bound_fn_type->zero_bits = true;
@@ -1105,7 +1080,6 @@ ZigType *get_fn_type(CodeGen *g, FnTypeId *fn_type_id) {
}
ZigType *fn_type = new_type_table_entry(ZigTypeIdFn);
fn_type->is_copyable = true;
fn_type->data.fn.fn_type_id = *fn_type_id;
bool skip_debug_info = false;
@@ -1318,7 +1292,6 @@ ZigType *analyze_type_expr(CodeGen *g, Scope *scope, AstNode *node) {
ZigType *get_generic_fn_type(CodeGen *g, FnTypeId *fn_type_id) {
ZigType *fn_type = new_type_table_entry(ZigTypeIdFn);
fn_type->is_copyable = false;
buf_resize(&fn_type->name, 0);
if (fn_type->data.fn.fn_type_id.cc == CallingConventionAsync) {
const char *async_allocator_type_str = (fn_type->data.fn.fn_type_id.async_allocator_type == nullptr) ?
@@ -1526,7 +1499,6 @@ ZigType *get_auto_err_set_type(CodeGen *g, ZigFn *fn_entry) {
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "@typeOf(%s).ReturnType.ErrorSet", buf_ptr(&fn_entry->symbol_name));
err_set_type->is_copyable = true;
err_set_type->type_ref = g->builtin_types.entry_global_error_set->type_ref;
err_set_type->di_type = g->builtin_types.entry_global_error_set->di_type;
err_set_type->data.error_set.err_count = 0;
@@ -2846,7 +2818,6 @@ static Error resolve_union_zero_bits(CodeGen *g, ZigType *union_type) {
tag_type = new_type_table_entry(ZigTypeIdEnum);
buf_resize(&tag_type->name, 0);
buf_appendf(&tag_type->name, "@TagType(%s)", buf_ptr(&union_type->name));
tag_type->is_copyable = true;
tag_type->type_ref = tag_int_type->type_ref;
tag_type->zero_bits = tag_int_type->zero_bits;
@@ -3366,10 +3337,10 @@ static void add_top_level_decl(CodeGen *g, ScopeDecls *decls_scope, Tld *tld) {
}
{
ZigType *type = get_primitive_type(g, tld->name);
if (type != nullptr) {
ZigType *type;
if (get_primitive_type(g, tld->name, &type) != ErrorPrimitiveTypeNotFound) {
add_node_error(g, tld->source_node,
buf_sprintf("declaration shadows type '%s'", buf_ptr(&type->name)));
buf_sprintf("declaration shadows primitive type '%s'", buf_ptr(tld->name)));
}
}
}
@@ -3428,9 +3399,9 @@ void update_compile_var(CodeGen *g, Buf *name, ConstExprValue *value) {
void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
switch (node->type) {
case NodeTypeRoot:
for (size_t i = 0; i < node->data.root.top_level_decls.length; i += 1) {
AstNode *child = node->data.root.top_level_decls.at(i);
case NodeTypeContainerDecl:
for (size_t i = 0; i < node->data.container_decl.decls.length; i += 1) {
AstNode *child = node->data.container_decl.decls.at(i);
scan_decls(g, decls_scope, child);
}
break;
@@ -3477,7 +3448,6 @@ void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
case NodeTypeCompTime:
preview_comptime_decl(g, node, decls_scope);
break;
case NodeTypeContainerDecl:
case NodeTypeParamDecl:
case NodeTypeReturnExpr:
case NodeTypeDefer:
@@ -3613,10 +3583,10 @@ ZigVar *add_variable(CodeGen *g, AstNode *source_node, Scope *parent_scope, Buf
add_error_note(g, msg, existing_var->decl_node, buf_sprintf("previous declaration is here"));
variable_entry->value->type = g->builtin_types.entry_invalid;
} else {
ZigType *type = get_primitive_type(g, name);
if (type != nullptr) {
ZigType *type;
if (get_primitive_type(g, name, &type) != ErrorPrimitiveTypeNotFound) {
add_node_error(g, source_node,
buf_sprintf("variable shadows type '%s'", buf_ptr(&type->name)));
buf_sprintf("variable shadows primitive type '%s'", buf_ptr(name)));
variable_entry->value->type = g->builtin_types.entry_invalid;
} else {
Scope *search_scope = nullptr;
@@ -4366,9 +4336,9 @@ ImportTableEntry *add_source_file(CodeGen *g, PackageTableEntry *package, Buf *r
import_entry->decls_scope = create_decls_scope(g, import_entry->root, nullptr, nullptr, import_entry);
assert(import_entry->root->type == NodeTypeRoot);
for (size_t decl_i = 0; decl_i < import_entry->root->data.root.top_level_decls.length; decl_i += 1) {
AstNode *top_level_decl = import_entry->root->data.root.top_level_decls.at(decl_i);
assert(import_entry->root->type == NodeTypeContainerDecl);
for (size_t decl_i = 0; decl_i < import_entry->root->data.container_decl.decls.length; decl_i += 1) {
AstNode *top_level_decl = import_entry->root->data.container_decl.decls.at(decl_i);
if (top_level_decl->type == NodeTypeFnDef) {
AstNode *proto_node = top_level_decl->data.fn_def.fn_proto;
@@ -4435,6 +4405,7 @@ void semantic_analyze(CodeGen *g) {
}
ZigType *get_int_type(CodeGen *g, bool is_signed, uint32_t size_in_bits) {
assert(size_in_bits <= 65535);
TypeId type_id = {};
type_id.id = ZigTypeIdInt;
type_id.data.integer.is_signed = is_signed;
@@ -4515,7 +4486,7 @@ static ZigWindowsSDK *get_windows_sdk(CodeGen *g) {
}
Buf *get_linux_libc_lib_path(const char *o_file) {
static Buf *get_linux_libc_lib_path(const char *o_file) {
const char *cc_exe = getenv("CC");
cc_exe = (cc_exe == nullptr) ? "cc" : cc_exe;
ZigList<const char *> args = {};
@@ -4523,7 +4494,7 @@ Buf *get_linux_libc_lib_path(const char *o_file) {
Termination term;
Buf *out_stderr = buf_alloc();
Buf *out_stdout = buf_alloc();
int err;
Error err;
if ((err = os_exec_process(cc_exe, args, &term, out_stderr, out_stdout))) {
zig_panic("unable to determine libc lib path: executing C compiler: %s", err_str(err));
}
@@ -4541,7 +4512,7 @@ Buf *get_linux_libc_lib_path(const char *o_file) {
return result;
}
Buf *get_linux_libc_include_path(void) {
static Buf *get_posix_libc_include_path(void) {
const char *cc_exe = getenv("CC");
cc_exe = (cc_exe == nullptr) ? "cc" : cc_exe;
ZigList<const char *> args = {};
@@ -4552,7 +4523,7 @@ Buf *get_linux_libc_include_path(void) {
Termination term;
Buf *out_stderr = buf_alloc();
Buf *out_stdout = buf_alloc();
int err;
Error err;
if ((err = os_exec_process(cc_exe, args, &term, out_stderr, out_stdout))) {
zig_panic("unable to determine libc include path: executing C compiler: %s", err_str(err));
}
@@ -4596,6 +4567,10 @@ Buf *get_linux_libc_include_path(void) {
void find_libc_include_path(CodeGen *g) {
if (g->libc_include_dir == nullptr) {
if (!g->is_native_target) {
fprintf(stderr, "Unable to determine libc include path. --libc-include-dir");
exit(1);
}
if (g->zig_target.os == OsWindows) {
ZigWindowsSDK *sdk = get_windows_sdk(g);
@@ -4604,13 +4579,13 @@ void find_libc_include_path(CodeGen *g) {
fprintf(stderr, "Unable to determine libc include path. --libc-include-dir");
exit(1);
}
} else if (g->zig_target.os == OsLinux) {
g->libc_include_dir = get_linux_libc_include_path();
} else if (g->zig_target.os == OsMacOSX) {
g->libc_include_dir = buf_create_from_str("/usr/include");
} else if (g->zig_target.os == OsLinux || g->zig_target.os == OsMacOSX) {
g->libc_include_dir = get_posix_libc_include_path();
} else {
// TODO find libc at runtime for other operating systems
zig_panic("Unable to determine libc include path.");
fprintf(stderr, "Unable to determine libc include path.\n"
"TODO: implement finding libc at runtime for other operating systems.\n"
"in the meantime, you can use as a workaround: --libc-include-dir\n");
exit(1);
}
}
assert(buf_len(g->libc_include_dir) != 0);
@@ -5977,8 +5952,8 @@ void render_const_value(CodeGen *g, Buf *buf, ConstExprValue *const_val) {
}
ZigType *make_int_type(CodeGen *g, bool is_signed, uint32_t size_in_bits) {
assert(size_in_bits <= 65535);
ZigType *entry = new_type_table_entry(ZigTypeIdInt);
entry->is_copyable = true;
entry->type_ref = (size_in_bits == 0) ? LLVMVoidType() : LLVMIntType(size_in_bits);
entry->zero_bits = (size_in_bits == 0);
@@ -6455,7 +6430,10 @@ bool fn_type_can_fail(FnTypeId *fn_type_id) {
return type_can_fail(fn_type_id->return_type) || fn_type_id->cc == CallingConventionAsync;
}
ZigType *get_primitive_type(CodeGen *g, Buf *name) {
// ErrorNone - result pointer has the type
// ErrorOverflow - an integer primitive type has too large a bit width
// ErrorPrimitiveTypeNotFound - result pointer unchanged
Error get_primitive_type(CodeGen *g, Buf *name, ZigType **result) {
if (buf_len(name) >= 2) {
uint8_t first_c = buf_ptr(name)[0];
if (first_c == 'i' || first_c == 'u') {
@@ -6466,18 +6444,22 @@ ZigType *get_primitive_type(CodeGen *g, Buf *name) {
}
}
bool is_signed = (first_c == 'i');
uint32_t bit_count = atoi(buf_ptr(name) + 1);
return get_int_type(g, is_signed, bit_count);
unsigned long int bit_count = strtoul(buf_ptr(name) + 1, nullptr, 10);
// strtoul returns ULONG_MAX on errors, so this comparison catches that as well.
if (bit_count >= 65536) return ErrorOverflow;
*result = get_int_type(g, is_signed, bit_count);
return ErrorNone;
}
}
not_integer:
auto primitive_table_entry = g->primitive_type_table.maybe_get(name);
if (primitive_table_entry != nullptr) {
return primitive_table_entry->value;
}
return nullptr;
if (primitive_table_entry == nullptr)
return ErrorPrimitiveTypeNotFound;
*result = primitive_table_entry->value;
return ErrorNone;
}
Error file_fetch(CodeGen *g, Buf *resolved_path, Buf *contents) {
+1 -3
View File
@@ -9,7 +9,6 @@
#define ZIG_ANALYZE_HPP
#include "all_types.hpp"
#include "result.hpp"
void semantic_analyze(CodeGen *g);
ErrorMsg *add_node_error(CodeGen *g, AstNode *node, Buf *msg);
@@ -180,7 +179,6 @@ ZigTypeId type_id_at_index(size_t index);
size_t type_id_len();
size_t type_id_index(ZigType *entry);
ZigType *get_generic_fn_type(CodeGen *g, FnTypeId *fn_type_id);
Result<bool> type_is_copyable(CodeGen *g, ZigType *type_entry);
LinkLib *create_link_lib(Buf *name);
LinkLib *add_link_lib(CodeGen *codegen, Buf *lib);
@@ -204,7 +202,7 @@ bool type_can_fail(ZigType *type_entry);
bool fn_eval_cacheable(Scope *scope, ZigType *return_type);
AstNode *type_decl_node(ZigType *type_entry);
ZigType *get_primitive_type(CodeGen *g, Buf *name);
Error get_primitive_type(CodeGen *g, Buf *name, ZigType **result);
bool calling_convention_allows_zig_types(CallingConvention cc);
const char *calling_convention_name(CallingConvention cc);
+76 -40
View File
@@ -143,8 +143,6 @@ const char *container_string(ContainerKind kind) {
static const char *node_type_str(NodeType node_type) {
switch (node_type) {
case NodeTypeRoot:
return "Root";
case NodeTypeFnDef:
return "FnDef";
case NodeTypeFnProto:
@@ -379,6 +377,38 @@ static void print_symbol(AstRender *ar, Buf *symbol) {
fprintf(ar->f, "@\"%s\"", buf_ptr(&escaped));
}
static bool statement_terminates_without_semicolon(AstNode *node) {
switch (node->type) {
case NodeTypeIfBoolExpr:
if (node->data.if_bool_expr.else_node)
return statement_terminates_without_semicolon(node->data.if_bool_expr.else_node);
return node->data.if_bool_expr.then_block->type == NodeTypeBlock;
case NodeTypeIfErrorExpr:
if (node->data.if_err_expr.else_node)
return statement_terminates_without_semicolon(node->data.if_err_expr.else_node);
return node->data.if_err_expr.then_node->type == NodeTypeBlock;
case NodeTypeTestExpr:
if (node->data.test_expr.else_node)
return statement_terminates_without_semicolon(node->data.test_expr.else_node);
return node->data.test_expr.then_node->type == NodeTypeBlock;
case NodeTypeWhileExpr:
return node->data.while_expr.body->type == NodeTypeBlock;
case NodeTypeForExpr:
return node->data.for_expr.body->type == NodeTypeBlock;
case NodeTypeCompTime:
return node->data.comptime_expr.expr->type == NodeTypeBlock;
case NodeTypeDefer:
return node->data.defer.expr->type == NodeTypeBlock;
case NodeTypeSuspend:
return node->data.suspend.block != nullptr && node->data.suspend.block->type == NodeTypeBlock;
case NodeTypeSwitchExpr:
case NodeTypeBlock:
return true;
default:
return false;
}
}
static void render_node_extra(AstRender *ar, AstNode *node, bool grouped);
static void render_node_grouped(AstRender *ar, AstNode *node) {
@@ -395,21 +425,6 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
case NodeTypeSwitchRange:
case NodeTypeStructValueField:
zig_unreachable();
case NodeTypeRoot:
for (size_t i = 0; i < node->data.root.top_level_decls.length; i += 1) {
AstNode *child = node->data.root.top_level_decls.at(i);
print_indent(ar);
render_node_grouped(ar, child);
if (child->type == NodeTypeUse ||
child->type == NodeTypeVariableDeclaration ||
child->type == NodeTypeFnProto)
{
fprintf(ar->f, ";");
}
fprintf(ar->f, "\n");
}
break;
case NodeTypeFnProto:
{
const char *pub_str = visib_mod_string(node->data.fn_proto.visib_mod);
@@ -698,7 +713,11 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
{
AstNode *lhs = node->data.field_access_expr.struct_expr;
Buf *rhs = node->data.field_access_expr.field_name;
render_node_ungrouped(ar, lhs);
if (lhs->type == NodeTypeErrorType) {
fprintf(ar->f, "error");
} else {
render_node_ungrouped(ar, lhs);
}
fprintf(ar->f, ".");
print_symbol(ar, rhs);
break;
@@ -722,23 +741,25 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
break;
case NodeTypeContainerDecl:
{
const char *layout_str = layout_string(node->data.container_decl.layout);
const char *container_str = container_string(node->data.container_decl.kind);
fprintf(ar->f, "%s%s", layout_str, container_str);
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, "(enum");
}
if (node->data.container_decl.init_arg_expr != nullptr) {
fprintf(ar->f, "(");
render_node_grouped(ar, node->data.container_decl.init_arg_expr);
fprintf(ar->f, ")");
}
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, ")");
}
if (!node->data.container_decl.is_root) {
const char *layout_str = layout_string(node->data.container_decl.layout);
const char *container_str = container_string(node->data.container_decl.kind);
fprintf(ar->f, "%s%s", layout_str, container_str);
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, "(enum");
}
if (node->data.container_decl.init_arg_expr != nullptr) {
fprintf(ar->f, "(");
render_node_grouped(ar, node->data.container_decl.init_arg_expr);
fprintf(ar->f, ")");
}
if (node->data.container_decl.auto_enum) {
fprintf(ar->f, ")");
}
fprintf(ar->f, ".{\n");
ar->indent += ar->indent_size;
fprintf(ar->f, " {\n");
ar->indent += ar->indent_size;
}
for (size_t field_i = 0; field_i < node->data.container_decl.fields.length; field_i += 1) {
AstNode *field_node = node->data.container_decl.fields.at(field_i);
assert(field_node->type == NodeTypeStructField);
@@ -755,18 +776,33 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
fprintf(ar->f, ",\n");
}
ar->indent -= ar->indent_size;
print_indent(ar);
fprintf(ar->f, "}");
for (size_t decl_i = 0; decl_i < node->data.container_decl.decls.length; decl_i += 1) {
AstNode *decls_node = node->data.container_decl.decls.at(decl_i);
render_node_grouped(ar, decls_node);
if (decls_node->type == NodeTypeUse ||
decls_node->type == NodeTypeVariableDeclaration ||
decls_node->type == NodeTypeFnProto)
{
fprintf(ar->f, ";");
}
fprintf(ar->f, "\n");
}
if (!node->data.container_decl.is_root) {
ar->indent -= ar->indent_size;
print_indent(ar);
fprintf(ar->f, "}");
}
break;
}
case NodeTypeContainerInitExpr:
render_node_ungrouped(ar, node->data.container_init_expr.type);
if (node->data.container_init_expr.kind == ContainerInitKindStruct) {
fprintf(ar->f, ".{\n");
fprintf(ar->f, "{\n");
ar->indent += ar->indent_size;
} else {
fprintf(ar->f, ".{");
fprintf(ar->f, "{");
}
for (size_t i = 0; i < node->data.container_init_expr.entries.length; i += 1) {
AstNode *entry = node->data.container_init_expr.entries.at(i);
@@ -812,7 +848,7 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
break;
}
case NodeTypeErrorType:
fprintf(ar->f, "error");
fprintf(ar->f, "anyerror");
break;
case NodeTypeAsmExpr:
{
+1 -1
View File
@@ -294,7 +294,7 @@ Error cache_hit(CacheHash *ch, Buf *out_digest) {
chf = &ch->files.at(file_i);
} else if (any_file_changed) {
// cache miss.
// keep the the manifest file open with the rw lock
// keep the manifest file open with the rw lock
// reset the hash
rc = blake2b_init(&ch->blake, 48);
assert(rc == 0);
+42 -43
View File
@@ -5102,7 +5102,6 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable,
case IrInstructionIdContainerInitFields:
case IrInstructionIdCompileErr:
case IrInstructionIdCompileLog:
case IrInstructionIdArrayLen:
case IrInstructionIdImport:
case IrInstructionIdCImport:
case IrInstructionIdCInclude:
@@ -6603,7 +6602,7 @@ static void define_builtin_types(CodeGen *g) {
{
ZigType *entry = new_type_table_entry(ZigTypeIdErrorSet);
buf_init_from_str(&entry->name, "error");
buf_init_from_str(&entry->name, "anyerror");
entry->data.error_set.err_count = UINT32_MAX;
// TODO allow overriding this type and keep track of max value and emit an
@@ -6614,7 +6613,7 @@ static void define_builtin_types(CodeGen *g) {
entry->type_ref = g->err_tag_type->type_ref;
entry->di_type = ZigLLVMCreateReplaceableCompositeType(g->dbuilder,
ZigLLVMTag_DW_enumeration_type(), "error",
ZigLLVMTag_DW_enumeration_type(), "anyerror",
ZigLLVMCompileUnitToScope(g->compile_unit), nullptr, 0);
// reserve index 0 to indicate no error
@@ -6746,14 +6745,14 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
// Modifications to this struct must be coordinated with code that does anything with
// g->stack_trace_type. There are hard-coded references to the field indexes.
buf_append_str(contents,
"pub const StackTrace = struct.{\n"
"pub const StackTrace = struct {\n"
" index: usize,\n"
" instruction_addresses: []usize,\n"
"};\n\n");
const char *cur_os = nullptr;
{
buf_appendf(contents, "pub const Os = enum.{\n");
buf_appendf(contents, "pub const Os = enum {\n");
uint32_t field_count = (uint32_t)target_os_count();
for (uint32_t i = 0; i < field_count; i += 1) {
Os os_type = get_target_os(i);
@@ -6771,7 +6770,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
const char *cur_arch = nullptr;
{
buf_appendf(contents, "pub const Arch = enum.{\n");
buf_appendf(contents, "pub const Arch = enum {\n");
uint32_t field_count = (uint32_t)target_arch_count();
for (uint32_t i = 0; i < field_count; i += 1) {
const ArchType *arch_type = get_target_arch(i);
@@ -6795,7 +6794,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
const char *cur_environ = nullptr;
{
buf_appendf(contents, "pub const Environ = enum.{\n");
buf_appendf(contents, "pub const Environ = enum {\n");
uint32_t field_count = (uint32_t)target_environ_count();
for (uint32_t i = 0; i < field_count; i += 1) {
ZigLLVM_EnvironmentType environ_type = get_target_environ(i);
@@ -6813,7 +6812,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
const char *cur_obj_fmt = nullptr;
{
buf_appendf(contents, "pub const ObjectFormat = enum.{\n");
buf_appendf(contents, "pub const ObjectFormat = enum {\n");
uint32_t field_count = (uint32_t)target_oformat_count();
for (uint32_t i = 0; i < field_count; i += 1) {
ZigLLVM_ObjectFormatType oformat = get_target_oformat(i);
@@ -6831,7 +6830,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
assert(cur_obj_fmt != nullptr);
{
buf_appendf(contents, "pub const GlobalLinkage = enum.{\n");
buf_appendf(contents, "pub const GlobalLinkage = enum {\n");
uint32_t field_count = array_length(global_linkage_values);
for (uint32_t i = 0; i < field_count; i += 1) {
const GlobalLinkageValue *value = &global_linkage_values[i];
@@ -6841,7 +6840,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const AtomicOrder = enum.{\n"
"pub const AtomicOrder = enum {\n"
" Unordered,\n"
" Monotonic,\n"
" Acquire,\n"
@@ -6852,7 +6851,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const AtomicRmwOp = enum.{\n"
"pub const AtomicRmwOp = enum {\n"
" Xchg,\n"
" Add,\n"
" Sub,\n"
@@ -6866,7 +6865,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const Mode = enum.{\n"
"pub const Mode = enum {\n"
" Debug,\n"
" ReleaseSafe,\n"
" ReleaseFast,\n"
@@ -6874,7 +6873,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
"};\n\n");
}
{
buf_appendf(contents, "pub const TypeId = enum.{\n");
buf_appendf(contents, "pub const TypeId = enum {\n");
size_t field_count = type_id_len();
for (size_t i = 0; i < field_count; i += 1) {
const ZigTypeId id = type_id_at_index(i);
@@ -6884,7 +6883,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const TypeInfo = union(TypeId).{\n"
"pub const TypeInfo = union(TypeId) {\n"
" Type: void,\n"
" Void: void,\n"
" Bool: void,\n"
@@ -6910,96 +6909,96 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" Opaque: void,\n"
" Promise: Promise,\n"
"\n\n"
" pub const Int = struct.{\n"
" pub const Int = struct {\n"
" is_signed: bool,\n"
" bits: u8,\n"
" };\n"
"\n"
" pub const Float = struct.{\n"
" pub const Float = struct {\n"
" bits: u8,\n"
" };\n"
"\n"
" pub const Pointer = struct.{\n"
" pub const Pointer = struct {\n"
" size: Size,\n"
" is_const: bool,\n"
" is_volatile: bool,\n"
" alignment: u32,\n"
" child: type,\n"
"\n"
" pub const Size = enum.{\n"
" pub const Size = enum {\n"
" One,\n"
" Many,\n"
" Slice,\n"
" };\n"
" };\n"
"\n"
" pub const Array = struct.{\n"
" pub const Array = struct {\n"
" len: usize,\n"
" child: type,\n"
" };\n"
"\n"
" pub const ContainerLayout = enum.{\n"
" pub const ContainerLayout = enum {\n"
" Auto,\n"
" Extern,\n"
" Packed,\n"
" };\n"
"\n"
" pub const StructField = struct.{\n"
" pub const StructField = struct {\n"
" name: []const u8,\n"
" offset: ?usize,\n"
" field_type: type,\n"
" };\n"
"\n"
" pub const Struct = struct.{\n"
" pub const Struct = struct {\n"
" layout: ContainerLayout,\n"
" fields: []StructField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const Optional = struct.{\n"
" pub const Optional = struct {\n"
" child: type,\n"
" };\n"
"\n"
" pub const ErrorUnion = struct.{\n"
" pub const ErrorUnion = struct {\n"
" error_set: type,\n"
" payload: type,\n"
" };\n"
"\n"
" pub const Error = struct.{\n"
" pub const Error = struct {\n"
" name: []const u8,\n"
" value: usize,\n"
" };\n"
"\n"
" pub const ErrorSet = struct.{\n"
" pub const ErrorSet = struct {\n"
" errors: []Error,\n"
" };\n"
"\n"
" pub const EnumField = struct.{\n"
" pub const EnumField = struct {\n"
" name: []const u8,\n"
" value: usize,\n"
" };\n"
"\n"
" pub const Enum = struct.{\n"
" pub const Enum = struct {\n"
" layout: ContainerLayout,\n"
" tag_type: type,\n"
" fields: []EnumField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const UnionField = struct.{\n"
" pub const UnionField = struct {\n"
" name: []const u8,\n"
" enum_field: ?EnumField,\n"
" field_type: type,\n"
" };\n"
"\n"
" pub const Union = struct.{\n"
" pub const Union = struct {\n"
" layout: ContainerLayout,\n"
" tag_type: ?type,\n"
" fields: []UnionField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const CallingConvention = enum.{\n"
" pub const CallingConvention = enum {\n"
" Unspecified,\n"
" C,\n"
" Cold,\n"
@@ -7008,13 +7007,13 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" Async,\n"
" };\n"
"\n"
" pub const FnArg = struct.{\n"
" pub const FnArg = struct {\n"
" is_generic: bool,\n"
" is_noalias: bool,\n"
" arg_type: ?type,\n"
" };\n"
"\n"
" pub const Fn = struct.{\n"
" pub const Fn = struct {\n"
" calling_convention: CallingConvention,\n"
" is_generic: bool,\n"
" is_var_args: bool,\n"
@@ -7023,21 +7022,21 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" args: []FnArg,\n"
" };\n"
"\n"
" pub const Promise = struct.{\n"
" pub const Promise = struct {\n"
" child: ?type,\n"
" };\n"
"\n"
" pub const Definition = struct.{\n"
" pub const Definition = struct {\n"
" name: []const u8,\n"
" is_pub: bool,\n"
" data: Data,\n"
"\n"
" pub const Data = union(enum).{\n"
" pub const Data = union(enum) {\n"
" Type: type,\n"
" Var: type,\n"
" Fn: FnDef,\n"
"\n"
" pub const FnDef = struct.{\n"
" pub const FnDef = struct {\n"
" fn_type: type,\n"
" inline_type: Inline,\n"
" calling_convention: CallingConvention,\n"
@@ -7048,7 +7047,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
" return_type: type,\n"
" arg_names: [][] const u8,\n"
"\n"
" pub const Inline = enum.{\n"
" pub const Inline = enum {\n"
" Auto,\n"
" Always,\n"
" Never,\n"
@@ -7074,7 +7073,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const FloatMode = enum.{\n"
"pub const FloatMode = enum {\n"
" Strict,\n"
" Optimized,\n"
"};\n\n");
@@ -7083,7 +7082,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
}
{
buf_appendf(contents,
"pub const Endian = enum.{\n"
"pub const Endian = enum {\n"
" Big,\n"
" Little,\n"
"};\n\n");
@@ -7326,7 +7325,7 @@ void codegen_translate_c(CodeGen *g, Buf *full_path) {
import->di_file = ZigLLVMCreateFile(g->dbuilder, buf_ptr(src_basename), buf_ptr(src_dirname));
ZigList<ErrorMsg *> errors = {0};
int err = parse_h_file(import, &errors, buf_ptr(full_path), g, nullptr);
Error err = parse_h_file(import, &errors, buf_ptr(full_path), g, nullptr);
if (err == ErrorCCompileErrors && errors.length > 0) {
for (size_t i = 0; i < errors.length; i += 1) {
@@ -7446,7 +7445,7 @@ static void gen_root_source(CodeGen *g) {
return;
Buf *source_code = buf_alloc();
int err;
Error err;
// No need for using the caching system for this file fetch because it is handled
// separately.
if ((err = os_fetch_file_path(resolved_path, source_code, true))) {
@@ -7514,7 +7513,7 @@ void codegen_add_assembly(CodeGen *g, Buf *path) {
static void gen_global_asm(CodeGen *g) {
Buf contents = BUF_INIT;
int err;
Error err;
for (size_t i = 0; i < g->assembly_files.length; i += 1) {
Buf *asm_file = g->assembly_files.at(i);
// No need to use the caching system for these fetches because they
+3 -2
View File
@@ -7,8 +7,8 @@
#include "error.hpp"
const char *err_str(int err) {
switch ((enum Error)err) {
const char *err_str(Error err) {
switch (err) {
case ErrorNone: return "(no error)";
case ErrorNoMem: return "out of memory";
case ErrorInvalidFormat: return "invalid format";
@@ -32,6 +32,7 @@ const char *err_str(int err) {
case ErrorUnsupportedOperatingSystem: return "unsupported operating system";
case ErrorSharingViolation: return "sharing violation";
case ErrorPipeBusy: return "pipe busy";
case ErrorPrimitiveTypeNotFound: return "primitive type not found";
}
return "(invalid error)";
}
+8 -1
View File
@@ -8,6 +8,8 @@
#ifndef ERROR_HPP
#define ERROR_HPP
#include <assert.h>
enum Error {
ErrorNone,
ErrorNoMem,
@@ -32,8 +34,13 @@ enum Error {
ErrorUnsupportedOperatingSystem,
ErrorSharingViolation,
ErrorPipeBusy,
ErrorPrimitiveTypeNotFound,
};
const char *err_str(int err);
const char *err_str(Error err);
static inline void assertNoError(Error err) {
assert(err == ErrorNone);
}
#endif
+42 -99
View File
@@ -479,10 +479,6 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionCUndef *) {
return IrInstructionIdCUndef;
}
static constexpr IrInstructionId ir_instruction_id(IrInstructionArrayLen *) {
return IrInstructionIdArrayLen;
}
static constexpr IrInstructionId ir_instruction_id(IrInstructionRef *) {
return IrInstructionIdRef;
}
@@ -1663,15 +1659,6 @@ static IrInstruction *ir_build_import(IrBuilder *irb, Scope *scope, AstNode *sou
return &instruction->base;
}
static IrInstruction *ir_build_array_len(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *array_value) {
IrInstructionArrayLen *instruction = ir_build_instruction<IrInstructionArrayLen>(irb, scope, source_node);
instruction->array_value = array_value;
ir_ref_instruction(array_value, irb->current_basic_block);
return &instruction->base;
}
static IrInstruction *ir_build_ref(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *value,
bool is_const, bool is_volatile)
{
@@ -3055,10 +3042,10 @@ static ZigVar *create_local_var(CodeGen *codegen, AstNode *node, Scope *parent_s
add_error_note(codegen, msg, existing_var->decl_node, buf_sprintf("previous declaration is here"));
variable_entry->value->type = codegen->builtin_types.entry_invalid;
} else {
ZigType *type = get_primitive_type(codegen, name);
if (type != nullptr) {
ZigType *type;
if (get_primitive_type(codegen, name, &type) != ErrorPrimitiveTypeNotFound) {
add_node_error(codegen, node,
buf_sprintf("variable shadows type '%s'", buf_ptr(&type->name)));
buf_sprintf("variable shadows primitive type '%s'", buf_ptr(name)));
variable_entry->value->type = codegen->builtin_types.entry_invalid;
} else {
Tld *tld = find_decl(codegen, parent_scope, name);
@@ -3182,8 +3169,13 @@ static IrInstruction *ir_gen_block(IrBuilder *irb, Scope *parent_scope, AstNode
}
static IrInstruction *ir_gen_bin_op_id(IrBuilder *irb, Scope *scope, AstNode *node, IrBinOp op_id) {
IrInstruction *op1 = ir_gen_node(irb, node->data.bin_op_expr.op1, scope);
IrInstruction *op2 = ir_gen_node(irb, node->data.bin_op_expr.op2, scope);
Scope *inner_scope = scope;
if (op_id == IrBinOpArrayCat || op_id == IrBinOpArrayMult) {
inner_scope = create_comptime_scope(irb->codegen, node, scope);
}
IrInstruction *op1 = ir_gen_node(irb, node->data.bin_op_expr.op1, inner_scope);
IrInstruction *op2 = ir_gen_node(irb, node->data.bin_op_expr.op2, inner_scope);
if (op1 == irb->codegen->invalid_instruction || op2 == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
@@ -3488,6 +3480,7 @@ static IrInstruction *ir_gen_null_literal(IrBuilder *irb, Scope *scope, AstNode
}
static IrInstruction *ir_gen_symbol(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
Error err;
assert(node->type == NodeTypeSymbol);
Buf *variable_name = node->data.symbol_expr.symbol;
@@ -3501,8 +3494,15 @@ static IrInstruction *ir_gen_symbol(IrBuilder *irb, Scope *scope, AstNode *node,
return &const_instruction->base;
}
ZigType *primitive_type = get_primitive_type(irb->codegen, variable_name);
if (primitive_type != nullptr) {
ZigType *primitive_type;
if ((err = get_primitive_type(irb->codegen, variable_name, &primitive_type))) {
if (err == ErrorOverflow) {
add_node_error(irb->codegen, node,
buf_sprintf("primitive integer type '%s' exceeds maximum bit width of 65535",
buf_ptr(variable_name)));
return irb->codegen->invalid_instruction;
}
} else {
IrInstruction *value = ir_build_const_type(irb, scope, node, primitive_type);
if (lval == LValPtr) {
return ir_build_ref(irb, scope, node, value, false, false);
@@ -5338,7 +5338,9 @@ static IrInstruction *ir_gen_for_expr(IrBuilder *irb, Scope *parent_scope, AstNo
IrBasicBlock *else_block = else_node ? ir_create_basic_block(irb, child_scope, "ForElse") : end_block;
IrBasicBlock *continue_block = ir_create_basic_block(irb, child_scope, "ForContinue");
IrInstruction *len_val = ir_build_array_len(irb, child_scope, node, array_val);
Buf *len_field_name = buf_create_from_str("len");
IrInstruction *len_ref = ir_build_field_ptr(irb, child_scope, node, array_val_ptr, len_field_name);
IrInstruction *len_val = ir_build_load_ptr(irb, child_scope, node, len_ref);
ir_build_br(irb, child_scope, node, cond_block, is_comptime);
ir_set_cursor_at_end_and_append_block(irb, cond_block);
@@ -5425,6 +5427,7 @@ static IrInstruction *ir_gen_array_type(IrBuilder *irb, Scope *scope, AstNode *n
bool is_volatile = node->data.array_type.is_volatile;
AstNode *align_expr = node->data.array_type.align_expr;
Scope *comptime_scope = create_comptime_scope(irb->codegen, node, scope);
if (size_node) {
if (is_const) {
add_node_error(irb->codegen, node, buf_create_from_str("const qualifier invalid on array type"));
@@ -5439,11 +5442,11 @@ static IrInstruction *ir_gen_array_type(IrBuilder *irb, Scope *scope, AstNode *n
return irb->codegen->invalid_instruction;
}
IrInstruction *size_value = ir_gen_node(irb, size_node, scope);
IrInstruction *size_value = ir_gen_node(irb, size_node, comptime_scope);
if (size_value == irb->codegen->invalid_instruction)
return size_value;
IrInstruction *child_type = ir_gen_node(irb, child_type_node, scope);
IrInstruction *child_type = ir_gen_node(irb, child_type_node, comptime_scope);
if (child_type == irb->codegen->invalid_instruction)
return child_type;
@@ -5451,14 +5454,14 @@ static IrInstruction *ir_gen_array_type(IrBuilder *irb, Scope *scope, AstNode *n
} else {
IrInstruction *align_value;
if (align_expr != nullptr) {
align_value = ir_gen_node(irb, align_expr, scope);
align_value = ir_gen_node(irb, align_expr, comptime_scope);
if (align_value == irb->codegen->invalid_instruction)
return align_value;
} else {
align_value = nullptr;
}
IrInstruction *child_type = ir_gen_node(irb, child_type_node, scope);
IrInstruction *child_type = ir_gen_node(irb, child_type_node, comptime_scope);
if (child_type == irb->codegen->invalid_instruction)
return child_type;
@@ -6288,7 +6291,7 @@ static ZigType *get_error_set_union(CodeGen *g, ErrorTableEntry **errors, ZigTyp
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "error.{");
buf_appendf(&err_set_type->name, "error{");
for (uint32_t i = 0, count = set1->data.error_set.err_count; i < count; i += 1) {
assert(errors[set1->data.error_set.errors[i]->value] == set1->data.error_set.errors[i]);
@@ -6302,7 +6305,6 @@ static ZigType *get_error_set_union(CodeGen *g, ErrorTableEntry **errors, ZigTyp
}
}
err_set_type->is_copyable = true;
err_set_type->type_ref = g->builtin_types.entry_global_error_set->type_ref;
err_set_type->di_type = g->builtin_types.entry_global_error_set->di_type;
err_set_type->data.error_set.err_count = count;
@@ -6340,8 +6342,7 @@ static ZigType *make_err_set_with_one_item(CodeGen *g, Scope *parent_scope, AstN
{
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "error.{%s}", buf_ptr(&err_entry->name));
err_set_type->is_copyable = true;
buf_appendf(&err_set_type->name, "error{%s}", buf_ptr(&err_entry->name));
err_set_type->type_ref = g->builtin_types.entry_global_error_set->type_ref;
err_set_type->di_type = g->builtin_types.entry_global_error_set->di_type;
err_set_type->data.error_set.err_count = 1;
@@ -6362,7 +6363,6 @@ static IrInstruction *ir_gen_err_set_decl(IrBuilder *irb, Scope *parent_scope, A
Buf *type_name = get_anon_type_name(irb->codegen, irb->exec, "error set", node);
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_init_from_buf(&err_set_type->name, type_name);
err_set_type->is_copyable = true;
err_set_type->data.error_set.err_count = err_count;
err_set_type->type_ref = irb->codegen->builtin_types.entry_global_error_set->type_ref;
err_set_type->di_type = irb->codegen->builtin_types.entry_global_error_set->di_type;
@@ -6906,7 +6906,6 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
assert(scope);
switch (node->type) {
case NodeTypeStructValueField:
case NodeTypeRoot:
case NodeTypeParamDecl:
case NodeTypeUse:
case NodeTypeSwitchProng:
@@ -8196,7 +8195,7 @@ static ZigType *get_error_set_intersection(IrAnalyze *ira, ZigType *set1, ZigTyp
ZigType *err_set_type = new_type_table_entry(ZigTypeIdErrorSet);
buf_resize(&err_set_type->name, 0);
buf_appendf(&err_set_type->name, "error.{");
buf_appendf(&err_set_type->name, "error{");
for (uint32_t i = 0; i < set2->data.error_set.err_count; i += 1) {
ErrorTableEntry *error_entry = set2->data.error_set.errors[i];
@@ -8208,7 +8207,6 @@ static ZigType *get_error_set_intersection(IrAnalyze *ira, ZigType *set1, ZigTyp
}
free(errors);
err_set_type->is_copyable = true;
err_set_type->type_ref = ira->codegen->builtin_types.entry_global_error_set->type_ref;
err_set_type->di_type = ira->codegen->builtin_types.entry_global_error_set->di_type;
err_set_type->data.error_set.err_count = intersection_list.length;
@@ -9441,12 +9439,6 @@ static IrInstruction *ir_const_unsigned(IrAnalyze *ira, IrInstruction *source_in
return result;
}
static IrInstruction *ir_const_usize(IrAnalyze *ira, IrInstruction *source_instruction, uint64_t value) {
IrInstruction *result = ir_const(ira, source_instruction, ira->codegen->builtin_types.entry_usize);
bigint_init_unsigned(&result->value.data.x_bigint, value);
return result;
}
static IrInstruction *ir_get_const_ptr(IrAnalyze *ira, IrInstruction *instruction,
ConstExprValue *pointee, ZigType *pointee_type,
ConstPtrMut ptr_mut, bool ptr_is_const, bool ptr_is_volatile, uint32_t ptr_align)
@@ -10416,25 +10408,6 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
// cast from *const [N]T to []const T
if (is_slice(wanted_type) &&
actual_type->id == ZigTypeIdPointer &&
actual_type->data.pointer.is_const &&
actual_type->data.pointer.child_type->id == ZigTypeIdArray)
{
ZigType *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == ZigTypeIdPointer);
ZigType *array_type = actual_type->data.pointer.child_type;
if ((ptr_type->data.pointer.is_const || array_type->data.array.len == 0) &&
types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type,
source_node, false).id == ConstCastResultIdOk)
{
return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
}
}
// cast from [N]T to ?[]const T
if (wanted_type->id == ZigTypeIdOptional &&
is_slice(wanted_type->data.maybe.child_type) &&
@@ -14981,8 +14954,14 @@ static IrInstruction *ir_analyze_instruction_to_ptr_type(IrAnalyze *ira,
ZigType *ptr_type;
if (type_entry->id == ZigTypeIdArray) {
// TODO: Allow capturing pointer to const array.
// const a = "123"; for (a) |*c| continue;
// error: expected type '*u8', found '*const u8'
ptr_type = get_pointer_to_type(ira->codegen, type_entry->data.array.child_type, false);
} else if (is_slice(type_entry)) {
} else if (is_array_ref(type_entry)) {
ptr_type = get_pointer_to_type(ira->codegen,
type_entry->data.pointer.child_type->data.array.child_type, type_entry->data.pointer.is_const);
} else if (is_slice(type_entry)) {
ptr_type = adjust_ptr_len(ira->codegen, type_entry->data.structure.fields[0].type_entry, PtrLenSingle);
} else if (type_entry->id == ZigTypeIdArgTuple) {
ConstExprValue *arg_tuple_val = ir_resolve_const(ira, value, UndefBad);
@@ -15969,39 +15948,6 @@ static IrInstruction *ir_analyze_instruction_import(IrAnalyze *ira, IrInstructio
return result;
}
static IrInstruction *ir_analyze_instruction_array_len(IrAnalyze *ira,
IrInstructionArrayLen *array_len_instruction)
{
IrInstruction *array_value = array_len_instruction->array_value->child;
ZigType *type_entry = array_value->value.type;
if (type_is_invalid(type_entry)) {
return ira->codegen->invalid_instruction;
} else if (type_entry->id == ZigTypeIdArray) {
return ir_const_usize(ira, &array_len_instruction->base,
type_entry->data.array.len);
} else if (is_slice(type_entry)) {
if (array_value->value.special != ConstValSpecialRuntime) {
ConstExprValue *len_val = &array_value->value.data.x_struct.fields[slice_len_index];
if (len_val->special != ConstValSpecialRuntime) {
return ir_const_usize(ira, &array_len_instruction->base,
bigint_as_unsigned(&len_val->data.x_bigint));
}
}
TypeStructField *field = &type_entry->data.structure.fields[slice_len_index];
IrInstruction *len_ptr = ir_build_struct_field_ptr(&ira->new_irb, array_len_instruction->base.scope,
array_len_instruction->base.source_node, array_value, field);
len_ptr->value.type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_usize, true);
IrInstruction *result = ir_build_load_ptr(&ira->new_irb,
array_len_instruction->base.scope, array_len_instruction->base.source_node, len_ptr);
result->value.type = ira->codegen->builtin_types.entry_usize;
return result;
} else {
ir_add_error_node(ira, array_len_instruction->base.source_node,
buf_sprintf("type '%s' has no field 'len'", buf_ptr(&array_value->value.type->name)));
return ira->codegen->invalid_instruction;
}
}
static IrInstruction *ir_analyze_instruction_ref(IrAnalyze *ira, IrInstructionRef *ref_instruction) {
IrInstruction *value = ref_instruction->value->child;
if (type_is_invalid(value->value.type))
@@ -17650,7 +17596,7 @@ static IrInstruction *ir_analyze_instruction_c_import(IrAnalyze *ira, IrInstruct
ZigList<ErrorMsg *> errors = {0};
int err;
Error err;
if ((err = parse_h_buf(child_import, &errors, &cimport_scope->buf, ira->codegen, node))) {
if (err != ErrorCCompileErrors) {
ir_add_error_node(ira, node, buf_sprintf("C import failed: %s", err_str(err)));
@@ -17766,7 +17712,7 @@ static IrInstruction *ir_analyze_instruction_embed_file(IrAnalyze *ira, IrInstru
// load from file system into const expr
Buf *file_contents = buf_alloc();
int err;
Error err;
if ((err = file_fetch(ira->codegen, file_path, file_contents))) {
if (err == ErrorFileNotFound) {
ir_add_error(ira, instruction->name, buf_sprintf("unable to find '%s'", buf_ptr(file_path)));
@@ -18252,7 +18198,7 @@ static IrInstruction *ir_analyze_instruction_int_type(IrAnalyze *ira, IrInstruct
IrInstruction *bit_count_value = instruction->bit_count->child;
uint64_t bit_count;
if (!ir_resolve_unsigned(ira, bit_count_value, ira->codegen->builtin_types.entry_u32, &bit_count))
if (!ir_resolve_unsigned(ira, bit_count_value, ira->codegen->builtin_types.entry_u16, &bit_count))
return ira->codegen->invalid_instruction;
return ir_const_type(ira, &instruction->base, get_int_type(ira->codegen, is_signed, (uint32_t)bit_count));
@@ -19481,7 +19427,7 @@ static IrInstruction *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira,
if (!instruction->have_else_prong) {
if (type_is_global_error_set(switch_type)) {
ir_add_error(ira, &instruction->base,
buf_sprintf("else prong required when switching on type 'error'"));
buf_sprintf("else prong required when switching on type 'anyerror'"));
return ira->codegen->invalid_instruction;
} else {
for (uint32_t i = 0; i < switch_type->data.error_set.err_count; i += 1) {
@@ -20903,8 +20849,6 @@ static IrInstruction *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructio
return ir_analyze_instruction_union_tag(ira, (IrInstructionUnionTag *)instruction);
case IrInstructionIdImport:
return ir_analyze_instruction_import(ira, (IrInstructionImport *)instruction);
case IrInstructionIdArrayLen:
return ir_analyze_instruction_array_len(ira, (IrInstructionArrayLen *)instruction);
case IrInstructionIdRef:
return ir_analyze_instruction_ref(ira, (IrInstructionRef *)instruction);
case IrInstructionIdContainerInitList:
@@ -21238,7 +21182,6 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdTypeOf:
case IrInstructionIdToPtrType:
case IrInstructionIdPtrTypeChild:
case IrInstructionIdArrayLen:
case IrInstructionIdStructFieldPtr:
case IrInstructionIdUnionFieldPtr:
case IrInstructionIdArrayType:
-8
View File
@@ -553,11 +553,6 @@ static void ir_print_import(IrPrint *irp, IrInstructionImport *instruction) {
fprintf(irp->f, ")");
}
static void ir_print_array_len(IrPrint *irp, IrInstructionArrayLen *instruction) {
ir_print_other_instruction(irp, instruction->array_value);
fprintf(irp->f, ".len");
}
static void ir_print_ref(IrPrint *irp, IrInstructionRef *instruction) {
const char *const_str = instruction->is_const ? "const " : "";
const char *volatile_str = instruction->is_volatile ? "volatile " : "";
@@ -1462,9 +1457,6 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
case IrInstructionIdImport:
ir_print_import(irp, (IrInstructionImport *)instruction);
break;
case IrInstructionIdArrayLen:
ir_print_array_len(irp, (IrInstructionArrayLen *)instruction);
break;
case IrInstructionIdRef:
ir_print_ref(irp, (IrInstructionRef *)instruction);
break;
+19 -23
View File
@@ -793,7 +793,7 @@ Error os_file_exists(Buf *full_path, bool *result) {
}
#if defined(ZIG_OS_POSIX)
static int os_exec_process_posix(const char *exe, ZigList<const char *> &args,
static Error os_exec_process_posix(const char *exe, ZigList<const char *> &args,
Termination *term, Buf *out_stderr, Buf *out_stdout)
{
int stdin_pipe[2];
@@ -872,7 +872,7 @@ static int os_exec_process_posix(const char *exe, ZigList<const char *> &args,
// LocalFree(messageBuffer);
//}
static int os_exec_process_windows(const char *exe, ZigList<const char *> &args,
static Error os_exec_process_windows(const char *exe, ZigList<const char *> &args,
Termination *term, Buf *out_stderr, Buf *out_stdout)
{
Buf command_line = BUF_INIT;
@@ -983,7 +983,7 @@ static int os_exec_process_windows(const char *exe, ZigList<const char *> &args,
CloseHandle(piProcInfo.hProcess);
CloseHandle(piProcInfo.hThread);
return 0;
return ErrorNone;
}
#endif
@@ -1003,7 +1003,7 @@ Error os_execv(const char *exe, const char **argv) {
#endif
}
int os_exec_process(const char *exe, ZigList<const char *> &args,
Error os_exec_process(const char *exe, ZigList<const char *> &args,
Termination *term, Buf *out_stderr, Buf *out_stdout)
{
#if defined(ZIG_OS_WINDOWS)
@@ -1027,7 +1027,7 @@ void os_write_file(Buf *full_path, Buf *contents) {
zig_panic("close failed");
}
int os_copy_file(Buf *src_path, Buf *dest_path) {
Error os_copy_file(Buf *src_path, Buf *dest_path) {
FILE *src_f = fopen(buf_ptr(src_path), "rb");
if (!src_f) {
int err = errno;
@@ -1074,7 +1074,7 @@ int os_copy_file(Buf *src_path, Buf *dest_path) {
if (feof(src_f)) {
fclose(src_f);
fclose(dest_f);
return 0;
return ErrorNone;
}
}
}
@@ -1128,9 +1128,6 @@ Error os_get_cwd(Buf *out_cwd) {
#define is_wprefix(s, prefix) \
(wcsncmp((s), (prefix), sizeof(prefix) / sizeof(WCHAR) - 1) == 0)
static bool is_stderr_cyg_pty(void) {
#if defined(__MINGW32__)
return false;
#else
HANDLE stderr_handle = GetStdHandle(STD_ERROR_HANDLE);
if (stderr_handle == INVALID_HANDLE_VALUE)
return false;
@@ -1182,7 +1179,6 @@ static bool is_stderr_cyg_pty(void) {
}
free(nameinfo);
return (p != NULL);
#endif
}
#endif
@@ -1197,7 +1193,7 @@ bool os_stderr_tty(void) {
}
#if defined(ZIG_OS_POSIX)
static int os_buf_to_tmp_file_posix(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
static Error os_buf_to_tmp_file_posix(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
const char *tmp_dir = getenv("TMPDIR");
if (!tmp_dir) {
tmp_dir = P_tmpdir;
@@ -1221,12 +1217,12 @@ static int os_buf_to_tmp_file_posix(Buf *contents, Buf *suffix, Buf *out_tmp_pat
if (fclose(f))
zig_panic("close failed");
return 0;
return ErrorNone;
}
#endif
#if defined(ZIG_OS_WINDOWS)
static int os_buf_to_tmp_file_windows(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
static Error os_buf_to_tmp_file_windows(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
char tmp_dir[MAX_PATH + 1];
if (GetTempPath(MAX_PATH, tmp_dir) == 0) {
zig_panic("GetTempPath failed");
@@ -1255,11 +1251,11 @@ static int os_buf_to_tmp_file_windows(Buf *contents, Buf *suffix, Buf *out_tmp_p
if (fclose(f)) {
zig_panic("fclose failed");
}
return 0;
return ErrorNone;
}
#endif
int os_buf_to_tmp_file(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
Error os_buf_to_tmp_file(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
#if defined(ZIG_OS_WINDOWS)
return os_buf_to_tmp_file_windows(contents, suffix, out_tmp_path);
#elif defined(ZIG_OS_POSIX)
@@ -1269,17 +1265,17 @@ int os_buf_to_tmp_file(Buf *contents, Buf *suffix, Buf *out_tmp_path) {
#endif
}
int os_delete_file(Buf *path) {
Error os_delete_file(Buf *path) {
if (remove(buf_ptr(path))) {
return ErrorFileSystem;
} else {
return 0;
return ErrorNone;
}
}
int os_rename(Buf *src_path, Buf *dest_path) {
Error os_rename(Buf *src_path, Buf *dest_path) {
if (buf_eql_buf(src_path, dest_path)) {
return 0;
return ErrorNone;
}
#if defined(ZIG_OS_WINDOWS)
if (!MoveFileExA(buf_ptr(src_path), buf_ptr(dest_path), MOVEFILE_REPLACE_EXISTING)) {
@@ -1290,7 +1286,7 @@ int os_rename(Buf *src_path, Buf *dest_path) {
return ErrorFileSystem;
}
#endif
return 0;
return ErrorNone;
}
double os_get_time(void) {
@@ -1540,7 +1536,7 @@ int os_get_win32_ucrt_lib_path(ZigWindowsSDK *sdk, Buf* output_buf, ZigLLVM_Arch
buf_append_str(output_buf, "arm\\");
break;
default:
zig_panic("Attemped to use vcruntime for non-supported platform.");
zig_panic("Attempted to use vcruntime for non-supported platform.");
}
Buf* tmp_buf = buf_alloc();
buf_init_from_buf(tmp_buf, output_buf);
@@ -1589,7 +1585,7 @@ int os_get_win32_kern32_path(ZigWindowsSDK *sdk, Buf* output_buf, ZigLLVM_ArchTy
buf_append_str(output_buf, "arm\\");
break;
default:
zig_panic("Attemped to use vcruntime for non-supported platform.");
zig_panic("Attempted to use vcruntime for non-supported platform.");
}
Buf* tmp_buf = buf_alloc();
buf_init_from_buf(tmp_buf, output_buf);
@@ -1612,7 +1608,7 @@ int os_get_win32_kern32_path(ZigWindowsSDK *sdk, Buf* output_buf, ZigLLVM_ArchTy
buf_append_str(output_buf, "arm\\");
break;
default:
zig_panic("Attemped to use vcruntime for non-supported platform.");
zig_panic("Attempted to use vcruntime for non-supported platform.");
}
Buf* tmp_buf = buf_alloc();
buf_init_from_buf(tmp_buf, output_buf);
+5 -6
View File
@@ -13,7 +13,6 @@
#include "error.hpp"
#include "zig_llvm.h"
#include "windows_sdk.h"
#include "result.hpp"
#include <stdio.h>
#include <inttypes.h>
@@ -85,7 +84,7 @@ struct OsTimeStamp {
int os_init(void);
void os_spawn_process(const char *exe, ZigList<const char *> &args, Termination *term);
int os_exec_process(const char *exe, ZigList<const char *> &args,
Error os_exec_process(const char *exe, ZigList<const char *> &args,
Termination *term, Buf *out_stderr, Buf *out_stdout);
Error os_execv(const char *exe, const char **argv);
@@ -109,7 +108,7 @@ Error ATTRIBUTE_MUST_USE os_file_overwrite(OsFile file, Buf *contents);
void os_file_close(OsFile file);
void os_write_file(Buf *full_path, Buf *contents);
int os_copy_file(Buf *src_path, Buf *dest_path);
Error os_copy_file(Buf *src_path, Buf *dest_path);
Error ATTRIBUTE_MUST_USE os_fetch_file(FILE *file, Buf *out_contents, bool skip_shebang);
Error ATTRIBUTE_MUST_USE os_fetch_file_path(Buf *full_path, Buf *out_contents, bool skip_shebang);
@@ -119,12 +118,12 @@ Error ATTRIBUTE_MUST_USE os_get_cwd(Buf *out_cwd);
bool os_stderr_tty(void);
void os_stderr_set_color(TermColor color);
int os_buf_to_tmp_file(Buf *contents, Buf *suffix, Buf *out_tmp_path);
int os_delete_file(Buf *path);
Error os_buf_to_tmp_file(Buf *contents, Buf *suffix, Buf *out_tmp_path);
Error os_delete_file(Buf *path);
Error ATTRIBUTE_MUST_USE os_file_exists(Buf *full_path, bool *result);
int os_rename(Buf *src_path, Buf *dest_path);
Error os_rename(Buf *src_path, Buf *dest_path);
double os_get_time(void);
bool os_is_sep(uint8_t c);
+2725 -2753
View File
@@ -16,35 +16,110 @@
struct ParseContext {
Buf *buf;
AstNode *root;
size_t current_token;
ZigList<Token> *tokens;
ImportTableEntry *owner;
ErrColor err_color;
// These buffers are used freqently so we preallocate them once here.
Buf *void_buf;
};
ATTRIBUTE_PRINTF(4, 5)
ATTRIBUTE_NORETURN
static void ast_asm_error(ParseContext *pc, AstNode *node, size_t offset, const char *format, ...) {
assert(node->type == NodeTypeAsmExpr);
struct PtrPayload {
Token *asterisk;
Token *payload;
};
struct PtrIndexPayload {
Token *asterisk;
Token *payload;
Token *index;
};
// TODO calculate or otherwise keep track of originating line/column number for strings
//SrcPos pos = node->data.asm_expr.offset_map.at(offset);
SrcPos pos = { node->line, node->column };
va_list ap;
va_start(ap, format);
Buf *msg = buf_vprintf(format, ap);
va_end(ap);
ErrorMsg *err = err_msg_create_with_line(pc->owner->path, pos.line, pos.column,
pc->owner->source_code, pc->owner->line_offsets, msg);
print_err_msg(err, pc->err_color);
exit(EXIT_FAILURE);
}
static AstNode *ast_parse_root(ParseContext *pc);
static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc);
static AstNode *ast_parse_test_decl(ParseContext *pc);
static AstNode *ast_parse_top_level_comptime(ParseContext *pc);
static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod);
static AstNode *ast_parse_fn_proto(ParseContext *pc);
static AstNode *ast_parse_var_decl(ParseContext *pc);
static AstNode *ast_parse_container_field(ParseContext *pc);
static AstNode *ast_parse_statement(ParseContext *pc);
static AstNode *ast_parse_if_statement(ParseContext *pc);
static AstNode *ast_parse_labeled_statement(ParseContext *pc);
static AstNode *ast_parse_loop_statement(ParseContext *pc);
static AstNode *ast_parse_for_statement(ParseContext *pc);
static AstNode *ast_parse_while_statement(ParseContext *pc);
static AstNode *ast_parse_block_expr_statement(ParseContext *pc);
static AstNode *ast_parse_block_expr(ParseContext *pc);
static AstNode *ast_parse_assign_expr(ParseContext *pc);
static AstNode *ast_parse_expr(ParseContext *pc);
static AstNode *ast_parse_bool_or_expr(ParseContext *pc);
static AstNode *ast_parse_bool_and_expr(ParseContext *pc);
static AstNode *ast_parse_compare_expr(ParseContext *pc);
static AstNode *ast_parse_bitwise_expr(ParseContext *pc);
static AstNode *ast_parse_bit_shit_expr(ParseContext *pc);
static AstNode *ast_parse_addition_expr(ParseContext *pc);
static AstNode *ast_parse_multiply_expr(ParseContext *pc);
static AstNode *ast_parse_prefix_expr(ParseContext *pc);
static AstNode *ast_parse_primary_expr(ParseContext *pc);
static AstNode *ast_parse_if_expr(ParseContext *pc);
static AstNode *ast_parse_labeled_expr(ParseContext *pc);
static AstNode *ast_parse_block(ParseContext *pc);
static AstNode *ast_parse_loop_expr(ParseContext *pc);
static AstNode *ast_parse_for_expr(ParseContext *pc);
static AstNode *ast_parse_while_expr(ParseContext *pc);
static AstNode *ast_parse_curly_suffix_expr(ParseContext *pc);
static AstNode *ast_parse_init_list(ParseContext *pc);
static AstNode *ast_parse_type_expr(ParseContext *pc);
static AstNode *ast_parse_error_union_expr(ParseContext *pc);
static AstNode *ast_parse_suffix_expr(ParseContext *pc);
static AstNode *ast_parse_primary_type_expr(ParseContext *pc);
static AstNode *ast_parse_container_decl(ParseContext *pc);
static AstNode *ast_parse_error_set_decl(ParseContext *pc);
static AstNode *ast_parse_grouped_expr(ParseContext *pc);
static AstNode *ast_parse_if_type_expr(ParseContext *pc);
static AstNode *ast_parse_labeled_type_expr(ParseContext *pc);
static AstNode *ast_parse_loop_type_expr(ParseContext *pc);
static AstNode *ast_parse_for_type_expr(ParseContext *pc);
static AstNode *ast_parse_while_type_expr(ParseContext *pc);
static AstNode *ast_parse_switch_expr(ParseContext *pc);
static AstNode *ast_parse_asm_expr(ParseContext *pc);
static AstNode *ast_parse_asm_output(ParseContext *pc);
static AsmOutput *ast_parse_asm_output_item(ParseContext *pc);
static AstNode *ast_parse_asm_input(ParseContext *pc);
static AsmInput *ast_parse_asm_input_item(ParseContext *pc);
static AstNode *ast_parse_asm_cloppers(ParseContext *pc);
static Token *ast_parse_break_label(ParseContext *pc);
static Token *ast_parse_block_label(ParseContext *pc);
static AstNode *ast_parse_field_init(ParseContext *pc);
static AstNode *ast_parse_while_continue_expr(ParseContext *pc);
static AstNode *ast_parse_section(ParseContext *pc);
static Optional<AstNodeFnProto> ast_parse_fn_cc(ParseContext *pc);
static AstNode *ast_parse_param_decl(ParseContext *pc);
static AstNode *ast_parse_param_type(ParseContext *pc);
static AstNode *ast_parse_if_prefix(ParseContext *pc);
static AstNode *ast_parse_while_prefix(ParseContext *pc);
static AstNode *ast_parse_for_prefix(ParseContext *pc);
static Token *ast_parse_payload(ParseContext *pc);
static Optional<PtrPayload> ast_parse_ptr_payload(ParseContext *pc);
static Optional<PtrIndexPayload> ast_parse_ptr_index_payload(ParseContext *pc);
static AstNode *ast_parse_switch_prong(ParseContext *pc);
static AstNode *ast_parse_switch_case(ParseContext *pc);
static AstNode *ast_parse_switch_item(ParseContext *pc);
static AstNode *ast_parse_assign_op(ParseContext *pc);
static AstNode *ast_parse_compare_op(ParseContext *pc);
static AstNode *ast_parse_bitwise_op(ParseContext *pc);
static AstNode *ast_parse_bit_shift_op(ParseContext *pc);
static AstNode *ast_parse_addition_op(ParseContext *pc);
static AstNode *ast_parse_multiply_op(ParseContext *pc);
static AstNode *ast_parse_prefix_op(ParseContext *pc);
static AstNode *ast_parse_prefix_type_op(ParseContext *pc);
static AstNode *ast_parse_suffix_op(ParseContext *pc);
static AstNode *ast_parse_async_prefix(ParseContext *pc);
static AstNode *ast_parse_fn_call_argumnets(ParseContext *pc);
static AstNode *ast_parse_array_type_start(ParseContext *pc);
static AstNode *ast_parse_ptr_type_start(ParseContext *pc);
static AstNode *ast_parse_container_decl_auto(ParseContext *pc);
static AstNode *ast_parse_container_decl_type(ParseContext *pc);
static AstNode *ast_parse_byte_align(ParseContext *pc);
ATTRIBUTE_PRINTF(3, 4)
ATTRIBUTE_NORETURN
@@ -64,6 +139,34 @@ static void ast_error(ParseContext *pc, Token *token, const char *format, ...) {
exit(EXIT_FAILURE);
}
ATTRIBUTE_PRINTF(4, 5)
ATTRIBUTE_NORETURN
static void ast_asm_error(ParseContext *pc, AstNode *node, size_t offset, const char *format, ...) {
assert(node->type == NodeTypeAsmExpr);
va_list ap;
va_start(ap, format);
Buf *msg = buf_vprintf(format, ap);
va_end(ap);
ErrorMsg *err = err_msg_create_with_line(pc->owner->path, node->line, node->column,
pc->owner->source_code, pc->owner->line_offsets, msg);
print_err_msg(err, pc->err_color);
exit(EXIT_FAILURE);
}
static Buf ast_token_str(Buf *input, Token *token) {
Buf str = BUF_INIT;
buf_init_from_mem(&str, buf_ptr(input) + token->start_pos, token->end_pos - token->start_pos);
return str;
}
ATTRIBUTE_NORETURN
static void ast_invalid_token_error(ParseContext *pc, Token *token) {
Buf token_value = ast_token_str(pc->buf, token);
ast_error(pc, token, "invalid token: '%s'", buf_ptr(&token_value));
}
static AstNode *ast_create_node_no_line_info(ParseContext *pc, NodeType type) {
AstNode *node = allocate<AstNode>(1);
node->type = type;
@@ -71,21 +174,316 @@ static AstNode *ast_create_node_no_line_info(ParseContext *pc, NodeType type) {
return node;
}
static void ast_update_node_line_info(AstNode *node, Token *first_token) {
assert(first_token);
node->line = first_token->start_line;
node->column = first_token->start_column;
}
static AstNode *ast_create_node(ParseContext *pc, NodeType type, Token *first_token) {
assert(first_token);
AstNode *node = ast_create_node_no_line_info(pc, type);
ast_update_node_line_info(node, first_token);
node->line = first_token->start_line;
node->column = first_token->start_column;
return node;
}
static AstNode *ast_create_node_copy_line_info(ParseContext *pc, NodeType type, AstNode *from) {
assert(from);
AstNode *node = ast_create_node_no_line_info(pc, type);
node->line = from->line;
node->column = from->column;
return node;
}
static void parse_asm_template(ParseContext *pc, AstNode *node) {
static Token *peek_token_i(ParseContext *pc, size_t i) {
return &pc->tokens->at(pc->current_token + i);
}
static Token *peek_token(ParseContext *pc) {
return peek_token_i(pc, 0);
}
static Token *eat_token(ParseContext *pc) {
Token *res = peek_token(pc);
pc->current_token += 1;
return res;
}
static Token *eat_token_if(ParseContext *pc, TokenId id) {
Token *res = peek_token(pc);
if (res->id == id)
return eat_token(pc);
return nullptr;
}
static Token *expect_token(ParseContext *pc, TokenId id) {
Token *res = eat_token(pc);
if (res->id != id)
ast_error(pc, res, "expected token '%s', found '%s'", token_name(id), token_name(res->id));
return res;
}
static void put_back_token(ParseContext *pc) {
pc->current_token -= 1;
}
static Buf *token_buf(Token *token) {
if (token == nullptr)
return nullptr;
assert(token->id == TokenIdStringLiteral || token->id == TokenIdSymbol);
return &token->data.str_lit.str;
}
static BigInt *token_bigint(Token *token) {
assert(token->id == TokenIdIntLiteral);
return &token->data.int_lit.bigint;
}
static AstNode *token_symbol(ParseContext *pc, Token *token) {
assert(token->id == TokenIdSymbol);
AstNode *res = ast_create_node(pc, NodeTypeSymbol, token);
res->data.symbol_expr.symbol = token_buf(token);
return res;
}
// (Rule SEP)* Rule?
template<typename T>
static ZigList<T *> ast_parse_list(ParseContext *pc, TokenId sep, T *(*parser)(ParseContext*)) {
ZigList<T *> res = {};
while (true) {
T *curr = parser(pc);
if (curr == nullptr)
break;
res.append(curr);
if (eat_token_if(pc, sep) == nullptr)
break;
}
return res;
}
static AstNode *ast_expect(ParseContext *pc, AstNode *(*parser)(ParseContext*)) {
AstNode *res = parser(pc);
if (res == nullptr)
ast_invalid_token_error(pc, peek_token(pc));
return res;
}
enum BinOpChain {
BinOpChainOnce,
BinOpChainInf,
};
// Op* Child
static AstNode *ast_parse_prefix_op_expr(
ParseContext *pc,
AstNode *(*op_parser)(ParseContext *),
AstNode *(*child_parser)(ParseContext *)
) {
AstNode *res = nullptr;
AstNode **right = &res;
while (true) {
AstNode *prefix = op_parser(pc);
if (prefix == nullptr)
break;
*right = prefix;
switch (prefix->type) {
case NodeTypePrefixOpExpr:
right = &prefix->data.prefix_op_expr.primary_expr;
break;
case NodeTypeReturnExpr:
right = &prefix->data.return_expr.expr;
break;
case NodeTypeAwaitExpr:
right = &prefix->data.await_expr.expr;
break;
case NodeTypePromiseType:
right = &prefix->data.promise_type.payload_type;
break;
case NodeTypeArrayType:
right = &prefix->data.array_type.child_type;
break;
case NodeTypePointerType: {
// We might get two pointers from *_ptr_type_start
AstNode *child = prefix->data.pointer_type.op_expr;
if (child == nullptr)
child = prefix;
right = &child->data.pointer_type.op_expr;
break;
}
default:
zig_unreachable();
}
}
// If we have already consumed a token, and determined that
// this node is a prefix op, then we expect that the node has
// a child.
if (res != nullptr) {
*right = ast_expect(pc, child_parser);
} else {
// Otherwise, if we didn't consume a token, then we can return
// null, if the child expr did.
*right = child_parser(pc);
if (*right == nullptr)
return nullptr;
}
return res;
}
// Child (Op Child)(*/?)
static AstNode *ast_parse_bin_op_expr(
ParseContext *pc,
BinOpChain chain,
AstNode *(*op_parse)(ParseContext*),
AstNode *(*child_parse)(ParseContext*)
) {
AstNode *res = child_parse(pc);
if (res == nullptr)
return nullptr;
do {
AstNode *op = op_parse(pc);
if (op == nullptr)
break;
AstNode *left = res;
AstNode *right = ast_expect(pc, child_parse);
res = op;
switch (op->type) {
case NodeTypeBinOpExpr:
op->data.bin_op_expr.op1 = left;
op->data.bin_op_expr.op2 = right;
break;
case NodeTypeUnwrapErrorExpr:
op->data.unwrap_err_expr.op1 = left;
op->data.unwrap_err_expr.op2 = right;
break;
default:
zig_unreachable();
}
} while (chain == BinOpChainInf);
return res;
}
// IfPrefix Body (KEYWORD_else Payload? Body)?
static AstNode *ast_parse_if_expr_helper(ParseContext *pc, AstNode *(*body_parser)(ParseContext*)) {
AstNode *res = ast_parse_if_prefix(pc);
if (res == nullptr)
return nullptr;
AstNode *body = ast_expect(pc, body_parser);
Token *err_payload = nullptr;
AstNode *else_body = nullptr;
if (eat_token_if(pc, TokenIdKeywordElse) != nullptr) {
err_payload = ast_parse_payload(pc);
else_body = ast_expect(pc, body_parser);
}
assert(res->type == NodeTypeTestExpr);
if (err_payload != nullptr) {
AstNodeTestExpr old = res->data.test_expr;
res->type = NodeTypeIfErrorExpr;
res->data.if_err_expr.target_node = old.target_node;
res->data.if_err_expr.var_is_ptr = old.var_is_ptr;
res->data.if_err_expr.var_symbol = old.var_symbol;
res->data.if_err_expr.then_node = body;
res->data.if_err_expr.err_symbol = token_buf(err_payload);
res->data.if_err_expr.else_node = else_body;
return res;
}
if (res->data.test_expr.var_symbol != nullptr) {
res->data.test_expr.then_node = body;
res->data.test_expr.else_node = else_body;
return res;
}
AstNodeTestExpr old = res->data.test_expr;
res->type = NodeTypeIfBoolExpr;
res->data.if_bool_expr.condition = old.target_node;
res->data.if_bool_expr.then_block = body;
res->data.if_bool_expr.else_node = else_body;
return res;
}
// KEYWORD_inline? (ForLoop / WhileLoop)
static AstNode *ast_parse_loop_expr_helper(
ParseContext *pc,
AstNode *(*for_parser)(ParseContext *),
AstNode *(*while_parser)(ParseContext *)
) {
Token *inline_token = eat_token_if(pc, TokenIdKeywordInline);
AstNode *for_expr = for_parser(pc);
if (for_expr != nullptr) {
assert(for_expr->type == NodeTypeForExpr);
for_expr->data.for_expr.is_inline = inline_token != nullptr;
return for_expr;
}
AstNode *while_expr = while_parser(pc);
if (while_expr != nullptr) {
assert(while_expr->type == NodeTypeWhileExpr);
while_expr->data.while_expr.is_inline = inline_token != nullptr;
return while_expr;
}
if (inline_token != nullptr)
ast_invalid_token_error(pc, peek_token(pc));
return nullptr;
}
// ForPrefix Body (KEYWORD_else Body)?
static AstNode *ast_parse_for_expr_helper(ParseContext *pc, AstNode *(*body_parser)(ParseContext*)) {
AstNode *res = ast_parse_for_prefix(pc);
if (res == nullptr)
return nullptr;
AstNode *body = ast_expect(pc, body_parser);
AstNode *else_body = nullptr;
if (eat_token_if(pc, TokenIdKeywordElse) != nullptr)
else_body = ast_expect(pc, body_parser);
assert(res->type == NodeTypeForExpr);
res->data.for_expr.body = body;
res->data.for_expr.else_node = else_body;
return res;
}
// WhilePrefix Body (KEYWORD_else Payload? Body)?
static AstNode *ast_parse_while_expr_helper(ParseContext *pc, AstNode *(*body_parser)(ParseContext*)) {
AstNode *res = ast_parse_while_prefix(pc);
if (res == nullptr)
return nullptr;
AstNode *body = ast_expect(pc, body_parser);
Token *err_payload = nullptr;
AstNode *else_body = nullptr;
if (eat_token_if(pc, TokenIdKeywordElse) != nullptr) {
err_payload = ast_parse_payload(pc);
else_body = ast_expect(pc, body_parser);
}
assert(res->type == NodeTypeWhileExpr);
res->data.while_expr.body = body;
res->data.while_expr.err_symbol = token_buf(err_payload);
res->data.while_expr.else_node = else_body;
return res;
}
template<TokenId id, BinOpType op>
AstNode *ast_parse_bin_op_simple(ParseContext *pc) {
Token *op_token = eat_token_if(pc, id);
if (op_token == nullptr)
return nullptr;
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
static void ast_parse_asm_template(ParseContext *pc, AstNode *node) {
Buf *asm_template = node->data.asm_expr.asm_template;
enum State {
@@ -172,2733 +570,2310 @@ static void parse_asm_template(ParseContext *pc, AstNode *node) {
}
}
static Buf *token_buf(Token *token) {
assert(token->id == TokenIdStringLiteral || token->id == TokenIdSymbol);
return &token->data.str_lit.str;
}
static BigInt *token_bigint(Token *token) {
assert(token->id == TokenIdIntLiteral);
return &token->data.int_lit.bigint;
}
static BigFloat *token_bigfloat(Token *token) {
assert(token->id == TokenIdFloatLiteral);
return &token->data.float_lit.bigfloat;
}
static uint8_t token_char_lit(Token *token) {
assert(token->id == TokenIdCharLiteral);
return token->data.char_lit.c;
}
static void ast_buf_from_token(ParseContext *pc, Token *token, Buf *buf) {
if (token->id == TokenIdSymbol) {
buf_init_from_buf(buf, token_buf(token));
} else {
buf_init_from_mem(buf, buf_ptr(pc->buf) + token->start_pos, token->end_pos - token->start_pos);
}
}
ATTRIBUTE_NORETURN
static void ast_invalid_token_error(ParseContext *pc, Token *token) {
Buf token_value = BUF_INIT;
ast_buf_from_token(pc, token, &token_value);
ast_error(pc, token, "invalid token: '%s'", buf_ptr(&token_value));
}
static AstNode *ast_parse_block_or_expression(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_block_expr_or_expression(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_expression(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_block(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_if_try_test_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_block_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_unwrap_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_prefix_op_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_fn_proto(ParseContext *pc, size_t *token_index, bool mandatory, VisibMod visib_mod);
static AstNode *ast_parse_return_expr(ParseContext *pc, size_t *token_index);
static AstNode *ast_parse_grouped_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_container_decl(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_primary_expr(ParseContext *pc, size_t *token_index, bool mandatory);
static AstNode *ast_parse_try_expr(ParseContext *pc, size_t *token_index);
static AstNode *ast_parse_await_expr(ParseContext *pc, size_t *token_index);
static AstNode *ast_parse_symbol(ParseContext *pc, size_t *token_index);
static void ast_expect_token(ParseContext *pc, Token *token, TokenId token_id) {
if (token->id == token_id) {
return;
}
Buf token_value = BUF_INIT;
ast_buf_from_token(pc, token, &token_value);
ast_error(pc, token, "expected token '%s', found '%s'", token_name(token_id), token_name(token->id));
}
static Token *ast_eat_token(ParseContext *pc, size_t *token_index, TokenId token_id) {
Token *token = &pc->tokens->at(*token_index);
ast_expect_token(pc, token, token_id);
*token_index += 1;
return token;
}
/*
ErrorSetExpr = (PrefixOpExpression "!" PrefixOpExpression) | PrefixOpExpression
*/
static AstNode *ast_parse_error_set_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *prefix_op_expr = ast_parse_prefix_op_expr(pc, token_index, mandatory);
if (!prefix_op_expr) {
return nullptr;
}
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdBang) {
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = prefix_op_expr;
node->data.bin_op_expr.bin_op = BinOpTypeErrorUnion;
node->data.bin_op_expr.op2 = ast_parse_prefix_op_expr(pc, token_index, true);
return node;
} else {
return prefix_op_expr;
}
}
/*
TypeExpr = ErrorSetExpr
*/
static AstNode *ast_parse_type_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
return ast_parse_error_set_expr(pc, token_index, mandatory);
}
/*
ParamDecl = option("noalias" | "comptime") option(Symbol ":") (TypeExpr | "var" | "...")
*/
static AstNode *ast_parse_param_decl(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
AstNode *node = ast_create_node(pc, NodeTypeParamDecl, token);
if (token->id == TokenIdKeywordNoAlias) {
node->data.param_decl.is_noalias = true;
*token_index += 1;
token = &pc->tokens->at(*token_index);
} else if (token->id == TokenIdKeywordCompTime) {
node->data.param_decl.is_inline = true;
*token_index += 1;
token = &pc->tokens->at(*token_index);
}
node->data.param_decl.name = nullptr;
if (token->id == TokenIdSymbol) {
Token *next_token = &pc->tokens->at(*token_index + 1);
if (next_token->id == TokenIdColon) {
node->data.param_decl.name = token_buf(token);
*token_index += 2;
}
}
Token *ellipsis_tok = &pc->tokens->at(*token_index);
if (ellipsis_tok->id == TokenIdEllipsis3) {
*token_index += 1;
node->data.param_decl.is_var_args = true;
} else if (ellipsis_tok->id == TokenIdKeywordVar) {
*token_index += 1;
node->data.param_decl.var_token = ellipsis_tok;
} else {
node->data.param_decl.type = ast_parse_type_expr(pc, token_index, true);
}
return node;
}
static void ast_parse_param_decl_list(ParseContext *pc, size_t *token_index,
ZigList<AstNode *> *params, bool *is_var_args)
{
*is_var_args = false;
ast_eat_token(pc, token_index, TokenIdLParen);
for (;;) {
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdRParen) {
*token_index += 1;
return;
}
AstNode *param_decl_node = ast_parse_param_decl(pc, token_index);
bool expect_end = false;
assert(param_decl_node);
params->append(param_decl_node);
expect_end = param_decl_node->data.param_decl.is_var_args;
*is_var_args = expect_end;
token = &pc->tokens->at(*token_index);
*token_index += 1;
if (token->id == TokenIdRParen) {
return;
} else {
ast_expect_token(pc, token, TokenIdComma);
if (expect_end) {
ast_eat_token(pc, token_index, TokenIdRParen);
return;
}
}
}
zig_unreachable();
}
static void ast_parse_fn_call_param_list(ParseContext *pc, size_t *token_index, ZigList<AstNode*> *params) {
for (;;) {
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdRParen) {
*token_index += 1;
return;
}
AstNode *expr = ast_parse_expression(pc, token_index, true);
params->append(expr);
token = &pc->tokens->at(*token_index);
*token_index += 1;
if (token->id == TokenIdRParen) {
return;
} else {
ast_expect_token(pc, token, TokenIdComma);
}
}
zig_unreachable();
}
/*
GroupedExpression : token(LParen) Expression token(RParen)
*/
static AstNode *ast_parse_grouped_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *l_paren = &pc->tokens->at(*token_index);
if (l_paren->id != TokenIdLParen) {
if (mandatory) {
ast_expect_token(pc, l_paren, TokenIdLParen);
} else {
return nullptr;
}
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeGroupedExpr, l_paren);
node->data.grouped_expr = ast_parse_expression(pc, token_index, true);
Token *r_paren = &pc->tokens->at(*token_index);
*token_index += 1;
ast_expect_token(pc, r_paren, TokenIdRParen);
return node;
}
/*
ArrayType : "[" option(Expression) "]" option("align" "(" Expression option(":" Integer ":" Integer) ")")) option("const") option("volatile") TypeExpr
*/
static AstNode *ast_parse_array_type_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *l_bracket = &pc->tokens->at(*token_index);
if (l_bracket->id != TokenIdLBracket) {
if (mandatory) {
ast_expect_token(pc, l_bracket, TokenIdLBracket);
} else {
return nullptr;
}
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeArrayType, l_bracket);
node->data.array_type.size = ast_parse_expression(pc, token_index, false);
ast_eat_token(pc, token_index, TokenIdRBracket);
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdKeywordAlign) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.array_type.align_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
token = &pc->tokens->at(*token_index);
}
if (token->id == TokenIdKeywordConst) {
*token_index += 1;
node->data.array_type.is_const = true;
token = &pc->tokens->at(*token_index);
}
if (token->id == TokenIdKeywordVolatile) {
*token_index += 1;
node->data.array_type.is_volatile = true;
}
node->data.array_type.child_type = ast_parse_type_expr(pc, token_index, true);
return node;
}
/*
AsmInputItem : token(LBracket) token(Symbol) token(RBracket) token(String) token(LParen) Expression token(RParen)
*/
static void ast_parse_asm_input_item(ParseContext *pc, size_t *token_index, AstNode *node) {
ast_eat_token(pc, token_index, TokenIdLBracket);
Token *alias = ast_eat_token(pc, token_index, TokenIdSymbol);
ast_eat_token(pc, token_index, TokenIdRBracket);
Token *constraint = ast_eat_token(pc, token_index, TokenIdStringLiteral);
ast_eat_token(pc, token_index, TokenIdLParen);
AstNode *expr_node = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
AsmInput *asm_input = allocate<AsmInput>(1);
asm_input->asm_symbolic_name = token_buf(alias);
asm_input->constraint = token_buf(constraint);
asm_input->expr = expr_node;
node->data.asm_expr.input_list.append(asm_input);
}
/*
AsmOutputItem : "[" "Symbol" "]" "String" "(" ("Symbol" | "->" PrefixOpExpression) ")"
*/
static void ast_parse_asm_output_item(ParseContext *pc, size_t *token_index, AstNode *node) {
ast_eat_token(pc, token_index, TokenIdLBracket);
Token *alias = ast_eat_token(pc, token_index, TokenIdSymbol);
ast_eat_token(pc, token_index, TokenIdRBracket);
Token *constraint = ast_eat_token(pc, token_index, TokenIdStringLiteral);
AsmOutput *asm_output = allocate<AsmOutput>(1);
ast_eat_token(pc, token_index, TokenIdLParen);
Token *token = &pc->tokens->at(*token_index);
*token_index += 1;
if (token->id == TokenIdSymbol) {
asm_output->variable_name = token_buf(token);
} else if (token->id == TokenIdArrow) {
asm_output->return_type = ast_parse_type_expr(pc, token_index, true);
} else {
ast_invalid_token_error(pc, token);
}
ast_eat_token(pc, token_index, TokenIdRParen);
asm_output->asm_symbolic_name = token_buf(alias);
asm_output->constraint = token_buf(constraint);
node->data.asm_expr.output_list.append(asm_output);
}
/*
AsmClobbers: token(Colon) list(token(String), token(Comma))
*/
static void ast_parse_asm_clobbers(ParseContext *pc, size_t *token_index, AstNode *node) {
Token *colon_tok = &pc->tokens->at(*token_index);
if (colon_tok->id != TokenIdColon)
return;
*token_index += 1;
for (;;) {
Token *string_tok = &pc->tokens->at(*token_index);
ast_expect_token(pc, string_tok, TokenIdStringLiteral);
*token_index += 1;
Buf *clobber_buf = token_buf(string_tok);
node->data.asm_expr.clobber_list.append(clobber_buf);
Token *comma = &pc->tokens->at(*token_index);
if (comma->id == TokenIdComma) {
*token_index += 1;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdRParen) {
break;
} else {
continue;
}
} else {
break;
}
}
}
/*
AsmInput : token(Colon) list(AsmInputItem, token(Comma)) option(AsmClobbers)
*/
static void ast_parse_asm_input(ParseContext *pc, size_t *token_index, AstNode *node) {
Token *colon_tok = &pc->tokens->at(*token_index);
if (colon_tok->id != TokenIdColon)
return;
*token_index += 1;
Token *colon_again = &pc->tokens->at(*token_index);
if (colon_again->id == TokenIdColon) {
ast_parse_asm_clobbers(pc, token_index, node);
return;
}
for (;;) {
ast_parse_asm_input_item(pc, token_index, node);
Token *comma = &pc->tokens->at(*token_index);
if (comma->id == TokenIdComma) {
*token_index += 1;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdColon || token->id == TokenIdRParen) {
break;
} else {
continue;
}
} else {
break;
}
}
ast_parse_asm_clobbers(pc, token_index, node);
}
/*
AsmOutput : token(Colon) list(AsmOutputItem, token(Comma)) option(AsmInput)
*/
static void ast_parse_asm_output(ParseContext *pc, size_t *token_index, AstNode *node) {
Token *colon_tok = &pc->tokens->at(*token_index);
if (colon_tok->id != TokenIdColon)
return;
*token_index += 1;
Token *colon_again = &pc->tokens->at(*token_index);
if (colon_again->id == TokenIdColon) {
ast_parse_asm_input(pc, token_index, node);
return;
}
for (;;) {
ast_parse_asm_output_item(pc, token_index, node);
Token *comma = &pc->tokens->at(*token_index);
if (comma->id == TokenIdComma) {
*token_index += 1;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdColon || token->id == TokenIdRParen) {
break;
} else {
continue;
}
} else {
break;
}
}
ast_parse_asm_input(pc, token_index, node);
}
/*
AsmExpression : token(Asm) option(token(Volatile)) token(LParen) token(String) option(AsmOutput) token(RParen)
*/
static AstNode *ast_parse_asm_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *asm_token = &pc->tokens->at(*token_index);
if (asm_token->id != TokenIdKeywordAsm) {
if (mandatory) {
ast_expect_token(pc, asm_token, TokenIdKeywordAsm);
} else {
return nullptr;
}
}
AstNode *node = ast_create_node(pc, NodeTypeAsmExpr, asm_token);
*token_index += 1;
Token *lparen_tok = &pc->tokens->at(*token_index);
if (lparen_tok->id == TokenIdKeywordVolatile) {
node->data.asm_expr.is_volatile = true;
*token_index += 1;
lparen_tok = &pc->tokens->at(*token_index);
}
ast_expect_token(pc, lparen_tok, TokenIdLParen);
*token_index += 1;
Token *template_tok = ast_eat_token(pc, token_index, TokenIdStringLiteral);
node->data.asm_expr.asm_template = token_buf(template_tok);
parse_asm_template(pc, node);
ast_parse_asm_output(pc, token_index, node);
ast_eat_token(pc, token_index, TokenIdRParen);
return node;
}
/*
SuspendExpression(body) = "suspend" option( body )
*/
static AstNode *ast_parse_suspend_block(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *suspend_token = &pc->tokens->at(*token_index);
if (suspend_token->id == TokenIdKeywordSuspend) {
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, suspend_token, TokenIdKeywordSuspend);
zig_unreachable();
} else {
return nullptr;
}
Token *lbrace = &pc->tokens->at(*token_index);
if (lbrace->id == TokenIdLBrace) {
AstNode *node = ast_create_node(pc, NodeTypeSuspend, suspend_token);
node->data.suspend.block = ast_parse_block(pc, token_index, true);
return node;
} else if (mandatory) {
ast_expect_token(pc, lbrace, TokenIdLBrace);
zig_unreachable();
} else {
*token_index -= 1;
return nullptr;
}
}
/*
CompTimeExpression(body) = "comptime" body
*/
static AstNode *ast_parse_comptime_expr(ParseContext *pc, size_t *token_index, bool require_block_body, bool mandatory) {
Token *comptime_token = &pc->tokens->at(*token_index);
if (comptime_token->id == TokenIdKeywordCompTime) {
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, comptime_token, TokenIdKeywordCompTime);
zig_unreachable();
} else {
return nullptr;
}
AstNode *node = ast_create_node(pc, NodeTypeCompTime, comptime_token);
if (require_block_body)
node->data.comptime_expr.expr = ast_parse_block(pc, token_index, true);
else
node->data.comptime_expr.expr = ast_parse_block_or_expression(pc, token_index, true);
return node;
}
/*
PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ContainerDecl | ("continue" option(":" Symbol)) | ErrorSetDecl | PromiseType
KeywordLiteral = "true" | "false" | "null" | "undefined" | "error" | "unreachable" | "suspend"
ErrorSetDecl = "error" Token(Dot) "{" list(Symbol, ",") "}"
*/
static AstNode *ast_parse_primary_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdIntLiteral) {
AstNode *node = ast_create_node(pc, NodeTypeIntLiteral, token);
node->data.int_literal.bigint = token_bigint(token);
*token_index += 1;
return node;
} else if (token->id == TokenIdFloatLiteral) {
AstNode *node = ast_create_node(pc, NodeTypeFloatLiteral, token);
node->data.float_literal.bigfloat = token_bigfloat(token);
node->data.float_literal.overflow = token->data.float_lit.overflow;
*token_index += 1;
return node;
} else if (token->id == TokenIdStringLiteral) {
AstNode *node = ast_create_node(pc, NodeTypeStringLiteral, token);
node->data.string_literal.buf = token_buf(token);
node->data.string_literal.c = token->data.str_lit.is_c_str;
*token_index += 1;
return node;
} else if (token->id == TokenIdCharLiteral) {
AstNode *node = ast_create_node(pc, NodeTypeCharLiteral, token);
node->data.char_literal.value = token_char_lit(token);
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordTrue) {
AstNode *node = ast_create_node(pc, NodeTypeBoolLiteral, token);
node->data.bool_literal.value = true;
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordFalse) {
AstNode *node = ast_create_node(pc, NodeTypeBoolLiteral, token);
node->data.bool_literal.value = false;
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordNull) {
AstNode *node = ast_create_node(pc, NodeTypeNullLiteral, token);
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordContinue) {
AstNode *node = ast_create_node(pc, NodeTypeContinue, token);
*token_index += 1;
Token *maybe_colon_token = &pc->tokens->at(*token_index);
if (maybe_colon_token->id == TokenIdColon) {
*token_index += 1;
Token *name = ast_eat_token(pc, token_index, TokenIdSymbol);
node->data.continue_expr.name = token_buf(name);
}
return node;
} else if (token->id == TokenIdKeywordUndefined) {
AstNode *node = ast_create_node(pc, NodeTypeUndefinedLiteral, token);
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordUnreachable) {
AstNode *node = ast_create_node(pc, NodeTypeUnreachable, token);
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordSuspend) {
AstNode *node = ast_create_node(pc, NodeTypeSuspend, token);
*token_index += 1;
return node;
} else if (token->id == TokenIdKeywordPromise) {
AstNode *node = ast_create_node(pc, NodeTypePromiseType, token);
*token_index += 1;
Token *arrow_tok = &pc->tokens->at(*token_index);
if (arrow_tok->id == TokenIdArrow) {
*token_index += 1;
node->data.promise_type.payload_type = ast_parse_type_expr(pc, token_index, true);
}
return node;
} else if (token->id == TokenIdKeywordError) {
Token *dot_token = &pc->tokens->at(*token_index + 1);
Token *brace_token = &pc->tokens->at(*token_index + 2);
if (dot_token->id != TokenIdDot || brace_token->id != TokenIdLBrace) {
AstNode *node = ast_create_node(pc, NodeTypeErrorType, token);
*token_index += 1;
return node;
}
AstNode *node = ast_create_node(pc, NodeTypeErrorSetDecl, token);
*token_index += 3;
for (;;) {
Token *item_tok = &pc->tokens->at(*token_index);
if (item_tok->id == TokenIdRBrace) {
*token_index += 1;
return node;
} else if (item_tok->id == TokenIdSymbol) {
AstNode *symbol_node = ast_parse_symbol(pc, token_index);
node->data.err_set_decl.decls.append(symbol_node);
Token *opt_comma_tok = &pc->tokens->at(*token_index);
if (opt_comma_tok->id == TokenIdComma) {
*token_index += 1;
}
} else {
ast_invalid_token_error(pc, item_tok);
}
}
} else if (token->id == TokenIdAtSign) {
*token_index += 1;
Token *name_tok = &pc->tokens->at(*token_index);
Buf *name_buf;
if (name_tok->id == TokenIdKeywordExport) {
name_buf = buf_create_from_str("export");
*token_index += 1;
} else if (name_tok->id == TokenIdSymbol) {
name_buf = token_buf(name_tok);
*token_index += 1;
} else {
ast_expect_token(pc, name_tok, TokenIdSymbol);
zig_unreachable();
}
AstNode *name_node = ast_create_node(pc, NodeTypeSymbol, name_tok);
name_node->data.symbol_expr.symbol = name_buf;
AstNode *node = ast_create_node(pc, NodeTypeFnCallExpr, token);
node->data.fn_call_expr.fn_ref_expr = name_node;
ast_eat_token(pc, token_index, TokenIdLParen);
ast_parse_fn_call_param_list(pc, token_index, &node->data.fn_call_expr.params);
node->data.fn_call_expr.is_builtin = true;
return node;
}
AstNode *block_expr_node = ast_parse_block_expr(pc, token_index, false);
if (block_expr_node) {
return block_expr_node;
}
if (token->id == TokenIdSymbol) {
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeSymbol, token);
node->data.symbol_expr.symbol = token_buf(token);
return node;
}
AstNode *grouped_expr_node = ast_parse_grouped_expr(pc, token_index, false);
if (grouped_expr_node) {
return grouped_expr_node;
}
AstNode *array_type_node = ast_parse_array_type_expr(pc, token_index, false);
if (array_type_node) {
return array_type_node;
}
AstNode *fn_proto_node = ast_parse_fn_proto(pc, token_index, false, VisibModPrivate);
if (fn_proto_node) {
return fn_proto_node;
}
AstNode *asm_expr = ast_parse_asm_expr(pc, token_index, false);
if (asm_expr) {
return asm_expr;
}
AstNode *container_decl = ast_parse_container_decl(pc, token_index, false);
if (container_decl)
return container_decl;
if (!mandatory)
return nullptr;
ast_invalid_token_error(pc, token);
}
/*
CurlySuffixExpression : PrefixOpExpression option(ContainerInitExpression)
ContainerInitExpression : token(Dot) token(LBrace) ContainerInitBody token(RBrace)
ContainerInitBody : list(StructLiteralField, token(Comma)) | list(Expression, token(Comma))
*/
static AstNode *ast_parse_curly_suffix_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *prefix_op_expr = ast_parse_prefix_op_expr(pc, token_index, mandatory);
if (!prefix_op_expr) {
return nullptr;
}
while (true) {
Token *first_token = &pc->tokens->at(*token_index);
Token *second_token = &pc->tokens->at(*token_index + 1);
if (first_token->id == TokenIdDot && second_token->id == TokenIdLBrace) {
*token_index += 2;
AstNode *node = ast_create_node(pc, NodeTypeContainerInitExpr, first_token);
node->data.container_init_expr.type = prefix_op_expr;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdDot) {
node->data.container_init_expr.kind = ContainerInitKindStruct;
for (;;) {
if (token->id == TokenIdDot) {
ast_eat_token(pc, token_index, TokenIdDot);
Token *field_name_tok = ast_eat_token(pc, token_index, TokenIdSymbol);
ast_eat_token(pc, token_index, TokenIdEq);
AstNode *field_node = ast_create_node(pc, NodeTypeStructValueField, token);
field_node->data.struct_val_field.name = token_buf(field_name_tok);
field_node->data.struct_val_field.expr = ast_parse_expression(pc, token_index, true);
node->data.container_init_expr.entries.append(field_node);
Token *comma_tok = &pc->tokens->at(*token_index);
if (comma_tok->id == TokenIdComma) {
*token_index += 1;
token = &pc->tokens->at(*token_index);
continue;
} else if (comma_tok->id != TokenIdRBrace) {
ast_expect_token(pc, comma_tok, TokenIdRBrace);
} else {
*token_index += 1;
break;
}
} else if (token->id == TokenIdRBrace) {
*token_index += 1;
break;
} else {
ast_invalid_token_error(pc, token);
}
}
} else {
node->data.container_init_expr.kind = ContainerInitKindArray;
for (;;) {
if (token->id == TokenIdRBrace) {
*token_index += 1;
break;
} else {
AstNode *elem_node = ast_parse_expression(pc, token_index, true);
node->data.container_init_expr.entries.append(elem_node);
Token *comma_tok = &pc->tokens->at(*token_index);
if (comma_tok->id == TokenIdComma) {
*token_index += 1;
token = &pc->tokens->at(*token_index);
continue;
} else if (comma_tok->id != TokenIdRBrace) {
ast_expect_token(pc, comma_tok, TokenIdRBrace);
} else {
*token_index += 1;
break;
}
}
}
}
prefix_op_expr = node;
} else {
return prefix_op_expr;
}
}
}
static AstNode *ast_parse_fn_proto_partial(ParseContext *pc, size_t *token_index, Token *fn_token,
AstNode *async_allocator_type_node, CallingConvention cc, bool is_extern, VisibMod visib_mod)
{
AstNode *node = ast_create_node(pc, NodeTypeFnProto, fn_token);
node->data.fn_proto.visib_mod = visib_mod;
node->data.fn_proto.cc = cc;
node->data.fn_proto.is_extern = is_extern;
node->data.fn_proto.async_allocator_type = async_allocator_type_node;
Token *fn_name = &pc->tokens->at(*token_index);
if (fn_name->id == TokenIdSymbol) {
*token_index += 1;
node->data.fn_proto.name = token_buf(fn_name);
} else {
node->data.fn_proto.name = nullptr;
}
ast_parse_param_decl_list(pc, token_index, &node->data.fn_proto.params, &node->data.fn_proto.is_var_args);
Token *next_token = &pc->tokens->at(*token_index);
if (next_token->id == TokenIdKeywordAlign) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.fn_proto.align_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
next_token = &pc->tokens->at(*token_index);
}
if (next_token->id == TokenIdKeywordSection) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.fn_proto.section_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
next_token = &pc->tokens->at(*token_index);
}
if (next_token->id == TokenIdKeywordVar) {
node->data.fn_proto.return_var_token = next_token;
*token_index += 1;
next_token = &pc->tokens->at(*token_index);
} else {
if (next_token->id == TokenIdKeywordError) {
Token *maybe_lbrace_tok = &pc->tokens->at(*token_index + 1);
if (maybe_lbrace_tok->id == TokenIdLBrace) {
*token_index += 1;
node->data.fn_proto.return_type = ast_create_node(pc, NodeTypeErrorType, next_token);
return node;
}
} else if (next_token->id == TokenIdBang) {
*token_index += 1;
node->data.fn_proto.auto_err_set = true;
next_token = &pc->tokens->at(*token_index);
}
node->data.fn_proto.return_type = ast_parse_type_expr(pc, token_index, true);
}
return node;
}
/*
SuffixOpExpression = ("async" option("&lt;" SuffixOpExpression "&gt;") SuffixOpExpression FnCallExpression) | PrimaryExpression option(FnCallExpression | ArrayAccessExpression | FieldAccessExpression | SliceExpression | ".*" | ".?")
FnCallExpression : token(LParen) list(Expression, token(Comma)) token(RParen)
ArrayAccessExpression : token(LBracket) Expression token(RBracket)
SliceExpression = "[" Expression ".." option(Expression) "]"
FieldAccessExpression : token(Dot) token(Symbol)
StructLiteralField : token(Dot) token(Symbol) token(Eq) Expression
*/
static AstNode *ast_parse_suffix_op_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *primary_expr;
Token *async_token = &pc->tokens->at(*token_index);
if (async_token->id == TokenIdKeywordAsync) {
*token_index += 1;
AstNode *allocator_expr_node = nullptr;
Token *async_lparen_tok = &pc->tokens->at(*token_index);
if (async_lparen_tok->id == TokenIdCmpLessThan) {
*token_index += 1;
allocator_expr_node = ast_parse_prefix_op_expr(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdCmpGreaterThan);
}
Token *fncall_token = &pc->tokens->at(*token_index);
if (fncall_token->id == TokenIdKeywordFn) {
*token_index += 1;
return ast_parse_fn_proto_partial(pc, token_index, fncall_token, allocator_expr_node, CallingConventionAsync,
false, VisibModPrivate);
}
AstNode *node = ast_parse_suffix_op_expr(pc, token_index, true);
if (node->type != NodeTypeFnCallExpr) {
ast_error(pc, fncall_token, "expected function call, found '%s'", token_name(fncall_token->id));
}
node->data.fn_call_expr.is_async = true;
node->data.fn_call_expr.async_allocator = allocator_expr_node;
assert(node->data.fn_call_expr.fn_ref_expr != nullptr);
primary_expr = node;
} else {
primary_expr = ast_parse_primary_expr(pc, token_index, mandatory);
if (!primary_expr)
return nullptr;
}
while (true) {
Token *first_token = &pc->tokens->at(*token_index);
if (first_token->id == TokenIdLParen) {
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeFnCallExpr, first_token);
node->data.fn_call_expr.fn_ref_expr = primary_expr;
ast_parse_fn_call_param_list(pc, token_index, &node->data.fn_call_expr.params);
primary_expr = node;
} else if (first_token->id == TokenIdLBracket) {
*token_index += 1;
AstNode *expr_node = ast_parse_expression(pc, token_index, true);
Token *ellipsis_or_r_bracket = &pc->tokens->at(*token_index);
if (ellipsis_or_r_bracket->id == TokenIdEllipsis2) {
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeSliceExpr, first_token);
node->data.slice_expr.array_ref_expr = primary_expr;
node->data.slice_expr.start = expr_node;
node->data.slice_expr.end = ast_parse_expression(pc, token_index, false);
ast_eat_token(pc, token_index, TokenIdRBracket);
primary_expr = node;
} else if (ellipsis_or_r_bracket->id == TokenIdRBracket) {
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeArrayAccessExpr, first_token);
node->data.array_access_expr.array_ref_expr = primary_expr;
node->data.array_access_expr.subscript = expr_node;
primary_expr = node;
} else {
ast_invalid_token_error(pc, ellipsis_or_r_bracket);
}
} else if (first_token->id == TokenIdDot) {
Token *token = &pc->tokens->at(*token_index + 1);
if (token->id == TokenIdSymbol) {
*token_index += 2;
AstNode *node = ast_create_node(pc, NodeTypeFieldAccessExpr, first_token);
node->data.field_access_expr.struct_expr = primary_expr;
node->data.field_access_expr.field_name = token_buf(token);
primary_expr = node;
} else if (token->id == TokenIdStar) {
*token_index += 2;
AstNode *node = ast_create_node(pc, NodeTypePtrDeref, first_token);
node->data.ptr_deref_expr.target = primary_expr;
primary_expr = node;
} else if (token->id == TokenIdQuestion) {
*token_index += 2;
AstNode *node = ast_create_node(pc, NodeTypeUnwrapOptional, first_token);
node->data.unwrap_optional.expr = primary_expr;
primary_expr = node;
} else {
return primary_expr;
}
} else {
return primary_expr;
}
}
}
static PrefixOp tok_to_prefix_op(Token *token) {
switch (token->id) {
case TokenIdBang: return PrefixOpBoolNot;
case TokenIdDash: return PrefixOpNegation;
case TokenIdMinusPercent: return PrefixOpNegationWrap;
case TokenIdTilde: return PrefixOpBinNot;
case TokenIdQuestion: return PrefixOpOptional;
case TokenIdAmpersand: return PrefixOpAddrOf;
default: return PrefixOpInvalid;
}
}
static AstNode *ast_parse_pointer_type(ParseContext *pc, size_t *token_index, Token *star_tok) {
AstNode *node = ast_create_node(pc, NodeTypePointerType, star_tok);
node->data.pointer_type.star_token = star_tok;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdKeywordAlign) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.pointer_type.align_expr = ast_parse_expression(pc, token_index, true);
token = &pc->tokens->at(*token_index);
if (token->id == TokenIdColon) {
*token_index += 1;
Token *bit_offset_start_tok = ast_eat_token(pc, token_index, TokenIdIntLiteral);
ast_eat_token(pc, token_index, TokenIdColon);
Token *host_int_bytes_tok = ast_eat_token(pc, token_index, TokenIdIntLiteral);
node->data.pointer_type.bit_offset_start = token_bigint(bit_offset_start_tok);
node->data.pointer_type.host_int_bytes = token_bigint(host_int_bytes_tok);
}
ast_eat_token(pc, token_index, TokenIdRParen);
token = &pc->tokens->at(*token_index);
}
if (token->id == TokenIdKeywordConst) {
*token_index += 1;
node->data.pointer_type.is_const = true;
token = &pc->tokens->at(*token_index);
}
if (token->id == TokenIdKeywordVolatile) {
*token_index += 1;
node->data.pointer_type.is_volatile = true;
}
node->data.pointer_type.op_expr = ast_parse_prefix_op_expr(pc, token_index, true);
return node;
}
/*
PrefixOpExpression = PrefixOp ErrorSetExpr | SuffixOpExpression
PrefixOp = "!" | "-" | "~" | (("*" | "[*]") option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try" | "await"
*/
static AstNode *ast_parse_prefix_op_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdStar || token->id == TokenIdBracketStarBracket) {
*token_index += 1;
return ast_parse_pointer_type(pc, token_index, token);
}
if (token->id == TokenIdStarStar) {
*token_index += 1;
AstNode *child_node = ast_parse_pointer_type(pc, token_index, token);
child_node->column += 1;
AstNode *parent_node = ast_create_node(pc, NodeTypePointerType, token);
parent_node->data.pointer_type.star_token = token;
parent_node->data.pointer_type.op_expr = child_node;
return parent_node;
}
if (token->id == TokenIdKeywordTry) {
return ast_parse_try_expr(pc, token_index);
}
if (token->id == TokenIdKeywordAwait) {
return ast_parse_await_expr(pc, token_index);
}
PrefixOp prefix_op = tok_to_prefix_op(token);
if (prefix_op == PrefixOpInvalid) {
return ast_parse_suffix_op_expr(pc, token_index, mandatory);
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypePrefixOpExpr, token);
AstNode *prefix_op_expr = ast_parse_error_set_expr(pc, token_index, true);
node->data.prefix_op_expr.primary_expr = prefix_op_expr;
node->data.prefix_op_expr.prefix_op = prefix_op;
return node;
}
static BinOpType tok_to_mult_op(Token *token) {
switch (token->id) {
case TokenIdStar: return BinOpTypeMult;
case TokenIdTimesPercent: return BinOpTypeMultWrap;
case TokenIdStarStar: return BinOpTypeArrayMult;
case TokenIdSlash: return BinOpTypeDiv;
case TokenIdPercent: return BinOpTypeMod;
case TokenIdBang: return BinOpTypeErrorUnion;
case TokenIdBarBar: return BinOpTypeMergeErrorSets;
default: return BinOpTypeInvalid;
}
}
/*
MultiplyOperator = "||" | "*" | "/" | "%" | "**" | "*%"
*/
static BinOpType ast_parse_mult_op(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
BinOpType result = tok_to_mult_op(token);
if (result == BinOpTypeInvalid) {
if (mandatory) {
ast_invalid_token_error(pc, token);
} else {
return BinOpTypeInvalid;
}
}
*token_index += 1;
return result;
}
/*
MultiplyExpression : CurlySuffixExpression MultiplyOperator MultiplyExpression | CurlySuffixExpression
*/
static AstNode *ast_parse_mult_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_curly_suffix_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
BinOpType mult_op = ast_parse_mult_op(pc, token_index, false);
if (mult_op == BinOpTypeInvalid)
return operand_1;
AstNode *operand_2 = ast_parse_curly_suffix_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = mult_op;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
static BinOpType tok_to_add_op(Token *token) {
switch (token->id) {
case TokenIdPlus: return BinOpTypeAdd;
case TokenIdPlusPercent: return BinOpTypeAddWrap;
case TokenIdDash: return BinOpTypeSub;
case TokenIdMinusPercent: return BinOpTypeSubWrap;
case TokenIdPlusPlus: return BinOpTypeArrayCat;
default: return BinOpTypeInvalid;
}
}
/*
AdditionOperator = "+" | "-" | "++" | "+%" | "-%"
*/
static BinOpType ast_parse_add_op(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
BinOpType result = tok_to_add_op(token);
if (result == BinOpTypeInvalid) {
if (mandatory) {
ast_invalid_token_error(pc, token);
} else {
return BinOpTypeInvalid;
}
}
*token_index += 1;
return result;
}
/*
AdditionExpression : MultiplyExpression AdditionOperator AdditionExpression | MultiplyExpression
*/
static AstNode *ast_parse_add_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_mult_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
BinOpType add_op = ast_parse_add_op(pc, token_index, false);
if (add_op == BinOpTypeInvalid)
return operand_1;
AstNode *operand_2 = ast_parse_mult_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = add_op;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
static BinOpType tok_to_bit_shift_op(Token *token) {
switch (token->id) {
case TokenIdBitShiftLeft: return BinOpTypeBitShiftLeft;
case TokenIdBitShiftRight: return BinOpTypeBitShiftRight;
default: return BinOpTypeInvalid;
}
}
/*
BitShiftOperator = "<<" | ">>" | "<<%"
*/
static BinOpType ast_parse_bit_shift_op(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
BinOpType result = tok_to_bit_shift_op(token);
if (result == BinOpTypeInvalid) {
if (mandatory) {
ast_invalid_token_error(pc, token);
} else {
return BinOpTypeInvalid;
}
}
*token_index += 1;
return result;
}
/*
BitShiftExpression : AdditionExpression BitShiftOperator BitShiftExpression | AdditionExpression
*/
static AstNode *ast_parse_bit_shift_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_add_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
BinOpType bit_shift_op = ast_parse_bit_shift_op(pc, token_index, false);
if (bit_shift_op == BinOpTypeInvalid)
return operand_1;
AstNode *operand_2 = ast_parse_add_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = bit_shift_op;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
/*
BinaryAndExpression : BitShiftExpression token(Ampersand) BinaryAndExpression | BitShiftExpression
*/
static AstNode *ast_parse_bin_and_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_bit_shift_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdAmpersand)
return operand_1;
*token_index += 1;
AstNode *operand_2 = ast_parse_bit_shift_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = BinOpTypeBinAnd;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
/*
BinaryXorExpression : BinaryAndExpression token(BinXor) BinaryXorExpression | BinaryAndExpression
*/
static AstNode *ast_parse_bin_xor_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_bin_and_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdBinXor)
return operand_1;
*token_index += 1;
AstNode *operand_2 = ast_parse_bin_and_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = BinOpTypeBinXor;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
/*
BinaryOrExpression : BinaryXorExpression token(BinOr) BinaryOrExpression | BinaryXorExpression
*/
static AstNode *ast_parse_bin_or_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_bin_xor_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdBinOr)
return operand_1;
*token_index += 1;
AstNode *operand_2 = ast_parse_bin_xor_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = BinOpTypeBinOr;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
static BinOpType tok_to_cmp_op(Token *token) {
switch (token->id) {
case TokenIdCmpEq: return BinOpTypeCmpEq;
case TokenIdCmpNotEq: return BinOpTypeCmpNotEq;
case TokenIdCmpLessThan: return BinOpTypeCmpLessThan;
case TokenIdCmpGreaterThan: return BinOpTypeCmpGreaterThan;
case TokenIdCmpLessOrEq: return BinOpTypeCmpLessOrEq;
case TokenIdCmpGreaterOrEq: return BinOpTypeCmpGreaterOrEq;
default: return BinOpTypeInvalid;
}
}
static BinOpType ast_parse_comparison_operator(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
BinOpType result = tok_to_cmp_op(token);
if (result == BinOpTypeInvalid) {
if (mandatory) {
ast_invalid_token_error(pc, token);
} else {
return BinOpTypeInvalid;
}
}
*token_index += 1;
return result;
}
/*
ComparisonExpression : BinaryOrExpression ComparisonOperator BinaryOrExpression | BinaryOrExpression
*/
static AstNode *ast_parse_comparison_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_bin_or_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
Token *token = &pc->tokens->at(*token_index);
BinOpType cmp_op = ast_parse_comparison_operator(pc, token_index, false);
if (cmp_op == BinOpTypeInvalid)
return operand_1;
AstNode *operand_2 = ast_parse_bin_or_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = cmp_op;
node->data.bin_op_expr.op2 = operand_2;
return node;
}
/*
BoolAndExpression = ComparisonExpression "and" BoolAndExpression | ComparisonExpression
*/
static AstNode *ast_parse_bool_and_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_comparison_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordAnd)
return operand_1;
*token_index += 1;
AstNode *operand_2 = ast_parse_comparison_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = BinOpTypeBoolAnd;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
/*
IfExpression(body) = "if" "(" Expression ")" body option("else" BlockExpression(body))
TryExpression(body) = "if" "(" Expression ")" option("|" option("*") Symbol "|") body "else" "|" Symbol "|" BlockExpression(body)
TestExpression(body) = "if" "(" Expression ")" "|" option("*") Symbol "|" body option("else" BlockExpression(body))
*/
static AstNode *ast_parse_if_try_test_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *if_token = &pc->tokens->at(*token_index);
if (if_token->id == TokenIdKeywordIf) {
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, if_token, TokenIdKeywordIf);
zig_unreachable();
} else {
return nullptr;
}
ast_eat_token(pc, token_index, TokenIdLParen);
AstNode *condition = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
Token *open_bar_tok = &pc->tokens->at(*token_index);
Token *var_name_tok = nullptr;
bool var_is_ptr = false;
if (open_bar_tok->id == TokenIdBinOr) {
*token_index += 1;
Token *star_tok = &pc->tokens->at(*token_index);
if (star_tok->id == TokenIdStar) {
*token_index += 1;
var_is_ptr = true;
}
var_name_tok = ast_eat_token(pc, token_index, TokenIdSymbol);
ast_eat_token(pc, token_index, TokenIdBinOr);
}
AstNode *body_node = ast_parse_block_or_expression(pc, token_index, true);
Token *else_tok = &pc->tokens->at(*token_index);
AstNode *else_node = nullptr;
Token *err_name_tok = nullptr;
if (else_tok->id == TokenIdKeywordElse) {
*token_index += 1;
Token *else_bar_tok = &pc->tokens->at(*token_index);
if (else_bar_tok->id == TokenIdBinOr) {
*token_index += 1;
err_name_tok = ast_eat_token(pc, token_index, TokenIdSymbol);
ast_eat_token(pc, token_index, TokenIdBinOr);
}
else_node = ast_parse_block_expr_or_expression(pc, token_index, true);
}
if (err_name_tok != nullptr) {
AstNode *node = ast_create_node(pc, NodeTypeIfErrorExpr, if_token);
node->data.if_err_expr.target_node = condition;
node->data.if_err_expr.var_is_ptr = var_is_ptr;
if (var_name_tok != nullptr) {
node->data.if_err_expr.var_symbol = token_buf(var_name_tok);
}
node->data.if_err_expr.then_node = body_node;
node->data.if_err_expr.err_symbol = token_buf(err_name_tok);
node->data.if_err_expr.else_node = else_node;
return node;
} else if (var_name_tok != nullptr) {
AstNode *node = ast_create_node(pc, NodeTypeTestExpr, if_token);
node->data.test_expr.target_node = condition;
node->data.test_expr.var_is_ptr = var_is_ptr;
node->data.test_expr.var_symbol = token_buf(var_name_tok);
node->data.test_expr.then_node = body_node;
node->data.test_expr.else_node = else_node;
return node;
} else {
AstNode *node = ast_create_node(pc, NodeTypeIfBoolExpr, if_token);
node->data.if_bool_expr.condition = condition;
node->data.if_bool_expr.then_block = body_node;
node->data.if_bool_expr.else_node = else_node;
return node;
}
}
/*
ReturnExpression : "return" option(Expression)
*/
static AstNode *ast_parse_return_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordReturn) {
return nullptr;
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeReturnExpr, token);
node->data.return_expr.kind = ReturnKindUnconditional;
node->data.return_expr.expr = ast_parse_expression(pc, token_index, false);
return node;
}
/*
TryExpression : "try" Expression
*/
static AstNode *ast_parse_try_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordTry) {
return nullptr;
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeReturnExpr, token);
node->data.return_expr.kind = ReturnKindError;
node->data.return_expr.expr = ast_parse_expression(pc, token_index, true);
return node;
}
/*
AwaitExpression : "await" Expression
*/
static AstNode *ast_parse_await_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordAwait) {
return nullptr;
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeAwaitExpr, token);
node->data.await_expr.expr = ast_parse_expression(pc, token_index, true);
return node;
}
/*
BreakExpression = "break" option(":" Symbol) option(Expression)
*/
static AstNode *ast_parse_break_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdKeywordBreak) {
*token_index += 1;
} else {
return nullptr;
}
AstNode *node = ast_create_node(pc, NodeTypeBreak, token);
Token *maybe_colon_token = &pc->tokens->at(*token_index);
if (maybe_colon_token->id == TokenIdColon) {
*token_index += 1;
Token *name = ast_eat_token(pc, token_index, TokenIdSymbol);
node->data.break_expr.name = token_buf(name);
}
node->data.break_expr.expr = ast_parse_expression(pc, token_index, false);
return node;
}
/*
CancelExpression = "cancel" Expression;
*/
static AstNode *ast_parse_cancel_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordCancel) {
return nullptr;
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeCancel, token);
node->data.cancel_expr.expr = ast_parse_expression(pc, token_index, false);
return node;
}
/*
ResumeExpression = "resume" Expression;
*/
static AstNode *ast_parse_resume_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordResume) {
return nullptr;
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeResume, token);
node->data.resume_expr.expr = ast_parse_expression(pc, token_index, false);
return node;
}
/*
Defer(body) = ("defer" | "errdefer") body
*/
static AstNode *ast_parse_defer_expr(ParseContext *pc, size_t *token_index) {
Token *token = &pc->tokens->at(*token_index);
NodeType node_type;
ReturnKind kind;
if (token->id == TokenIdKeywordErrdefer) {
kind = ReturnKindError;
node_type = NodeTypeDefer;
*token_index += 1;
} else if (token->id == TokenIdKeywordDefer) {
kind = ReturnKindUnconditional;
node_type = NodeTypeDefer;
*token_index += 1;
} else {
return nullptr;
}
AstNode *node = ast_create_node(pc, node_type, token);
node->data.defer.kind = kind;
node->data.defer.expr = ast_parse_block_or_expression(pc, token_index, true);
return node;
}
/*
VariableDeclaration = ("var" | "const") Symbol option(":" TypeExpr) option("align" "(" Expression ")") "=" Expression
*/
static AstNode *ast_parse_variable_declaration_expr(ParseContext *pc, size_t *token_index, bool mandatory,
VisibMod visib_mod, bool is_comptime, bool is_export)
{
Token *first_token = &pc->tokens->at(*token_index);
Token *var_token;
bool is_const;
if (first_token->id == TokenIdKeywordVar) {
is_const = false;
var_token = first_token;
*token_index += 1;
} else if (first_token->id == TokenIdKeywordConst) {
is_const = true;
var_token = first_token;
*token_index += 1;
} else if (mandatory) {
ast_invalid_token_error(pc, first_token);
} else {
return nullptr;
}
AstNode *node = ast_create_node(pc, NodeTypeVariableDeclaration, var_token);
node->data.variable_declaration.is_comptime = is_comptime;
node->data.variable_declaration.is_export = is_export;
node->data.variable_declaration.is_const = is_const;
node->data.variable_declaration.visib_mod = visib_mod;
Token *name_token = ast_eat_token(pc, token_index, TokenIdSymbol);
node->data.variable_declaration.symbol = token_buf(name_token);
Token *next_token = &pc->tokens->at(*token_index);
if (next_token->id == TokenIdColon) {
*token_index += 1;
node->data.variable_declaration.type = ast_parse_type_expr(pc, token_index, true);
next_token = &pc->tokens->at(*token_index);
}
if (next_token->id == TokenIdKeywordAlign) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.variable_declaration.align_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
next_token = &pc->tokens->at(*token_index);
}
if (next_token->id == TokenIdKeywordSection) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.variable_declaration.section_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
next_token = &pc->tokens->at(*token_index);
}
if (next_token->id == TokenIdEq) {
*token_index += 1;
node->data.variable_declaration.expr = ast_parse_expression(pc, token_index, true);
next_token = &pc->tokens->at(*token_index);
}
// peek ahead and ensure that all variable declarations are followed by a semicolon
ast_expect_token(pc, next_token, TokenIdSemicolon);
return node;
}
/*
GlobalVarDecl = option("export") VariableDeclaration ";"
*/
static AstNode *ast_parse_global_var_decl(ParseContext *pc, size_t *token_index, VisibMod visib_mod) {
Token *first_token = &pc->tokens->at(*token_index);
bool is_export = false;;
if (first_token->id == TokenIdKeywordExport) {
*token_index += 1;
is_export = true;
}
AstNode *node = ast_parse_variable_declaration_expr(pc, token_index, false, visib_mod, false, is_export);
if (node == nullptr) {
if (is_export) {
*token_index -= 1;
}
return nullptr;
}
return node;
}
/*
LocalVarDecl = option("comptime") VariableDeclaration
*/
static AstNode *ast_parse_local_var_decl(ParseContext *pc, size_t *token_index) {
Token *first_token = &pc->tokens->at(*token_index);
bool is_comptime = false;;
if (first_token->id == TokenIdKeywordCompTime) {
*token_index += 1;
is_comptime = true;
}
AstNode *node = ast_parse_variable_declaration_expr(pc, token_index, false, VisibModPrivate, is_comptime, false);
if (node == nullptr) {
if (is_comptime) {
*token_index -= 1;
}
return nullptr;
}
return node;
}
/*
BoolOrExpression = BoolAndExpression "or" BoolOrExpression | BoolAndExpression
*/
static AstNode *ast_parse_bool_or_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *operand_1 = ast_parse_bool_and_expr(pc, token_index, mandatory);
if (!operand_1)
return nullptr;
while (true) {
Token *token = &pc->tokens->at(*token_index);
if (token->id != TokenIdKeywordOr)
return operand_1;
*token_index += 1;
AstNode *operand_2 = ast_parse_bool_and_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = operand_1;
node->data.bin_op_expr.bin_op = BinOpTypeBoolOr;
node->data.bin_op_expr.op2 = operand_2;
operand_1 = node;
}
}
/*
WhileExpression(body) = option(Symbol ":") option("inline") "while" "(" Expression ")" option("|" option("*") Symbol "|") option(":" "(" Expression ")") body option("else" option("|" Symbol "|") BlockExpression(body))
*/
static AstNode *ast_parse_while_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
size_t orig_token_index = *token_index;
Token *name_token = nullptr;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdSymbol) {
*token_index += 1;
Token *colon_token = &pc->tokens->at(*token_index);
if (colon_token->id == TokenIdColon) {
*token_index += 1;
name_token = token;
token = &pc->tokens->at(*token_index);
} else if (mandatory) {
ast_expect_token(pc, colon_token, TokenIdColon);
zig_unreachable();
} else {
*token_index = orig_token_index;
return nullptr;
}
}
bool is_inline = false;
if (token->id == TokenIdKeywordInline) {
is_inline = true;
*token_index += 1;
token = &pc->tokens->at(*token_index);
}
Token *while_token;
if (token->id == TokenIdKeywordWhile) {
while_token = token;
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, token, TokenIdKeywordWhile);
zig_unreachable();
} else {
*token_index = orig_token_index;
return nullptr;
}
AstNode *node = ast_create_node(pc, NodeTypeWhileExpr, while_token);
if (name_token != nullptr) {
node->data.while_expr.name = token_buf(name_token);
}
node->data.while_expr.is_inline = is_inline;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.while_expr.condition = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
Token *open_bar_tok = &pc->tokens->at(*token_index);
if (open_bar_tok->id == TokenIdBinOr) {
*token_index += 1;
Token *star_tok = &pc->tokens->at(*token_index);
if (star_tok->id == TokenIdStar) {
*token_index += 1;
node->data.while_expr.var_is_ptr = true;
}
Token *var_name_tok = ast_eat_token(pc, token_index, TokenIdSymbol);
node->data.while_expr.var_symbol = token_buf(var_name_tok);
ast_eat_token(pc, token_index, TokenIdBinOr);
}
Token *colon_tok = &pc->tokens->at(*token_index);
if (colon_tok->id == TokenIdColon) {
*token_index += 1;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.while_expr.continue_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
}
node->data.while_expr.body = ast_parse_block_or_expression(pc, token_index, true);
Token *else_tok = &pc->tokens->at(*token_index);
if (else_tok->id == TokenIdKeywordElse) {
*token_index += 1;
Token *else_bar_tok = &pc->tokens->at(*token_index);
if (else_bar_tok->id == TokenIdBinOr) {
*token_index += 1;
Token *err_name_tok = ast_eat_token(pc, token_index, TokenIdSymbol);
node->data.while_expr.err_symbol = token_buf(err_name_tok);
ast_eat_token(pc, token_index, TokenIdBinOr);
}
node->data.while_expr.else_node = ast_parse_block_or_expression(pc, token_index, true);
}
return node;
}
static AstNode *ast_parse_symbol(ParseContext *pc, size_t *token_index) {
Token *token = ast_eat_token(pc, token_index, TokenIdSymbol);
AstNode *node = ast_create_node(pc, NodeTypeSymbol, token);
node->data.symbol_expr.symbol = token_buf(token);
return node;
}
/*
ForExpression(body) = option(Symbol ":") option("inline") "for" "(" Expression ")" option("|" option("*") Symbol option("," Symbol) "|") body option("else" BlockExpression(body))
*/
static AstNode *ast_parse_for_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
size_t orig_token_index = *token_index;
Token *name_token = nullptr;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdSymbol) {
*token_index += 1;
Token *colon_token = &pc->tokens->at(*token_index);
if (colon_token->id == TokenIdColon) {
*token_index += 1;
name_token = token;
token = &pc->tokens->at(*token_index);
} else if (mandatory) {
ast_expect_token(pc, colon_token, TokenIdColon);
zig_unreachable();
} else {
*token_index = orig_token_index;
return nullptr;
}
}
bool is_inline = false;
if (token->id == TokenIdKeywordInline) {
is_inline = true;
*token_index += 1;
token = &pc->tokens->at(*token_index);
}
Token *for_token;
if (token->id == TokenIdKeywordFor) {
for_token = token;
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, token, TokenIdKeywordFor);
zig_unreachable();
} else {
*token_index = orig_token_index;
return nullptr;
}
AstNode *node = ast_create_node(pc, NodeTypeForExpr, for_token);
if (name_token != nullptr) {
node->data.for_expr.name = token_buf(name_token);
}
node->data.for_expr.is_inline = is_inline;
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.for_expr.array_expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
Token *maybe_bar = &pc->tokens->at(*token_index);
if (maybe_bar->id == TokenIdBinOr) {
*token_index += 1;
Token *maybe_star = &pc->tokens->at(*token_index);
if (maybe_star->id == TokenIdStar) {
*token_index += 1;
node->data.for_expr.elem_is_ptr = true;
}
node->data.for_expr.elem_node = ast_parse_symbol(pc, token_index);
Token *maybe_comma = &pc->tokens->at(*token_index);
if (maybe_comma->id == TokenIdComma) {
*token_index += 1;
node->data.for_expr.index_node = ast_parse_symbol(pc, token_index);
}
ast_eat_token(pc, token_index, TokenIdBinOr);
}
node->data.for_expr.body = ast_parse_block_or_expression(pc, token_index, true);
Token *else_tok = &pc->tokens->at(*token_index);
if (else_tok->id == TokenIdKeywordElse) {
*token_index += 1;
node->data.for_expr.else_node = ast_parse_block_or_expression(pc, token_index, true);
}
return node;
}
/*
SwitchExpression = "switch" "(" Expression ")" "{" many(SwitchProng) "}"
SwitchProng = (list(SwitchItem, ",") | "else") "=>" option("|" option("*") Symbol "|") Expression ","
SwitchItem = Expression | (Expression "..." Expression)
*/
static AstNode *ast_parse_switch_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *switch_token = &pc->tokens->at(*token_index);
if (switch_token->id == TokenIdKeywordSwitch) {
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, switch_token, TokenIdKeywordSwitch);
zig_unreachable();
} else {
return nullptr;
}
AstNode *node = ast_create_node(pc, NodeTypeSwitchExpr, switch_token);
ast_eat_token(pc, token_index, TokenIdLParen);
node->data.switch_expr.expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
ast_eat_token(pc, token_index, TokenIdLBrace);
for (;;) {
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdRBrace) {
*token_index += 1;
return node;
}
AstNode *prong_node = ast_create_node(pc, NodeTypeSwitchProng, token);
node->data.switch_expr.prongs.append(prong_node);
if (token->id == TokenIdKeywordElse) {
*token_index += 1;
} else for (;;) {
AstNode *expr1 = ast_parse_expression(pc, token_index, true);
Token *ellipsis_tok = &pc->tokens->at(*token_index);
if (ellipsis_tok->id == TokenIdEllipsis3) {
*token_index += 1;
AstNode *range_node = ast_create_node(pc, NodeTypeSwitchRange, ellipsis_tok);
prong_node->data.switch_prong.items.append(range_node);
range_node->data.switch_range.start = expr1;
range_node->data.switch_range.end = ast_parse_expression(pc, token_index, true);
prong_node->data.switch_prong.any_items_are_range = true;
} else {
prong_node->data.switch_prong.items.append(expr1);
}
Token *comma_tok = &pc->tokens->at(*token_index);
if (comma_tok->id == TokenIdComma) {
*token_index += 1;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdFatArrow) {
break;
} else {
continue;
}
}
break;
}
ast_eat_token(pc, token_index, TokenIdFatArrow);
Token *maybe_bar = &pc->tokens->at(*token_index);
if (maybe_bar->id == TokenIdBinOr) {
*token_index += 1;
Token *star_or_symbol = &pc->tokens->at(*token_index);
AstNode *var_symbol_node;
bool var_is_ptr;
if (star_or_symbol->id == TokenIdStar) {
*token_index += 1;
var_is_ptr = true;
var_symbol_node = ast_parse_symbol(pc, token_index);
} else {
var_is_ptr = false;
var_symbol_node = ast_parse_symbol(pc, token_index);
}
prong_node->data.switch_prong.var_symbol = var_symbol_node;
prong_node->data.switch_prong.var_is_ptr = var_is_ptr;
ast_eat_token(pc, token_index, TokenIdBinOr);
}
prong_node->data.switch_prong.expr = ast_parse_expression(pc, token_index, true);
Token *trailing_token = &pc->tokens->at(*token_index);
if (trailing_token->id == TokenIdRBrace) {
*token_index += 1;
return node;
} else {
ast_eat_token(pc, token_index, TokenIdComma);
}
}
}
/*
BlockExpression(body) = Block | IfExpression(body) | IfErrorExpression(body) | TestExpression(body) | WhileExpression(body) | ForExpression(body) | SwitchExpression | CompTimeExpression(body) | SuspendExpression(body)
*/
static AstNode *ast_parse_block_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
AstNode *if_expr = ast_parse_if_try_test_expr(pc, token_index, false);
if (if_expr)
return if_expr;
AstNode *while_expr = ast_parse_while_expr(pc, token_index, false);
if (while_expr)
return while_expr;
AstNode *for_expr = ast_parse_for_expr(pc, token_index, false);
if (for_expr)
return for_expr;
AstNode *switch_expr = ast_parse_switch_expr(pc, token_index, false);
if (switch_expr)
return switch_expr;
AstNode *block = ast_parse_block(pc, token_index, false);
if (block)
return block;
AstNode *comptime_node = ast_parse_comptime_expr(pc, token_index, false, false);
if (comptime_node)
return comptime_node;
AstNode *suspend_node = ast_parse_suspend_block(pc, token_index, false);
if (suspend_node)
return suspend_node;
if (mandatory)
ast_invalid_token_error(pc, token);
return nullptr;
}
static BinOpType tok_to_ass_op(Token *token) {
switch (token->id) {
case TokenIdEq: return BinOpTypeAssign;
case TokenIdTimesEq: return BinOpTypeAssignTimes;
case TokenIdTimesPercentEq: return BinOpTypeAssignTimesWrap;
case TokenIdDivEq: return BinOpTypeAssignDiv;
case TokenIdModEq: return BinOpTypeAssignMod;
case TokenIdPlusEq: return BinOpTypeAssignPlus;
case TokenIdPlusPercentEq: return BinOpTypeAssignPlusWrap;
case TokenIdMinusEq: return BinOpTypeAssignMinus;
case TokenIdMinusPercentEq: return BinOpTypeAssignMinusWrap;
case TokenIdBitShiftLeftEq: return BinOpTypeAssignBitShiftLeft;
case TokenIdBitShiftRightEq: return BinOpTypeAssignBitShiftRight;
case TokenIdBitAndEq: return BinOpTypeAssignBitAnd;
case TokenIdBitXorEq: return BinOpTypeAssignBitXor;
case TokenIdBitOrEq: return BinOpTypeAssignBitOr;
default: return BinOpTypeInvalid;
}
}
/*
AssignmentOperator = "=" | "*=" | "/=" | "%=" | "+=" | "-=" | "<<=" | ">>=" | "&=" | "^=" | "|=" | "*%=" | "+%=" | "-%=" | "<<%="
*/
static BinOpType ast_parse_ass_op(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
BinOpType result = tok_to_ass_op(token);
if (result == BinOpTypeInvalid) {
if (mandatory) {
ast_invalid_token_error(pc, token);
} else {
return BinOpTypeInvalid;
}
}
*token_index += 1;
return result;
}
/*
UnwrapExpression : BoolOrExpression (UnwrapOptional | UnwrapError) | BoolOrExpression
UnwrapOptional = "orelse" Expression
UnwrapError = "catch" option("|" Symbol "|") Expression
*/
static AstNode *ast_parse_unwrap_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *lhs = ast_parse_bool_or_expr(pc, token_index, mandatory);
if (!lhs)
return nullptr;
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdKeywordOrElse) {
*token_index += 1;
AstNode *rhs = ast_parse_expression(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = lhs;
node->data.bin_op_expr.bin_op = BinOpTypeUnwrapOptional;
node->data.bin_op_expr.op2 = rhs;
return node;
} else if (token->id == TokenIdKeywordCatch) {
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeUnwrapErrorExpr, token);
node->data.unwrap_err_expr.op1 = lhs;
Token *maybe_bar_tok = &pc->tokens->at(*token_index);
if (maybe_bar_tok->id == TokenIdBinOr) {
*token_index += 1;
node->data.unwrap_err_expr.symbol = ast_parse_symbol(pc, token_index);
ast_eat_token(pc, token_index, TokenIdBinOr);
}
node->data.unwrap_err_expr.op2 = ast_parse_expression(pc, token_index, true);
return node;
} else {
return lhs;
}
}
/*
AssignmentExpression : UnwrapExpression AssignmentOperator UnwrapExpression | UnwrapExpression
*/
static AstNode *ast_parse_ass_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *lhs = ast_parse_unwrap_expr(pc, token_index, mandatory);
if (!lhs)
return nullptr;
Token *token = &pc->tokens->at(*token_index);
BinOpType ass_op = ast_parse_ass_op(pc, token_index, false);
if (ass_op == BinOpTypeInvalid)
return lhs;
AstNode *rhs = ast_parse_unwrap_expr(pc, token_index, true);
AstNode *node = ast_create_node(pc, NodeTypeBinOpExpr, token);
node->data.bin_op_expr.op1 = lhs;
node->data.bin_op_expr.bin_op = ass_op;
node->data.bin_op_expr.op2 = rhs;
return node;
}
static AstNode *ast_parse_block_expr_or_expression(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *block_expr = ast_parse_block_expr(pc, token_index, false);
if (block_expr)
return block_expr;
return ast_parse_expression(pc, token_index, mandatory);
}
/*
BlockOrExpression = Block | Expression
*/
static AstNode *ast_parse_block_or_expression(ParseContext *pc, size_t *token_index, bool mandatory) {
AstNode *block_expr = ast_parse_block(pc, token_index, false);
if (block_expr)
return block_expr;
return ast_parse_expression(pc, token_index, mandatory);
}
/*
Expression = TryExpression | ReturnExpression | BreakExpression | AssignmentExpression | CancelExpression | ResumeExpression
*/
static AstNode *ast_parse_expression(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *token = &pc->tokens->at(*token_index);
AstNode *return_expr = ast_parse_return_expr(pc, token_index);
if (return_expr)
return return_expr;
AstNode *try_expr = ast_parse_try_expr(pc, token_index);
if (try_expr)
return try_expr;
AstNode *break_expr = ast_parse_break_expr(pc, token_index);
if (break_expr)
return break_expr;
AstNode *cancel_expr = ast_parse_cancel_expr(pc, token_index);
if (cancel_expr)
return cancel_expr;
AstNode *resume_expr = ast_parse_resume_expr(pc, token_index);
if (resume_expr)
return resume_expr;
AstNode *ass_expr = ast_parse_ass_expr(pc, token_index, false);
if (ass_expr)
return ass_expr;
if (mandatory)
ast_invalid_token_error(pc, token);
return nullptr;
}
bool statement_terminates_without_semicolon(AstNode *node) {
switch (node->type) {
case NodeTypeIfBoolExpr:
if (node->data.if_bool_expr.else_node)
return statement_terminates_without_semicolon(node->data.if_bool_expr.else_node);
return node->data.if_bool_expr.then_block->type == NodeTypeBlock;
case NodeTypeIfErrorExpr:
if (node->data.if_err_expr.else_node)
return statement_terminates_without_semicolon(node->data.if_err_expr.else_node);
return node->data.if_err_expr.then_node->type == NodeTypeBlock;
case NodeTypeTestExpr:
if (node->data.test_expr.else_node)
return statement_terminates_without_semicolon(node->data.test_expr.else_node);
return node->data.test_expr.then_node->type == NodeTypeBlock;
case NodeTypeWhileExpr:
return node->data.while_expr.body->type == NodeTypeBlock;
case NodeTypeForExpr:
return node->data.for_expr.body->type == NodeTypeBlock;
case NodeTypeCompTime:
return node->data.comptime_expr.expr->type == NodeTypeBlock;
case NodeTypeDefer:
return node->data.defer.expr->type == NodeTypeBlock;
case NodeTypeSuspend:
return node->data.suspend.block != nullptr && node->data.suspend.block->type == NodeTypeBlock;
case NodeTypeSwitchExpr:
case NodeTypeBlock:
return true;
default:
return false;
}
}
/*
Block = option(Symbol ":") "{" many(Statement) "}"
Statement = Label | VariableDeclaration ";" | Defer(Block) | Defer(Expression) ";" | BlockExpression(Block) | Expression ";" | ";" | ExportDecl
*/
static AstNode *ast_parse_block(ParseContext *pc, size_t *token_index, bool mandatory) {
size_t orig_token_index = *token_index;
Token *name_token = nullptr;
Token *last_token = &pc->tokens->at(*token_index);
if (last_token->id == TokenIdSymbol) {
*token_index += 1;
Token *colon_token = &pc->tokens->at(*token_index);
if (colon_token->id == TokenIdColon) {
*token_index += 1;
name_token = last_token;
last_token = &pc->tokens->at(*token_index);
} else if (mandatory) {
ast_expect_token(pc, colon_token, TokenIdColon);
zig_unreachable();
} else {
*token_index = orig_token_index;
return nullptr;
}
}
if (last_token->id != TokenIdLBrace) {
if (mandatory) {
ast_expect_token(pc, last_token, TokenIdLBrace);
} else {
*token_index = orig_token_index;
return nullptr;
}
}
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeBlock, last_token);
if (name_token != nullptr) {
node->data.block.name = token_buf(name_token);
}
for (;;) {
last_token = &pc->tokens->at(*token_index);
if (last_token->id == TokenIdRBrace) {
*token_index += 1;
return node;
}
AstNode *statement_node = ast_parse_local_var_decl(pc, token_index);
if (!statement_node)
statement_node = ast_parse_defer_expr(pc, token_index);
if (!statement_node)
statement_node = ast_parse_block_expr(pc, token_index, false);
if (!statement_node)
statement_node = ast_parse_expression(pc, token_index, false);
if (!statement_node) {
ast_invalid_token_error(pc, last_token);
}
node->data.block.statements.append(statement_node);
if (!statement_terminates_without_semicolon(statement_node)) {
ast_eat_token(pc, token_index, TokenIdSemicolon);
}
}
zig_unreachable();
}
/*
FnProto = option("nakedcc" | "stdcallcc" | "extern" | ("async" option("(" Expression ")"))) "fn" option(Symbol) ParamDeclList option("align" "(" Expression ")") option("section" "(" Expression ")") option("!") (TypeExpr | "var")
*/
static AstNode *ast_parse_fn_proto(ParseContext *pc, size_t *token_index, bool mandatory, VisibMod visib_mod) {
Token *first_token = &pc->tokens->at(*token_index);
Token *fn_token;
CallingConvention cc;
bool is_extern = false;
AstNode *async_allocator_type_node = nullptr;
if (first_token->id == TokenIdKeywordNakedCC) {
*token_index += 1;
fn_token = ast_eat_token(pc, token_index, TokenIdKeywordFn);
cc = CallingConventionNaked;
} else if (first_token->id == TokenIdKeywordAsync) {
*token_index += 1;
Token *next_token = &pc->tokens->at(*token_index);
if (next_token->id == TokenIdCmpLessThan) {
*token_index += 1;
async_allocator_type_node = ast_parse_type_expr(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdCmpGreaterThan);
}
fn_token = ast_eat_token(pc, token_index, TokenIdKeywordFn);
cc = CallingConventionAsync;
} else if (first_token->id == TokenIdKeywordStdcallCC) {
*token_index += 1;
fn_token = ast_eat_token(pc, token_index, TokenIdKeywordFn);
cc = CallingConventionStdcall;
} else if (first_token->id == TokenIdKeywordExtern) {
is_extern = true;
*token_index += 1;
Token *next_token = &pc->tokens->at(*token_index);
if (next_token->id == TokenIdKeywordFn) {
fn_token = next_token;
*token_index += 1;
} else if (mandatory) {
ast_expect_token(pc, next_token, TokenIdKeywordFn);
zig_unreachable();
} else {
*token_index -= 1;
return nullptr;
}
cc = CallingConventionC;
} else if (first_token->id == TokenIdKeywordFn) {
fn_token = first_token;
*token_index += 1;
cc = CallingConventionUnspecified;
} else if (mandatory) {
ast_expect_token(pc, first_token, TokenIdKeywordFn);
zig_unreachable();
} else {
return nullptr;
}
return ast_parse_fn_proto_partial(pc, token_index, fn_token, async_allocator_type_node, cc, is_extern, visib_mod);
}
/*
FnDef = option("inline" | "export") FnProto Block
*/
static AstNode *ast_parse_fn_def(ParseContext *pc, size_t *token_index, bool mandatory, VisibMod visib_mod) {
Token *first_token = &pc->tokens->at(*token_index);
bool is_inline;
bool is_export;
if (first_token->id == TokenIdKeywordInline) {
*token_index += 1;
is_inline = true;
is_export = false;
} else if (first_token->id == TokenIdKeywordExport) {
*token_index += 1;
is_export = true;
is_inline = false;
} else {
is_inline = false;
is_export = false;
}
AstNode *fn_proto = ast_parse_fn_proto(pc, token_index, mandatory, visib_mod);
if (!fn_proto) {
if (is_inline || is_export) {
*token_index -= 1;
}
return nullptr;
}
fn_proto->data.fn_proto.is_inline = is_inline;
fn_proto->data.fn_proto.is_export = is_export;
Token *semi_token = &pc->tokens->at(*token_index);
if (semi_token->id == TokenIdSemicolon) {
*token_index += 1;
return fn_proto;
}
AstNode *node = ast_create_node(pc, NodeTypeFnDef, first_token);
node->data.fn_def.fn_proto = fn_proto;
node->data.fn_def.body = ast_parse_block(pc, token_index, true);
fn_proto->data.fn_proto.fn_def_node = node;
return node;
}
/*
ExternDecl = "extern" option(String) (FnProto | VariableDeclaration) ";"
*/
static AstNode *ast_parse_extern_decl(ParseContext *pc, size_t *token_index, bool mandatory, VisibMod visib_mod) {
Token *extern_kw = &pc->tokens->at(*token_index);
if (extern_kw->id != TokenIdKeywordExtern) {
if (mandatory) {
ast_expect_token(pc, extern_kw, TokenIdKeywordExtern);
} else {
return nullptr;
}
}
*token_index += 1;
Token *lib_name_tok = &pc->tokens->at(*token_index);
Buf *lib_name = nullptr;
if (lib_name_tok->id == TokenIdStringLiteral) {
lib_name = token_buf(lib_name_tok);
*token_index += 1;
}
AstNode *fn_proto_node = ast_parse_fn_proto(pc, token_index, false, visib_mod);
if (fn_proto_node) {
ast_eat_token(pc, token_index, TokenIdSemicolon);
fn_proto_node->data.fn_proto.is_extern = true;
fn_proto_node->data.fn_proto.lib_name = lib_name;
return fn_proto_node;
}
AstNode *var_decl_node = ast_parse_variable_declaration_expr(pc, token_index, false, visib_mod, false, false);
if (var_decl_node) {
ast_eat_token(pc, token_index, TokenIdSemicolon);
var_decl_node->data.variable_declaration.is_extern = true;
var_decl_node->data.variable_declaration.lib_name = lib_name;
return var_decl_node;
}
Token *token = &pc->tokens->at(*token_index);
ast_invalid_token_error(pc, token);
}
/*
UseDecl = "use" Expression ";"
*/
static AstNode *ast_parse_use(ParseContext *pc, size_t *token_index, VisibMod visib_mod) {
Token *use_kw = &pc->tokens->at(*token_index);
if (use_kw->id != TokenIdKeywordUse)
return nullptr;
*token_index += 1;
AstNode *node = ast_create_node(pc, NodeTypeUse, use_kw);
node->data.use.visib_mod = visib_mod;
node->data.use.expr = ast_parse_expression(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdSemicolon);
return node;
}
/*
ContainerDecl = option("extern" | "packed")
("struct" option(GroupedExpression) | "union" option("enum" option(GroupedExpression) | GroupedExpression) | ("enum" option(GroupedExpression)))
"{" many(ContainerMember) "}"
ContainerMember = (ContainerField | FnDef | GlobalVarDecl)
ContainerField = Symbol option(":" PrefixOpExpression option("=" PrefixOpExpression ","
*/
static AstNode *ast_parse_container_decl(ParseContext *pc, size_t *token_index, bool mandatory) {
Token *first_token = &pc->tokens->at(*token_index);
Token *container_kind_token;
ContainerLayout layout;
if (first_token->id == TokenIdKeywordExtern) {
container_kind_token = &pc->tokens->at(*token_index + 1);
layout = ContainerLayoutExtern;
} else if (first_token->id == TokenIdKeywordPacked) {
container_kind_token = &pc->tokens->at(*token_index + 1);
layout = ContainerLayoutPacked;
} else {
container_kind_token = first_token;
layout = ContainerLayoutAuto;
}
ContainerKind kind;
if (container_kind_token->id == TokenIdKeywordStruct) {
kind = ContainerKindStruct;
} else if (container_kind_token->id == TokenIdKeywordEnum) {
kind = ContainerKindEnum;
} else if (container_kind_token->id == TokenIdKeywordUnion) {
kind = ContainerKindUnion;
} else if (mandatory) {
ast_invalid_token_error(pc, container_kind_token);
} else {
return nullptr;
}
*token_index += (layout == ContainerLayoutAuto) ? 1 : 2;
AstNode *node = ast_create_node(pc, NodeTypeContainerDecl, first_token);
node->data.container_decl.layout = layout;
node->data.container_decl.kind = kind;
if (kind == ContainerKindUnion) {
Token *lparen_token = &pc->tokens->at(*token_index);
if (lparen_token->id == TokenIdLParen) {
Token *enum_token = &pc->tokens->at(*token_index + 1);
if (enum_token->id == TokenIdKeywordEnum) {
Token *paren_token = &pc->tokens->at(*token_index + 2);
if (paren_token->id == TokenIdLParen) {
node->data.container_decl.auto_enum = true;
*token_index += 2;
node->data.container_decl.init_arg_expr = ast_parse_grouped_expr(pc, token_index, true);
ast_eat_token(pc, token_index, TokenIdRParen);
} else if (paren_token->id == TokenIdRParen) {
node->data.container_decl.auto_enum = true;
*token_index += 3;
}
}
}
}
if (!node->data.container_decl.auto_enum) {
node->data.container_decl.init_arg_expr = ast_parse_grouped_expr(pc, token_index, false);
}
ast_eat_token(pc, token_index, TokenIdDot);
ast_eat_token(pc, token_index, TokenIdLBrace);
for (;;) {
Token *visib_tok = &pc->tokens->at(*token_index);
VisibMod visib_mod;
if (visib_tok->id == TokenIdKeywordPub) {
*token_index += 1;
visib_mod = VisibModPub;
} else {
visib_mod = VisibModPrivate;
}
AstNode *fn_def_node = ast_parse_fn_def(pc, token_index, false, visib_mod);
if (fn_def_node) {
node->data.container_decl.decls.append(fn_def_node);
continue;
}
AstNode *var_decl_node = ast_parse_global_var_decl(pc, token_index, visib_mod);
if (var_decl_node) {
ast_eat_token(pc, token_index, TokenIdSemicolon);
node->data.container_decl.decls.append(var_decl_node);
continue;
}
Token *token = &pc->tokens->at(*token_index);
if (token->id == TokenIdRBrace) {
*token_index += 1;
break;
} else if (token->id == TokenIdSymbol) {
AstNode *field_node = ast_create_node(pc, NodeTypeStructField, token);
*token_index += 1;
node->data.container_decl.fields.append(field_node);
field_node->data.struct_field.visib_mod = visib_mod;
field_node->data.struct_field.name = token_buf(token);
Token *colon_token = &pc->tokens->at(*token_index);
if (colon_token->id == TokenIdColon) {
*token_index += 1;
field_node->data.struct_field.type = ast_parse_type_expr(pc, token_index, true);
}
Token *eq_token = &pc->tokens->at(*token_index);
if (eq_token->id == TokenIdEq) {
*token_index += 1;
field_node->data.struct_field.value = ast_parse_expression(pc, token_index, true);
}
Token *next_token = &pc->tokens->at(*token_index);
if (next_token->id == TokenIdComma) {
*token_index += 1;
continue;
}
if (next_token->id == TokenIdRBrace) {
*token_index += 1;
break;
}
ast_invalid_token_error(pc, next_token);
} else {
ast_invalid_token_error(pc, token);
}
}
return node;
}
/*
TestDecl = "test" String Block
*/
static AstNode *ast_parse_test_decl_node(ParseContext *pc, size_t *token_index) {
Token *first_token = &pc->tokens->at(*token_index);
if (first_token->id != TokenIdKeywordTest) {
return nullptr;
}
*token_index += 1;
Token *name_tok = ast_eat_token(pc, token_index, TokenIdStringLiteral);
AstNode *node = ast_create_node(pc, NodeTypeTestDecl, first_token);
node->data.test_decl.name = token_buf(name_tok);
node->data.test_decl.body = ast_parse_block(pc, token_index, true);
return node;
}
/*
TopLevelItem = ErrorValueDecl | CompTimeExpression(Block) | TopLevelDecl | TestDecl
TopLevelDecl = option("pub") (FnDef | ExternDecl | GlobalVarDecl | UseDecl)
*/
static void ast_parse_top_level_decls(ParseContext *pc, size_t *token_index, ZigList<AstNode *> *top_level_decls) {
for (;;) {
AstNode *comptime_expr_node = ast_parse_comptime_expr(pc, token_index, true, false);
if (comptime_expr_node) {
top_level_decls->append(comptime_expr_node);
continue;
}
AstNode *test_decl_node = ast_parse_test_decl_node(pc, token_index);
if (test_decl_node) {
top_level_decls->append(test_decl_node);
continue;
}
Token *visib_tok = &pc->tokens->at(*token_index);
VisibMod visib_mod;
if (visib_tok->id == TokenIdKeywordPub) {
*token_index += 1;
visib_mod = VisibModPub;
} else {
visib_mod = VisibModPrivate;
}
AstNode *fn_def_node = ast_parse_fn_def(pc, token_index, false, visib_mod);
if (fn_def_node) {
top_level_decls->append(fn_def_node);
continue;
}
AstNode *fn_proto_node = ast_parse_extern_decl(pc, token_index, false, visib_mod);
if (fn_proto_node) {
top_level_decls->append(fn_proto_node);
continue;
}
AstNode *use_node = ast_parse_use(pc, token_index, visib_mod);
if (use_node) {
top_level_decls->append(use_node);
continue;
}
AstNode *var_decl_node = ast_parse_global_var_decl(pc, token_index, visib_mod);
if (var_decl_node) {
ast_eat_token(pc, token_index, TokenIdSemicolon);
top_level_decls->append(var_decl_node);
continue;
}
return;
}
zig_unreachable();
}
/*
Root = many(TopLevelItem) "EOF"
*/
static AstNode *ast_parse_root(ParseContext *pc, size_t *token_index) {
AstNode *node = ast_create_node(pc, NodeTypeRoot, &pc->tokens->at(*token_index));
ast_parse_top_level_decls(pc, token_index, &node->data.root.top_level_decls);
if (*token_index != pc->tokens->length - 1) {
ast_invalid_token_error(pc, &pc->tokens->at(*token_index));
}
return node;
}
AstNode *ast_parse(Buf *buf, ZigList<Token> *tokens, ImportTableEntry *owner,
ErrColor err_color)
{
ParseContext pc = {0};
pc.void_buf = buf_create_from_str("void");
ParseContext pc = {};
pc.err_color = err_color;
pc.owner = owner;
pc.buf = buf;
pc.tokens = tokens;
size_t token_index = 0;
pc.root = ast_parse_root(&pc, &token_index);
return pc.root;
return ast_parse_root(&pc);
}
// Root <- skip ContainerMembers eof
static AstNode *ast_parse_root(ParseContext *pc) {
Token *first = peek_token(pc);
AstNodeContainerDecl members = ast_parse_container_members(pc);
if (pc->current_token != pc->tokens->length - 1)
ast_invalid_token_error(pc, peek_token(pc));
AstNode *node = ast_create_node(pc, NodeTypeContainerDecl, first);
node->data.container_decl.fields = members.fields;
node->data.container_decl.decls = members.decls;
node->data.container_decl.layout = ContainerLayoutAuto;
node->data.container_decl.kind = ContainerKindStruct;
node->data.container_decl.is_root = true;
return node;
}
// ContainerMembers
// <- TestDecl ContainerMembers
// / TopLevelComptime ContainerMembers
// / KEYWORD_pub? TopLevelDecl ContainerMembers
// / KEYWORD_pub? ContainerField COMMA ContainerMembers
// / KEYWORD_pub? ContainerField
// /
static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
AstNodeContainerDecl res = {};
for (;;) {
AstNode *test_decl = ast_parse_test_decl(pc);
if (test_decl != nullptr) {
res.decls.append(test_decl);
continue;
}
AstNode *top_level_comptime = ast_parse_top_level_comptime(pc);
if (top_level_comptime != nullptr) {
res.decls.append(top_level_comptime);
continue;
}
Token *visib_token = eat_token_if(pc, TokenIdKeywordPub);
VisibMod visib_mod = visib_token != nullptr ? VisibModPub : VisibModPrivate;
AstNode *top_level_decl = ast_parse_top_level_decl(pc, visib_mod);
if (top_level_decl != nullptr) {
res.decls.append(top_level_decl);
continue;
}
AstNode *container_field = ast_parse_container_field(pc);
if (container_field != nullptr) {
assert(container_field->type == NodeTypeStructField);
container_field->data.struct_field.visib_mod = visib_mod;
res.fields.append(container_field);
if (eat_token_if(pc, TokenIdComma) != nullptr) {
continue;
} else {
break;
}
}
// We visib_token wasn't eaten, then we haven't consumed the first token in this rule yet.
// It is therefore safe to return and let the caller continue parsing.
if (visib_token == nullptr)
break;
ast_invalid_token_error(pc, peek_token(pc));
}
return res;
}
// TestDecl <- KEYWORD_test STRINGLITERAL Block
static AstNode *ast_parse_test_decl(ParseContext *pc) {
Token *test = eat_token_if(pc, TokenIdKeywordTest);
if (test == nullptr)
return nullptr;
Token *name = expect_token(pc, TokenIdStringLiteral);
AstNode *block = ast_expect(pc, ast_parse_block);
AstNode *res = ast_create_node(pc, NodeTypeTestDecl, test);
res->data.test_decl.name = token_buf(name);
res->data.test_decl.body = block;
return res;
}
// TopLevelComptime <- KEYWORD_comptime BlockExpr
static AstNode *ast_parse_top_level_comptime(ParseContext *pc) {
Token *comptime = eat_token_if(pc, TokenIdKeywordCompTime);
if (comptime == nullptr)
return nullptr;
AstNode *block = ast_expect(pc, ast_parse_block_expr);
AstNode *res = ast_create_node(pc, NodeTypeCompTime, comptime);
res->data.comptime_expr.expr = block;
return res;
}
// TopLevelDecl
// <- (KEYWORD_export / KEYWORD_extern STRINGLITERAL? / KEYWORD_inline)? FnProto (SEMICOLON / Block)
// / (KEYWORD_export / KEYWORD_extern STRINGLITERAL?)? VarDecl
// / KEYWORD_use Expr SEMICOLON
static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod) {
Token *first = eat_token_if(pc, TokenIdKeywordExport);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordExtern);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordInline);
if (first != nullptr) {
Token *lib_name = nullptr;
if (first->id == TokenIdKeywordExtern)
lib_name = eat_token_if(pc, TokenIdStringLiteral);
if (first->id != TokenIdKeywordInline) {
AstNode *var_decl = ast_parse_var_decl(pc);
if (var_decl != nullptr) {
assert(var_decl->type == NodeTypeVariableDeclaration);
var_decl->line = first->start_line;
var_decl->column = first->start_column;
var_decl->data.variable_declaration.visib_mod = visib_mod;
var_decl->data.variable_declaration.is_extern = first->id == TokenIdKeywordExtern;
var_decl->data.variable_declaration.is_export = first->id == TokenIdKeywordExport;
var_decl->data.variable_declaration.lib_name = token_buf(lib_name);
return var_decl;
}
}
AstNode *fn_proto = ast_parse_fn_proto(pc);
if (fn_proto != nullptr) {
AstNode *body = ast_parse_block(pc);
if (body == nullptr)
expect_token(pc, TokenIdSemicolon);
assert(fn_proto->type == NodeTypeFnProto);
fn_proto->line = first->start_line;
fn_proto->column = first->start_column;
fn_proto->data.fn_proto.visib_mod = visib_mod;
fn_proto->data.fn_proto.is_extern = first->id == TokenIdKeywordExtern;
fn_proto->data.fn_proto.is_export = first->id == TokenIdKeywordExport;
fn_proto->data.fn_proto.is_inline = first->id == TokenIdKeywordInline;
fn_proto->data.fn_proto.lib_name = token_buf(lib_name);
AstNode *res = fn_proto;
if (body != nullptr) {
res = ast_create_node_copy_line_info(pc, NodeTypeFnDef, fn_proto);
res->data.fn_def.fn_proto = fn_proto;
res->data.fn_def.body = body;
fn_proto->data.fn_proto.fn_def_node = res;
}
return res;
}
ast_invalid_token_error(pc, peek_token(pc));
}
AstNode *var_decl = ast_parse_var_decl(pc);
if (var_decl != nullptr) {
assert(var_decl->type == NodeTypeVariableDeclaration);
var_decl->data.variable_declaration.visib_mod = visib_mod;
return var_decl;
}
AstNode *fn_proto = ast_parse_fn_proto(pc);
if (fn_proto != nullptr) {
AstNode *body = ast_parse_block(pc);
if (body == nullptr)
expect_token(pc, TokenIdSemicolon);
assert(fn_proto->type == NodeTypeFnProto);
fn_proto->data.fn_proto.visib_mod = visib_mod;
AstNode *res = fn_proto;
if (body != nullptr) {
res = ast_create_node_copy_line_info(pc, NodeTypeFnDef, fn_proto);
res->data.fn_def.fn_proto = fn_proto;
res->data.fn_def.body = body;
fn_proto->data.fn_proto.fn_def_node = res;
}
return res;
}
Token *use = eat_token_if(pc, TokenIdKeywordUse);
if (use != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdSemicolon);
AstNode *res = ast_create_node(pc, NodeTypeUse, use);
res->data.use.visib_mod = visib_mod;
res->data.use.expr = expr;
return res;
}
return nullptr;
}
// FnProto <- FnCC? KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? Section? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
static AstNode *ast_parse_fn_proto(ParseContext *pc) {
Token *first = peek_token(pc);
AstNodeFnProto fn_cc;
Token *fn;
if (ast_parse_fn_cc(pc).unwrap(&fn_cc)) {
// The extern keyword for fn CC is also used for container decls.
// We therefore put it back, as allow container decl to consume it
// later.
if (fn_cc.cc == CallingConventionC) {
fn = eat_token_if(pc, TokenIdKeywordFn);
if (fn == nullptr) {
put_back_token(pc);
return nullptr;
}
} else {
fn = expect_token(pc, TokenIdKeywordFn);
}
} else {
fn_cc = {};
fn = eat_token_if(pc, TokenIdKeywordFn);
if (fn == nullptr)
return nullptr;
}
Token *identifier = eat_token_if(pc, TokenIdSymbol);
expect_token(pc, TokenIdLParen);
ZigList<AstNode *> params = ast_parse_list(pc, TokenIdComma, ast_parse_param_decl);
expect_token(pc, TokenIdRParen);
AstNode *align_expr = ast_parse_byte_align(pc);
AstNode *section_expr = ast_parse_section(pc);
Token *var = eat_token_if(pc, TokenIdKeywordVar);
Token *exmark = nullptr;
AstNode *return_type = nullptr;
if (var == nullptr) {
exmark = eat_token_if(pc, TokenIdBang);
return_type = ast_expect(pc, ast_parse_type_expr);
}
AstNode *res = ast_create_node(pc, NodeTypeFnProto, first);
res->data.fn_proto = fn_cc;
res->data.fn_proto.name = token_buf(identifier);
res->data.fn_proto.params = params;
res->data.fn_proto.align_expr = align_expr;
res->data.fn_proto.section_expr = section_expr;
res->data.fn_proto.return_var_token = var;
res->data.fn_proto.auto_err_set = exmark != nullptr;
res->data.fn_proto.return_type = return_type;
// It seems that the Zig compiler expects varargs to be the
// last parameter in the decl list. This is not encoded in
// the grammar, which allows varargs anywhere in the decl.
// Since varargs is gonna be removed at some point, I'm not
// gonna encode this "varargs is always last" rule in the
// grammar, and just enforce it here, until varargs is removed.
for (size_t i = 0; i < params.length; i++) {
AstNode *param_decl = params.at(i);
assert(param_decl->type == NodeTypeParamDecl);
if (param_decl->data.param_decl.is_var_args)
res->data.fn_proto.is_var_args = true;
if (i != params.length - 1 && res->data.fn_proto.is_var_args)
ast_error(pc, first, "Function prototype have varargs as a none last paramter.");
}
return res;
}
// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? Section? (EQUAL Expr)? SEMICOLON
static AstNode *ast_parse_var_decl(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdKeywordConst);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordVar);
if (first == nullptr)
return nullptr;
Token *identifier = expect_token(pc, TokenIdSymbol);
AstNode *type_expr = nullptr;
if (eat_token_if(pc, TokenIdColon) != nullptr)
type_expr = ast_expect(pc, ast_parse_type_expr);
AstNode *align_expr = ast_parse_byte_align(pc);
AstNode *section_expr = ast_parse_section(pc);
AstNode *expr = nullptr;
if (eat_token_if(pc, TokenIdEq) != nullptr)
expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdSemicolon);
AstNode *res = ast_create_node(pc, NodeTypeVariableDeclaration, first);
res->data.variable_declaration.is_const = first->id == TokenIdKeywordConst;
res->data.variable_declaration.symbol = token_buf(identifier);
res->data.variable_declaration.type = type_expr;
res->data.variable_declaration.align_expr = align_expr;
res->data.variable_declaration.section_expr = section_expr;
res->data.variable_declaration.expr = expr;
return res;
}
// ContainerField <- IDENTIFIER (COLON TypeExpr)? (EQUAL Expr)?
static AstNode *ast_parse_container_field(ParseContext *pc) {
Token *identifier = eat_token_if(pc, TokenIdSymbol);
if (identifier == nullptr)
return nullptr;
AstNode *type_expr = nullptr;
if (eat_token_if(pc, TokenIdColon) != nullptr)
type_expr = ast_expect(pc, ast_parse_type_expr);
AstNode *expr = nullptr;
if (eat_token_if(pc, TokenIdEq) != nullptr)
expr = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeStructField, identifier);
res->data.struct_field.name = token_buf(identifier);
res->data.struct_field.type = type_expr;
res->data.struct_field.value = expr;
return res;
}
// Statement
// <- KEYWORD_comptime? VarDecl
// / KEYWORD_comptime BlockExprStatement
// / KEYWORD_suspend (SEMICOLON / BlockExprStatement)
// / KEYWORD_defer BlockExprStatement
// / KEYWORD_errdefer BlockExprStatement
// / IfStatement
// / LabeledStatement
// / SwitchExpr
// / AssignExpr SEMICOLON
static AstNode *ast_parse_statement(ParseContext *pc) {
Token *comptime = eat_token_if(pc, TokenIdKeywordCompTime);
AstNode *var_decl = ast_parse_var_decl(pc);
if (var_decl != nullptr) {
assert(var_decl->type == NodeTypeVariableDeclaration);
var_decl->data.variable_declaration.is_comptime = comptime != nullptr;
return var_decl;
}
if (comptime != nullptr) {
AstNode *statement = ast_expect(pc, ast_parse_block_expr_statement);
AstNode *res = ast_create_node(pc, NodeTypeCompTime, comptime);
res->data.comptime_expr.expr = statement;
return res;
}
Token *suspend = eat_token_if(pc, TokenIdKeywordSuspend);
if (suspend != nullptr) {
AstNode *statement = nullptr;
if (eat_token_if(pc, TokenIdSemicolon) == nullptr)
statement = ast_expect(pc, ast_parse_block_expr_statement);
AstNode *res = ast_create_node(pc, NodeTypeSuspend, suspend);
res->data.suspend.block = statement;
return res;
}
Token *defer = eat_token_if(pc, TokenIdKeywordDefer);
if (defer == nullptr)
defer = eat_token_if(pc, TokenIdKeywordErrdefer);
if (defer != nullptr) {
AstNode *statement = ast_expect(pc, ast_parse_block_expr_statement);
AstNode *res = ast_create_node(pc, NodeTypeDefer, defer);
res->data.defer.kind = ReturnKindUnconditional;
res->data.defer.expr = statement;
if (defer->id == TokenIdKeywordErrdefer)
res->data.defer.kind = ReturnKindError;
return res;
}
AstNode *if_statement = ast_parse_if_statement(pc);
if (if_statement != nullptr)
return if_statement;
AstNode *labeled_statement = ast_parse_labeled_statement(pc);
if (labeled_statement != nullptr)
return labeled_statement;
AstNode *switch_expr = ast_parse_switch_expr(pc);
if (switch_expr != nullptr)
return switch_expr;
AstNode *assign = ast_parse_assign_expr(pc);
if (assign != nullptr) {
expect_token(pc, TokenIdSemicolon);
return assign;
}
return nullptr;
}
// IfStatement
// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
static AstNode *ast_parse_if_statement(ParseContext *pc) {
AstNode *res = ast_parse_if_prefix(pc);
if (res == nullptr)
return nullptr;
AstNode *body = ast_parse_block_expr(pc);
bool requires_semi = false;
if (body == nullptr) {
requires_semi = true;
body = ast_parse_assign_expr(pc);
}
Token *err_payload = nullptr;
AstNode *else_body = nullptr;
if (eat_token_if(pc, TokenIdKeywordElse) != nullptr) {
err_payload = ast_parse_payload(pc);
else_body = ast_expect(pc, ast_parse_statement);
}
if (requires_semi && else_body == nullptr)
expect_token(pc, TokenIdSemicolon);
assert(res->type == NodeTypeTestExpr);
if (err_payload != nullptr) {
AstNodeTestExpr old = res->data.test_expr;
res->type = NodeTypeIfErrorExpr;
res->data.if_err_expr.target_node = old.target_node;
res->data.if_err_expr.var_is_ptr = old.var_is_ptr;
res->data.if_err_expr.var_symbol = old.var_symbol;
res->data.if_err_expr.then_node = body;
res->data.if_err_expr.err_symbol = token_buf(err_payload);
res->data.if_err_expr.else_node = else_body;
return res;
}
if (res->data.test_expr.var_symbol != nullptr) {
res->data.test_expr.then_node = body;
res->data.test_expr.else_node = else_body;
return res;
}
AstNodeTestExpr old = res->data.test_expr;
res->type = NodeTypeIfBoolExpr;
res->data.if_bool_expr.condition = old.target_node;
res->data.if_bool_expr.then_block = body;
res->data.if_bool_expr.else_node = else_body;
return res;
}
// LabeledStatement <- BlockLabel? (Block / LoopStatement)
static AstNode *ast_parse_labeled_statement(ParseContext *pc) {
Token *label = ast_parse_block_label(pc);
AstNode *block = ast_parse_block(pc);
if (block != nullptr) {
assert(block->type == NodeTypeBlock);
block->data.block.name = token_buf(label);
return block;
}
AstNode *loop = ast_parse_loop_statement(pc);
if (loop != nullptr) {
switch (loop->type) {
case NodeTypeForExpr:
loop->data.for_expr.name = token_buf(label);
break;
case NodeTypeWhileExpr:
loop->data.while_expr.name = token_buf(label);
break;
default:
zig_unreachable();
}
return loop;
}
if (label != nullptr)
ast_invalid_token_error(pc, peek_token(pc));
return nullptr;
}
// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
static AstNode *ast_parse_loop_statement(ParseContext *pc) {
Token *label = ast_parse_block_label(pc);
Token *first = label;
Token *inline_token = eat_token_if(pc, TokenIdKeywordInline);
if (first == nullptr)
first = inline_token;
AstNode *for_statement = ast_parse_for_statement(pc);
if (for_statement != nullptr) {
assert(for_statement->type == NodeTypeForExpr);
for_statement->data.for_expr.name = token_buf(label);
for_statement->data.for_expr.is_inline = inline_token != nullptr;
return for_statement;
}
AstNode *while_statement = ast_parse_while_statement(pc);
if (while_statement != nullptr) {
assert(while_statement->type == NodeTypeWhileExpr);
while_statement->data.while_expr.name = token_buf(label);
while_statement->data.while_expr.is_inline = inline_token != nullptr;
return while_statement;
}
if (first != nullptr)
ast_invalid_token_error(pc, peek_token(pc));
return nullptr;
}
// ForStatement
// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
static AstNode *ast_parse_for_statement(ParseContext *pc) {
AstNode *res = ast_parse_for_prefix(pc);
if (res == nullptr)
return nullptr;
AstNode *body = ast_parse_block_expr(pc);
bool requires_semi = false;
if (body == nullptr) {
requires_semi = true;
body = ast_parse_assign_expr(pc);
}
AstNode *else_body = nullptr;
if (eat_token_if(pc, TokenIdKeywordElse) != nullptr) {
else_body = ast_expect(pc, ast_parse_statement);
}
if (requires_semi && else_body == nullptr)
expect_token(pc, TokenIdSemicolon);
assert(res->type == NodeTypeForExpr);
res->data.for_expr.body = body;
res->data.for_expr.else_node = else_body;
return res;
}
// WhileStatement
// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
static AstNode *ast_parse_while_statement(ParseContext *pc) {
AstNode *res = ast_parse_while_prefix(pc);
if (res == nullptr)
return nullptr;
AstNode *body = ast_parse_block_expr(pc);
bool requires_semi = false;
if (body == nullptr) {
requires_semi = true;
body = ast_parse_assign_expr(pc);
}
Token *err_payload = nullptr;
AstNode *else_body = nullptr;
if (eat_token_if(pc, TokenIdKeywordElse) != nullptr) {
err_payload = ast_parse_payload(pc);
else_body = ast_expect(pc, ast_parse_statement);
}
if (requires_semi && else_body == nullptr)
expect_token(pc, TokenIdSemicolon);
assert(res->type == NodeTypeWhileExpr);
res->data.while_expr.body = body;
res->data.while_expr.err_symbol = token_buf(err_payload);
res->data.while_expr.else_node = else_body;
return res;
}
// BlockExprStatement
// <- BlockExpr
// / AssignExpr SEMICOLON
static AstNode *ast_parse_block_expr_statement(ParseContext *pc) {
AstNode *block = ast_parse_block_expr(pc);
if (block != nullptr)
return block;
AstNode *assign_expr = ast_parse_assign_expr(pc);
if (assign_expr != nullptr) {
expect_token(pc, TokenIdSemicolon);
return assign_expr;
}
return nullptr;
}
// BlockExpr <- BlockLabel? Block
static AstNode *ast_parse_block_expr(ParseContext *pc) {
Token *label = ast_parse_block_label(pc);
if (label != nullptr) {
AstNode *res = ast_expect(pc, ast_parse_block);
assert(res->type == NodeTypeBlock);
res->data.block.name = token_buf(label);
return res;
}
return ast_parse_block(pc);
}
// AssignExpr <- Expr (AssignOp Expr)?
static AstNode *ast_parse_assign_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(pc, BinOpChainOnce, ast_parse_assign_op, ast_parse_expr);
}
// Expr <- KEYWORD_try* BoolOrExpr
static AstNode *ast_parse_expr(ParseContext *pc) {
return ast_parse_prefix_op_expr(
pc,
[](ParseContext *context) {
Token *try_token = eat_token_if(context, TokenIdKeywordTry);
if (try_token != nullptr) {
AstNode *res = ast_create_node(context, NodeTypeReturnExpr, try_token);
res->data.return_expr.kind = ReturnKindError;
return res;
}
return (AstNode*)nullptr;
},
ast_parse_bool_or_expr
);
}
// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
static AstNode *ast_parse_bool_or_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(
pc,
BinOpChainInf,
ast_parse_bin_op_simple<TokenIdKeywordOr, BinOpTypeBoolOr>,
ast_parse_bool_and_expr
);
}
// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
static AstNode *ast_parse_bool_and_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(
pc,
BinOpChainInf,
ast_parse_bin_op_simple<TokenIdKeywordAnd, BinOpTypeBoolAnd>,
ast_parse_compare_expr
);
}
// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
static AstNode *ast_parse_compare_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(pc, BinOpChainInf, ast_parse_compare_op, ast_parse_bitwise_expr);
}
// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
static AstNode *ast_parse_bitwise_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(pc, BinOpChainInf, ast_parse_bitwise_op, ast_parse_bit_shit_expr);
}
// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
static AstNode *ast_parse_bit_shit_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(pc, BinOpChainInf, ast_parse_bit_shift_op, ast_parse_addition_expr);
}
// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
static AstNode *ast_parse_addition_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(pc, BinOpChainInf, ast_parse_addition_op, ast_parse_multiply_expr);
}
// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
static AstNode *ast_parse_multiply_expr(ParseContext *pc) {
return ast_parse_bin_op_expr(pc, BinOpChainInf, ast_parse_multiply_op, ast_parse_prefix_expr);
}
// PrefixExpr <- PrefixOp* PrimaryExpr
static AstNode *ast_parse_prefix_expr(ParseContext *pc) {
return ast_parse_prefix_op_expr(
pc,
ast_parse_prefix_op,
ast_parse_primary_expr
);
}
// PrimaryExpr
// <- AsmExpr
// / IfExpr
// / KEYWORD_break BreakLabel? Expr?
// / KEYWORD_cancel Expr
// / KEYWORD_comptime Expr
// / KEYWORD_continue BreakLabel?
// / KEYWORD_resume Expr
// / KEYWORD_return Expr?
// / LabeledExpr
// / CurlySuffixExpr
static AstNode *ast_parse_primary_expr(ParseContext *pc) {
AstNode *asm_expr = ast_parse_asm_expr(pc);
if (asm_expr != nullptr)
return asm_expr;
AstNode *if_expr = ast_parse_if_expr(pc);
if (if_expr != nullptr)
return if_expr;
Token *break_token = eat_token_if(pc, TokenIdKeywordBreak);
if (break_token != nullptr) {
Token *label = ast_parse_break_label(pc);
AstNode *expr = ast_parse_expr(pc);
AstNode *res = ast_create_node(pc, NodeTypeBreak, break_token);
res->data.break_expr.name = token_buf(label);
res->data.break_expr.expr = expr;
return res;
}
Token *cancel = eat_token_if(pc, TokenIdKeywordCancel);
if (cancel != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeCancel, cancel);
res->data.cancel_expr.expr = expr;
return res;
}
Token *comptime = eat_token_if(pc, TokenIdKeywordCompTime);
if (comptime != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeCompTime, comptime);
res->data.comptime_expr.expr = expr;
return res;
}
Token *continue_token = eat_token_if(pc, TokenIdKeywordContinue);
if (continue_token != nullptr) {
Token *label = ast_parse_break_label(pc);
AstNode *res = ast_create_node(pc, NodeTypeContinue, continue_token);
res->data.continue_expr.name = token_buf(label);
return res;
}
Token *resume = eat_token_if(pc, TokenIdKeywordResume);
if (resume != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeResume, resume);
res->data.resume_expr.expr = expr;
return res;
}
Token *return_token = eat_token_if(pc, TokenIdKeywordReturn);
if (return_token != nullptr) {
AstNode *expr = ast_parse_expr(pc);
AstNode *res = ast_create_node(pc, NodeTypeReturnExpr, return_token);
res->data.return_expr.expr = expr;
return res;
}
AstNode *labeled_expr = ast_parse_labeled_expr(pc);
if (labeled_expr != nullptr)
return labeled_expr;
AstNode *curly_suffix = ast_parse_curly_suffix_expr(pc);
if (curly_suffix != nullptr)
return curly_suffix;
return nullptr;
}
// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
static AstNode *ast_parse_if_expr(ParseContext *pc) {
return ast_parse_if_expr_helper(pc, ast_parse_expr);
}
// LabeledExpr <- BlockLabel? (Block / LoopExpr)
static AstNode *ast_parse_labeled_expr(ParseContext *pc) {
Token *label = ast_parse_block_label(pc);
AstNode *block = ast_parse_block(pc);
if (block != nullptr) {
assert(block->type == NodeTypeBlock);
block->data.block.name = token_buf(label);
return block;
}
AstNode *loop = ast_parse_loop_expr(pc);
if (loop != nullptr) {
switch (loop->type) {
case NodeTypeForExpr:
loop->data.for_expr.name = token_buf(label);
break;
case NodeTypeWhileExpr:
loop->data.while_expr.name = token_buf(label);
break;
default:
zig_unreachable();
}
return loop;
}
if (label != nullptr)
ast_invalid_token_error(pc, peek_token(pc));
return nullptr;
}
// Block <- LBRACE Statement* RBRACE
static AstNode *ast_parse_block(ParseContext *pc) {
Token *lbrace = eat_token_if(pc, TokenIdLBrace);
if (lbrace == nullptr)
return nullptr;
ZigList<AstNode *> statements = {};
AstNode *statement;
while ((statement = ast_parse_statement(pc)) != nullptr)
statements.append(statement);
expect_token(pc, TokenIdRBrace);
AstNode *res = ast_create_node(pc, NodeTypeBlock, lbrace);
res->data.block.statements = statements;
return res;
}
// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr)
static AstNode *ast_parse_loop_expr(ParseContext *pc) {
return ast_parse_loop_expr_helper(
pc,
ast_parse_for_expr,
ast_parse_while_expr
);
}
// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
static AstNode *ast_parse_for_expr(ParseContext *pc) {
return ast_parse_for_expr_helper(pc, ast_parse_expr);
}
// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
static AstNode *ast_parse_while_expr(ParseContext *pc) {
return ast_parse_while_expr_helper(pc, ast_parse_expr);
}
// CurlySuffixExpr <- TypeExpr InitList?
static AstNode *ast_parse_curly_suffix_expr(ParseContext *pc) {
AstNode *type_expr = ast_parse_type_expr(pc);
if (type_expr == nullptr)
return nullptr;
AstNode *res = ast_parse_init_list(pc);
if (res == nullptr)
return type_expr;
assert(res->type == NodeTypeContainerInitExpr);
res->data.container_init_expr.type = type_expr;
return res;
}
// InitList
// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
// / LBRACE RBRACE
static AstNode *ast_parse_init_list(ParseContext *pc) {
Token *lbrace = eat_token_if(pc, TokenIdLBrace);
if (lbrace == nullptr)
return nullptr;
AstNode *first = ast_parse_field_init(pc);
if (first != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeContainerInitExpr, lbrace);
res->data.container_init_expr.kind = ContainerInitKindStruct;
res->data.container_init_expr.entries.append(first);
while (eat_token_if(pc, TokenIdComma) != nullptr) {
AstNode *field_init = ast_parse_field_init(pc);
if (field_init == nullptr)
break;
res->data.container_init_expr.entries.append(field_init);
}
expect_token(pc, TokenIdRBrace);
return res;
}
AstNode *res = ast_create_node(pc, NodeTypeContainerInitExpr, lbrace);
res->data.container_init_expr.kind = ContainerInitKindArray;
first = ast_parse_expr(pc);
if (first != nullptr) {
res->data.container_init_expr.entries.append(first);
while (eat_token_if(pc, TokenIdComma) != nullptr) {
AstNode *expr = ast_parse_expr(pc);
if (expr == nullptr)
break;
res->data.container_init_expr.entries.append(expr);
}
expect_token(pc, TokenIdRBrace);
return res;
}
expect_token(pc, TokenIdRBrace);
return res;
}
// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
static AstNode *ast_parse_type_expr(ParseContext *pc) {
return ast_parse_prefix_op_expr(
pc,
ast_parse_prefix_type_op,
ast_parse_error_union_expr
);
}
// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
static AstNode *ast_parse_error_union_expr(ParseContext *pc) {
AstNode *res = ast_parse_suffix_expr(pc);
if (res == nullptr)
return nullptr;
AstNode *op = ast_parse_bin_op_simple<TokenIdBang, BinOpTypeErrorUnion>(pc);
if (op == nullptr)
return res;
AstNode *right = ast_expect(pc, ast_parse_type_expr);
assert(op->type == NodeTypeBinOpExpr);
op->data.bin_op_expr.op1 = res;
op->data.bin_op_expr.op2 = right;
return op;
}
// SuffixExpr
// <- AsyncPrefix PrimaryTypeExpr SuffixOp* FnCallArgumnets
// / PrimaryTypeExpr (SuffixOp / FnCallArgumnets)*
static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
AstNode *async_call = ast_parse_async_prefix(pc);
if (async_call != nullptr) {
if (eat_token_if(pc, TokenIdKeywordFn) != nullptr) {
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
// prefix...
// HACK: This loop is not actually enough to put back all the
// tokens. Let's hope this is fine for most code right now
// and wait till we get the async rework for a syntax update.
do {
put_back_token(pc);
} while (peek_token(pc)->id != TokenIdKeywordAsync);
return ast_parse_primary_type_expr(pc);
}
AstNode *child = ast_expect(pc, ast_parse_primary_type_expr);
while (true) {
AstNode *suffix = ast_parse_suffix_op(pc);
if (suffix == nullptr)
break;
switch (suffix->type) {
case NodeTypeSliceExpr:
suffix->data.slice_expr.array_ref_expr = child;
break;
case NodeTypeArrayAccessExpr:
suffix->data.array_access_expr.array_ref_expr = child;
break;
case NodeTypeFieldAccessExpr:
suffix->data.field_access_expr.struct_expr = child;
break;
case NodeTypeUnwrapOptional:
suffix->data.unwrap_optional.expr = child;
break;
case NodeTypePtrDeref:
suffix->data.ptr_deref_expr.target = child;
break;
default:
zig_unreachable();
}
child = suffix;
}
// TODO: Both *_async_prefix and *_fn_call_argumnets returns an
// AstNode *. All we really want here is the arguments of
// the call we parse. We therefor "leak" the node for now.
// Wait till we get async rework to fix this.
AstNode *args = ast_parse_fn_call_argumnets(pc);
if (args == nullptr)
ast_invalid_token_error(pc, peek_token(pc));
assert(args->type == NodeTypeFnCallExpr);
async_call->data.fn_call_expr.fn_ref_expr = child;
async_call->data.fn_call_expr.params = args->data.fn_call_expr.params;
async_call->data.fn_call_expr.is_builtin = false;
return async_call;
}
AstNode *res = ast_parse_primary_type_expr(pc);
if (res == nullptr)
return nullptr;
while (true) {
AstNode *suffix = ast_parse_suffix_op(pc);
if (suffix != nullptr) {
switch (suffix->type) {
case NodeTypeSliceExpr:
suffix->data.slice_expr.array_ref_expr = res;
break;
case NodeTypeArrayAccessExpr:
suffix->data.array_access_expr.array_ref_expr = res;
break;
case NodeTypeFieldAccessExpr:
suffix->data.field_access_expr.struct_expr = res;
break;
case NodeTypeUnwrapOptional:
suffix->data.unwrap_optional.expr = res;
break;
case NodeTypePtrDeref:
suffix->data.ptr_deref_expr.target = res;
break;
default:
zig_unreachable();
}
res = suffix;
continue;
}
AstNode * call = ast_parse_fn_call_argumnets(pc);
if (call != nullptr) {
assert(call->type == NodeTypeFnCallExpr);
call->data.fn_call_expr.fn_ref_expr = res;
res = call;
continue;
}
break;
}
return res;
}
// PrimaryTypeExpr
// <- BUILTININDENTIFIER FnCallArgumnets
// / CHAR_LITERAL
// / ContainerDecl
// / ErrorSetDecl
// / FLOAT
// / FnProto
// / GroupedExpr
// / LabeledTypeExpr
// / IDENTIFIER
// / IfTypeExpr
// / INTEGER
// / KEYWORD_anyerror
// / KEYWORD_comptime TypeExpr
// / KEYWORD_error DOT IDENTIFIER
// / KEYWORD_false
// / KEYWORD_null
// / KEYWORD_promise
// / KEYWORD_true
// / KEYWORD_undefined
// / KEYWORD_unreachable
// / STRINGLITERAL
// / SwitchExpr
static AstNode *ast_parse_primary_type_expr(ParseContext *pc) {
// TODO: This is not in line with the grammar.
// Because the prev stage 1 tokenizer does not parse
// @[a-zA-Z_][a-zA-Z0-9_] as one token, it has to do a
// hack, where it accepts '@' (IDENTIFIER / KEYWORD_export).
// I'd say that it's better if '@' is part of the builtin
// identifier token.
Token *at_sign = eat_token_if(pc, TokenIdAtSign);
if (at_sign != nullptr) {
Buf *name;
Token *token = eat_token_if(pc, TokenIdKeywordExport);
if (token == nullptr) {
token = expect_token(pc, TokenIdSymbol);
name = token_buf(token);
} else {
name = buf_create_from_str("export");
}
AstNode *res = ast_expect(pc, ast_parse_fn_call_argumnets);
AstNode *name_sym = ast_create_node(pc, NodeTypeSymbol, token);
name_sym->data.symbol_expr.symbol = name;
assert(res->type == NodeTypeFnCallExpr);
res->line = at_sign->start_line;
res->column = at_sign->start_column;
res->data.fn_call_expr.fn_ref_expr = name_sym;
res->data.fn_call_expr.is_builtin = true;
return res;
}
Token *char_lit = eat_token_if(pc, TokenIdCharLiteral);
if (char_lit != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeCharLiteral, char_lit);
res->data.char_literal.value = char_lit->data.char_lit.c;
return res;
}
AstNode *container_decl = ast_parse_container_decl(pc);
if (container_decl != nullptr)
return container_decl;
AstNode *error_set_decl = ast_parse_error_set_decl(pc);
if (error_set_decl != nullptr)
return error_set_decl;
Token *float_lit = eat_token_if(pc, TokenIdFloatLiteral);
if (float_lit != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeFloatLiteral, float_lit);
res->data.float_literal.bigfloat = &float_lit->data.float_lit.bigfloat;
res->data.float_literal.overflow = float_lit->data.float_lit.overflow;
return res;
}
AstNode *fn_proto = ast_parse_fn_proto(pc);
if (fn_proto != nullptr)
return fn_proto;
AstNode *grouped_expr = ast_parse_grouped_expr(pc);
if (grouped_expr != nullptr)
return grouped_expr;
AstNode *labeled_type_expr = ast_parse_labeled_type_expr(pc);
if (labeled_type_expr != nullptr)
return labeled_type_expr;
Token *identifier = eat_token_if(pc, TokenIdSymbol);
if (identifier != nullptr)
return token_symbol(pc, identifier);
AstNode *if_type_expr = ast_parse_if_type_expr(pc);
if (if_type_expr != nullptr)
return if_type_expr;
Token *int_lit = eat_token_if(pc, TokenIdIntLiteral);
if (int_lit != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeIntLiteral, int_lit);
res->data.int_literal.bigint = &int_lit->data.int_lit.bigint;
return res;
}
Token *error_type = eat_token_if(pc, TokenIdKeywordAnyerror);
if (error_type != nullptr)
return ast_create_node(pc, NodeTypeErrorType, error_type);
Token *comptime = eat_token_if(pc, TokenIdKeywordCompTime);
if (comptime != nullptr) {
AstNode *expr = ast_expect(pc, ast_parse_type_expr);
AstNode *res = ast_create_node(pc, NodeTypeCompTime, comptime);
res->data.comptime_expr.expr = expr;
return res;
}
Token *error = eat_token_if(pc, TokenIdKeywordError);
if (error != nullptr) {
Token *dot = expect_token(pc, TokenIdDot);
Token *name = expect_token(pc, TokenIdSymbol);
AstNode *left = ast_create_node(pc, NodeTypeErrorType, error);
AstNode *res = ast_create_node(pc, NodeTypeFieldAccessExpr, dot);
res->data.field_access_expr.struct_expr = left;
res->data.field_access_expr.field_name = token_buf(name);
return res;
}
Token *false_token = eat_token_if(pc, TokenIdKeywordFalse);
if (false_token != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeBoolLiteral, false_token);
res->data.bool_literal.value = false;
return res;
}
Token *null = eat_token_if(pc, TokenIdKeywordNull);
if (null != nullptr)
return ast_create_node(pc, NodeTypeNullLiteral, null);
Token *promise = eat_token_if(pc, TokenIdKeywordPromise);
if (promise != nullptr)
return ast_create_node(pc, NodeTypePromiseType, promise);
Token *true_token = eat_token_if(pc, TokenIdKeywordTrue);
if (true_token != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeBoolLiteral, true_token);
res->data.bool_literal.value = true;
return res;
}
Token *undefined = eat_token_if(pc, TokenIdKeywordUndefined);
if (undefined != nullptr)
return ast_create_node(pc, NodeTypeUndefinedLiteral, undefined);
Token *unreachable = eat_token_if(pc, TokenIdKeywordUnreachable);
if (unreachable != nullptr)
return ast_create_node(pc, NodeTypeUnreachable, unreachable);
Token *string_lit = eat_token_if(pc, TokenIdStringLiteral);
if (string_lit != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeStringLiteral, string_lit);
res->data.string_literal.buf = token_buf(string_lit);
res->data.string_literal.c = string_lit->data.str_lit.is_c_str;
return res;
}
AstNode *switch_expr = ast_parse_switch_expr(pc);
if (switch_expr != nullptr)
return switch_expr;
return nullptr;
}
// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
static AstNode *ast_parse_container_decl(ParseContext *pc) {
Token *extern_token = eat_token_if(pc, TokenIdKeywordExtern);
if (extern_token != nullptr) {
AstNode *res = ast_parse_container_decl_auto(pc);
if (res == nullptr) {
put_back_token(pc);
return nullptr;
}
assert(res->type == NodeTypeContainerDecl);
res->line = extern_token->start_line;
res->column = extern_token->start_column;
res->data.container_decl.layout = ContainerLayoutExtern;
return res;
}
Token *packed_token = eat_token_if(pc, TokenIdKeywordPacked);
if (packed_token != nullptr) {
AstNode *res = ast_expect(pc, ast_parse_container_decl_auto);
assert(res->type == NodeTypeContainerDecl);
res->line = packed_token->start_line;
res->column = packed_token->start_column;
res->data.container_decl.layout = ContainerLayoutPacked;
return res;
}
return ast_parse_container_decl_auto(pc);
}
// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE
static AstNode *ast_parse_error_set_decl(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdKeywordError);
if (first == nullptr)
return nullptr;
if (eat_token_if(pc, TokenIdLBrace) == nullptr) {
put_back_token(pc);
return nullptr;
}
ZigList<AstNode *> decls = ast_parse_list<AstNode>(pc, TokenIdComma, [](ParseContext *context) {
Token *ident = eat_token_if(context, TokenIdSymbol);
if (ident == nullptr)
return (AstNode*)nullptr;
return token_symbol(context, ident);
});
expect_token(pc, TokenIdRBrace);
AstNode *res = ast_create_node(pc, NodeTypeErrorSetDecl, first);
res->data.err_set_decl.decls = decls;
return res;
}
// GroupedExpr <- LPAREN Expr RPAREN
static AstNode *ast_parse_grouped_expr(ParseContext *pc) {
Token *lparen = eat_token_if(pc, TokenIdLParen);
if (lparen == nullptr)
return nullptr;
AstNode *expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
AstNode *res = ast_create_node(pc, NodeTypeGroupedExpr, lparen);
res->data.grouped_expr = expr;
return res;
}
// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
static AstNode *ast_parse_if_type_expr(ParseContext *pc) {
return ast_parse_if_expr_helper(pc, ast_parse_type_expr);
}
// LabeledTypeExpr
// <- BlockLabel Block
// / BlockLabel? LoopTypeExpr
static AstNode *ast_parse_labeled_type_expr(ParseContext *pc) {
Token *label = ast_parse_block_label(pc);
if (label != nullptr) {
AstNode *block = ast_parse_block(pc);
if (block != nullptr) {
assert(block->type == NodeTypeBlock);
block->data.block.name = token_buf(label);
return block;
}
}
AstNode *loop = ast_parse_loop_type_expr(pc);
if (loop != nullptr) {
switch (loop->type) {
case NodeTypeForExpr:
loop->data.for_expr.name = token_buf(label);
break;
case NodeTypeWhileExpr:
loop->data.while_expr.name = token_buf(label);
break;
default:
zig_unreachable();
}
return loop;
}
if (label != nullptr)
ast_invalid_token_error(pc, peek_token(pc));
return nullptr;
}
// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
static AstNode *ast_parse_loop_type_expr(ParseContext *pc) {
return ast_parse_loop_expr_helper(
pc,
ast_parse_for_type_expr,
ast_parse_while_type_expr
);
}
// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
static AstNode *ast_parse_for_type_expr(ParseContext *pc) {
return ast_parse_for_expr_helper(pc, ast_parse_type_expr);
}
// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
static AstNode *ast_parse_while_type_expr(ParseContext *pc) {
return ast_parse_while_expr_helper(pc, ast_parse_type_expr);
}
// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
static AstNode *ast_parse_switch_expr(ParseContext *pc) {
Token *switch_token = eat_token_if(pc, TokenIdKeywordSwitch);
if (switch_token == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
expect_token(pc, TokenIdLBrace);
ZigList<AstNode *> prongs = ast_parse_list(pc, TokenIdComma, ast_parse_switch_prong);
expect_token(pc, TokenIdRBrace);
AstNode *res = ast_create_node(pc, NodeTypeSwitchExpr, switch_token);
res->data.switch_expr.expr = expr;
res->data.switch_expr.prongs = prongs;
return res;
}
// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN STRINGLITERAL AsmOutput? RPAREN
static AstNode *ast_parse_asm_expr(ParseContext *pc) {
Token *asm_token = eat_token_if(pc, TokenIdKeywordAsm);
if (asm_token == nullptr)
return nullptr;
Token *volatile_token = eat_token_if(pc, TokenIdKeywordVolatile);
expect_token(pc, TokenIdLParen);
Token *asm_template = expect_token(pc, TokenIdStringLiteral);
AstNode *res = ast_parse_asm_output(pc);
if (res == nullptr)
res = ast_create_node_no_line_info(pc, NodeTypeAsmExpr);
expect_token(pc, TokenIdRParen);
res->line = asm_token->start_line;
res->column = asm_token->start_column;
res->data.asm_expr.is_volatile = volatile_token != nullptr;
res->data.asm_expr.asm_template = token_buf(asm_template);
ast_parse_asm_template(pc, res);
return res;
}
// AsmOutput <- COLON AsmOutputList AsmInput?
static AstNode *ast_parse_asm_output(ParseContext *pc) {
if (eat_token_if(pc, TokenIdColon) == nullptr)
return nullptr;
ZigList<AsmOutput *> output_list = ast_parse_list(pc, TokenIdComma, ast_parse_asm_output_item);
AstNode *res = ast_parse_asm_input(pc);
if (res == nullptr)
res = ast_create_node_no_line_info(pc, NodeTypeAsmExpr);
res->data.asm_expr.output_list = output_list;
return res;
}
// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
static AsmOutput *ast_parse_asm_output_item(ParseContext *pc) {
if (eat_token_if(pc, TokenIdLBracket) == nullptr)
return nullptr;
Token *sym_name = expect_token(pc, TokenIdSymbol);
expect_token(pc, TokenIdRBracket);
Token *str = expect_token(pc, TokenIdStringLiteral);
expect_token(pc, TokenIdLParen);
Token *var_name = eat_token_if(pc, TokenIdSymbol);
AstNode *return_type = nullptr;
if (var_name == nullptr) {
expect_token(pc, TokenIdArrow);
return_type = ast_expect(pc, ast_parse_type_expr);
}
expect_token(pc, TokenIdRParen);
AsmOutput *res = allocate<AsmOutput>(1);
res->asm_symbolic_name = token_buf(sym_name);
res->constraint = token_buf(str);
res->variable_name = token_buf(var_name);
res->return_type = return_type;
return res;
}
// AsmInput <- COLON AsmInputList AsmCloppers?
static AstNode *ast_parse_asm_input(ParseContext *pc) {
if (eat_token_if(pc, TokenIdColon) == nullptr)
return nullptr;
ZigList<AsmInput *> input_list = ast_parse_list(pc, TokenIdComma, ast_parse_asm_input_item);
AstNode *res = ast_parse_asm_cloppers(pc);
if (res == nullptr)
res = ast_create_node_no_line_info(pc, NodeTypeAsmExpr);
res->data.asm_expr.input_list = input_list;
return res;
}
// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
static AsmInput *ast_parse_asm_input_item(ParseContext *pc) {
if (eat_token_if(pc, TokenIdLBracket) == nullptr)
return nullptr;
Token *sym_name = expect_token(pc, TokenIdSymbol);
expect_token(pc, TokenIdRBracket);
Token *constraint = expect_token(pc, TokenIdStringLiteral);
expect_token(pc, TokenIdLParen);
AstNode *expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
AsmInput *res = allocate<AsmInput>(1);
res->asm_symbolic_name = token_buf(sym_name);
res->constraint = token_buf(constraint);
res->expr = expr;
return res;
}
// AsmCloppers <- COLON StringList
static AstNode *ast_parse_asm_cloppers(ParseContext *pc) {
if (eat_token_if(pc, TokenIdColon) == nullptr)
return nullptr;
ZigList<Buf *> clobber_list = ast_parse_list<Buf>(pc, TokenIdComma, [](ParseContext *context) {
Token *str = eat_token_if(context, TokenIdStringLiteral);
if (str != nullptr)
return token_buf(str);
return (Buf*)nullptr;
});
AstNode *res = ast_create_node_no_line_info(pc, NodeTypeAsmExpr);
res->data.asm_expr.clobber_list = clobber_list;
return res;
}
// BreakLabel <- COLON IDENTIFIER
static Token *ast_parse_break_label(ParseContext *pc) {
if (eat_token_if(pc, TokenIdColon) == nullptr)
return nullptr;
return expect_token(pc, TokenIdSymbol);
}
// BlockLabel <- IDENTIFIER COLON
static Token *ast_parse_block_label(ParseContext *pc) {
Token *ident = eat_token_if(pc, TokenIdSymbol);
if (ident == nullptr)
return nullptr;
// We do 2 token lookahead here, as we don't want to error when
// parsing identifiers.
if (eat_token_if(pc, TokenIdColon) == nullptr) {
put_back_token(pc);
return nullptr;
}
return ident;
}
// FieldInit <- DOT IDENTIFIER EQUAL Expr
static AstNode *ast_parse_field_init(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdDot);
if (first == nullptr)
return nullptr;
Token *name = expect_token(pc, TokenIdSymbol);
expect_token(pc, TokenIdEq);
AstNode *expr = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeStructValueField, first);
res->data.struct_val_field.name = token_buf(name);
res->data.struct_val_field.expr = expr;
return res;
}
// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
static AstNode *ast_parse_while_continue_expr(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdColon);
if (first == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *expr = ast_expect(pc, ast_parse_assign_expr);
expect_token(pc, TokenIdRParen);
return expr;
}
// Section <- KEYWORD_section LPAREN Expr RPAREN
static AstNode *ast_parse_section(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdKeywordLinkSection);
if (first == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *res = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
return res;
}
// FnCC
// <- KEYWORD_nakedcc
// / KEYWORD_stdcallcc
// / KEYWORD_extern
// / KEYWORD_async (LARROW TypeExpr RARROW)?
static Optional<AstNodeFnProto> ast_parse_fn_cc(ParseContext *pc) {
AstNodeFnProto res = {};
if (eat_token_if(pc, TokenIdKeywordNakedCC) != nullptr) {
res.cc = CallingConventionNaked;
return Optional<AstNodeFnProto>::some(res);
}
if (eat_token_if(pc, TokenIdKeywordStdcallCC) != nullptr) {
res.cc = CallingConventionStdcall;
return Optional<AstNodeFnProto>::some(res);
}
if (eat_token_if(pc, TokenIdKeywordExtern) != nullptr) {
res.cc = CallingConventionC;
return Optional<AstNodeFnProto>::some(res);
}
if (eat_token_if(pc, TokenIdKeywordAsync) != nullptr) {
res.cc = CallingConventionAsync;
if (eat_token_if(pc, TokenIdCmpLessThan) == nullptr)
return Optional<AstNodeFnProto>::some(res);
res.async_allocator_type = ast_expect(pc, ast_parse_type_expr);
expect_token(pc, TokenIdCmpGreaterThan);
return Optional<AstNodeFnProto>::some(res);
}
return Optional<AstNodeFnProto>::none();
}
// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
static AstNode *ast_parse_param_decl(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdKeywordNoAlias);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordCompTime);
Token *name = eat_token_if(pc, TokenIdSymbol);
if (name != nullptr) {
if (eat_token_if(pc, TokenIdColon) != nullptr) {
if (first == nullptr)
first = name;
} else {
// We put back the ident, so it can be parsed as a ParamType
// later.
put_back_token(pc);
name = nullptr;
}
}
AstNode *res;
if (first == nullptr) {
first = peek_token(pc);
res = ast_parse_param_type(pc);
} else {
res = ast_expect(pc, ast_parse_param_type);
}
if (res == nullptr)
return nullptr;
assert(res->type == NodeTypeParamDecl);
res->line = first->start_line;
res->column = first->start_column;
res->data.param_decl.name = token_buf(name);
res->data.param_decl.is_noalias = first->id == TokenIdKeywordNoAlias;
res->data.param_decl.is_inline = first->id == TokenIdKeywordCompTime;
return res;
}
// ParamType
// <- KEYWORD_var
// / DOT3
// / TypeExpr
static AstNode *ast_parse_param_type(ParseContext *pc) {
Token *var_token = eat_token_if(pc, TokenIdKeywordVar);
if (var_token != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeParamDecl, var_token);
res->data.param_decl.var_token = var_token;
return res;
}
Token *dots = eat_token_if(pc, TokenIdEllipsis3);
if (dots != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeParamDecl, dots);
res->data.param_decl.is_var_args = true;
return res;
}
AstNode *type_expr = ast_parse_type_expr(pc);
if (type_expr != nullptr) {
AstNode *res = ast_create_node_copy_line_info(pc, NodeTypeParamDecl, type_expr);
res->data.param_decl.type = type_expr;
return res;
}
return nullptr;
}
// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
static AstNode *ast_parse_if_prefix(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdKeywordIf);
if (first == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *condition = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
Optional<PtrPayload> opt_payload = ast_parse_ptr_payload(pc);
PtrPayload payload;
AstNode *res = ast_create_node(pc, NodeTypeTestExpr, first);
res->data.test_expr.target_node = condition;
if (opt_payload.unwrap(&payload)) {
res->data.test_expr.var_symbol = token_buf(payload.payload);
res->data.test_expr.var_is_ptr = payload.asterisk != nullptr;
}
return res;
}
// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
static AstNode *ast_parse_while_prefix(ParseContext *pc) {
Token *while_token = eat_token_if(pc, TokenIdKeywordWhile);
if (while_token == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *condition = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
Optional<PtrPayload> opt_payload = ast_parse_ptr_payload(pc);
AstNode *continue_expr = ast_parse_while_continue_expr(pc);
PtrPayload payload;
AstNode *res = ast_create_node(pc, NodeTypeWhileExpr, while_token);
res->data.while_expr.condition = condition;
res->data.while_expr.continue_expr = continue_expr;
if (opt_payload.unwrap(&payload)) {
res->data.while_expr.var_symbol = token_buf(payload.payload);
res->data.while_expr.var_is_ptr = payload.asterisk != nullptr;
}
return res;
}
// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
static AstNode *ast_parse_for_prefix(ParseContext *pc) {
Token *for_token = eat_token_if(pc, TokenIdKeywordFor);
if (for_token == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *array_expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
PtrIndexPayload payload;
if (!ast_parse_ptr_index_payload(pc).unwrap(&payload))
ast_invalid_token_error(pc, peek_token(pc));
AstNode *res = ast_create_node(pc, NodeTypeForExpr, for_token);
res->data.for_expr.array_expr = array_expr;
res->data.for_expr.elem_node = token_symbol(pc, payload.payload);
res->data.for_expr.elem_is_ptr = payload.asterisk != nullptr;
if (payload.index != nullptr)
res->data.for_expr.index_node = token_symbol(pc, payload.index);
return res;
}
// Payload <- PIPE IDENTIFIER PIPE
static Token *ast_parse_payload(ParseContext *pc) {
if (eat_token_if(pc, TokenIdBinOr) == nullptr)
return nullptr;
Token *res = expect_token(pc, TokenIdSymbol);
expect_token(pc, TokenIdBinOr);
return res;
}
// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
static Optional<PtrPayload> ast_parse_ptr_payload(ParseContext *pc) {
if (eat_token_if(pc, TokenIdBinOr) == nullptr)
return Optional<PtrPayload>::none();
Token *asterisk = eat_token_if(pc, TokenIdStar);
Token *payload = expect_token(pc, TokenIdSymbol);
expect_token(pc, TokenIdBinOr);
PtrPayload res;
res.asterisk = asterisk;
res.payload = payload;
return Optional<PtrPayload>::some(res);
}
// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
static Optional<PtrIndexPayload> ast_parse_ptr_index_payload(ParseContext *pc) {
if (eat_token_if(pc, TokenIdBinOr) == nullptr)
return Optional<PtrIndexPayload>::none();
Token *asterisk = eat_token_if(pc, TokenIdStar);
Token *payload = expect_token(pc, TokenIdSymbol);
Token *index = nullptr;
if (eat_token_if(pc, TokenIdComma) != nullptr)
index = expect_token(pc, TokenIdSymbol);
expect_token(pc, TokenIdBinOr);
PtrIndexPayload res;
res.asterisk = asterisk;
res.payload = payload;
res.index = index;
return Optional<PtrIndexPayload>::some(res);
}
// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr
static AstNode *ast_parse_switch_prong(ParseContext *pc) {
AstNode *res = ast_parse_switch_case(pc);
if (res == nullptr)
return nullptr;
expect_token(pc, TokenIdFatArrow);
Optional<PtrPayload> opt_payload = ast_parse_ptr_payload(pc);
AstNode *expr = ast_expect(pc, ast_parse_assign_expr);
PtrPayload payload;
assert(res->type == NodeTypeSwitchProng);
res->data.switch_prong.expr = expr;
if (opt_payload.unwrap(&payload)) {
res->data.switch_prong.var_symbol = token_symbol(pc, payload.payload);
res->data.switch_prong.var_is_ptr = payload.asterisk != nullptr;
}
return res;
}
// SwitchCase
// <- SwitchItem (COMMA SwitchItem)* COMMA?
// / KEYWORD_else
static AstNode *ast_parse_switch_case(ParseContext *pc) {
AstNode *first = ast_parse_switch_item(pc);
if (first != nullptr) {
AstNode *res = ast_create_node_copy_line_info(pc, NodeTypeSwitchProng, first);
res->data.switch_prong.items.append(first);
res->data.switch_prong.any_items_are_range = first->type == NodeTypeSwitchRange;
while (eat_token_if(pc, TokenIdComma) != nullptr) {
AstNode *item = ast_parse_switch_item(pc);
if (item == nullptr)
break;
res->data.switch_prong.items.append(item);
res->data.switch_prong.any_items_are_range |= item->type == NodeTypeSwitchRange;
}
return res;
}
Token *else_token = eat_token_if(pc, TokenIdKeywordElse);
if (else_token != nullptr)
return ast_create_node(pc, NodeTypeSwitchProng, else_token);
return nullptr;
}
// SwitchItem <- Expr (DOT3 Expr)?
static AstNode *ast_parse_switch_item(ParseContext *pc) {
AstNode *expr = ast_parse_expr(pc);
if (expr == nullptr)
return nullptr;
Token *dots = eat_token_if(pc, TokenIdEllipsis3);
if (dots != nullptr) {
AstNode *expr2 = ast_expect(pc, ast_parse_expr);
AstNode *res = ast_create_node(pc, NodeTypeSwitchRange, dots);
res->data.switch_range.start = expr;
res->data.switch_range.end = expr2;
return res;
}
return expr;
}
// AssignOp
// <- ASTERISKEQUAL
// / SLASHEQUAL
// / PERCENTEQUAL
// / PLUSEQUAL
// / MINUSEQUAL
// / LARROW2EQUAL
// / RARROW2EQUAL
// / AMPERSANDEQUAL
// / CARETEQUAL
// / PIPEEQUAL
// / ASTERISKPERCENTEQUAL
// / PLUSPERCENTEQUAL
// / MINUSPERCENTEQUAL
// / EQUAL
static AstNode *ast_parse_assign_op(ParseContext *pc) {
// In C, we have `T arr[N] = {[i] = T{}};` but it doesn't
// seem to work in C++...
BinOpType table[TokenIdCount] = {};
table[TokenIdBarBarEq] = BinOpTypeAssignMergeErrorSets;
table[TokenIdBitAndEq] = BinOpTypeAssignBitAnd;
table[TokenIdBitOrEq] = BinOpTypeAssignBitOr;
table[TokenIdBitShiftLeftEq] = BinOpTypeAssignBitShiftLeft;
table[TokenIdBitShiftRightEq] = BinOpTypeAssignBitShiftRight;
table[TokenIdBitXorEq] = BinOpTypeAssignBitXor;
table[TokenIdDivEq] = BinOpTypeAssignDiv;
table[TokenIdEq] = BinOpTypeAssign;
table[TokenIdMinusEq] = BinOpTypeAssignMinus;
table[TokenIdMinusPercentEq] = BinOpTypeAssignMinusWrap;
table[TokenIdModEq] = BinOpTypeAssignMod;
table[TokenIdPlusEq] = BinOpTypeAssignPlus;
table[TokenIdPlusPercentEq] = BinOpTypeAssignPlusWrap;
table[TokenIdTimesEq] = BinOpTypeAssignTimes;
table[TokenIdTimesPercentEq] = BinOpTypeAssignTimesWrap;
BinOpType op = table[peek_token(pc)->id];
if (op != BinOpTypeInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
return nullptr;
}
// CompareOp
// <- EQUALEQUAL
// / EXCLAMATIONMARKEQUAL
// / LARROW
// / RARROW
// / LARROWEQUAL
// / RARROWEQUAL
static AstNode *ast_parse_compare_op(ParseContext *pc) {
BinOpType table[TokenIdCount] = {};
table[TokenIdCmpEq] = BinOpTypeCmpEq;
table[TokenIdCmpNotEq] = BinOpTypeCmpNotEq;
table[TokenIdCmpLessThan] = BinOpTypeCmpLessThan;
table[TokenIdCmpGreaterThan] = BinOpTypeCmpGreaterThan;
table[TokenIdCmpLessOrEq] = BinOpTypeCmpLessOrEq;
table[TokenIdCmpGreaterOrEq] = BinOpTypeCmpGreaterOrEq;
BinOpType op = table[peek_token(pc)->id];
if (op != BinOpTypeInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
return nullptr;
}
// BitwiseOp
// <- AMPERSAND
// / CARET
// / PIPE
// / KEYWORD_orelse
// / KEYWORD_catch Payload?
static AstNode *ast_parse_bitwise_op(ParseContext *pc) {
BinOpType table[TokenIdCount] = {};
table[TokenIdAmpersand] = BinOpTypeBinAnd;
table[TokenIdBinXor] = BinOpTypeBinXor;
table[TokenIdBinOr] = BinOpTypeBinOr;
table[TokenIdKeywordOrElse] = BinOpTypeUnwrapOptional;
BinOpType op = table[peek_token(pc)->id];
if (op != BinOpTypeInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
Token *catch_token = eat_token_if(pc, TokenIdKeywordCatch);
if (catch_token != nullptr) {
Token *payload = ast_parse_payload(pc);
AstNode *res = ast_create_node(pc, NodeTypeUnwrapErrorExpr, catch_token);
if (payload != nullptr)
res->data.unwrap_err_expr.symbol = token_symbol(pc, payload);
return res;
}
return nullptr;
}
// BitShiftOp
// <- LARROW2
// / RARROW2
static AstNode *ast_parse_bit_shift_op(ParseContext *pc) {
BinOpType table[TokenIdCount] = {};
table[TokenIdBitShiftLeft] = BinOpTypeBitShiftLeft;
table[TokenIdBitShiftRight] = BinOpTypeBitShiftRight;
BinOpType op = table[peek_token(pc)->id];
if (op != BinOpTypeInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
return nullptr;
}
// AdditionOp
// <- PLUS
// / MINUS
// / PLUS2
// / PLUSPERCENT
// / MINUSPERCENT
static AstNode *ast_parse_addition_op(ParseContext *pc) {
BinOpType table[TokenIdCount] = {};
table[TokenIdPlus] = BinOpTypeAdd;
table[TokenIdDash] = BinOpTypeSub;
table[TokenIdPlusPlus] = BinOpTypeArrayCat;
table[TokenIdPlusPercent] = BinOpTypeAddWrap;
table[TokenIdMinusPercent] = BinOpTypeSubWrap;
BinOpType op = table[peek_token(pc)->id];
if (op != BinOpTypeInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
return nullptr;
}
// MultiplyOp
// <- PIPE2
// / ASTERISK
// / SLASH
// / PERCENT
// / ASTERISK2
// / ASTERISKPERCENT
static AstNode *ast_parse_multiply_op(ParseContext *pc) {
BinOpType table[TokenIdCount] = {};
table[TokenIdBarBar] = BinOpTypeMergeErrorSets;
table[TokenIdStar] = BinOpTypeMult;
table[TokenIdSlash] = BinOpTypeDiv;
table[TokenIdPercent] = BinOpTypeMod;
table[TokenIdStarStar] = BinOpTypeArrayMult;
table[TokenIdTimesPercent] = BinOpTypeMultWrap;
BinOpType op = table[peek_token(pc)->id];
if (op != BinOpTypeInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypeBinOpExpr, op_token);
res->data.bin_op_expr.bin_op = op;
return res;
}
return nullptr;
}
// PrefixOp
// <- EXCLAMATIONMARK
// / MINUS
// / TILDE
// / MINUSPERCENT
// / AMPERSAND
// / KEYWORD_try
// / KEYWORD_await
static AstNode *ast_parse_prefix_op(ParseContext *pc) {
PrefixOp table[TokenIdCount] = {};
table[TokenIdBang] = PrefixOpBoolNot;
table[TokenIdDash] = PrefixOpNegation;
table[TokenIdTilde] = PrefixOpBinNot;
table[TokenIdMinusPercent] = PrefixOpNegationWrap;
table[TokenIdAmpersand] = PrefixOpAddrOf;
PrefixOp op = table[peek_token(pc)->id];
if (op != PrefixOpInvalid) {
Token *op_token = eat_token(pc);
AstNode *res = ast_create_node(pc, NodeTypePrefixOpExpr, op_token);
res->data.prefix_op_expr.prefix_op = op;
return res;
}
Token *try_token = eat_token_if(pc, TokenIdKeywordTry);
if (try_token != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeReturnExpr, try_token);
res->data.return_expr.kind = ReturnKindError;
return res;
}
Token *await = eat_token_if(pc, TokenIdKeywordAwait);
if (await != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypeAwaitExpr, await);
return res;
}
return nullptr;
}
// PrefixTypeOp
// <- QUESTIONMARK
// / KEYWORD_promise MINUSRARROW
// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile)*
// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile)*
static AstNode *ast_parse_prefix_type_op(ParseContext *pc) {
Token *questionmark = eat_token_if(pc, TokenIdQuestion);
if (questionmark != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypePrefixOpExpr, questionmark);
res->data.prefix_op_expr.prefix_op = PrefixOpOptional;
return res;
}
Token *promise = eat_token_if(pc, TokenIdKeywordPromise);
if (promise != nullptr) {
if (eat_token_if(pc, TokenIdArrow) != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypePromiseType, promise);
return res;
}
put_back_token(pc);
}
AstNode *array = ast_parse_array_type_start(pc);
if (array != nullptr) {
assert(array->type == NodeTypeArrayType);
while (true) {
AstNode *align_expr = ast_parse_byte_align(pc);
if (align_expr != nullptr) {
array->data.array_type.align_expr = align_expr;
continue;
}
if (eat_token_if(pc, TokenIdKeywordConst) != nullptr) {
array->data.array_type.is_const = true;
continue;
}
if (eat_token_if(pc, TokenIdKeywordVolatile) != nullptr) {
array->data.array_type.is_volatile = true;
continue;
}
break;
}
return array;
}
AstNode *ptr = ast_parse_ptr_type_start(pc);
if (ptr != nullptr) {
assert(ptr->type == NodeTypePointerType);
// We might get two pointers from *_ptr_type_start
AstNode *child = ptr->data.pointer_type.op_expr;
if (child == nullptr)
child = ptr;
while (true) {
if (eat_token_if(pc, TokenIdKeywordAlign) != nullptr) {
expect_token(pc, TokenIdLParen);
AstNode *align_expr = ast_parse_expr(pc);
child->data.pointer_type.align_expr = align_expr;
if (eat_token_if(pc, TokenIdColon) != nullptr) {
Token *bit_offset_start = expect_token(pc, TokenIdIntLiteral);
expect_token(pc, TokenIdColon);
Token *host_int_bytes = expect_token(pc, TokenIdIntLiteral);
child->data.pointer_type.bit_offset_start = token_bigint(bit_offset_start);
child->data.pointer_type.host_int_bytes = token_bigint(host_int_bytes);
}
expect_token(pc, TokenIdRParen);
continue;
}
if (eat_token_if(pc, TokenIdKeywordConst) != nullptr) {
child->data.pointer_type.is_const = true;
continue;
}
if (eat_token_if(pc, TokenIdKeywordVolatile) != nullptr) {
child->data.pointer_type.is_volatile = true;
continue;
}
break;
}
return ptr;
}
return nullptr;
}
// SuffixOp
// <- LBRACKET Expr (DOT2 Expr?)? RBRACKET
// / DOT IDENTIFIER
// / DOTASTERISK
// / DOTQUESTIONMARK
static AstNode *ast_parse_suffix_op(ParseContext *pc) {
Token *lbracket = eat_token_if(pc, TokenIdLBracket);
if (lbracket != nullptr) {
AstNode *start = ast_expect(pc, ast_parse_expr);
AstNode *end = nullptr;
if (eat_token_if(pc, TokenIdEllipsis2) != nullptr) {
end = ast_parse_expr(pc);
expect_token(pc, TokenIdRBracket);
AstNode *res = ast_create_node(pc, NodeTypeSliceExpr, lbracket);
res->data.slice_expr.start = start;
res->data.slice_expr.end = end;
return res;
}
expect_token(pc, TokenIdRBracket);
AstNode *res = ast_create_node(pc, NodeTypeArrayAccessExpr, lbracket);
res->data.array_access_expr.subscript = start;
return res;
}
Token *dot = eat_token_if(pc, TokenIdDot);
if (dot != nullptr) {
if (eat_token_if(pc, TokenIdStar) != nullptr)
return ast_create_node(pc, NodeTypePtrDeref, dot);
if (eat_token_if(pc, TokenIdQuestion) != nullptr)
return ast_create_node(pc, NodeTypeUnwrapOptional, dot);
Token *ident = expect_token(pc, TokenIdSymbol);
AstNode *res = ast_create_node(pc, NodeTypeFieldAccessExpr, dot);
res->data.field_access_expr.field_name = token_buf(ident);
return res;
}
return nullptr;
}
// AsyncPrefix <- KEYWORD_async (LARROW PrefixExpr RARROW)?
static AstNode *ast_parse_async_prefix(ParseContext *pc) {
Token *async = eat_token_if(pc, TokenIdKeywordAsync);
if (async == nullptr)
return nullptr;
AstNode *res = ast_create_node(pc, NodeTypeFnCallExpr, async);
res->data.fn_call_expr.is_async = true;
if (eat_token_if(pc, TokenIdCmpLessThan) != nullptr) {
AstNode *prefix_expr = ast_expect(pc, ast_parse_prefix_expr);
expect_token(pc, TokenIdCmpGreaterThan);
res->data.fn_call_expr.async_allocator = prefix_expr;
}
return res;
}
// FnCallArgumnets <- LPAREN ExprList RPAREN
static AstNode *ast_parse_fn_call_argumnets(ParseContext *pc) {
Token *paren = eat_token_if(pc, TokenIdLParen);
if (paren == nullptr)
return nullptr;
ZigList<AstNode *> params = ast_parse_list(pc, TokenIdComma, ast_parse_expr);
expect_token(pc, TokenIdRParen);
AstNode *res = ast_create_node(pc, NodeTypeFnCallExpr, paren);
res->data.fn_call_expr.params = params;
return res;
}
// ArrayTypeStart <- LBRACKET Expr? RBRACKET
static AstNode *ast_parse_array_type_start(ParseContext *pc) {
Token *lbracket = eat_token_if(pc, TokenIdLBracket);
if (lbracket == nullptr)
return nullptr;
AstNode *size = ast_parse_expr(pc);
expect_token(pc, TokenIdRBracket);
AstNode *res = ast_create_node(pc, NodeTypeArrayType, lbracket);
res->data.array_type.size = size;
return res;
}
// PtrTypeStart
// <- ASTERISK
// / ASTERISK2
// / LBRACKET ASTERISK RBRACKET
static AstNode *ast_parse_ptr_type_start(ParseContext *pc) {
Token *asterisk = eat_token_if(pc, TokenIdStar);
if (asterisk != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypePointerType, asterisk);
res->data.pointer_type.star_token = asterisk;
return res;
}
Token *asterisk2 = eat_token_if(pc, TokenIdStarStar);
if (asterisk2 != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypePointerType, asterisk2);
AstNode *res2 = ast_create_node(pc, NodeTypePointerType, asterisk2);
res->data.pointer_type.star_token = asterisk2;
res2->data.pointer_type.star_token = asterisk2;
res->data.pointer_type.op_expr = res2;
return res;
}
Token *multptr = eat_token_if(pc, TokenIdBracketStarBracket);
if (multptr != nullptr) {
AstNode *res = ast_create_node(pc, NodeTypePointerType, multptr);
res->data.pointer_type.star_token = multptr;
return res;
}
return nullptr;
}
// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
static AstNode *ast_parse_container_decl_auto(ParseContext *pc) {
AstNode *res = ast_parse_container_decl_type(pc);
if (res == nullptr)
return nullptr;
expect_token(pc, TokenIdLBrace);
AstNodeContainerDecl members = ast_parse_container_members(pc);
expect_token(pc, TokenIdRBrace);
res->data.container_decl.fields = members.fields;
res->data.container_decl.decls = members.decls;
return res;
}
// ContainerDeclType
// <- (KEYWORD_struct / KEYWORD_enum) (LPAREN Expr RPAREN)?
// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
static AstNode *ast_parse_container_decl_type(ParseContext *pc) {
Token *first = eat_token_if(pc, TokenIdKeywordStruct);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordEnum);
if (first != nullptr) {
AstNode *init_arg_expr = nullptr;
if (eat_token_if(pc, TokenIdLParen) != nullptr) {
init_arg_expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
}
AstNode *res = ast_create_node(pc, NodeTypeContainerDecl, first);
res->data.container_decl.init_arg_expr = init_arg_expr;
res->data.container_decl.kind = first->id == TokenIdKeywordStruct
? ContainerKindStruct
: ContainerKindEnum;
return res;
}
first = eat_token_if(pc, TokenIdKeywordUnion);
if (first != nullptr) {
AstNode *init_arg_expr = nullptr;
bool auto_enum = false;
if (eat_token_if(pc, TokenIdLParen) != nullptr) {
if (eat_token_if(pc, TokenIdKeywordEnum) != nullptr) {
auto_enum = true;
if (eat_token_if(pc, TokenIdLParen) != nullptr) {
init_arg_expr = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
}
} else {
init_arg_expr = ast_expect(pc, ast_parse_expr);
}
expect_token(pc, TokenIdRParen);
}
AstNode *res = ast_create_node(pc, NodeTypeContainerDecl, first);
res->data.container_decl.init_arg_expr = init_arg_expr;
res->data.container_decl.auto_enum = auto_enum;
res->data.container_decl.kind = ContainerKindUnion;
return res;
}
return nullptr;
}
// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
static AstNode *ast_parse_byte_align(ParseContext *pc) {
if (eat_token_if(pc, TokenIdKeywordAlign) == nullptr)
return nullptr;
expect_token(pc, TokenIdLParen);
AstNode *res = ast_expect(pc, ast_parse_expr);
expect_token(pc, TokenIdRParen);
return res;
}
static void visit_field(AstNode **node, void (*visit)(AstNode **, void *context), void *context) {
@@ -2917,9 +2892,6 @@ static void visit_node_list(ZigList<AstNode *> *list, void (*visit)(AstNode **,
void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *context), void *context) {
switch (node->type) {
case NodeTypeRoot:
visit_node_list(&node->data.root.top_level_decls, visit, context);
break;
case NodeTypeFnProto:
visit_field(&node->data.fn_proto.return_type, visit, context);
visit_node_list(&node->data.fn_proto.params, visit, context);
-2
View File
@@ -22,6 +22,4 @@ void ast_print(AstNode *node, int indent);
void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *context), void *context);
bool statement_terminates_without_semicolon(AstNode *node);
#endif
-36
View File
@@ -1,36 +0,0 @@
/*
* Copyright (c) 2018 Andrew Kelley
*
* This file is part of zig, which is MIT licensed.
* See http://opensource.org/licenses/MIT
*/
#ifndef ZIG_RESULT_HPP
#define ZIG_RESULT_HPP
#include "error.hpp"
#include <assert.h>
static inline void assertNoError(Error err) {
assert(err == ErrorNone);
}
template<typename T>
struct Result {
T data;
Error err;
Result(T x) : data(x), err(ErrorNone) {}
Result(Error err) : err(err) {
assert(err != ErrorNone);
}
T unwrap() {
assert(err == ErrorNone);
return data;
}
};
#endif
+6 -2
View File
@@ -108,6 +108,7 @@ struct ZigKeyword {
static const struct ZigKeyword zig_keywords[] = {
{"align", TokenIdKeywordAlign},
{"and", TokenIdKeywordAnd},
{"anyerror", TokenIdKeywordAnyerror},
{"asm", TokenIdKeywordAsm},
{"async", TokenIdKeywordAsync},
{"await", TokenIdKeywordAwait},
@@ -139,7 +140,7 @@ static const struct ZigKeyword zig_keywords[] = {
{"pub", TokenIdKeywordPub},
{"resume", TokenIdKeywordResume},
{"return", TokenIdKeywordReturn},
{"section", TokenIdKeywordSection},
{"linksection", TokenIdKeywordLinkSection},
{"stdcallcc", TokenIdKeywordStdcallCC},
{"struct", TokenIdKeywordStruct},
{"suspend", TokenIdKeywordSuspend},
@@ -1548,6 +1549,7 @@ const char * token_name(TokenId id) {
case TokenIdFloatLiteral: return "FloatLiteral";
case TokenIdIntLiteral: return "IntLiteral";
case TokenIdKeywordAsync: return "async";
case TokenIdKeywordAnyerror: return "anyerror";
case TokenIdKeywordAwait: return "await";
case TokenIdKeywordResume: return "resume";
case TokenIdKeywordSuspend: return "suspend";
@@ -1581,7 +1583,7 @@ const char * token_name(TokenId id) {
case TokenIdKeywordPromise: return "promise";
case TokenIdKeywordPub: return "pub";
case TokenIdKeywordReturn: return "return";
case TokenIdKeywordSection: return "section";
case TokenIdKeywordLinkSection: return "linksection";
case TokenIdKeywordStdcallCC: return "stdcallcc";
case TokenIdKeywordStruct: return "struct";
case TokenIdKeywordSwitch: return "switch";
@@ -1625,6 +1627,8 @@ const char * token_name(TokenId id) {
case TokenIdTimesPercent: return "*%";
case TokenIdTimesPercentEq: return "*%=";
case TokenIdBarBarEq: return "||=";
case TokenIdCount:
zig_unreachable();
}
return "(invalid token)";
}
+3 -1
View File
@@ -50,6 +50,7 @@ enum TokenId {
TokenIdIntLiteral,
TokenIdKeywordAlign,
TokenIdKeywordAnd,
TokenIdKeywordAnyerror,
TokenIdKeywordAsm,
TokenIdKeywordAsync,
TokenIdKeywordAwait,
@@ -81,7 +82,7 @@ enum TokenId {
TokenIdKeywordPub,
TokenIdKeywordResume,
TokenIdKeywordReturn,
TokenIdKeywordSection,
TokenIdKeywordLinkSection,
TokenIdKeywordStdcallCC,
TokenIdKeywordStruct,
TokenIdKeywordSuspend,
@@ -125,6 +126,7 @@ enum TokenId {
TokenIdTimesEq,
TokenIdTimesPercent,
TokenIdTimesPercentEq,
TokenIdCount,
};
struct TokenFloatLit {
+9 -7
View File
@@ -436,7 +436,8 @@ static AstNode *get_global(Context *c, Buf *name) {
if (entry)
return entry->value;
}
if (get_primitive_type(c->codegen, name) != nullptr) {
ZigType *type;
if (get_primitive_type(c->codegen, name, &type) != ErrorPrimitiveTypeNotFound) {
return trans_create_node_symbol(c, name);
}
return nullptr;
@@ -444,7 +445,7 @@ static AstNode *get_global(Context *c, Buf *name) {
static void add_top_level_decl(Context *c, Buf *name, AstNode *node) {
c->global_table.put(name, node);
c->root->data.root.top_level_decls.append(node);
c->root->data.container_decl.decls.append(node);
}
static AstNode *add_global_var(Context *c, Buf *var_name, AstNode *value_node) {
@@ -4690,10 +4691,10 @@ static void process_preprocessor_entities(Context *c, ASTUnit &unit) {
}
}
int parse_h_buf(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, Buf *source,
Error parse_h_buf(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, Buf *source,
CodeGen *codegen, AstNode *source_node)
{
int err;
Error err;
Buf tmp_file_path = BUF_INIT;
if ((err = os_buf_to_tmp_file(source, buf_create_from_str(".h"), &tmp_file_path))) {
return err;
@@ -4706,7 +4707,7 @@ int parse_h_buf(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, Buf *sour
return err;
}
int parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const char *target_file,
Error parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const char *target_file,
CodeGen *codegen, AstNode *source_node)
{
Context context = {0};
@@ -4862,7 +4863,8 @@ int parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const ch
c->ctx = &ast_unit->getASTContext();
c->source_manager = &ast_unit->getSourceManager();
c->root = trans_create_node(c, NodeTypeRoot);
c->root = trans_create_node(c, NodeTypeContainerDecl);
c->root->data.container_decl.is_root = true;
ast_unit->visitLocalTopLevelDecls(c, decl_visitor);
@@ -4873,5 +4875,5 @@ int parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const ch
import->root = c->root;
return 0;
return ErrorNone;
}
+2 -2
View File
@@ -11,10 +11,10 @@
#include "all_types.hpp"
int parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const char *target_file,
Error parse_h_file(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, const char *target_file,
CodeGen *codegen, AstNode *source_node);
int parse_h_buf(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, Buf *source,
Error parse_h_buf(ImportTableEntry *import, ZigList<ErrorMsg *> *errors, Buf *source,
CodeGen *codegen, AstNode *source_node);
#endif
+27
View File
@@ -194,6 +194,15 @@ struct Optional {
static inline Optional<T> some(T x) {
return {x, true};
}
static inline Optional<T> none() {
return {{}, false};
}
inline bool unwrap(T *res) {
*res = value;
return is_some;
}
};
template<typename T>
@@ -201,6 +210,11 @@ struct Slice {
T *ptr;
size_t len;
inline T &at(size_t i) {
assert(i < len);
return &ptr[i];
}
inline Slice<T> slice(size_t start, size_t end) {
assert(end <= len);
assert(end >= start);
@@ -223,6 +237,19 @@ struct Slice {
}
};
template<typename T, size_t n>
struct Array {
static const size_t len = n;
T items[n];
inline Slice<T> slice() {
return {
&items[0],
len,
};
}
};
static inline Slice<uint8_t> str(const char *literal) {
return {(uint8_t*)(literal), strlen(literal)};
}
+10 -10
View File
@@ -10,7 +10,7 @@ pub fn ArrayList(comptime T: type) type {
}
pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
return struct.{
return struct {
const Self = @This();
/// Use toSlice instead of slicing this directly, because if you don't
@@ -22,8 +22,8 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
/// Deinitialize with `deinit` or use `toOwnedSlice`.
pub fn init(allocator: *Allocator) Self {
return Self.{
.items = []align(A) T.{},
return Self{
.items = []align(A) T{},
.len = 0,
.allocator = allocator,
};
@@ -70,7 +70,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
/// allocated with `allocator`.
/// Deinitialize with `deinit` or use `toOwnedSlice`.
pub fn fromOwnedSlice(allocator: *Allocator, slice: []align(A) T) Self {
return Self.{
return Self{
.items = slice,
.len = slice.len,
.allocator = allocator,
@@ -179,7 +179,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
return self.pop();
}
pub const Iterator = struct.{
pub const Iterator = struct {
list: *const Self,
// how many items have we returned
count: usize,
@@ -197,7 +197,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
};
pub fn iterator(self: *const Self) Iterator {
return Iterator.{
return Iterator{
.list = self,
.count = 0,
};
@@ -251,7 +251,7 @@ test "std.ArrayList.basic" {
assert(list.pop() == 10);
assert(list.len == 9);
list.appendSlice([]const i32.{
list.appendSlice([]const i32{
1,
2,
3,
@@ -262,7 +262,7 @@ test "std.ArrayList.basic" {
assert(list.pop() == 1);
assert(list.len == 9);
list.appendSlice([]const i32.{}) catch unreachable;
list.appendSlice([]const i32{}) catch unreachable;
assert(list.len == 9);
// can only set on indices < self.len
@@ -382,7 +382,7 @@ test "std.ArrayList.insertSlice" {
try list.append(2);
try list.append(3);
try list.append(4);
try list.insertSlice(1, []const i32.{
try list.insertSlice(1, []const i32{
9,
8,
});
@@ -393,7 +393,7 @@ test "std.ArrayList.insertSlice" {
assert(list.items[4] == 3);
assert(list.items[5] == 4);
const items = []const i32.{1};
const items = []const i32{1};
try list.insertSlice(0, items[0..0]);
assert(list.len == 6);
assert(list.items[0] == 1);
+2 -2
View File
@@ -3,13 +3,13 @@ const AtomicOrder = builtin.AtomicOrder;
/// Thread-safe, lock-free integer
pub fn Int(comptime T: type) type {
return struct.{
return struct {
unprotected_value: T,
pub const Self = @This();
pub fn init(init_val: T) Self {
return Self.{ .unprotected_value = init_val };
return Self{ .unprotected_value = init_val };
}
/// Returns previous value
+13 -13
View File
@@ -7,7 +7,7 @@ const assert = std.debug.assert;
/// Many producer, many consumer, non-allocating, thread-safe.
/// Uses a mutex to protect access.
pub fn Queue(comptime T: type) type {
return struct.{
return struct {
head: ?*Node,
tail: ?*Node,
mutex: std.Mutex,
@@ -16,7 +16,7 @@ pub fn Queue(comptime T: type) type {
pub const Node = std.LinkedList(T).Node;
pub fn init() Self {
return Self.{
return Self{
.head = null,
.tail = null,
.mutex = std.Mutex.init(),
@@ -111,7 +111,7 @@ pub fn Queue(comptime T: type) type {
}
pub fn dumpToStream(self: *Self, comptime Error: type, stream: *std.io.OutStream(Error)) Error!void {
const S = struct.{
const S = struct {
fn dumpRecursive(s: *std.io.OutStream(Error), optional_node: ?*Node, indent: usize) Error!void {
try s.writeByteNTimes(' ', indent);
if (optional_node) |node| {
@@ -133,7 +133,7 @@ pub fn Queue(comptime T: type) type {
};
}
const Context = struct.{
const Context = struct {
allocator: *std.mem.Allocator,
queue: *Queue(i32),
put_sum: isize,
@@ -161,7 +161,7 @@ test "std.atomic.Queue" {
var a = &fixed_buffer_allocator.allocator;
var queue = Queue(i32).init();
var context = Context.{
var context = Context{
.allocator = a,
.queue = &queue,
.put_sum = 0,
@@ -205,7 +205,7 @@ fn startPuts(ctx: *Context) u8 {
while (put_count != 0) : (put_count -= 1) {
std.os.time.sleep(1); // let the os scheduler be our fuzz
const x = @bitCast(i32, r.random.scalar(u32));
const node = ctx.allocator.create(Queue(i32).Node.{
const node = ctx.allocator.create(Queue(i32).Node{
.prev = undefined,
.next = undefined,
.data = x,
@@ -233,14 +233,14 @@ fn startGets(ctx: *Context) u8 {
test "std.atomic.Queue single-threaded" {
var queue = Queue(i32).init();
var node_0 = Queue(i32).Node.{
var node_0 = Queue(i32).Node{
.data = 0,
.next = undefined,
.prev = undefined,
};
queue.put(&node_0);
var node_1 = Queue(i32).Node.{
var node_1 = Queue(i32).Node{
.data = 1,
.next = undefined,
.prev = undefined,
@@ -249,14 +249,14 @@ test "std.atomic.Queue single-threaded" {
assert(queue.get().?.data == 0);
var node_2 = Queue(i32).Node.{
var node_2 = Queue(i32).Node{
.data = 2,
.next = undefined,
.prev = undefined,
};
queue.put(&node_2);
var node_3 = Queue(i32).Node.{
var node_3 = Queue(i32).Node{
.data = 3,
.next = undefined,
.prev = undefined,
@@ -267,7 +267,7 @@ test "std.atomic.Queue single-threaded" {
assert(queue.get().?.data == 2);
var node_4 = Queue(i32).Node.{
var node_4 = Queue(i32).Node{
.data = 4,
.next = undefined,
.prev = undefined,
@@ -301,7 +301,7 @@ test "std.atomic.Queue dump" {
));
// Test a stream with one element
var node_0 = Queue(i32).Node.{
var node_0 = Queue(i32).Node{
.data = 1,
.next = undefined,
.prev = undefined,
@@ -321,7 +321,7 @@ test "std.atomic.Queue dump" {
assert(mem.eql(u8, buffer[0..sos.pos], expected));
// Test a stream with two elements
var node_1 = Queue(i32).Node.{
var node_1 = Queue(i32).Node{
.data = 2,
.next = undefined,
.prev = undefined,
+6 -6
View File
@@ -5,19 +5,19 @@ const AtomicOrder = builtin.AtomicOrder;
/// Many reader, many writer, non-allocating, thread-safe
/// Uses a spinlock to protect push() and pop()
pub fn Stack(comptime T: type) type {
return struct.{
return struct {
root: ?*Node,
lock: u8,
pub const Self = @This();
pub const Node = struct.{
pub const Node = struct {
next: ?*Node,
data: T,
};
pub fn init() Self {
return Self.{
return Self{
.root = null,
.lock = 0,
};
@@ -54,7 +54,7 @@ pub fn Stack(comptime T: type) type {
}
const std = @import("../index.zig");
const Context = struct.{
const Context = struct {
allocator: *std.mem.Allocator,
stack: *Stack(i32),
put_sum: isize,
@@ -81,7 +81,7 @@ test "std.atomic.stack" {
var a = &fixed_buffer_allocator.allocator;
var stack = Stack(i32).init();
var context = Context.{
var context = Context{
.allocator = a,
.stack = &stack,
.put_sum = 0,
@@ -125,7 +125,7 @@ fn startPuts(ctx: *Context) u8 {
while (put_count != 0) : (put_count -= 1) {
std.os.time.sleep(1); // let the os scheduler be our fuzz
const x = @bitCast(i32, r.random.scalar(u32));
const node = ctx.allocator.create(Stack(i32).Node.{
const node = ctx.allocator.create(Stack(i32).Node{
.next = undefined,
.data = x,
}) catch unreachable;
+13 -13
View File
@@ -6,21 +6,21 @@ pub const standard_alphabet_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopq
pub const standard_pad_char = '=';
pub const standard_encoder = Base64Encoder.init(standard_alphabet_chars, standard_pad_char);
pub const Base64Encoder = struct.{
pub const Base64Encoder = struct {
alphabet_chars: []const u8,
pad_char: u8,
/// a bunch of assertions, then simply pass the data right through.
pub fn init(alphabet_chars: []const u8, pad_char: u8) Base64Encoder {
assert(alphabet_chars.len == 64);
var char_in_alphabet = []bool.{false} ** 256;
var char_in_alphabet = []bool{false} ** 256;
for (alphabet_chars) |c| {
assert(!char_in_alphabet[c]);
assert(c != pad_char);
char_in_alphabet[c] = true;
}
return Base64Encoder.{
return Base64Encoder{
.alphabet_chars = alphabet_chars,
.pad_char = pad_char,
};
@@ -77,7 +77,7 @@ pub const Base64Encoder = struct.{
pub const standard_decoder = Base64Decoder.init(standard_alphabet_chars, standard_pad_char);
pub const Base64Decoder = struct.{
pub const Base64Decoder = struct {
/// e.g. 'A' => 0.
/// undefined for any value not in the 64 alphabet chars.
char_to_index: [256]u8,
@@ -89,9 +89,9 @@ pub const Base64Decoder = struct.{
pub fn init(alphabet_chars: []const u8, pad_char: u8) Base64Decoder {
assert(alphabet_chars.len == 64);
var result = Base64Decoder.{
var result = Base64Decoder{
.char_to_index = undefined,
.char_in_alphabet = []bool.{false} ** 256,
.char_in_alphabet = []bool{false} ** 256,
.pad_char = pad_char,
};
@@ -153,13 +153,13 @@ pub const Base64Decoder = struct.{
}
};
pub const Base64DecoderWithIgnore = struct.{
pub const Base64DecoderWithIgnore = struct {
decoder: Base64Decoder,
char_is_ignored: [256]bool,
pub fn init(alphabet_chars: []const u8, pad_char: u8, ignore_chars: []const u8) Base64DecoderWithIgnore {
var result = Base64DecoderWithIgnore.{
var result = Base64DecoderWithIgnore{
.decoder = Base64Decoder.init(alphabet_chars, pad_char),
.char_is_ignored = []bool.{false} ** 256,
.char_is_ignored = []bool{false} ** 256,
};
for (ignore_chars) |c| {
@@ -180,7 +180,7 @@ pub const Base64DecoderWithIgnore = struct.{
/// Invalid characters that are not ignored result in error.InvalidCharacter.
/// Invalid padding results in error.InvalidPadding.
/// Decoding more data than can fit in dest results in error.OutputTooSmall. See also ::calcSizeUpperBound.
/// Returns the number of bytes writen to dest.
/// Returns the number of bytes written to dest.
pub fn decode(decoder_with_ignore: *const Base64DecoderWithIgnore, dest: []u8, source: []const u8) !usize {
const decoder = &decoder_with_ignore.decoder;
@@ -270,7 +270,7 @@ pub const Base64DecoderWithIgnore = struct.{
pub const standard_decoder_unsafe = Base64DecoderUnsafe.init(standard_alphabet_chars, standard_pad_char);
pub const Base64DecoderUnsafe = struct.{
pub const Base64DecoderUnsafe = struct {
/// e.g. 'A' => 0.
/// undefined for any value not in the 64 alphabet chars.
char_to_index: [256]u8,
@@ -278,7 +278,7 @@ pub const Base64DecoderUnsafe = struct.{
pub fn init(alphabet_chars: []const u8, pad_char: u8) Base64DecoderUnsafe {
assert(alphabet_chars.len == 64);
var result = Base64DecoderUnsafe.{
var result = Base64DecoderUnsafe{
.char_to_index = undefined,
.pad_char = pad_char,
};
@@ -432,7 +432,7 @@ fn testDecodeIgnoreSpace(expected_decoded: []const u8, encoded: []const u8) !voi
assert(mem.eql(u8, decoded[0..written], expected_decoded));
}
fn testError(encoded: []const u8, expected_err: error) !void {
fn testError(encoded: []const u8, expected_err: anyerror) !void {
const standard_decoder_ignore_space = Base64DecoderWithIgnore.init(standard_alphabet_chars, standard_pad_char, " ");
var buffer: [0x100]u8 = undefined;
if (standard_decoder.calcSize(encoded)) |decoded_size| {
+2 -2
View File
@@ -6,13 +6,13 @@ const assert = std.debug.assert;
/// BufMap copies keys and values before they go into the map, and
/// frees them when they get removed.
pub const BufMap = struct.{
pub const BufMap = struct {
hash_map: BufMapHashMap,
const BufMapHashMap = HashMap([]const u8, []const u8, mem.hash_slice_u8, mem.eql_slice_u8);
pub fn init(allocator: *Allocator) BufMap {
var self = BufMap.{ .hash_map = BufMapHashMap.init(allocator) };
var self = BufMap{ .hash_map = BufMapHashMap.init(allocator) };
return self;
}
+2 -2
View File
@@ -4,13 +4,13 @@ const mem = @import("mem.zig");
const Allocator = mem.Allocator;
const assert = std.debug.assert;
pub const BufSet = struct.{
pub const BufSet = struct {
hash_map: BufSetHashMap,
const BufSetHashMap = HashMap([]const u8, void, mem.hash_slice_u8, mem.eql_slice_u8);
pub fn init(a: *Allocator) BufSet {
var self = BufSet.{ .hash_map = BufSetHashMap.init(a) };
var self = BufSet{ .hash_map = BufSetHashMap.init(a) };
return self;
}
+6 -6
View File
@@ -6,7 +6,7 @@ const assert = debug.assert;
const ArrayList = std.ArrayList;
/// A buffer that allocates memory and maintains a null byte at the end.
pub const Buffer = struct.{
pub const Buffer = struct {
list: ArrayList(u8),
/// Must deinitialize with deinit.
@@ -28,7 +28,7 @@ pub const Buffer = struct.{
/// * ::replaceContents
/// * ::resize
pub fn initNull(allocator: *Allocator) Buffer {
return Buffer.{ .list = ArrayList(u8).init(allocator) };
return Buffer{ .list = ArrayList(u8).init(allocator) };
}
/// Must deinitialize with deinit.
@@ -40,7 +40,7 @@ pub const Buffer = struct.{
/// allocated with `allocator`.
/// Must deinitialize with deinit.
pub fn fromOwnedSlice(allocator: *Allocator, slice: []u8) !Buffer {
var self = Buffer.{ .list = ArrayList(u8).fromOwnedSlice(allocator, slice) };
var self = Buffer{ .list = ArrayList(u8).fromOwnedSlice(allocator, slice) };
try self.list.append(0);
return self;
}
@@ -55,13 +55,13 @@ pub const Buffer = struct.{
}
pub fn allocPrint(allocator: *Allocator, comptime format: []const u8, args: ...) !Buffer {
const countSize = struct.{
fn countSize(size: *usize, bytes: []const u8) (error.{}!void) {
const countSize = struct {
fn countSize(size: *usize, bytes: []const u8) (error{}!void) {
size.* += bytes.len;
}
}.countSize;
var size: usize = 0;
std.fmt.format(&size, error.{}, countSize, format, args) catch |err| switch (err) {};
std.fmt.format(&size, error{}, countSize, format, args) catch |err| switch (err) {};
var self = try Buffer.initSize(allocator, size);
assert((std.fmt.bufPrint(self.list.items, format, args) catch unreachable).len == size);
return self;
+60 -60
View File
@@ -15,7 +15,7 @@ const BufSet = std.BufSet;
const BufMap = std.BufMap;
const fmt_lib = std.fmt;
pub const Builder = struct.{
pub const Builder = struct {
uninstall_tls: TopLevelStep,
install_tls: TopLevelStep,
have_uninstall_step: bool,
@@ -37,7 +37,7 @@ pub const Builder = struct.{
invalid_user_input: bool,
zig_exe: []const u8,
default_step: *Step,
env_map: *const BufMap,
env_map: *BufMap,
top_level_steps: ArrayList(*TopLevelStep),
prefix: []const u8,
search_prefixes: ArrayList([]const u8),
@@ -48,7 +48,7 @@ pub const Builder = struct.{
cache_root: []const u8,
release_mode: ?builtin.Mode,
pub const CStd = enum.{
pub const CStd = enum {
C89,
C99,
C11,
@@ -57,25 +57,25 @@ pub const Builder = struct.{
const UserInputOptionsMap = HashMap([]const u8, UserInputOption, mem.hash_slice_u8, mem.eql_slice_u8);
const AvailableOptionsMap = HashMap([]const u8, AvailableOption, mem.hash_slice_u8, mem.eql_slice_u8);
const AvailableOption = struct.{
const AvailableOption = struct {
name: []const u8,
type_id: TypeId,
description: []const u8,
};
const UserInputOption = struct.{
const UserInputOption = struct {
name: []const u8,
value: UserValue,
used: bool,
};
const UserValue = union(enum).{
const UserValue = union(enum) {
Flag: void,
Scalar: []const u8,
List: ArrayList([]const u8),
};
const TypeId = enum.{
const TypeId = enum {
Bool,
Int,
Float,
@@ -83,7 +83,7 @@ pub const Builder = struct.{
List,
};
const TopLevelStep = struct.{
const TopLevelStep = struct {
step: Step,
description: []const u8,
};
@@ -91,7 +91,7 @@ pub const Builder = struct.{
pub fn init(allocator: *Allocator, zig_exe: []const u8, build_root: []const u8, cache_root: []const u8) Builder {
const env_map = allocator.createOne(BufMap) catch unreachable;
env_map.* = os.getEnvMap(allocator) catch unreachable;
var self = Builder.{
var self = Builder{
.zig_exe = zig_exe,
.build_root = build_root,
.cache_root = os.path.relative(allocator, build_root, cache_root) catch unreachable,
@@ -118,12 +118,12 @@ pub const Builder = struct.{
.lib_dir = undefined,
.exe_dir = undefined,
.installed_files = ArrayList([]const u8).init(allocator),
.uninstall_tls = TopLevelStep.{
.uninstall_tls = TopLevelStep{
.step = Step.init("uninstall", allocator, makeUninstall),
.description = "Remove build artifacts from prefix path",
},
.have_uninstall_step = false,
.install_tls = TopLevelStep.{
.install_tls = TopLevelStep{
.step = Step.initNoOp("install", allocator),
.description = "Copy build artifacts to prefix path",
},
@@ -214,7 +214,7 @@ pub const Builder = struct.{
}
pub fn version(self: *const Builder, major: u32, minor: u32, patch: u32) Version {
return Version.{
return Version{
.major = major,
.minor = minor,
.patch = patch,
@@ -269,7 +269,7 @@ pub const Builder = struct.{
return &self.uninstall_tls.step;
}
fn makeUninstall(uninstall_step: *Step) error!void {
fn makeUninstall(uninstall_step: *Step) anyerror!void {
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls);
@@ -283,7 +283,7 @@ pub const Builder = struct.{
// TODO remove empty directories
}
fn makeOneStep(self: *Builder, s: *Step) error!void {
fn makeOneStep(self: *Builder, s: *Step) anyerror!void {
if (s.loop_flag) {
warn("Dependency loop detected:\n {}\n", s.name);
return error.DependencyLoopDetected;
@@ -358,7 +358,7 @@ pub const Builder = struct.{
pub fn option(self: *Builder, comptime T: type, name: []const u8, description: []const u8) ?T {
const type_id = comptime typeToEnum(T);
const available_option = AvailableOption.{
const available_option = AvailableOption{
.name = name,
.type_id = type_id,
.description = description,
@@ -410,7 +410,7 @@ pub const Builder = struct.{
}
pub fn step(self: *Builder, name: []const u8, description: []const u8) *Step {
const step_info = self.allocator.create(TopLevelStep.{
const step_info = self.allocator.create(TopLevelStep{
.step = Step.initNoOp(name, self.allocator),
.description = description,
}) catch unreachable;
@@ -437,9 +437,9 @@ pub const Builder = struct.{
pub fn addUserInputOption(self: *Builder, name: []const u8, value: []const u8) !bool {
const gop = try self.user_input_options.getOrPut(name);
if (!gop.found_existing) {
gop.kv.value = UserInputOption.{
gop.kv.value = UserInputOption{
.name = name,
.value = UserValue.{ .Scalar = value },
.value = UserValue{ .Scalar = value },
.used = false,
};
return false;
@@ -452,18 +452,18 @@ pub const Builder = struct.{
var list = ArrayList([]const u8).init(self.allocator);
list.append(s) catch unreachable;
list.append(value) catch unreachable;
_ = self.user_input_options.put(name, UserInputOption.{
_ = self.user_input_options.put(name, UserInputOption{
.name = name,
.value = UserValue.{ .List = list },
.value = UserValue{ .List = list },
.used = false,
}) catch unreachable;
},
UserValue.List => |*list| {
// append to the list
list.append(value) catch unreachable;
_ = self.user_input_options.put(name, UserInputOption.{
_ = self.user_input_options.put(name, UserInputOption{
.name = name,
.value = UserValue.{ .List = list.* },
.value = UserValue{ .List = list.* },
.used = false,
}) catch unreachable;
},
@@ -478,9 +478,9 @@ pub const Builder = struct.{
pub fn addUserInputFlag(self: *Builder, name: []const u8) !bool {
const gop = try self.user_input_options.getOrPut(name);
if (!gop.found_existing) {
gop.kv.value = UserInputOption.{
gop.kv.value = UserInputOption{
.name = name,
.value = UserValue.{ .Flag = {} },
.value = UserValue{ .Flag = {} },
.used = false,
};
return false;
@@ -660,7 +660,7 @@ pub const Builder = struct.{
pub fn findProgram(self: *Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 {
// TODO report error for ambiguous situations
const exe_extension = (Target.{ .Native = {} }).exeFileExt();
const exe_extension = (Target{ .Native = {} }).exeFileExt();
for (self.search_prefixes.toSliceConst()) |search_prefix| {
for (names) |name| {
if (os.path.isAbsolute(name)) {
@@ -679,7 +679,7 @@ pub const Builder = struct.{
if (os.path.isAbsolute(name)) {
return name;
}
var it = mem.split(PATH, []u8.{os.path.delimiter});
var it = mem.split(PATH, []u8{os.path.delimiter});
while (it.next()) |path| {
const full_path = try os.path.join(self.allocator, path, self.fmt("{}{}", name, exe_extension));
if (os.path.real(self.allocator, full_path)) |real_path| {
@@ -733,19 +733,19 @@ pub const Builder = struct.{
}
};
const Version = struct.{
const Version = struct {
major: u32,
minor: u32,
patch: u32,
};
const CrossTarget = struct.{
const CrossTarget = struct {
arch: builtin.Arch,
os: builtin.Os,
environ: builtin.Environ,
};
pub const Target = union(enum).{
pub const Target = union(enum) {
Native: void,
Cross: CrossTarget,
@@ -800,7 +800,7 @@ pub const Target = union(enum).{
}
};
pub const LibExeObjStep = struct.{
pub const LibExeObjStep = struct {
step: Step,
builder: *Builder,
name: []const u8,
@@ -842,12 +842,12 @@ pub const LibExeObjStep = struct.{
source_files: ArrayList([]const u8),
object_src: []const u8,
const Pkg = struct.{
const Pkg = struct {
name: []const u8,
path: []const u8,
};
const Kind = enum.{
const Kind = enum {
Exe,
Lib,
Obj,
@@ -895,7 +895,7 @@ pub const LibExeObjStep = struct.{
}
fn initExtraArgs(builder: *Builder, name: []const u8, root_src: ?[]const u8, kind: Kind, static: bool, ver: Version) LibExeObjStep {
var self = LibExeObjStep.{
var self = LibExeObjStep{
.no_rosegment = false,
.strip = false,
.builder = builder,
@@ -938,7 +938,7 @@ pub const LibExeObjStep = struct.{
}
fn initC(builder: *Builder, name: []const u8, kind: Kind, version: Version, static: bool) LibExeObjStep {
var self = LibExeObjStep.{
var self = LibExeObjStep{
.no_rosegment = false,
.builder = builder,
.name = name,
@@ -1018,8 +1018,8 @@ pub const LibExeObjStep = struct.{
}
pub fn setTarget(self: *LibExeObjStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
self.target = Target.{
.Cross = CrossTarget.{
self.target = Target{
.Cross = CrossTarget{
.arch = target_arch,
.os = target_os,
.environ = target_environ,
@@ -1148,7 +1148,7 @@ pub const LibExeObjStep = struct.{
pub fn addPackagePath(self: *LibExeObjStep, name: []const u8, pkg_index_path: []const u8) void {
assert(self.is_zig);
self.packages.append(Pkg.{
self.packages.append(Pkg{
.name = name,
.path = pkg_index_path,
}) catch unreachable;
@@ -1640,7 +1640,7 @@ pub const LibExeObjStep = struct.{
}
};
pub const TestStep = struct.{
pub const TestStep = struct {
step: Step,
builder: *Builder,
root_src: []const u8,
@@ -1660,7 +1660,7 @@ pub const TestStep = struct.{
pub fn init(builder: *Builder, root_src: []const u8) TestStep {
const step_name = builder.fmt("test {}", root_src);
return TestStep.{
return TestStep{
.step = Step.init(step_name, builder.allocator, make),
.builder = builder,
.root_src = root_src,
@@ -1669,7 +1669,7 @@ pub const TestStep = struct.{
.name_prefix = "",
.filter = null,
.link_libs = BufSet.init(builder.allocator),
.target = Target.{ .Native = {} },
.target = Target{ .Native = {} },
.exec_cmd_args = null,
.include_dirs = ArrayList([]const u8).init(builder.allocator),
.lib_paths = ArrayList([]const u8).init(builder.allocator),
@@ -1746,8 +1746,8 @@ pub const TestStep = struct.{
}
pub fn setTarget(self: *TestStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
self.target = Target.{
.Cross = CrossTarget.{
self.target = Target{
.Cross = CrossTarget{
.arch = target_arch,
.os = target_os,
.environ = target_environ,
@@ -1875,7 +1875,7 @@ pub const TestStep = struct.{
}
};
pub const CommandStep = struct.{
pub const CommandStep = struct {
step: Step,
builder: *Builder,
argv: [][]const u8,
@@ -1884,7 +1884,7 @@ pub const CommandStep = struct.{
/// ::argv is copied.
pub fn create(builder: *Builder, cwd: ?[]const u8, env_map: *const BufMap, argv: []const []const u8) *CommandStep {
const self = builder.allocator.create(CommandStep.{
const self = builder.allocator.create(CommandStep{
.builder = builder,
.step = Step.init(argv[0], builder.allocator, make),
.argv = builder.allocator.alloc([]u8, argv.len) catch unreachable,
@@ -1905,7 +1905,7 @@ pub const CommandStep = struct.{
}
};
const InstallArtifactStep = struct.{
const InstallArtifactStep = struct {
step: Step,
builder: *Builder,
artifact: *LibExeObjStep,
@@ -1919,7 +1919,7 @@ const InstallArtifactStep = struct.{
LibExeObjStep.Kind.Exe => builder.exe_dir,
LibExeObjStep.Kind.Lib => builder.lib_dir,
};
const self = builder.allocator.create(Self.{
const self = builder.allocator.create(Self{
.builder = builder,
.step = Step.init(builder.fmt("install {}", artifact.step.name), builder.allocator, make),
.artifact = artifact,
@@ -1953,14 +1953,14 @@ const InstallArtifactStep = struct.{
}
};
pub const InstallFileStep = struct.{
pub const InstallFileStep = struct {
step: Step,
builder: *Builder,
src_path: []const u8,
dest_path: []const u8,
pub fn init(builder: *Builder, src_path: []const u8, dest_path: []const u8) InstallFileStep {
return InstallFileStep.{
return InstallFileStep{
.builder = builder,
.step = Step.init(builder.fmt("install {}", src_path), builder.allocator, make),
.src_path = src_path,
@@ -1974,14 +1974,14 @@ pub const InstallFileStep = struct.{
}
};
pub const WriteFileStep = struct.{
pub const WriteFileStep = struct {
step: Step,
builder: *Builder,
file_path: []const u8,
data: []const u8,
pub fn init(builder: *Builder, file_path: []const u8, data: []const u8) WriteFileStep {
return WriteFileStep.{
return WriteFileStep{
.builder = builder,
.step = Step.init(builder.fmt("writefile {}", file_path), builder.allocator, make),
.file_path = file_path,
@@ -2004,32 +2004,32 @@ pub const WriteFileStep = struct.{
}
};
pub const LogStep = struct.{
pub const LogStep = struct {
step: Step,
builder: *Builder,
data: []const u8,
pub fn init(builder: *Builder, data: []const u8) LogStep {
return LogStep.{
return LogStep{
.builder = builder,
.step = Step.init(builder.fmt("log {}", data), builder.allocator, make),
.data = data,
};
}
fn make(step: *Step) error!void {
fn make(step: *Step) anyerror!void {
const self = @fieldParentPtr(LogStep, "step", step);
warn("{}", self.data);
}
};
pub const RemoveDirStep = struct.{
pub const RemoveDirStep = struct {
step: Step,
builder: *Builder,
dir_path: []const u8,
pub fn init(builder: *Builder, dir_path: []const u8) RemoveDirStep {
return RemoveDirStep.{
return RemoveDirStep{
.builder = builder,
.step = Step.init(builder.fmt("RemoveDir {}", dir_path), builder.allocator, make),
.dir_path = dir_path,
@@ -2047,15 +2047,15 @@ pub const RemoveDirStep = struct.{
}
};
pub const Step = struct.{
pub const Step = struct {
name: []const u8,
makeFn: fn (self: *Step) error!void,
makeFn: fn (self: *Step) anyerror!void,
dependencies: ArrayList(*Step),
loop_flag: bool,
done_flag: bool,
pub fn init(name: []const u8, allocator: *Allocator, makeFn: fn (*Step) error!void) Step {
return Step.{
pub fn init(name: []const u8, allocator: *Allocator, makeFn: fn (*Step) anyerror!void) Step {
return Step{
.name = name,
.makeFn = makeFn,
.dependencies = ArrayList(*Step).init(allocator),
@@ -2078,7 +2078,7 @@ pub const Step = struct.{
self.dependencies.append(other) catch unreachable;
}
fn makeNoOp(self: *Step) error!void {}
fn makeNoOp(self: *Step) anyerror!void {}
};
fn doAtomicSymLinks(allocator: *Allocator, output_path: []const u8, filename_major_only: []const u8, filename_name_only: []const u8) !void {
+13 -13
View File
@@ -52,18 +52,18 @@ pub const _errno = __error;
pub const in_port_t = u16;
pub const sa_family_t = u8;
pub const socklen_t = u32;
pub const sockaddr = extern union.{
pub const sockaddr = extern union {
in: sockaddr_in,
in6: sockaddr_in6,
};
pub const sockaddr_in = extern struct.{
pub const sockaddr_in = extern struct {
len: u8,
family: sa_family_t,
port: in_port_t,
addr: u32,
zero: [8]u8,
};
pub const sockaddr_in6 = extern struct.{
pub const sockaddr_in6 = extern struct {
len: u8,
family: sa_family_t,
port: in_port_t,
@@ -72,23 +72,23 @@ pub const sockaddr_in6 = extern struct.{
scope_id: u32,
};
pub const timeval = extern struct.{
pub const timeval = extern struct {
tv_sec: isize,
tv_usec: isize,
};
pub const timezone = extern struct.{
pub const timezone = extern struct {
tz_minuteswest: i32,
tz_dsttime: i32,
};
pub const mach_timebase_info_data = extern struct.{
pub const mach_timebase_info_data = extern struct {
numer: u32,
denom: u32,
};
/// Renamed to Stat to not conflict with the stat function.
pub const Stat = extern struct.{
pub const Stat = extern struct {
dev: i32,
mode: u16,
nlink: u16,
@@ -113,7 +113,7 @@ pub const Stat = extern struct.{
qspare: [2]i64,
};
pub const timespec = extern struct.{
pub const timespec = extern struct {
tv_sec: isize,
tv_nsec: isize,
};
@@ -121,13 +121,13 @@ pub const timespec = extern struct.{
pub const sigset_t = u32;
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with function name.
pub const Sigaction = extern struct.{
pub const Sigaction = extern struct {
handler: extern fn (c_int) void,
sa_mask: sigset_t,
sa_flags: c_int,
};
pub const dirent = extern struct.{
pub const dirent = extern struct {
d_ino: usize,
d_seekoff: usize,
d_reclen: u16,
@@ -136,13 +136,13 @@ pub const dirent = extern struct.{
d_name: u8, // field address is address of first byte of name
};
pub const pthread_attr_t = extern struct.{
pub const pthread_attr_t = extern struct {
__sig: c_long,
__opaque: [56]u8,
};
/// Renamed from `kevent` to `Kevent` to avoid conflict with function name.
pub const Kevent = extern struct.{
pub const Kevent = extern struct {
ident: usize,
filter: i16,
flags: u16,
@@ -166,7 +166,7 @@ comptime {
assert(@byteOffsetOf(Kevent, "udata") == 24);
}
pub const kevent64_s = extern struct.{
pub const kevent64_s = extern struct {
ident: u64,
filter: i16,
flags: u16,
+1 -1
View File
@@ -4,7 +4,7 @@ pub extern "c" fn getrandom(buf_ptr: [*]u8, buf_len: usize, flags: c_uint) c_int
extern "c" fn __errno_location() *c_int;
pub const _errno = __errno_location;
pub const pthread_attr_t = extern struct.{
pub const pthread_attr_t = extern struct {
__size: [56]u8,
__align: c_long,
};
+14 -14
View File
@@ -20,14 +20,14 @@ const IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b;
const IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16;
const DEBUG_DIRECTORY = 6;
pub const CoffError = error.{
pub const CoffError = error{
InvalidPEMagic,
InvalidPEHeader,
InvalidMachine,
MissingCoffSection,
};
pub const Coff = struct.{
pub const Coff = struct {
in_file: os.File,
allocator: *mem.Allocator,
@@ -56,10 +56,10 @@ pub const Coff = struct.{
var pe_header_magic: [4]u8 = undefined;
try in.readNoEof(pe_header_magic[0..]);
if (!mem.eql(u8, pe_header_magic, []u8.{ 'P', 'E', 0, 0 }))
if (!mem.eql(u8, pe_header_magic, []u8{ 'P', 'E', 0, 0 }))
return error.InvalidPEHeader;
self.coff_header = CoffHeader.{
self.coff_header = CoffHeader{
.machine = try in.readIntLe(u16),
.number_of_sections = try in.readIntLe(u16),
.timedate_stamp = try in.readIntLe(u32),
@@ -98,7 +98,7 @@ pub const Coff = struct.{
return error.InvalidPEHeader;
for (self.pe_header.data_directory) |*data_dir| {
data_dir.* = OptionalHeader.DataDirectory.{
data_dir.* = OptionalHeader.DataDirectory{
.virtual_address = try in.readIntLe(u32),
.size = try in.readIntLe(u32),
};
@@ -154,10 +154,10 @@ pub const Coff = struct.{
var i: u16 = 0;
while (i < self.coff_header.number_of_sections) : (i += 1) {
try in.readNoEof(name[0..]);
try self.sections.append(Section.{
.header = SectionHeader.{
try self.sections.append(Section{
.header = SectionHeader{
.name = name,
.misc = SectionHeader.Misc.{ .physical_address = try in.readIntLe(u32) },
.misc = SectionHeader.Misc{ .physical_address = try in.readIntLe(u32) },
.virtual_address = try in.readIntLe(u32),
.size_of_raw_data = try in.readIntLe(u32),
.pointer_to_raw_data = try in.readIntLe(u32),
@@ -181,7 +181,7 @@ pub const Coff = struct.{
}
};
const CoffHeader = struct.{
const CoffHeader = struct {
machine: u16,
number_of_sections: u16,
timedate_stamp: u32,
@@ -191,8 +191,8 @@ const CoffHeader = struct.{
characteristics: u16,
};
const OptionalHeader = struct.{
const DataDirectory = struct.{
const OptionalHeader = struct {
const DataDirectory = struct {
virtual_address: u32,
size: u32,
};
@@ -201,12 +201,12 @@ const OptionalHeader = struct.{
data_directory: [IMAGE_NUMBEROF_DIRECTORY_ENTRIES]DataDirectory,
};
pub const Section = struct.{
pub const Section = struct {
header: SectionHeader,
};
const SectionHeader = struct.{
const Misc = union.{
const SectionHeader = struct {
const Misc = union {
physical_address: u32,
virtual_size: u32,
};
+34 -34
View File
@@ -5,7 +5,7 @@ const debug = @import("../debug/index.zig");
const builtin = @import("builtin");
const htest = @import("test.zig");
const RoundParam = struct.{
const RoundParam = struct {
a: usize,
b: usize,
c: usize,
@@ -15,7 +15,7 @@ const RoundParam = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize, x: usize, y: usize) RoundParam {
return RoundParam.{
return RoundParam{
.a = a,
.b = b,
.c = c,
@@ -32,12 +32,12 @@ pub const Blake2s224 = Blake2s(224);
pub const Blake2s256 = Blake2s(256);
fn Blake2s(comptime out_len: usize) type {
return struct.{
return struct {
const Self = @This();
const block_length = 64;
const digest_length = out_len / 8;
const iv = [8]u32.{
const iv = [8]u32{
0x6A09E667,
0xBB67AE85,
0x3C6EF372,
@@ -48,17 +48,17 @@ fn Blake2s(comptime out_len: usize) type {
0x5BE0CD19,
};
const sigma = [10][16]u8.{
[]const u8.{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8.{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8.{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8.{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8.{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8.{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8.{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8.{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8.{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8.{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
const sigma = [10][16]u8{
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
};
h: [8]u32,
@@ -147,7 +147,7 @@ fn Blake2s(comptime out_len: usize) type {
v[13] ^= @intCast(u32, d.t >> 32);
if (last) v[14] = ~v[14];
const rounds = comptime []RoundParam.{
const rounds = comptime []RoundParam{
Rp(0, 4, 8, 12, 0, 1),
Rp(1, 5, 9, 13, 2, 3),
Rp(2, 6, 10, 14, 4, 5),
@@ -250,7 +250,7 @@ test "blake2s256 streaming" {
}
test "blake2s256 aligned final" {
var block = []u8.{0} ** Blake2s256.block_length;
var block = []u8{0} ** Blake2s256.block_length;
var out: [Blake2s256.digest_length]u8 = undefined;
var h = Blake2s256.init();
@@ -265,12 +265,12 @@ pub const Blake2b384 = Blake2b(384);
pub const Blake2b512 = Blake2b(512);
fn Blake2b(comptime out_len: usize) type {
return struct.{
return struct {
const Self = @This();
const block_length = 128;
const digest_length = out_len / 8;
const iv = [8]u64.{
const iv = [8]u64{
0x6a09e667f3bcc908,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
@@ -281,19 +281,19 @@ fn Blake2b(comptime out_len: usize) type {
0x5be0cd19137e2179,
};
const sigma = [12][16]u8.{
[]const u8.{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8.{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8.{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8.{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8.{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8.{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8.{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8.{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8.{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8.{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
[]const u8.{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8.{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
const sigma = [12][16]u8{
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
[]const u8{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
[]const u8{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
[]const u8{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
[]const u8{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
[]const u8{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
[]const u8{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
[]const u8{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
[]const u8{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
};
h: [8]u64,
@@ -380,7 +380,7 @@ fn Blake2b(comptime out_len: usize) type {
v[13] ^= @intCast(u64, d.t >> 64);
if (last) v[14] = ~v[14];
const rounds = comptime []RoundParam.{
const rounds = comptime []RoundParam{
Rp(0, 4, 8, 12, 0, 1),
Rp(1, 5, 9, 13, 2, 3),
Rp(2, 6, 10, 14, 4, 5),
@@ -483,7 +483,7 @@ test "blake2b512 streaming" {
}
test "blake2b512 aligned final" {
var block = []u8.{0} ** Blake2b512.block_length;
var block = []u8{0} ** Blake2b512.block_length;
var out: [Blake2b512.digest_length]u8 = undefined;
var h = Blake2b512.init();
+27 -27
View File
@@ -7,7 +7,7 @@ const assert = std.debug.assert;
const builtin = @import("builtin");
const maxInt = std.math.maxInt;
const QuarterRound = struct.{
const QuarterRound = struct {
a: usize,
b: usize,
c: usize,
@@ -15,7 +15,7 @@ const QuarterRound = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize) QuarterRound {
return QuarterRound.{
return QuarterRound{
.a = a,
.b = b,
.c = c,
@@ -32,7 +32,7 @@ fn salsa20_wordtobyte(out: []u8, input: [16]u32) void {
for (x) |_, i|
x[i] = input[i];
const rounds = comptime []QuarterRound.{
const rounds = comptime []QuarterRound{
Rp(0, 4, 8, 12),
Rp(1, 5, 9, 13),
Rp(2, 6, 10, 14),
@@ -69,7 +69,7 @@ fn chaCha20_internal(out: []u8, in: []const u8, key: [8]u32, counter: [4]u32) vo
var cursor: usize = 0;
const c = "expand 32-byte k";
const constant_le = []u32.{
const constant_le = []u32{
mem.readIntLE(u32, c[0..4]),
mem.readIntLE(u32, c[4..8]),
mem.readIntLE(u32, c[8..12]),
@@ -183,7 +183,7 @@ pub fn chaCha20With64BitNonce(out: []u8, in: []const u8, counter: u64, key: [32]
// https://tools.ietf.org/html/rfc7539#section-2.4.2
test "crypto.chacha20 test vector sunscreen" {
const expected_result = []u8.{
const expected_result = []u8{
0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
@@ -202,13 +202,13 @@ test "crypto.chacha20 test vector sunscreen" {
};
const input = "Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it.";
var result: [114]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31,
};
const nonce = []u8.{
const nonce = []u8{
0, 0, 0, 0,
0, 0, 0, 0x4a,
0, 0, 0, 0,
@@ -225,7 +225,7 @@ test "crypto.chacha20 test vector sunscreen" {
// https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04#section-7
test "crypto.chacha20 test vector 1" {
const expected_result = []u8.{
const expected_result = []u8{
0x76, 0xb8, 0xe0, 0xad, 0xa0, 0xf1, 0x3d, 0x90,
0x40, 0x5d, 0x6a, 0xe5, 0x53, 0x86, 0xbd, 0x28,
0xbd, 0xd2, 0x19, 0xb8, 0xa0, 0x8d, 0xed, 0x1a,
@@ -235,7 +235,7 @@ test "crypto.chacha20 test vector 1" {
0x6a, 0x43, 0xb8, 0xf4, 0x15, 0x18, 0xa1, 0x1c,
0xc3, 0x87, 0xb6, 0x69, 0xb2, 0xee, 0x65, 0x86,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@@ -246,20 +246,20 @@ test "crypto.chacha20 test vector 1" {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
var result: [64]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
const nonce = []u8.{ 0, 0, 0, 0, 0, 0, 0, 0 };
const nonce = []u8{ 0, 0, 0, 0, 0, 0, 0, 0 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 2" {
const expected_result = []u8.{
const expected_result = []u8{
0x45, 0x40, 0xf0, 0x5a, 0x9f, 0x1f, 0xb2, 0x96,
0xd7, 0x73, 0x6e, 0x7b, 0x20, 0x8e, 0x3c, 0x96,
0xeb, 0x4f, 0xe1, 0x83, 0x46, 0x88, 0xd2, 0x60,
@@ -269,7 +269,7 @@ test "crypto.chacha20 test vector 2" {
0x53, 0xd7, 0x92, 0xb1, 0xc4, 0x3f, 0xea, 0x81,
0x7e, 0x9a, 0xd2, 0x75, 0xae, 0x54, 0x69, 0x63,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@@ -280,20 +280,20 @@ test "crypto.chacha20 test vector 2" {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
var result: [64]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 1,
};
const nonce = []u8.{ 0, 0, 0, 0, 0, 0, 0, 0 };
const nonce = []u8{ 0, 0, 0, 0, 0, 0, 0, 0 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 3" {
const expected_result = []u8.{
const expected_result = []u8{
0xde, 0x9c, 0xba, 0x7b, 0xf3, 0xd6, 0x9e, 0xf5,
0xe7, 0x86, 0xdc, 0x63, 0x97, 0x3f, 0x65, 0x3a,
0x0b, 0x49, 0xe0, 0x15, 0xad, 0xbf, 0xf7, 0x13,
@@ -303,7 +303,7 @@ test "crypto.chacha20 test vector 3" {
0x52, 0x77, 0x06, 0x2e, 0xb7, 0xa0, 0x43, 0x3e,
0x44, 0x5f, 0x41, 0xe3,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@@ -314,20 +314,20 @@ test "crypto.chacha20 test vector 3" {
0x00, 0x00, 0x00, 0x00,
};
var result: [60]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
const nonce = []u8.{ 0, 0, 0, 0, 0, 0, 0, 1 };
const nonce = []u8{ 0, 0, 0, 0, 0, 0, 0, 1 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 4" {
const expected_result = []u8.{
const expected_result = []u8{
0xef, 0x3f, 0xdf, 0xd6, 0xc6, 0x15, 0x78, 0xfb,
0xf5, 0xcf, 0x35, 0xbd, 0x3d, 0xd3, 0x3b, 0x80,
0x09, 0x63, 0x16, 0x34, 0xd2, 0x1e, 0x42, 0xac,
@@ -337,7 +337,7 @@ test "crypto.chacha20 test vector 4" {
0x5d, 0xdc, 0x49, 0x7a, 0x0b, 0x46, 0x6e, 0x7d,
0x6b, 0xbd, 0xb0, 0x04, 0x1b, 0x2f, 0x58, 0x6b,
};
const input = []u8.{
const input = []u8{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
@@ -348,20 +348,20 @@ test "crypto.chacha20 test vector 4" {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
};
var result: [64]u8 = undefined;
const key = []u8.{
const key = []u8{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
};
const nonce = []u8.{ 1, 0, 0, 0, 0, 0, 0, 0 };
const nonce = []u8{ 1, 0, 0, 0, 0, 0, 0, 0 };
chaCha20With64BitNonce(result[0..], input[0..], 0, key, nonce);
assert(mem.eql(u8, expected_result, result));
}
test "crypto.chacha20 test vector 5" {
const expected_result = []u8.{
const expected_result = []u8{
0xf7, 0x98, 0xa1, 0x89, 0xf1, 0x95, 0xe6, 0x69,
0x82, 0x10, 0x5f, 0xfb, 0x64, 0x0b, 0xb7, 0x75,
0x7f, 0x57, 0x9d, 0xa3, 0x16, 0x02, 0xfc, 0x93,
@@ -398,7 +398,7 @@ test "crypto.chacha20 test vector 5" {
0x87, 0x46, 0xd4, 0x52, 0x4d, 0x38, 0x40, 0x7a,
0x6d, 0xeb, 0x3a, 0xb7, 0x8f, 0xab, 0x78, 0xc9,
};
const input = []u8.{
const input = []u8{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
@@ -418,13 +418,13 @@ test "crypto.chacha20 test vector 5" {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
};
var result: [256]u8 = undefined;
const key = []u8.{
const key = []u8{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
};
const nonce = []u8.{
const nonce = []u8{
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
};
+1 -1
View File
@@ -9,7 +9,7 @@ pub const HmacSha256 = Hmac(crypto.Sha256);
pub const HmacBlake2s256 = Hmac(crypto.Blake2s256);
pub fn Hmac(comptime Hash: type) type {
return struct.{
return struct {
const Self = @This();
pub const mac_length = Hash.digest_length;
pub const minimum_key_length = 0;
+9 -9
View File
@@ -5,7 +5,7 @@ const builtin = @import("builtin");
const debug = @import("../debug/index.zig");
const fmt = @import("../fmt/index.zig");
const RoundParam = struct.{
const RoundParam = struct {
a: usize,
b: usize,
c: usize,
@@ -16,7 +16,7 @@ const RoundParam = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize, k: usize, s: u32, t: u32) RoundParam {
return RoundParam.{
return RoundParam{
.a = a,
.b = b,
.c = c,
@@ -27,7 +27,7 @@ fn Rp(a: usize, b: usize, c: usize, d: usize, k: usize, s: u32, t: u32) RoundPar
};
}
pub const Md5 = struct.{
pub const Md5 = struct {
const Self = @This();
const block_length = 64;
const digest_length = 16;
@@ -131,14 +131,14 @@ pub const Md5 = struct.{
s[i] |= u32(b[i * 4 + 3]) << 24;
}
var v: [4]u32 = []u32.{
var v: [4]u32 = []u32{
d.s[0],
d.s[1],
d.s[2],
d.s[3],
};
const round0 = comptime []RoundParam.{
const round0 = comptime []RoundParam{
Rp(0, 1, 2, 3, 0, 7, 0xD76AA478),
Rp(3, 0, 1, 2, 1, 12, 0xE8C7B756),
Rp(2, 3, 0, 1, 2, 17, 0x242070DB),
@@ -161,7 +161,7 @@ pub const Md5 = struct.{
v[r.a] = v[r.b] +% math.rotl(u32, v[r.a], r.s);
}
const round1 = comptime []RoundParam.{
const round1 = comptime []RoundParam{
Rp(0, 1, 2, 3, 1, 5, 0xF61E2562),
Rp(3, 0, 1, 2, 6, 9, 0xC040B340),
Rp(2, 3, 0, 1, 11, 14, 0x265E5A51),
@@ -184,7 +184,7 @@ pub const Md5 = struct.{
v[r.a] = v[r.b] +% math.rotl(u32, v[r.a], r.s);
}
const round2 = comptime []RoundParam.{
const round2 = comptime []RoundParam{
Rp(0, 1, 2, 3, 5, 4, 0xFFFA3942),
Rp(3, 0, 1, 2, 8, 11, 0x8771F681),
Rp(2, 3, 0, 1, 11, 16, 0x6D9D6122),
@@ -207,7 +207,7 @@ pub const Md5 = struct.{
v[r.a] = v[r.b] +% math.rotl(u32, v[r.a], r.s);
}
const round3 = comptime []RoundParam.{
const round3 = comptime []RoundParam{
Rp(0, 1, 2, 3, 0, 6, 0xF4292244),
Rp(3, 0, 1, 2, 7, 10, 0x432AFF97),
Rp(2, 3, 0, 1, 14, 15, 0xAB9423A7),
@@ -271,7 +271,7 @@ test "md5 streaming" {
}
test "md5 aligned final" {
var block = []u8.{0} ** Md5.block_length;
var block = []u8{0} ** Md5.block_length;
var out: [Md5.digest_length]u8 = undefined;
var h = Md5.init();
+1 -1
View File
@@ -9,7 +9,7 @@ const Endian = builtin.Endian;
const readInt = std.mem.readInt;
const writeInt = std.mem.writeInt;
pub const Poly1305 = struct.{
pub const Poly1305 = struct {
const Self = @This();
pub const mac_length = 16;
+10 -10
View File
@@ -4,7 +4,7 @@ const endian = @import("../endian.zig");
const debug = @import("../debug/index.zig");
const builtin = @import("builtin");
const RoundParam = struct.{
const RoundParam = struct {
a: usize,
b: usize,
c: usize,
@@ -14,7 +14,7 @@ const RoundParam = struct.{
};
fn Rp(a: usize, b: usize, c: usize, d: usize, e: usize, i: u32) RoundParam {
return RoundParam.{
return RoundParam{
.a = a,
.b = b,
.c = c,
@@ -24,7 +24,7 @@ fn Rp(a: usize, b: usize, c: usize, d: usize, e: usize, i: u32) RoundParam {
};
}
pub const Sha1 = struct.{
pub const Sha1 = struct {
const Self = @This();
const block_length = 64;
const digest_length = 20;
@@ -118,7 +118,7 @@ pub const Sha1 = struct.{
var s: [16]u32 = undefined;
var v: [5]u32 = []u32.{
var v: [5]u32 = []u32{
d.s[0],
d.s[1],
d.s[2],
@@ -126,7 +126,7 @@ pub const Sha1 = struct.{
d.s[4],
};
const round0a = comptime []RoundParam.{
const round0a = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 0),
Rp(4, 0, 1, 2, 3, 1),
Rp(3, 4, 0, 1, 2, 2),
@@ -151,7 +151,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round0b = comptime []RoundParam.{
const round0b = comptime []RoundParam{
Rp(4, 0, 1, 2, 3, 16),
Rp(3, 4, 0, 1, 2, 17),
Rp(2, 3, 4, 0, 1, 18),
@@ -165,7 +165,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round1 = comptime []RoundParam.{
const round1 = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 20),
Rp(4, 0, 1, 2, 3, 21),
Rp(3, 4, 0, 1, 2, 22),
@@ -195,7 +195,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round2 = comptime []RoundParam.{
const round2 = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 40),
Rp(4, 0, 1, 2, 3, 41),
Rp(3, 4, 0, 1, 2, 42),
@@ -225,7 +225,7 @@ pub const Sha1 = struct.{
v[r.b] = math.rotl(u32, v[r.b], u32(30));
}
const round3 = comptime []RoundParam.{
const round3 = comptime []RoundParam{
Rp(0, 1, 2, 3, 4, 60),
Rp(4, 0, 1, 2, 3, 61),
Rp(3, 4, 0, 1, 2, 62),
@@ -292,7 +292,7 @@ test "sha1 streaming" {
}
test "sha1 aligned final" {
var block = []u8.{0} ** Sha1.block_length;
var block = []u8{0} ** Sha1.block_length;
var out: [Sha1.digest_length]u8 = undefined;
var h = Sha1.init();
+18 -18
View File
@@ -8,7 +8,7 @@ const htest = @import("test.zig");
/////////////////////
// Sha224 + Sha256
const RoundParam256 = struct.{
const RoundParam256 = struct {
a: usize,
b: usize,
c: usize,
@@ -22,7 +22,7 @@ const RoundParam256 = struct.{
};
fn Rp256(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h: usize, i: usize, k: u32) RoundParam256 {
return RoundParam256.{
return RoundParam256{
.a = a,
.b = b,
.c = c,
@@ -36,7 +36,7 @@ fn Rp256(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h
};
}
const Sha2Params32 = struct.{
const Sha2Params32 = struct {
iv0: u32,
iv1: u32,
iv2: u32,
@@ -48,7 +48,7 @@ const Sha2Params32 = struct.{
out_len: usize,
};
const Sha224Params = Sha2Params32.{
const Sha224Params = Sha2Params32{
.iv0 = 0xC1059ED8,
.iv1 = 0x367CD507,
.iv2 = 0x3070DD17,
@@ -60,7 +60,7 @@ const Sha224Params = Sha2Params32.{
.out_len = 224,
};
const Sha256Params = Sha2Params32.{
const Sha256Params = Sha2Params32{
.iv0 = 0x6A09E667,
.iv1 = 0xBB67AE85,
.iv2 = 0x3C6EF372,
@@ -76,7 +76,7 @@ pub const Sha224 = Sha2_32(Sha224Params);
pub const Sha256 = Sha2_32(Sha256Params);
fn Sha2_32(comptime params: Sha2Params32) type {
return struct.{
return struct {
const Self = @This();
const block_length = 64;
const digest_length = params.out_len / 8;
@@ -188,7 +188,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
s[i] = s[i - 16] +% s[i - 7] +% (math.rotr(u32, s[i - 15], u32(7)) ^ math.rotr(u32, s[i - 15], u32(18)) ^ (s[i - 15] >> 3)) +% (math.rotr(u32, s[i - 2], u32(17)) ^ math.rotr(u32, s[i - 2], u32(19)) ^ (s[i - 2] >> 10));
}
var v: [8]u32 = []u32.{
var v: [8]u32 = []u32{
d.s[0],
d.s[1],
d.s[2],
@@ -199,7 +199,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
d.s[7],
};
const round0 = comptime []RoundParam256.{
const round0 = comptime []RoundParam256{
Rp256(0, 1, 2, 3, 4, 5, 6, 7, 0, 0x428A2F98),
Rp256(7, 0, 1, 2, 3, 4, 5, 6, 1, 0x71374491),
Rp256(6, 7, 0, 1, 2, 3, 4, 5, 2, 0xB5C0FBCF),
@@ -338,7 +338,7 @@ test "sha256 streaming" {
}
test "sha256 aligned final" {
var block = []u8.{0} ** Sha256.block_length;
var block = []u8{0} ** Sha256.block_length;
var out: [Sha256.digest_length]u8 = undefined;
var h = Sha256.init();
@@ -349,7 +349,7 @@ test "sha256 aligned final" {
/////////////////////
// Sha384 + Sha512
const RoundParam512 = struct.{
const RoundParam512 = struct {
a: usize,
b: usize,
c: usize,
@@ -363,7 +363,7 @@ const RoundParam512 = struct.{
};
fn Rp512(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h: usize, i: usize, k: u64) RoundParam512 {
return RoundParam512.{
return RoundParam512{
.a = a,
.b = b,
.c = c,
@@ -377,7 +377,7 @@ fn Rp512(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize, g: usize, h
};
}
const Sha2Params64 = struct.{
const Sha2Params64 = struct {
iv0: u64,
iv1: u64,
iv2: u64,
@@ -389,7 +389,7 @@ const Sha2Params64 = struct.{
out_len: usize,
};
const Sha384Params = Sha2Params64.{
const Sha384Params = Sha2Params64{
.iv0 = 0xCBBB9D5DC1059ED8,
.iv1 = 0x629A292A367CD507,
.iv2 = 0x9159015A3070DD17,
@@ -401,7 +401,7 @@ const Sha384Params = Sha2Params64.{
.out_len = 384,
};
const Sha512Params = Sha2Params64.{
const Sha512Params = Sha2Params64{
.iv0 = 0x6A09E667F3BCC908,
.iv1 = 0xBB67AE8584CAA73B,
.iv2 = 0x3C6EF372FE94F82B,
@@ -417,7 +417,7 @@ pub const Sha384 = Sha2_64(Sha384Params);
pub const Sha512 = Sha2_64(Sha512Params);
fn Sha2_64(comptime params: Sha2Params64) type {
return struct.{
return struct {
const Self = @This();
const block_length = 128;
const digest_length = params.out_len / 8;
@@ -533,7 +533,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
s[i] = s[i - 16] +% s[i - 7] +% (math.rotr(u64, s[i - 15], u64(1)) ^ math.rotr(u64, s[i - 15], u64(8)) ^ (s[i - 15] >> 7)) +% (math.rotr(u64, s[i - 2], u64(19)) ^ math.rotr(u64, s[i - 2], u64(61)) ^ (s[i - 2] >> 6));
}
var v: [8]u64 = []u64.{
var v: [8]u64 = []u64{
d.s[0],
d.s[1],
d.s[2],
@@ -544,7 +544,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
d.s[7],
};
const round0 = comptime []RoundParam512.{
const round0 = comptime []RoundParam512{
Rp512(0, 1, 2, 3, 4, 5, 6, 7, 0, 0x428A2F98D728AE22),
Rp512(7, 0, 1, 2, 3, 4, 5, 6, 1, 0x7137449123EF65CD),
Rp512(6, 7, 0, 1, 2, 3, 4, 5, 2, 0xB5C0FBCFEC4D3B2F),
@@ -715,7 +715,7 @@ test "sha512 streaming" {
}
test "sha512 aligned final" {
var block = []u8.{0} ** Sha512.block_length;
var block = []u8{0} ** Sha512.block_length;
var out: [Sha512.digest_length]u8 = undefined;
var h = Sha512.init();
+10 -10
View File
@@ -11,7 +11,7 @@ pub const Sha3_384 = Keccak(384, 0x06);
pub const Sha3_512 = Keccak(512, 0x06);
fn Keccak(comptime bits: usize, comptime delim: u8) type {
return struct.{
return struct {
const Self = @This();
const block_length = 200;
const digest_length = bits / 8;
@@ -86,7 +86,7 @@ fn Keccak(comptime bits: usize, comptime delim: u8) type {
};
}
const RC = []const u64.{
const RC = []const u64{
0x0000000000000001, 0x0000000000008082, 0x800000000000808a, 0x8000000080008000,
0x000000000000808b, 0x0000000080000001, 0x8000000080008081, 0x8000000000008009,
0x000000000000008a, 0x0000000000000088, 0x0000000080008009, 0x000000008000000a,
@@ -95,15 +95,15 @@ const RC = []const u64.{
0x8000000080008081, 0x8000000000008080, 0x0000000080000001, 0x8000000080008008,
};
const ROTC = []const usize.{
const ROTC = []const usize{
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44,
};
const PIL = []const usize.{
const PIL = []const usize{
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1,
};
const M5 = []const usize.{
const M5 = []const usize{
0, 1, 2, 3, 4, 0, 1, 2, 3, 4,
};
@@ -115,9 +115,9 @@ fn keccak_f(comptime F: usize, d: []u8) void {
break :x 12 + 2 * math.log2(B);
};
var s = []const u64.{0} ** 25;
var t = []const u64.{0} ** 1;
var c = []const u64.{0} ** 5;
var s = []const u64{0} ** 25;
var t = []const u64{0} ** 1;
var c = []const u64{0} ** 5;
for (s) |*r, i| {
r.* = mem.readIntLE(u64, d[8 * i .. 8 * i + 8]);
@@ -224,7 +224,7 @@ test "sha3-256 streaming" {
}
test "sha3-256 aligned final" {
var block = []u8.{0} ** Sha3_256.block_length;
var block = []u8{0} ** Sha3_256.block_length;
var out: [Sha3_256.digest_length]u8 = undefined;
var h = Sha3_256.init();
@@ -295,7 +295,7 @@ test "sha3-512 streaming" {
}
test "sha3-512 aligned final" {
var block = []u8.{0} ** Sha3_512.block_length;
var block = []u8{0} ** Sha3_512.block_length;
var out: [Sha3_512.digest_length]u8 = undefined;
var h = Sha3_512.init();
+16 -16
View File
@@ -9,20 +9,20 @@ const MiB = 1024 * KiB;
var prng = std.rand.DefaultPrng.init(0);
const Crypto = struct.{
const Crypto = struct {
ty: type,
name: []const u8,
};
const hashes = []Crypto.{
Crypto.{ .ty = crypto.Md5, .name = "md5" },
Crypto.{ .ty = crypto.Sha1, .name = "sha1" },
Crypto.{ .ty = crypto.Sha256, .name = "sha256" },
Crypto.{ .ty = crypto.Sha512, .name = "sha512" },
Crypto.{ .ty = crypto.Sha3_256, .name = "sha3-256" },
Crypto.{ .ty = crypto.Sha3_512, .name = "sha3-512" },
Crypto.{ .ty = crypto.Blake2s256, .name = "blake2s" },
Crypto.{ .ty = crypto.Blake2b512, .name = "blake2b" },
const hashes = []Crypto{
Crypto{ .ty = crypto.Md5, .name = "md5" },
Crypto{ .ty = crypto.Sha1, .name = "sha1" },
Crypto{ .ty = crypto.Sha256, .name = "sha256" },
Crypto{ .ty = crypto.Sha512, .name = "sha512" },
Crypto{ .ty = crypto.Sha3_256, .name = "sha3-256" },
Crypto{ .ty = crypto.Sha3_512, .name = "sha3-512" },
Crypto{ .ty = crypto.Blake2s256, .name = "blake2s" },
Crypto{ .ty = crypto.Blake2b512, .name = "blake2b" },
};
pub fn benchmarkHash(comptime Hash: var, comptime bytes: comptime_int) !u64 {
@@ -45,11 +45,11 @@ pub fn benchmarkHash(comptime Hash: var, comptime bytes: comptime_int) !u64 {
return throughput;
}
const macs = []Crypto.{
Crypto.{ .ty = crypto.Poly1305, .name = "poly1305" },
Crypto.{ .ty = crypto.HmacMd5, .name = "hmac-md5" },
Crypto.{ .ty = crypto.HmacSha1, .name = "hmac-sha1" },
Crypto.{ .ty = crypto.HmacSha256, .name = "hmac-sha256" },
const macs = []Crypto{
Crypto{ .ty = crypto.Poly1305, .name = "poly1305" },
Crypto{ .ty = crypto.HmacMd5, .name = "hmac-md5" },
Crypto{ .ty = crypto.HmacSha1, .name = "hmac-sha1" },
Crypto{ .ty = crypto.HmacSha256, .name = "hmac-sha256" },
};
pub fn benchmarkMac(comptime Mac: var, comptime bytes: comptime_int) !u64 {
@@ -75,7 +75,7 @@ pub fn benchmarkMac(comptime Mac: var, comptime bytes: comptime_int) !u64 {
return throughput;
}
const exchanges = []Crypto.{Crypto.{ .ty = crypto.X25519, .name = "x25519" }};
const exchanges = []Crypto{Crypto{ .ty = crypto.X25519, .name = "x25519" }};
pub fn benchmarkKeyExchange(comptime DhKeyExchange: var, comptime exchange_count: comptime_int) !u64 {
std.debug.assert(DhKeyExchange.minimum_key_length >= DhKeyExchange.secret_length);
+4 -4
View File
@@ -11,7 +11,7 @@ const readInt = std.mem.readInt;
const writeInt = std.mem.writeInt;
// Based on Supercop's ref10 implementation.
pub const X25519 = struct.{
pub const X25519 = struct {
pub const secret_length = 32;
pub const minimum_key_length = 32;
@@ -52,7 +52,7 @@ pub const X25519 = struct.{
// computes the actual scalar product (the result is in x2 and z2)
// Montgomery ladder
// In projective coordinates, to avoid divisons: x = X / Z
// In projective coordinates, to avoid divisions: x = X / Z
// We don't care about the y coordinate, it's only 1 bit of information
Fe.init1(x2);
Fe.init0(z2); // "zero" point
@@ -116,7 +116,7 @@ pub const X25519 = struct.{
}
pub fn createPublicKey(public_key: []u8, private_key: []const u8) bool {
var base_point = []u8.{9} ++ []u8.{0} ** 31;
var base_point = []u8{9} ++ []u8{0} ** 31;
return create(public_key, private_key, base_point);
}
};
@@ -137,7 +137,7 @@ fn zerocmp(comptime T: type, a: []const T) bool {
// A bit bigger than TweetNaCl, over 4 times faster.
// field element
const Fe = struct.{
const Fe = struct {
b: [10]i32,
fn secureZero(self: *Fe) void {
+2 -2
View File
@@ -55,7 +55,7 @@ pub fn addNullByte(allocator: *mem.Allocator, slice: []const u8) ![]u8 {
return result;
}
pub const NullTerminated2DArray = struct.{
pub const NullTerminated2DArray = struct {
allocator: *mem.Allocator,
byte_count: usize,
ptr: ?[*]?[*]u8,
@@ -95,7 +95,7 @@ pub const NullTerminated2DArray = struct.{
}
index_buf[i] = null;
return NullTerminated2DArray.{
return NullTerminated2DArray{
.allocator = allocator,
.byte_count = byte_count,
.ptr = @ptrCast(?[*]?[*]u8, buf.ptr),
+3 -3
View File
@@ -3,7 +3,7 @@ const mem = std.mem;
/// Allocator that fails after N allocations, useful for making sure out of
/// memory conditions are handled correctly.
pub const FailingAllocator = struct.{
pub const FailingAllocator = struct {
allocator: mem.Allocator,
index: usize,
fail_index: usize,
@@ -13,14 +13,14 @@ pub const FailingAllocator = struct.{
deallocations: usize,
pub fn init(allocator: *mem.Allocator, fail_index: usize) FailingAllocator {
return FailingAllocator.{
return FailingAllocator{
.internal_allocator = allocator,
.fail_index = fail_index,
.index = 0,
.allocated_bytes = 0,
.freed_bytes = 0,
.deallocations = 0,
.allocator = mem.Allocator.{
.allocator = mem.Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
+121 -99
View File
@@ -21,7 +21,7 @@ pub const runtime_safety = switch (builtin.mode) {
builtin.Mode.ReleaseFast, builtin.Mode.ReleaseSmall => false,
};
const Module = struct.{
const Module = struct {
mod_info: pdb.ModInfo,
module_name: []u8,
obj_file_name: []u8,
@@ -125,7 +125,7 @@ pub fn assert(ok: bool) void {
/// TODO: add `==` operator for `error_union == error_set`, and then
/// remove this function
pub fn assertError(value: var, expected_error: error) void {
pub fn assertError(value: var, expected_error: anyerror) void {
if (value) {
@panic("expected error");
} else |actual_error| {
@@ -171,7 +171,9 @@ pub fn panicExtra(trace: ?*const builtin.StackTrace, first_trace_addr: ?usize, c
os.abort();
}
const RED = "\x1b[31;1m";
const GREEN = "\x1b[32;1m";
const CYAN = "\x1b[36;1m";
const WHITE = "\x1b[37;1m";
const DIM = "\x1b[2m";
const RESET = "\x1b[0m";
@@ -211,7 +213,7 @@ pub fn writeCurrentStackTrace(out_stream: var, debug_info: *DebugInfo, tty_color
builtin.Os.windows => return writeCurrentStackTraceWindows(out_stream, debug_info, tty_color, start_addr),
else => {},
}
const AddressState = union(enum).{
const AddressState = union(enum) {
NotLookingForStartAddress,
LookingForStartAddress: usize,
};
@@ -220,7 +222,7 @@ pub fn writeCurrentStackTrace(out_stream: var, debug_info: *DebugInfo, tty_color
// else AddressState.NotLookingForStartAddress;
var addr_state: AddressState = undefined;
if (start_addr) |addr| {
addr_state = AddressState.{ .LookingForStartAddress = addr };
addr_state = AddressState{ .LookingForStartAddress = addr };
} else {
addr_state = AddressState.NotLookingForStartAddress;
}
@@ -375,7 +377,7 @@ fn printSourceAtAddressWindows(di: *DebugInfo, out_stream: var, relocated_addres
const col_num_entry = @ptrCast(*pdb.ColumnNumberEntry, &subsect_info[line_index]);
break :blk col_num_entry.StartColumn;
} else 0;
break :subsections LineInfo.{
break :subsections LineInfo{
.allocator = allocator,
.file_name = source_file_name,
.line = line,
@@ -442,7 +444,7 @@ fn printSourceAtAddressWindows(di: *DebugInfo, out_stream: var, relocated_addres
}
}
const TtyColor = enum.{
const TtyColor = enum {
Red,
Green,
Cyan,
@@ -454,38 +456,61 @@ const TtyColor = enum.{
/// TODO this is a special case hack right now. clean it up and maybe make it part of std.fmt
fn setTtyColor(tty_color: TtyColor) void {
const S = struct.{
var attrs: windows.WORD = undefined;
var init_attrs = false;
};
if (!S.init_attrs) {
S.init_attrs = true;
var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined;
// TODO handle error
_ = windows.GetConsoleScreenBufferInfo(stderr_file.handle, &info);
S.attrs = info.wAttributes;
}
if (os.supportsAnsiEscapeCodes(stderr_file.handle)) {
switch (tty_color) {
TtyColor.Red => {
stderr_file.write(RED) catch return;
},
TtyColor.Green => {
stderr_file.write(GREEN) catch return;
},
TtyColor.Cyan => {
stderr_file.write(CYAN) catch return;
},
TtyColor.White, TtyColor.Bold => {
stderr_file.write(WHITE) catch return;
},
TtyColor.Dim => {
stderr_file.write(DIM) catch return;
},
TtyColor.Reset => {
stderr_file.write(RESET) catch return;
},
}
} else {
const S = struct {
var attrs: windows.WORD = undefined;
var init_attrs = false;
};
if (!S.init_attrs) {
S.init_attrs = true;
var info: windows.CONSOLE_SCREEN_BUFFER_INFO = undefined;
// TODO handle error
_ = windows.GetConsoleScreenBufferInfo(stderr_file.handle, &info);
S.attrs = info.wAttributes;
}
// TODO handle errors
switch (tty_color) {
TtyColor.Red => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY);
},
TtyColor.Green => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY);
},
TtyColor.Cyan => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY);
},
TtyColor.White, TtyColor.Bold => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY);
},
TtyColor.Dim => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_INTENSITY);
},
TtyColor.Reset => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, S.attrs);
},
// TODO handle errors
switch (tty_color) {
TtyColor.Red => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_RED | windows.FOREGROUND_INTENSITY);
},
TtyColor.Green => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_GREEN | windows.FOREGROUND_INTENSITY);
},
TtyColor.Cyan => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY);
},
TtyColor.White, TtyColor.Bold => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_RED | windows.FOREGROUND_GREEN | windows.FOREGROUND_BLUE | windows.FOREGROUND_INTENSITY);
},
TtyColor.Dim => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, windows.FOREGROUND_INTENSITY);
},
TtyColor.Reset => {
_ = windows.SetConsoleTextAttribute(stderr_file.handle, S.attrs);
},
}
}
}
@@ -660,7 +685,7 @@ fn printLineInfo(
}
// TODO use this
pub const OpenSelfDebugInfoError = error.{
pub const OpenSelfDebugInfoError = error{
MissingDebugInfo,
OutOfMemory,
UnsupportedOperatingSystem,
@@ -680,7 +705,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
defer self_file.close();
const coff_obj = try allocator.createOne(coff.Coff);
coff_obj.* = coff.Coff.{
coff_obj.* = coff.Coff{
.in_file = self_file,
.allocator = allocator,
.coff_header = undefined,
@@ -690,7 +715,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
.age = undefined,
};
var di = DebugInfo.{
var di = DebugInfo{
.coff = coff_obj,
.pdb = undefined,
.sect_contribs = undefined,
@@ -722,7 +747,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
const name_bytes = try allocator.alloc(u8, name_bytes_len);
try pdb_stream.stream.readNoEof(name_bytes);
const HashTableHeader = packed struct.{
const HashTableHeader = packed struct {
Size: u32,
Capacity: u32,
@@ -730,8 +755,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
return cap * 2 / 3 + 1;
}
};
var hash_tbl_hdr: HashTableHeader = undefined;
try pdb_stream.stream.readStruct(HashTableHeader, &hash_tbl_hdr);
const hash_tbl_hdr = try pdb_stream.stream.readStruct(HashTableHeader);
if (hash_tbl_hdr.Capacity == 0)
return error.InvalidDebugInfo;
@@ -743,7 +767,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
return error.InvalidDebugInfo;
const deleted = try readSparseBitVector(&pdb_stream.stream, allocator);
const Bucket = struct.{
const Bucket = struct {
first: u32,
second: u32,
};
@@ -765,8 +789,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
const dbi = di.pdb.dbi;
// Dbi Header
var dbi_stream_header: pdb.DbiStreamHeader = undefined;
try dbi.stream.readStruct(pdb.DbiStreamHeader, &dbi_stream_header);
const dbi_stream_header = try dbi.stream.readStruct(pdb.DbiStreamHeader);
const mod_info_size = dbi_stream_header.ModInfoSize;
const section_contrib_size = dbi_stream_header.SectionContributionSize;
@@ -775,8 +798,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
// Module Info Substream
var mod_info_offset: usize = 0;
while (mod_info_offset != mod_info_size) {
var mod_info: pdb.ModInfo = undefined;
try dbi.stream.readStruct(pdb.ModInfo, &mod_info);
const mod_info = try dbi.stream.readStruct(pdb.ModInfo);
var this_record_len: usize = @sizeOf(pdb.ModInfo);
const module_name = try dbi.readNullTermString(allocator);
@@ -791,7 +813,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
this_record_len += march_forward_bytes;
}
try modules.append(Module.{
try modules.append(Module{
.mod_info = mod_info,
.module_name = module_name,
.obj_file_name = obj_file_name,
@@ -820,7 +842,7 @@ fn openSelfDebugInfoWindows(allocator: *mem.Allocator) !DebugInfo {
}
while (sect_cont_offset != section_contrib_size) {
const entry = try sect_contribs.addOne();
try dbi.stream.readStruct(pdb.SectionContribEntry, entry);
entry.* = try dbi.stream.readStruct(pdb.SectionContribEntry);
sect_cont_offset += @sizeOf(pdb.SectionContribEntry);
if (sect_cont_offset > section_contrib_size)
@@ -850,7 +872,7 @@ fn readSparseBitVector(stream: var, allocator: *mem.Allocator) ![]usize {
}
fn openSelfDebugInfoLinux(allocator: *mem.Allocator) !DebugInfo {
var di = DebugInfo.{
var di = DebugInfo{
.self_exe_file = undefined,
.elf = undefined,
.debug_info = undefined,
@@ -937,7 +959,7 @@ fn openSelfDebugInfoMacOs(allocator: *mem.Allocator) !DebugInfo {
if (sym.n_sect == 0) {
last_len = sym.n_value;
} else {
symbols_buf[symbol_index] = MachoSymbol.{
symbols_buf[symbol_index] = MachoSymbol{
.nlist = sym,
.ofile = ofile,
.reloc = reloc,
@@ -955,7 +977,7 @@ fn openSelfDebugInfoMacOs(allocator: *mem.Allocator) !DebugInfo {
}
}
const sentinel = try allocator.createOne(macho.nlist_64);
sentinel.* = macho.nlist_64.{
sentinel.* = macho.nlist_64{
.n_strx = 0,
.n_type = 36,
.n_sect = 0,
@@ -970,7 +992,7 @@ fn openSelfDebugInfoMacOs(allocator: *mem.Allocator) !DebugInfo {
// This sort is so that we can binary search later.
std.sort.sort(MachoSymbol, symbols, MachoSymbol.addressLessThan);
return DebugInfo.{
return DebugInfo{
.ofiles = DebugInfo.OFileTable.init(allocator),
.symbols = symbols,
.strings = strings,
@@ -1009,7 +1031,7 @@ fn printLineFromFile(out_stream: var, line_info: LineInfo) !void {
}
}
const MachoSymbol = struct.{
const MachoSymbol = struct {
nlist: *macho.nlist_64,
ofile: ?*macho.nlist_64,
reloc: u64,
@@ -1024,14 +1046,14 @@ const MachoSymbol = struct.{
}
};
const MachOFile = struct.{
const MachOFile = struct {
bytes: []align(@alignOf(macho.mach_header_64)) const u8,
sect_debug_info: ?*const macho.section_64,
sect_debug_line: ?*const macho.section_64,
};
pub const DebugInfo = switch (builtin.os) {
builtin.Os.macosx => struct.{
builtin.Os.macosx => struct {
symbols: []const MachoSymbol,
strings: []const u8,
ofiles: OFileTable,
@@ -1047,13 +1069,13 @@ pub const DebugInfo = switch (builtin.os) {
return self.ofiles.allocator;
}
},
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
pdb: pdb.Pdb,
coff: *coff.Coff,
sect_contribs: []pdb.SectionContribEntry,
modules: []Module,
},
builtin.Os.linux => struct.{
builtin.Os.linux => struct {
self_exe_file: os.File,
elf: elf.Elf,
debug_info: *elf.SectionHeader,
@@ -1082,12 +1104,12 @@ pub const DebugInfo = switch (builtin.os) {
else => @compileError("Unsupported OS"),
};
const PcRange = struct.{
const PcRange = struct {
start: u64,
end: u64,
};
const CompileUnit = struct.{
const CompileUnit = struct {
version: u16,
is_64: bool,
die: *Die,
@@ -1097,25 +1119,25 @@ const CompileUnit = struct.{
const AbbrevTable = ArrayList(AbbrevTableEntry);
const AbbrevTableHeader = struct.{
const AbbrevTableHeader = struct {
// offset from .debug_abbrev
offset: u64,
table: AbbrevTable,
};
const AbbrevTableEntry = struct.{
const AbbrevTableEntry = struct {
has_children: bool,
abbrev_code: u64,
tag_id: u64,
attrs: ArrayList(AbbrevAttr),
};
const AbbrevAttr = struct.{
const AbbrevAttr = struct {
attr_id: u64,
form_id: u64,
};
const FormValue = union(enum).{
const FormValue = union(enum) {
Address: u64,
Block: []u8,
Const: Constant,
@@ -1129,7 +1151,7 @@ const FormValue = union(enum).{
StrPtr: u64,
};
const Constant = struct.{
const Constant = struct {
payload: []u8,
signed: bool,
@@ -1140,12 +1162,12 @@ const Constant = struct.{
}
};
const Die = struct.{
const Die = struct {
tag_id: u64,
has_children: bool,
attrs: ArrayList(Attr),
const Attr = struct.{
const Attr = struct {
id: u64,
value: FormValue,
};
@@ -1192,14 +1214,14 @@ const Die = struct.{
}
};
const FileEntry = struct.{
const FileEntry = struct {
file_name: []const u8,
dir_index: usize,
mtime: usize,
len_bytes: usize,
};
const LineInfo = struct.{
const LineInfo = struct {
line: usize,
column: usize,
file_name: []u8,
@@ -1210,7 +1232,7 @@ const LineInfo = struct.{
}
};
const LineNumberProgram = struct.{
const LineNumberProgram = struct {
address: usize,
file: usize,
line: isize,
@@ -1232,7 +1254,7 @@ const LineNumberProgram = struct.{
prev_end_sequence: bool,
pub fn init(is_stmt: bool, include_dirs: []const []const u8, file_entries: *ArrayList(FileEntry), target_address: usize) LineNumberProgram {
return LineNumberProgram.{
return LineNumberProgram{
.address = 0,
.file = 1,
.line = 1,
@@ -1268,7 +1290,7 @@ const LineNumberProgram = struct.{
self.include_dirs[file_entry.dir_index];
const file_name = try os.path.join(self.file_entries.allocator, dir_name, file_entry.file_name);
errdefer self.file_entries.allocator.free(file_name);
return LineInfo.{
return LineInfo{
.line = if (self.prev_line >= 0) @intCast(usize, self.prev_line) else 0,
.column = self.prev_column,
.file_name = file_name,
@@ -1312,7 +1334,7 @@ fn readAllocBytes(allocator: *mem.Allocator, in_stream: var, size: usize) ![]u8
fn parseFormValueBlockLen(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue.{ .Block = buf };
return FormValue{ .Block = buf };
}
fn parseFormValueBlock(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
@@ -1321,8 +1343,8 @@ fn parseFormValueBlock(allocator: *mem.Allocator, in_stream: var, size: usize) !
}
fn parseFormValueConstant(allocator: *mem.Allocator, in_stream: var, signed: bool, size: usize) !FormValue {
return FormValue.{
.Const = Constant.{
return FormValue{
.Const = Constant{
.signed = signed,
.payload = try readAllocBytes(allocator, in_stream, size),
},
@@ -1339,7 +1361,7 @@ fn parseFormValueTargetAddrSize(in_stream: var) !u64 {
fn parseFormValueRefLen(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue.{ .Ref = buf };
return FormValue{ .Ref = buf };
}
fn parseFormValueRef(allocator: *mem.Allocator, in_stream: var, comptime T: type) !FormValue {
@@ -1347,7 +1369,7 @@ fn parseFormValueRef(allocator: *mem.Allocator, in_stream: var, comptime T: type
return parseFormValueRefLen(allocator, in_stream, block_len);
}
const ParseFormValueError = error.{
const ParseFormValueError = error{
EndOfStream,
InvalidDebugInfo,
EndOfFile,
@@ -1356,7 +1378,7 @@ const ParseFormValueError = error.{
fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64: bool) ParseFormValueError!FormValue {
return switch (form_id) {
DW.FORM_addr => FormValue.{ .Address = try parseFormValueTargetAddrSize(in_stream) },
DW.FORM_addr => FormValue{ .Address = try parseFormValueTargetAddrSize(in_stream) },
DW.FORM_block1 => parseFormValueBlock(allocator, in_stream, 1),
DW.FORM_block2 => parseFormValueBlock(allocator, in_stream, 2),
DW.FORM_block4 => parseFormValueBlock(allocator, in_stream, 4),
@@ -1376,11 +1398,11 @@ fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64
DW.FORM_exprloc => {
const size = try readULeb128(in_stream);
const buf = try readAllocBytes(allocator, in_stream, size);
return FormValue.{ .ExprLoc = buf };
return FormValue{ .ExprLoc = buf };
},
DW.FORM_flag => FormValue.{ .Flag = (try in_stream.readByte()) != 0 },
DW.FORM_flag_present => FormValue.{ .Flag = true },
DW.FORM_sec_offset => FormValue.{ .SecOffset = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_flag => FormValue{ .Flag = (try in_stream.readByte()) != 0 },
DW.FORM_flag_present => FormValue{ .Flag = true },
DW.FORM_sec_offset => FormValue{ .SecOffset = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_ref1 => parseFormValueRef(allocator, in_stream, u8),
DW.FORM_ref2 => parseFormValueRef(allocator, in_stream, u16),
@@ -1391,11 +1413,11 @@ fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64
return parseFormValueRefLen(allocator, in_stream, ref_len);
},
DW.FORM_ref_addr => FormValue.{ .RefAddr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_ref_sig8 => FormValue.{ .RefSig8 = try in_stream.readIntLe(u64) },
DW.FORM_ref_addr => FormValue{ .RefAddr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_ref_sig8 => FormValue{ .RefSig8 = try in_stream.readIntLe(u64) },
DW.FORM_string => FormValue.{ .String = try readStringRaw(allocator, in_stream) },
DW.FORM_strp => FormValue.{ .StrPtr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_string => FormValue{ .String = try readStringRaw(allocator, in_stream) },
DW.FORM_strp => FormValue{ .StrPtr = try parseFormValueDwarfOffsetSize(in_stream, is_64) },
DW.FORM_indirect => {
const child_form_id = try readULeb128(in_stream);
return parseFormValue(allocator, in_stream, child_form_id, is_64);
@@ -1412,7 +1434,7 @@ fn parseAbbrevTable(st: *DebugInfo) !AbbrevTable {
while (true) {
const abbrev_code = try readULeb128(in_stream);
if (abbrev_code == 0) return result;
try result.append(AbbrevTableEntry.{
try result.append(AbbrevTableEntry{
.abbrev_code = abbrev_code,
.tag_id = try readULeb128(in_stream),
.has_children = (try in_stream.readByte()) == DW.CHILDREN_yes,
@@ -1424,7 +1446,7 @@ fn parseAbbrevTable(st: *DebugInfo) !AbbrevTable {
const attr_id = try readULeb128(in_stream);
const form_id = try readULeb128(in_stream);
if (attr_id == 0 and form_id == 0) break;
try attrs.append(AbbrevAttr.{
try attrs.append(AbbrevAttr{
.attr_id = attr_id,
.form_id = form_id,
});
@@ -1441,7 +1463,7 @@ fn getAbbrevTable(st: *DebugInfo, abbrev_offset: u64) !*const AbbrevTable {
}
}
try st.self_exe_file.seekTo(st.debug_abbrev.offset + abbrev_offset);
try st.abbrev_table_list.append(AbbrevTableHeader.{
try st.abbrev_table_list.append(AbbrevTableHeader{
.offset = abbrev_offset,
.table = try parseAbbrevTable(st),
});
@@ -1462,14 +1484,14 @@ fn parseDie(st: *DebugInfo, abbrev_table: *const AbbrevTable, is_64: bool) !Die
const abbrev_code = try readULeb128(in_stream);
const table_entry = getAbbrevTableEntry(abbrev_table, abbrev_code) orelse return error.InvalidDebugInfo;
var result = Die.{
var result = Die{
.tag_id = table_entry.tag_id,
.has_children = table_entry.has_children,
.attrs = ArrayList(Die.Attr).init(st.allocator()),
};
try result.attrs.resize(table_entry.attrs.len);
for (table_entry.attrs.toSliceConst()) |attr, i| {
result.attrs.items[i] = Die.Attr.{
result.attrs.items[i] = Die.Attr{
.id = attr.attr_id,
.value = try parseFormValue(st.allocator(), in_stream, attr.form_id, is_64),
};
@@ -1484,7 +1506,7 @@ fn getLineNumberInfoMacOs(di: *DebugInfo, symbol: MachoSymbol, target_address: u
errdefer _ = di.ofiles.remove(ofile);
const ofile_path = mem.toSliceConst(u8, di.strings.ptr + ofile.n_strx);
gop.kv.value = MachOFile.{
gop.kv.value = MachOFile{
.bytes = try std.io.readFileAllocAligned(di.ofiles.allocator, ofile_path, @alignOf(macho.mach_header_64)),
.sect_debug_info = null,
.sect_debug_line = null,
@@ -1575,7 +1597,7 @@ fn getLineNumberInfoMacOs(di: *DebugInfo, symbol: MachoSymbol, target_address: u
const dir_index = try readULeb128Mem(&ptr);
const mtime = try readULeb128Mem(&ptr);
const len_bytes = try readULeb128Mem(&ptr);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@@ -1606,7 +1628,7 @@ fn getLineNumberInfoMacOs(di: *DebugInfo, symbol: MachoSymbol, target_address: u
const dir_index = try readULeb128Mem(&ptr);
const mtime = try readULeb128Mem(&ptr);
const len_bytes = try readULeb128Mem(&ptr);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@@ -1748,7 +1770,7 @@ fn getLineNumberInfoLinux(di: *DebugInfo, compile_unit: *const CompileUnit, targ
const dir_index = try readULeb128(in_stream);
const mtime = try readULeb128(in_stream);
const len_bytes = try readULeb128(in_stream);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@@ -1780,7 +1802,7 @@ fn getLineNumberInfoLinux(di: *DebugInfo, compile_unit: *const CompileUnit, targ
const dir_index = try readULeb128(in_stream);
const mtime = try readULeb128(in_stream);
const len_bytes = try readULeb128(in_stream);
try file_entries.append(FileEntry.{
try file_entries.append(FileEntry{
.file_name = file_name,
.dir_index = dir_index,
.mtime = mtime,
@@ -1897,7 +1919,7 @@ fn scanAllCompileUnits(st: *DebugInfo) !void {
},
else => return error.InvalidDebugInfo,
};
break :x PcRange.{
break :x PcRange{
.start = low_pc,
.end = pc_end,
};
@@ -1910,7 +1932,7 @@ fn scanAllCompileUnits(st: *DebugInfo) !void {
}
};
try st.compile_unit_list.append(CompileUnit.{
try st.compile_unit_list.append(CompileUnit{
.version = version,
.is_64 = is_64,
.pc_range = pc_range,
+6 -6
View File
@@ -18,7 +18,7 @@ pub const DynLib = switch (builtin.os) {
else => void,
};
pub const LinuxDynLib = struct.{
pub const LinuxDynLib = struct {
allocator: *mem.Allocator,
elf_lib: ElfLib,
fd: i32,
@@ -44,7 +44,7 @@ pub const LinuxDynLib = struct.{
const bytes = @intToPtr([*]align(std.os.page_size) u8, addr)[0..size];
return DynLib.{
return DynLib{
.allocator = allocator,
.elf_lib = try ElfLib.init(bytes),
.fd = fd,
@@ -64,7 +64,7 @@ pub const LinuxDynLib = struct.{
}
};
pub const ElfLib = struct.{
pub const ElfLib = struct {
strings: [*]u8,
syms: [*]elf.Sym,
hashtab: [*]linux.Elf_Symndx,
@@ -121,7 +121,7 @@ pub const ElfLib = struct.{
}
}
return ElfLib.{
return ElfLib{
.base = base,
.strings = maybe_strings orelse return error.ElfStringSectionNotFound,
.syms = maybe_syms orelse return error.ElfSymSectionNotFound,
@@ -169,14 +169,14 @@ fn checkver(def_arg: *elf.Verdef, vsym_arg: i32, vername: []const u8, strings: [
return mem.eql(u8, vername, cstr.toSliceConst(strings + aux.vda_name));
}
pub const WindowsDynLib = struct.{
pub const WindowsDynLib = struct {
allocator: *mem.Allocator,
dll: windows.HMODULE,
pub fn open(allocator: *mem.Allocator, path: []const u8) !WindowsDynLib {
const wpath = try win_util.sliceToPrefixedFileW(path);
return WindowsDynLib.{
return WindowsDynLib{
.allocator = allocator,
.dll = windows.LoadLibraryW(&wpath) orelse {
const err = windows.GetLastError();
+50 -50
View File
@@ -320,14 +320,14 @@ pub const ET_DYN = 3;
/// A core file.
pub const ET_CORE = 4;
pub const FileType = enum.{
pub const FileType = enum {
Relocatable,
Executable,
Shared,
Core,
};
pub const Arch = enum.{
pub const Arch = enum {
Sparc,
x86,
Mips,
@@ -339,7 +339,7 @@ pub const Arch = enum.{
AArch64,
};
pub const SectionHeader = struct.{
pub const SectionHeader = struct {
name: u32,
sh_type: u32,
flags: u64,
@@ -352,7 +352,7 @@ pub const SectionHeader = struct.{
ent_size: u64,
};
pub const Elf = struct.{
pub const Elf = struct {
in_file: os.File,
auto_close_stream: bool,
is_64: bool,
@@ -490,7 +490,7 @@ pub const Elf = struct.{
if (sh_entry_size != 40) return error.InvalidFormat;
for (elf.section_headers) |*elf_section| {
// TODO (multiple occurences) allow implicit cast from %u32 -> %u64 ?
// TODO (multiple occurrences) allow implicit cast from %u32 -> %u64 ?
elf_section.name = try in.readInt(elf.endian, u32);
elf_section.sh_type = try in.readInt(elf.endian, u32);
elf_section.flags = u64(try in.readInt(elf.endian, u32));
@@ -572,7 +572,7 @@ pub const Elf32_Section = u16;
pub const Elf64_Section = u16;
pub const Elf32_Versym = Elf32_Half;
pub const Elf64_Versym = Elf64_Half;
pub const Elf32_Ehdr = extern struct.{
pub const Elf32_Ehdr = extern struct {
e_ident: [EI_NIDENT]u8,
e_type: Elf32_Half,
e_machine: Elf32_Half,
@@ -588,7 +588,7 @@ pub const Elf32_Ehdr = extern struct.{
e_shnum: Elf32_Half,
e_shstrndx: Elf32_Half,
};
pub const Elf64_Ehdr = extern struct.{
pub const Elf64_Ehdr = extern struct {
e_ident: [EI_NIDENT]u8,
e_type: Elf64_Half,
e_machine: Elf64_Half,
@@ -604,7 +604,7 @@ pub const Elf64_Ehdr = extern struct.{
e_shnum: Elf64_Half,
e_shstrndx: Elf64_Half,
};
pub const Elf32_Shdr = extern struct.{
pub const Elf32_Shdr = extern struct {
sh_name: Elf32_Word,
sh_type: Elf32_Word,
sh_flags: Elf32_Word,
@@ -616,7 +616,7 @@ pub const Elf32_Shdr = extern struct.{
sh_addralign: Elf32_Word,
sh_entsize: Elf32_Word,
};
pub const Elf64_Shdr = extern struct.{
pub const Elf64_Shdr = extern struct {
sh_name: Elf64_Word,
sh_type: Elf64_Word,
sh_flags: Elf64_Xword,
@@ -628,18 +628,18 @@ pub const Elf64_Shdr = extern struct.{
sh_addralign: Elf64_Xword,
sh_entsize: Elf64_Xword,
};
pub const Elf32_Chdr = extern struct.{
pub const Elf32_Chdr = extern struct {
ch_type: Elf32_Word,
ch_size: Elf32_Word,
ch_addralign: Elf32_Word,
};
pub const Elf64_Chdr = extern struct.{
pub const Elf64_Chdr = extern struct {
ch_type: Elf64_Word,
ch_reserved: Elf64_Word,
ch_size: Elf64_Xword,
ch_addralign: Elf64_Xword,
};
pub const Elf32_Sym = extern struct.{
pub const Elf32_Sym = extern struct {
st_name: Elf32_Word,
st_value: Elf32_Addr,
st_size: Elf32_Word,
@@ -647,7 +647,7 @@ pub const Elf32_Sym = extern struct.{
st_other: u8,
st_shndx: Elf32_Section,
};
pub const Elf64_Sym = extern struct.{
pub const Elf64_Sym = extern struct {
st_name: Elf64_Word,
st_info: u8,
st_other: u8,
@@ -655,33 +655,33 @@ pub const Elf64_Sym = extern struct.{
st_value: Elf64_Addr,
st_size: Elf64_Xword,
};
pub const Elf32_Syminfo = extern struct.{
pub const Elf32_Syminfo = extern struct {
si_boundto: Elf32_Half,
si_flags: Elf32_Half,
};
pub const Elf64_Syminfo = extern struct.{
pub const Elf64_Syminfo = extern struct {
si_boundto: Elf64_Half,
si_flags: Elf64_Half,
};
pub const Elf32_Rel = extern struct.{
pub const Elf32_Rel = extern struct {
r_offset: Elf32_Addr,
r_info: Elf32_Word,
};
pub const Elf64_Rel = extern struct.{
pub const Elf64_Rel = extern struct {
r_offset: Elf64_Addr,
r_info: Elf64_Xword,
};
pub const Elf32_Rela = extern struct.{
pub const Elf32_Rela = extern struct {
r_offset: Elf32_Addr,
r_info: Elf32_Word,
r_addend: Elf32_Sword,
};
pub const Elf64_Rela = extern struct.{
pub const Elf64_Rela = extern struct {
r_offset: Elf64_Addr,
r_info: Elf64_Xword,
r_addend: Elf64_Sxword,
};
pub const Elf32_Phdr = extern struct.{
pub const Elf32_Phdr = extern struct {
p_type: Elf32_Word,
p_offset: Elf32_Off,
p_vaddr: Elf32_Addr,
@@ -691,7 +691,7 @@ pub const Elf32_Phdr = extern struct.{
p_flags: Elf32_Word,
p_align: Elf32_Word,
};
pub const Elf64_Phdr = extern struct.{
pub const Elf64_Phdr = extern struct {
p_type: Elf64_Word,
p_flags: Elf64_Word,
p_offset: Elf64_Off,
@@ -701,21 +701,21 @@ pub const Elf64_Phdr = extern struct.{
p_memsz: Elf64_Xword,
p_align: Elf64_Xword,
};
pub const Elf32_Dyn = extern struct.{
pub const Elf32_Dyn = extern struct {
d_tag: Elf32_Sword,
d_un: extern union.{
d_un: extern union {
d_val: Elf32_Word,
d_ptr: Elf32_Addr,
},
};
pub const Elf64_Dyn = extern struct.{
pub const Elf64_Dyn = extern struct {
d_tag: Elf64_Sxword,
d_un: extern union.{
d_un: extern union {
d_val: Elf64_Xword,
d_ptr: Elf64_Addr,
},
};
pub const Elf32_Verdef = extern struct.{
pub const Elf32_Verdef = extern struct {
vd_version: Elf32_Half,
vd_flags: Elf32_Half,
vd_ndx: Elf32_Half,
@@ -724,7 +724,7 @@ pub const Elf32_Verdef = extern struct.{
vd_aux: Elf32_Word,
vd_next: Elf32_Word,
};
pub const Elf64_Verdef = extern struct.{
pub const Elf64_Verdef = extern struct {
vd_version: Elf64_Half,
vd_flags: Elf64_Half,
vd_ndx: Elf64_Half,
@@ -733,111 +733,111 @@ pub const Elf64_Verdef = extern struct.{
vd_aux: Elf64_Word,
vd_next: Elf64_Word,
};
pub const Elf32_Verdaux = extern struct.{
pub const Elf32_Verdaux = extern struct {
vda_name: Elf32_Word,
vda_next: Elf32_Word,
};
pub const Elf64_Verdaux = extern struct.{
pub const Elf64_Verdaux = extern struct {
vda_name: Elf64_Word,
vda_next: Elf64_Word,
};
pub const Elf32_Verneed = extern struct.{
pub const Elf32_Verneed = extern struct {
vn_version: Elf32_Half,
vn_cnt: Elf32_Half,
vn_file: Elf32_Word,
vn_aux: Elf32_Word,
vn_next: Elf32_Word,
};
pub const Elf64_Verneed = extern struct.{
pub const Elf64_Verneed = extern struct {
vn_version: Elf64_Half,
vn_cnt: Elf64_Half,
vn_file: Elf64_Word,
vn_aux: Elf64_Word,
vn_next: Elf64_Word,
};
pub const Elf32_Vernaux = extern struct.{
pub const Elf32_Vernaux = extern struct {
vna_hash: Elf32_Word,
vna_flags: Elf32_Half,
vna_other: Elf32_Half,
vna_name: Elf32_Word,
vna_next: Elf32_Word,
};
pub const Elf64_Vernaux = extern struct.{
pub const Elf64_Vernaux = extern struct {
vna_hash: Elf64_Word,
vna_flags: Elf64_Half,
vna_other: Elf64_Half,
vna_name: Elf64_Word,
vna_next: Elf64_Word,
};
pub const Elf32_auxv_t = extern struct.{
pub const Elf32_auxv_t = extern struct {
a_type: u32,
a_un: extern union.{
a_un: extern union {
a_val: u32,
},
};
pub const Elf64_auxv_t = extern struct.{
pub const Elf64_auxv_t = extern struct {
a_type: u64,
a_un: extern union.{
a_un: extern union {
a_val: u64,
},
};
pub const Elf32_Nhdr = extern struct.{
pub const Elf32_Nhdr = extern struct {
n_namesz: Elf32_Word,
n_descsz: Elf32_Word,
n_type: Elf32_Word,
};
pub const Elf64_Nhdr = extern struct.{
pub const Elf64_Nhdr = extern struct {
n_namesz: Elf64_Word,
n_descsz: Elf64_Word,
n_type: Elf64_Word,
};
pub const Elf32_Move = extern struct.{
pub const Elf32_Move = extern struct {
m_value: Elf32_Xword,
m_info: Elf32_Word,
m_poffset: Elf32_Word,
m_repeat: Elf32_Half,
m_stride: Elf32_Half,
};
pub const Elf64_Move = extern struct.{
pub const Elf64_Move = extern struct {
m_value: Elf64_Xword,
m_info: Elf64_Xword,
m_poffset: Elf64_Xword,
m_repeat: Elf64_Half,
m_stride: Elf64_Half,
};
pub const Elf32_gptab = extern union.{
gt_header: extern struct.{
pub const Elf32_gptab = extern union {
gt_header: extern struct {
gt_current_g_value: Elf32_Word,
gt_unused: Elf32_Word,
},
gt_entry: extern struct.{
gt_entry: extern struct {
gt_g_value: Elf32_Word,
gt_bytes: Elf32_Word,
},
};
pub const Elf32_RegInfo = extern struct.{
pub const Elf32_RegInfo = extern struct {
ri_gprmask: Elf32_Word,
ri_cprmask: [4]Elf32_Word,
ri_gp_value: Elf32_Sword,
};
pub const Elf_Options = extern struct.{
pub const Elf_Options = extern struct {
kind: u8,
size: u8,
@"section": Elf32_Section,
info: Elf32_Word,
};
pub const Elf_Options_Hw = extern struct.{
pub const Elf_Options_Hw = extern struct {
hwp_flags1: Elf32_Word,
hwp_flags2: Elf32_Word,
};
pub const Elf32_Lib = extern struct.{
pub const Elf32_Lib = extern struct {
l_name: Elf32_Word,
l_time_stamp: Elf32_Word,
l_checksum: Elf32_Word,
l_version: Elf32_Word,
l_flags: Elf32_Word,
};
pub const Elf64_Lib = extern struct.{
pub const Elf64_Lib = extern struct {
l_name: Elf64_Word,
l_time_stamp: Elf64_Word,
l_checksum: Elf64_Word,
@@ -845,7 +845,7 @@ pub const Elf64_Lib = extern struct.{
l_flags: Elf64_Word,
};
pub const Elf32_Conflict = Elf32_Addr;
pub const Elf_MIPS_ABIFlags_v0 = extern struct.{
pub const Elf_MIPS_ABIFlags_v0 = extern struct {
version: Elf32_Half,
isa_level: u8,
isa_rev: u8,
+14 -14
View File
@@ -9,7 +9,7 @@ const Loop = std.event.Loop;
/// when buffer is empty, consumers suspend and are resumed by producers
/// when buffer is full, producers suspend and are resumed by consumers
pub fn Channel(comptime T: type) type {
return struct.{
return struct {
loop: *Loop,
getters: std.atomic.Queue(GetNode),
@@ -26,25 +26,25 @@ pub fn Channel(comptime T: type) type {
buffer_len: usize,
const SelfChannel = @This();
const GetNode = struct.{
const GetNode = struct {
tick_node: *Loop.NextTickNode,
data: Data,
const Data = union(enum).{
const Data = union(enum) {
Normal: Normal,
OrNull: OrNull,
};
const Normal = struct.{
const Normal = struct {
ptr: *T,
};
const OrNull = struct.{
const OrNull = struct {
ptr: *?T,
or_null: *std.atomic.Queue(*std.atomic.Queue(GetNode).Node).Node,
};
};
const PutNode = struct.{
const PutNode = struct {
data: T,
tick_node: *Loop.NextTickNode,
};
@@ -54,7 +54,7 @@ pub fn Channel(comptime T: type) type {
const buffer_nodes = try loop.allocator.alloc(T, capacity);
errdefer loop.allocator.free(buffer_nodes);
const self = try loop.allocator.create(SelfChannel.{
const self = try loop.allocator.create(SelfChannel{
.loop = loop,
.buffer_len = 0,
.buffer_nodes = buffer_nodes,
@@ -93,7 +93,7 @@ pub fn Channel(comptime T: type) type {
}
var my_tick_node = Loop.NextTickNode.init(@handle());
var queue_node = std.atomic.Queue(PutNode).Node.init(PutNode.{
var queue_node = std.atomic.Queue(PutNode).Node.init(PutNode{
.tick_node = &my_tick_node,
.data = data,
});
@@ -129,10 +129,10 @@ pub fn Channel(comptime T: type) type {
// so we can get rid of this extra result copy
var result: T = undefined;
var my_tick_node = Loop.NextTickNode.init(@handle());
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode.{
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode{
.tick_node = &my_tick_node,
.data = GetNode.Data.{
.Normal = GetNode.Normal.{ .ptr = &result },
.data = GetNode.Data{
.Normal = GetNode.Normal{ .ptr = &result },
},
});
@@ -181,10 +181,10 @@ pub fn Channel(comptime T: type) type {
var result: ?T = null;
var my_tick_node = Loop.NextTickNode.init(@handle());
var or_null_node = std.atomic.Queue(*std.atomic.Queue(GetNode).Node).Node.init(undefined);
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode.{
var queue_node = std.atomic.Queue(GetNode).Node.init(GetNode{
.tick_node = &my_tick_node,
.data = GetNode.Data.{
.OrNull = GetNode.OrNull.{
.data = GetNode.Data{
.OrNull = GetNode.OrNull{
.ptr = &result,
.or_null = &or_null_node,
},
+100 -95
View File
@@ -10,17 +10,17 @@ const Loop = event.Loop;
pub const RequestNode = std.atomic.Queue(Request).Node;
pub const Request = struct.{
pub const Request = struct {
msg: Msg,
finish: Finish,
pub const Finish = union(enum).{
pub const Finish = union(enum) {
TickNode: Loop.NextTickNode,
DeallocCloseOperation: *CloseOperation,
NoAction,
};
pub const Msg = union(enum).{
pub const Msg = union(enum) {
PWriteV: PWriteV,
PReadV: PReadV,
Open: Open,
@@ -28,7 +28,7 @@ pub const Request = struct.{
WriteFile: WriteFile,
End, // special - means the fs thread should exit
pub const PWriteV = struct.{
pub const PWriteV = struct {
fd: os.FileHandle,
iov: []const os.posix.iovec_const,
offset: usize,
@@ -37,7 +37,7 @@ pub const Request = struct.{
pub const Error = os.PosixWriteError;
};
pub const PReadV = struct.{
pub const PReadV = struct {
fd: os.FileHandle,
iov: []const os.posix.iovec,
offset: usize,
@@ -46,7 +46,7 @@ pub const Request = struct.{
pub const Error = os.PosixReadError;
};
pub const Open = struct.{
pub const Open = struct {
/// must be null terminated. TODO https://github.com/ziglang/zig/issues/265
path: []const u8,
flags: u32,
@@ -56,7 +56,7 @@ pub const Request = struct.{
pub const Error = os.File.OpenError;
};
pub const WriteFile = struct.{
pub const WriteFile = struct {
/// must be null terminated. TODO https://github.com/ziglang/zig/issues/265
path: []const u8,
contents: []const u8,
@@ -66,13 +66,13 @@ pub const Request = struct.{
pub const Error = os.File.OpenError || os.File.WriteError;
};
pub const Close = struct.{
pub const Close = struct {
fd: os.FileHandle,
};
};
};
pub const PWriteVError = error.{OutOfMemory} || os.File.WriteError;
pub const PWriteVError = error{OutOfMemory} || os.File.WriteError;
/// data - just the inner references - must live until pwritev promise completes.
pub async fn pwritev(loop: *Loop, fd: os.FileHandle, data: []const []const u8, offset: usize) PWriteVError!void {
@@ -88,7 +88,7 @@ pub async fn pwritev(loop: *Loop, fd: os.FileHandle, data: []const []const u8, o
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec_const.{
iovecs[i] = os.posix.iovec_const{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@@ -124,11 +124,11 @@ pub async fn pwriteWindows(loop: *Loop, fd: os.FileHandle, data: []const u8, off
resume @handle();
}
var resume_node = Loop.ResumeNode.Basic.{
.base = Loop.ResumeNode.{
var resume_node = Loop.ResumeNode.Basic{
.base = Loop.ResumeNode{
.id = Loop.ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = windows.OVERLAPPED.{
.overlapped = windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = @truncate(u32, offset),
@@ -175,20 +175,20 @@ pub async fn pwritevPosix(
resume @handle();
}
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.PWriteV = Request.Msg.PWriteV.{
.data = Request{
.msg = Request.Msg{
.PWriteV = Request.Msg.PWriteV{
.fd = fd,
.iov = iovecs,
.offset = offset,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@@ -206,7 +206,7 @@ pub async fn pwritevPosix(
return req_node.data.msg.PWriteV.result;
}
pub const PReadVError = error.{OutOfMemory} || os.File.ReadError;
pub const PReadVError = error{OutOfMemory} || os.File.ReadError;
/// data - just the inner references - must live until preadv promise completes.
pub async fn preadv(loop: *Loop, fd: os.FileHandle, data: []const []u8, offset: usize) PReadVError!usize {
@@ -224,7 +224,7 @@ pub async fn preadv(loop: *Loop, fd: os.FileHandle, data: []const []u8, offset:
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec.{
iovecs[i] = os.posix.iovec{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@@ -272,11 +272,11 @@ pub async fn preadWindows(loop: *Loop, fd: os.FileHandle, data: []u8, offset: u6
resume @handle();
}
var resume_node = Loop.ResumeNode.Basic.{
.base = Loop.ResumeNode.{
var resume_node = Loop.ResumeNode.Basic{
.base = Loop.ResumeNode{
.id = Loop.ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = windows.OVERLAPPED.{
.overlapped = windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = @truncate(u32, offset),
@@ -322,20 +322,20 @@ pub async fn preadvPosix(
resume @handle();
}
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.PReadV = Request.Msg.PReadV.{
.data = Request{
.msg = Request.Msg{
.PReadV = Request.Msg.PReadV{
.fd = fd,
.iov = iovecs,
.offset = offset,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@@ -366,20 +366,20 @@ pub async fn openPosix(
const path_c = try std.os.toPosixPath(path);
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.Open = Request.Msg.Open.{
.data = Request{
.msg = Request.Msg{
.Open = Request.Msg.Open{
.path = path_c[0..path.len],
.flags = flags,
.mode = mode,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@@ -472,32 +472,32 @@ pub async fn openReadWrite(
/// `CloseOperation.finish`.
/// If you call `setHandle` then finishing will close the fd; otherwise finishing
/// will deallocate the `CloseOperation`.
pub const CloseOperation = struct.{
pub const CloseOperation = struct {
loop: *Loop,
os_data: OsData,
const OsData = switch (builtin.os) {
builtin.Os.linux, builtin.Os.macosx => OsDataPosix,
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
handle: ?os.FileHandle,
},
else => @compileError("Unsupported OS"),
};
const OsDataPosix = struct.{
const OsDataPosix = struct {
have_fd: bool,
close_req_node: RequestNode,
};
pub fn start(loop: *Loop) (error.{OutOfMemory}!*CloseOperation) {
pub fn start(loop: *Loop) (error{OutOfMemory}!*CloseOperation) {
const self = try loop.allocator.createOne(CloseOperation);
self.* = CloseOperation.{
self.* = CloseOperation{
.loop = loop,
.os_data = switch (builtin.os) {
builtin.Os.linux, builtin.Os.macosx => initOsDataPosix(self),
builtin.Os.windows => OsData.{ .handle = null },
builtin.Os.windows => OsData{ .handle = null },
else => @compileError("Unsupported OS"),
},
};
@@ -505,16 +505,16 @@ pub const CloseOperation = struct.{
}
fn initOsDataPosix(self: *CloseOperation) OsData {
return OsData.{
return OsData{
.have_fd = false,
.close_req_node = RequestNode.{
.close_req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.Close = Request.Msg.Close.{ .fd = undefined },
.data = Request{
.msg = Request.Msg{
.Close = Request.Msg.Close{ .fd = undefined },
},
.finish = Request.Finish.{ .DeallocCloseOperation = self },
.finish = Request.Finish{ .DeallocCloseOperation = self },
},
},
};
@@ -627,20 +627,20 @@ async fn writeFileModeThread(loop: *Loop, path: []const u8, contents: []const u8
const path_with_null = try std.cstr.addNullByte(loop.allocator, path);
defer loop.allocator.free(path_with_null);
var req_node = RequestNode.{
var req_node = RequestNode{
.prev = null,
.next = null,
.data = Request.{
.msg = Request.Msg.{
.WriteFile = Request.Msg.WriteFile.{
.data = Request{
.msg = Request.Msg{
.WriteFile = Request.Msg.WriteFile{
.path = path_with_null[0..path.len],
.contents = contents,
.mode = mode,
.result = undefined,
},
},
.finish = Request.Finish.{
.TickNode = Loop.NextTickNode.{
.finish = Request.Finish{
.TickNode = Loop.NextTickNode{
.prev = null,
.next = null,
.data = @handle(),
@@ -674,7 +674,7 @@ pub async fn readFile(loop: *Loop, file_path: []const u8, max_size: usize) ![]u8
while (true) {
try list.ensureCapacity(list.len + os.page_size);
const buf = list.items[list.len..];
const buf_array = [][]u8.{buf};
const buf_array = [][]u8{buf};
const amt = try await (async preadv(loop, fd, buf_array, list.len) catch unreachable);
list.len += amt;
if (list.len > max_size) {
@@ -686,12 +686,12 @@ pub async fn readFile(loop: *Loop, file_path: []const u8, max_size: usize) ![]u8
}
}
pub const WatchEventId = enum.{
pub const WatchEventId = enum {
CloseWrite,
Delete,
};
pub const WatchEventError = error.{
pub const WatchEventError = error{
UserResourceLimitReached,
SystemResources,
AccessDenied,
@@ -699,17 +699,17 @@ pub const WatchEventError = error.{
};
pub fn Watch(comptime V: type) type {
return struct.{
return struct {
channel: *event.Channel(Event.Error!Event),
os_data: OsData,
const OsData = switch (builtin.os) {
builtin.Os.macosx => struct.{
builtin.Os.macosx => struct {
file_table: FileTable,
table_lock: event.Lock,
const FileTable = std.AutoHashMap([]const u8, *Put);
const Put = struct.{
const Put = struct {
putter: promise,
value_ptr: *V,
};
@@ -721,7 +721,7 @@ pub fn Watch(comptime V: type) type {
else => @compileError("Unsupported OS"),
};
const WindowsOsData = struct.{
const WindowsOsData = struct {
table_lock: event.Lock,
dir_table: DirTable,
all_putters: std.atomic.Queue(promise),
@@ -730,14 +730,14 @@ pub fn Watch(comptime V: type) type {
const DirTable = std.AutoHashMap([]const u8, *Dir);
const FileTable = std.AutoHashMap([]const u16, V);
const Dir = struct.{
const Dir = struct {
putter: promise,
file_table: FileTable,
table_lock: event.Lock,
};
};
const LinuxOsData = struct.{
const LinuxOsData = struct {
putter: promise,
inotify_fd: i32,
wd_table: WdTable,
@@ -746,7 +746,7 @@ pub fn Watch(comptime V: type) type {
const WdTable = std.AutoHashMap(i32, Dir);
const FileTable = std.AutoHashMap([]const u8, V);
const Dir = struct.{
const Dir = struct {
dirname: []const u8,
file_table: FileTable,
};
@@ -756,7 +756,7 @@ pub fn Watch(comptime V: type) type {
const Self = @This();
pub const Event = struct.{
pub const Event = struct {
id: Id,
data: V,
@@ -781,9 +781,9 @@ pub fn Watch(comptime V: type) type {
builtin.Os.windows => {
const self = try loop.allocator.createOne(Self);
errdefer loop.allocator.destroy(self);
self.* = Self.{
self.* = Self{
.channel = channel,
.os_data = OsData.{
.os_data = OsData{
.table_lock = event.Lock.init(loop),
.dir_table = OsData.DirTable.init(loop.allocator),
.ref_count = std.atomic.Int(usize).init(1),
@@ -797,9 +797,9 @@ pub fn Watch(comptime V: type) type {
const self = try loop.allocator.createOne(Self);
errdefer loop.allocator.destroy(self);
self.* = Self.{
self.* = Self{
.channel = channel,
.os_data = OsData.{
.os_data = OsData{
.table_lock = event.Lock.init(loop),
.file_table = OsData.FileTable.init(loop.allocator),
},
@@ -908,7 +908,7 @@ pub fn Watch(comptime V: type) type {
}
var value_copy = value;
var put = OsData.Put.{
var put = OsData.Put{
.putter = @handle(),
.value_ptr = &value_copy,
};
@@ -928,12 +928,12 @@ pub fn Watch(comptime V: type) type {
) catch unreachable)) |kev| {
// TODO handle EV_ERROR
if (kev.fflags & posix.NOTE_DELETE != 0) {
await (async self.channel.put(Self.Event.{
await (async self.channel.put(Self.Event{
.id = Event.Id.Delete,
.data = value_copy,
}) catch unreachable);
} else if (kev.fflags & posix.NOTE_WRITE != 0) {
await (async self.channel.put(Self.Event.{
await (async self.channel.put(Self.Event{
.id = Event.Id.CloseWrite,
.data = value_copy,
}) catch unreachable);
@@ -943,7 +943,7 @@ pub fn Watch(comptime V: type) type {
error.ProcessNotFound => unreachable,
error.AccessDenied, error.SystemResources => {
// TODO https://github.com/ziglang/zig/issues/769
const casted_err = @errSetCast(error.{
const casted_err = @errSetCast(error{
AccessDenied,
SystemResources,
}, err);
@@ -978,7 +978,7 @@ pub fn Watch(comptime V: type) type {
const gop = try self.os_data.wd_table.getOrPut(wd);
if (!gop.found_existing) {
gop.kv.value = OsData.Dir.{
gop.kv.value = OsData.Dir{
.dirname = dirname_with_null,
.file_table = OsData.FileTable.init(self.channel.loop.allocator),
};
@@ -1060,7 +1060,7 @@ pub fn Watch(comptime V: type) type {
const dir = try self.channel.loop.allocator.createOne(OsData.Dir);
errdefer self.channel.loop.allocator.destroy(dir);
dir.* = OsData.Dir.{
dir.* = OsData.Dir{
.file_table = OsData.FileTable.init(self.channel.loop.allocator),
.table_lock = event.Lock.init(self.channel.loop),
.putter = undefined,
@@ -1089,7 +1089,7 @@ pub fn Watch(comptime V: type) type {
defer os.close(dir_handle);
var putter_node = std.atomic.Queue(promise).Node.{
var putter_node = std.atomic.Queue(promise).Node{
.data = @handle(),
.prev = null,
.next = null,
@@ -1097,11 +1097,11 @@ pub fn Watch(comptime V: type) type {
self.os_data.all_putters.put(&putter_node);
defer _ = self.os_data.all_putters.remove(&putter_node);
var resume_node = Loop.ResumeNode.Basic.{
.base = Loop.ResumeNode.{
var resume_node = Loop.ResumeNode.Basic{
.base = Loop.ResumeNode{
.id = Loop.ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = windows.OVERLAPPED.{
.overlapped = windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = 0,
@@ -1179,7 +1179,7 @@ pub fn Watch(comptime V: type) type {
}
};
if (user_value) |v| {
await (async self.channel.put(Event.{
await (async self.channel.put(Event{
.id = id,
.data = v,
}) catch unreachable);
@@ -1203,9 +1203,9 @@ pub fn Watch(comptime V: type) type {
const loop = channel.loop;
var watch = Self.{
var watch = Self{
.channel = channel,
.os_data = OsData.{
.os_data = OsData{
.putter = @handle(),
.inotify_fd = inotify_fd,
.wd_table = OsData.WdTable.init(loop.allocator),
@@ -1259,7 +1259,7 @@ pub fn Watch(comptime V: type) type {
}
};
if (user_value) |v| {
await (async channel.put(Event.{
await (async channel.put(Event{
.id = WatchEventId.CloseWrite,
.data = v,
}) catch unreachable);
@@ -1297,6 +1297,11 @@ pub fn Watch(comptime V: type) type {
const test_tmp_dir = "std_event_fs_test";
test "write a file, watch it, write it again" {
if (builtin.os == builtin.Os.windows) {
// TODO this test is disabled on windows until the coroutine rewrite is finished.
// https://github.com/ziglang/zig/issues/1363
return error.SkipZigTest;
}
var da = std.heap.DirectAllocator.init();
defer da.deinit();
@@ -1310,7 +1315,7 @@ test "write a file, watch it, write it again" {
try loop.initMultiThreaded(allocator);
defer loop.deinit();
var result: error!void = error.ResultNeverWritten;
var result: anyerror!void = error.ResultNeverWritten;
const handle = try async<allocator> testFsWatchCantFail(&loop, &result);
defer cancel handle;
@@ -1318,8 +1323,8 @@ test "write a file, watch it, write it again" {
return result;
}
async fn testFsWatchCantFail(loop: *Loop, result: *(error!void)) void {
result.* = await async testFsWatch(loop) catch unreachable;
async fn testFsWatchCantFail(loop: *Loop, result: *(anyerror!void)) void {
result.* = await (async testFsWatch(loop) catch unreachable);
}
async fn testFsWatch(loop: *Loop) !void {
@@ -1353,7 +1358,7 @@ async fn testFsWatch(loop: *Loop) !void {
{
defer os.close(fd);
try await try async pwritev(loop, fd, []const []const u8.{"lorem ipsum"}, line2_offset);
try await try async pwritev(loop, fd, []const []const u8{"lorem ipsum"}, line2_offset);
}
ev_consumed = true;
@@ -1370,7 +1375,7 @@ async fn testFsWatch(loop: *Loop) !void {
// TODO test deleting the file and then re-adding it. we should get events for both
}
pub const OutStream = struct.{
pub const OutStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@@ -1380,11 +1385,11 @@ pub const OutStream = struct.{
pub const Stream = event.io.OutStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle, offset: usize) OutStream {
return OutStream.{
return OutStream{
.fd = fd,
.loop = loop,
.offset = offset,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@@ -1392,11 +1397,11 @@ pub const OutStream = struct.{
const self = @fieldParentPtr(OutStream, "stream", out_stream);
const offset = self.offset;
self.offset += bytes.len;
return await (async pwritev(self.loop, self.fd, [][]const u8.{bytes}, offset) catch unreachable);
return await (async pwritev(self.loop, self.fd, [][]const u8{bytes}, offset) catch unreachable);
}
};
pub const InStream = struct.{
pub const InStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@@ -1406,17 +1411,17 @@ pub const InStream = struct.{
pub const Stream = event.io.InStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle, offset: usize) InStream {
return InStream.{
return InStream{
.fd = fd,
.loop = loop,
.offset = offset,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
async<*mem.Allocator> fn readFn(in_stream: *Stream, bytes: []u8) Error!usize {
const self = @fieldParentPtr(InStream, "stream", in_stream);
const amt = try await (async preadv(self.loop, self.fd, [][]u8.{bytes}, self.offset) catch unreachable);
const amt = try await (async preadv(self.loop, self.fd, [][]u8{bytes}, self.offset) catch unreachable);
self.offset += amt;
return amt;
}
+3 -3
View File
@@ -11,7 +11,7 @@ const Loop = std.event.Loop;
/// and then are resumed when resolve() is called.
/// At this point the value remains forever available, and another resolve() is not allowed.
pub fn Future(comptime T: type) type {
return struct.{
return struct {
lock: Lock,
data: T,
@@ -25,7 +25,7 @@ pub fn Future(comptime T: type) type {
const Queue = std.atomic.Queue(promise);
pub fn init(loop: *Loop) Self {
return Self.{
return Self{
.lock = Lock.initLocked(loop),
.available = 0,
.data = undefined,
@@ -78,7 +78,7 @@ pub fn Future(comptime T: type) type {
pub fn resolve(self: *Self) void {
const prev = @atomicRmw(u8, &self.available, AtomicRmwOp.Xchg, 2, AtomicOrder.SeqCst);
assert(prev == 0 or prev == 1); // resolve() called twice
Lock.Held.release(Lock.Held.{ .lock = &self.lock });
Lock.Held.release(Lock.Held{ .lock = &self.lock });
}
};
}
+10 -10
View File
@@ -8,7 +8,7 @@ const assert = std.debug.assert;
/// ReturnType must be `void` or `E!void`
pub fn Group(comptime ReturnType: type) type {
return struct.{
return struct {
coro_stack: Stack,
alloc_stack: Stack,
lock: Lock,
@@ -22,7 +22,7 @@ pub fn Group(comptime ReturnType: type) type {
const Stack = std.atomic.Stack(promise->ReturnType);
pub fn init(loop: *Loop) Self {
return Self.{
return Self{
.coro_stack = Stack.init(),
.alloc_stack = Stack.init(),
.lock = Lock.init(loop),
@@ -41,8 +41,8 @@ pub fn Group(comptime ReturnType: type) type {
}
/// Add a promise to the group. Thread-safe.
pub fn add(self: *Self, handle: promise->ReturnType) (error.{OutOfMemory}!void) {
const node = try self.lock.loop.allocator.create(Stack.Node.{
pub fn add(self: *Self, handle: promise->ReturnType) (error{OutOfMemory}!void) {
const node = try self.lock.loop.allocator.create(Stack.Node{
.next = undefined,
.data = handle,
});
@@ -61,8 +61,8 @@ pub fn Group(comptime ReturnType: type) type {
/// This is equivalent to an async call, but the async function is added to the group, instead
/// of returning a promise. func must be async and have return type ReturnType.
/// Thread-safe.
pub fn call(self: *Self, comptime func: var, args: ...) (error.{OutOfMemory}!void) {
const S = struct.{
pub fn call(self: *Self, comptime func: var, args: ...) (error{OutOfMemory}!void) {
const S = struct {
async fn asyncFunc(node: **Stack.Node, args2: ...) ReturnType {
// TODO this is a hack to make the memory following be inside the coro frame
suspend {
@@ -78,7 +78,7 @@ pub fn Group(comptime ReturnType: type) type {
};
var node: *Stack.Node = undefined;
const handle = try async<self.lock.loop.allocator> S.asyncFunc(&node, args);
node.* = Stack.Node.{
node.* = Stack.Node{
.next = undefined,
.data = handle,
};
@@ -144,7 +144,7 @@ async fn testGroup(loop: *Loop) void {
await (async group.wait() catch @panic("memory"));
assert(count == 11);
var another = Group(error!void).init(loop);
var another = Group(anyerror!void).init(loop);
another.add(async somethingElse() catch @panic("memory")) catch @panic("memory");
another.call(doSomethingThatFails) catch @panic("memory");
std.debug.assertError(await (async another.wait() catch @panic("memory")), error.ItBroke);
@@ -162,7 +162,7 @@ async fn increaseByTen(count: *usize) void {
}
}
async fn doSomethingThatFails() error!void {}
async fn somethingElse() error!void {
async fn doSomethingThatFails() anyerror!void {}
async fn somethingElse() anyerror!void {
return error.ItBroke;
}
+6 -4
View File
@@ -5,7 +5,7 @@ const assert = std.debug.assert;
const mem = std.mem;
pub fn InStream(comptime ReadError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = ReadError;
@@ -53,16 +53,18 @@ pub fn InStream(comptime ReadError: type) type {
return mem.readInt(bytes, T, endian);
}
pub async fn readStruct(self: *Self, comptime T: type, ptr: *T) !void {
pub async fn readStruct(self: *Self, comptime T: type) !T {
// Only extern and packed structs have defined in-memory layout.
comptime assert(@typeInfo(T).Struct.layout != builtin.TypeInfo.ContainerLayout.Auto);
return await (async self.readNoEof(@sliceToBytes((*[1]T)(ptr)[0..])) catch unreachable);
var res: [1]T = undefined;
try await (async self.readNoEof(@sliceToBytes(res[0..])) catch unreachable);
return res[0];
}
};
}
pub fn OutStream(comptime WriteError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = WriteError;
+10 -10
View File
@@ -10,7 +10,7 @@ const Loop = std.event.Loop;
/// coroutines which are waiting for the lock are suspended, and
/// are resumed when the lock is released, in order.
/// Allows only one actor to hold the lock.
pub const Lock = struct.{
pub const Lock = struct {
loop: *Loop,
shared_bit: u8, // TODO make this a bool
queue: Queue,
@@ -18,7 +18,7 @@ pub const Lock = struct.{
const Queue = std.atomic.Queue(promise);
pub const Held = struct.{
pub const Held = struct {
lock: *Lock,
pub fn release(self: Held) void {
@@ -66,7 +66,7 @@ pub const Lock = struct.{
};
pub fn init(loop: *Loop) Lock {
return Lock.{
return Lock{
.loop = loop,
.shared_bit = 0,
.queue = Queue.init(),
@@ -75,7 +75,7 @@ pub const Lock = struct.{
}
pub fn initLocked(loop: *Loop) Lock {
return Lock.{
return Lock{
.loop = loop,
.shared_bit = 1,
.queue = Queue.init(),
@@ -117,7 +117,7 @@ pub const Lock = struct.{
}
}
return Held.{ .lock = self };
return Held{ .lock = self };
}
};
@@ -138,7 +138,7 @@ test "std.event.Lock" {
defer cancel handle;
loop.run();
assert(mem.eql(i32, shared_test_data, [1]i32.{3 * @intCast(i32, shared_test_data.len)} ** shared_test_data.len));
assert(mem.eql(i32, shared_test_data, [1]i32{3 * @intCast(i32, shared_test_data.len)} ** shared_test_data.len));
}
async fn testLock(loop: *Loop, lock: *Lock) void {
@@ -147,7 +147,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
resume @handle();
}
const handle1 = async lockRunner(lock) catch @panic("out of memory");
var tick_node1 = Loop.NextTickNode.{
var tick_node1 = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = handle1,
@@ -155,7 +155,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
loop.onNextTick(&tick_node1);
const handle2 = async lockRunner(lock) catch @panic("out of memory");
var tick_node2 = Loop.NextTickNode.{
var tick_node2 = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = handle2,
@@ -163,7 +163,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
loop.onNextTick(&tick_node2);
const handle3 = async lockRunner(lock) catch @panic("out of memory");
var tick_node3 = Loop.NextTickNode.{
var tick_node3 = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = handle3,
@@ -175,7 +175,7 @@ async fn testLock(loop: *Loop, lock: *Lock) void {
await handle3;
}
var shared_test_data = [1]i32.{0} ** 10;
var shared_test_data = [1]i32{0} ** 10;
var shared_test_index: usize = 0;
async fn lockRunner(lock: *Lock) void {
+4 -4
View File
@@ -6,13 +6,13 @@ const Loop = std.event.Loop;
/// coroutines which are waiting for the lock are suspended, and
/// are resumed when the lock is released, in order.
pub fn Locked(comptime T: type) type {
return struct.{
return struct {
lock: Lock,
private_data: T,
const Self = @This();
pub const HeldLock = struct.{
pub const HeldLock = struct {
value: *T,
held: Lock.Held,
@@ -22,7 +22,7 @@ pub fn Locked(comptime T: type) type {
};
pub fn init(loop: *Loop, data: T) Self {
return Self.{
return Self{
.lock = Lock.init(loop),
.private_data = data,
};
@@ -33,7 +33,7 @@ pub fn Locked(comptime T: type) type {
}
pub async fn acquire(self: *Self) HeldLock {
return HeldLock.{
return HeldLock{
// TODO guaranteed allocation elision
.held = await (async self.lock.acquire() catch unreachable),
.value = &self.private_data,
+47 -47
View File
@@ -10,7 +10,7 @@ const posix = os.posix;
const windows = os.windows;
const maxInt = std.math.maxInt;
pub const Loop = struct.{
pub const Loop = struct {
allocator: *mem.Allocator,
next_tick_queue: std.atomic.Queue(promise),
os_data: OsData,
@@ -25,13 +25,13 @@ pub const Loop = struct.{
pub const NextTickNode = std.atomic.Queue(promise).Node;
pub const ResumeNode = struct.{
pub const ResumeNode = struct {
id: Id,
handle: promise,
overlapped: Overlapped,
pub const overlapped_init = switch (builtin.os) {
builtin.Os.windows => windows.OVERLAPPED.{
builtin.Os.windows => windows.OVERLAPPED{
.Internal = 0,
.InternalHigh = 0,
.Offset = 0,
@@ -42,7 +42,7 @@ pub const Loop = struct.{
};
pub const Overlapped = @typeOf(overlapped_init);
pub const Id = enum.{
pub const Id = enum {
Basic,
Stop,
EventFd,
@@ -50,35 +50,35 @@ pub const Loop = struct.{
pub const EventFd = switch (builtin.os) {
builtin.Os.macosx => MacOsEventFd,
builtin.Os.linux => struct.{
builtin.Os.linux => struct {
base: ResumeNode,
epoll_op: u32,
eventfd: i32,
},
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
base: ResumeNode,
completion_key: usize,
},
else => @compileError("unsupported OS"),
};
const MacOsEventFd = struct.{
const MacOsEventFd = struct {
base: ResumeNode,
kevent: posix.Kevent,
};
pub const Basic = switch (builtin.os) {
builtin.Os.macosx => MacOsBasic,
builtin.Os.linux => struct.{
builtin.Os.linux => struct {
base: ResumeNode,
},
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
base: ResumeNode,
},
else => @compileError("unsupported OS"),
};
const MacOsBasic = struct.{
const MacOsBasic = struct {
base: ResumeNode,
kev: posix.Kevent,
};
@@ -104,7 +104,7 @@ pub const Loop = struct.{
/// Thread count is the total thread count. The thread pool size will be
/// max(thread_count - 1, 0)
fn initInternal(self: *Loop, allocator: *mem.Allocator, thread_count: usize) !void {
self.* = Loop.{
self.* = Loop{
.pending_event_count = 1,
.allocator = allocator,
.os_data = undefined,
@@ -112,7 +112,7 @@ pub const Loop = struct.{
.extra_threads = undefined,
.available_eventfd_resume_nodes = std.atomic.Stack(ResumeNode.EventFd).init(),
.eventfd_resume_nodes = undefined,
.final_resume_node = ResumeNode.{
.final_resume_node = ResumeNode{
.id = ResumeNode.Id.Stop,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
@@ -141,7 +141,7 @@ pub const Loop = struct.{
os.SpawnThreadError || os.LinuxEpollCtlError || os.BsdKEventError ||
os.WindowsCreateIoCompletionPortError;
const wakeup_bytes = []u8.{0x1} ** 8;
const wakeup_bytes = []u8{0x1} ** 8;
fn initOsData(self: *Loop, extra_thread_count: usize) InitOsDataError!void {
switch (builtin.os) {
@@ -150,10 +150,10 @@ pub const Loop = struct.{
self.os_data.fs_queue_item = 0;
// we need another thread for the file system because Linux does not have an async
// file system I/O API.
self.os_data.fs_end_request = fs.RequestNode.{
self.os_data.fs_end_request = fs.RequestNode{
.prev = undefined,
.next = undefined,
.data = fs.Request.{
.data = fs.Request{
.msg = fs.Request.Msg.End,
.finish = fs.Request.Finish.NoAction,
},
@@ -163,9 +163,9 @@ pub const Loop = struct.{
while (self.available_eventfd_resume_nodes.pop()) |node| os.close(node.data.eventfd);
}
for (self.eventfd_resume_nodes) |*eventfd_node| {
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node.{
.data = ResumeNode.EventFd.{
.base = ResumeNode.{
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node{
.data = ResumeNode.EventFd{
.base = ResumeNode{
.id = ResumeNode.Id.EventFd,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
@@ -184,9 +184,9 @@ pub const Loop = struct.{
self.os_data.final_eventfd = try os.linuxEventFd(0, posix.EFD_CLOEXEC | posix.EFD_NONBLOCK);
errdefer os.close(self.os_data.final_eventfd);
self.os_data.final_eventfd_event = posix.epoll_event.{
self.os_data.final_eventfd_event = posix.epoll_event{
.events = posix.EPOLLIN,
.data = posix.epoll_data.{ .ptr = @ptrToInt(&self.final_resume_node) },
.data = posix.epoll_data{ .ptr = @ptrToInt(&self.final_resume_node) },
};
try os.linuxEpollCtl(
self.os_data.epollfd,
@@ -224,10 +224,10 @@ pub const Loop = struct.{
self.os_data.fs_queue = std.atomic.Queue(fs.Request).init();
// we need another thread for the file system because Darwin does not have an async
// file system I/O API.
self.os_data.fs_end_request = fs.RequestNode.{
self.os_data.fs_end_request = fs.RequestNode{
.prev = undefined,
.next = undefined,
.data = fs.Request.{
.data = fs.Request{
.msg = fs.Request.Msg.End,
.finish = fs.Request.Finish.NoAction,
},
@@ -236,15 +236,15 @@ pub const Loop = struct.{
const empty_kevs = ([*]posix.Kevent)(undefined)[0..0];
for (self.eventfd_resume_nodes) |*eventfd_node, i| {
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node.{
.data = ResumeNode.EventFd.{
.base = ResumeNode.{
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node{
.data = ResumeNode.EventFd{
.base = ResumeNode{
.id = ResumeNode.Id.EventFd,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
},
// this one is for sending events
.kevent = posix.Kevent.{
.kevent = posix.Kevent{
.ident = i,
.filter = posix.EVFILT_USER,
.flags = posix.EV_CLEAR | posix.EV_ADD | posix.EV_DISABLE,
@@ -264,7 +264,7 @@ pub const Loop = struct.{
// Pre-add so that we cannot get error.SystemResources
// later when we try to activate it.
self.os_data.final_kevent = posix.Kevent.{
self.os_data.final_kevent = posix.Kevent{
.ident = extra_thread_count,
.filter = posix.EVFILT_USER,
.flags = posix.EV_ADD | posix.EV_DISABLE,
@@ -277,7 +277,7 @@ pub const Loop = struct.{
self.os_data.final_kevent.flags = posix.EV_ENABLE;
self.os_data.final_kevent.fflags = posix.NOTE_TRIGGER;
self.os_data.fs_kevent_wake = posix.Kevent.{
self.os_data.fs_kevent_wake = posix.Kevent{
.ident = 0,
.filter = posix.EVFILT_USER,
.flags = posix.EV_ADD | posix.EV_ENABLE,
@@ -286,7 +286,7 @@ pub const Loop = struct.{
.udata = undefined,
};
self.os_data.fs_kevent_wait = posix.Kevent.{
self.os_data.fs_kevent_wait = posix.Kevent{
.ident = 0,
.filter = posix.EVFILT_USER,
.flags = posix.EV_ADD | posix.EV_CLEAR,
@@ -323,9 +323,9 @@ pub const Loop = struct.{
errdefer os.close(self.os_data.io_port);
for (self.eventfd_resume_nodes) |*eventfd_node, i| {
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node.{
.data = ResumeNode.EventFd.{
.base = ResumeNode.{
eventfd_node.* = std.atomic.Stack(ResumeNode.EventFd).Node{
.data = ResumeNode.EventFd{
.base = ResumeNode{
.id = ResumeNode.Id.EventFd,
.handle = undefined,
.overlapped = ResumeNode.overlapped_init,
@@ -396,9 +396,9 @@ pub const Loop = struct.{
pub fn linuxModFd(self: *Loop, fd: i32, op: u32, flags: u32, resume_node: *ResumeNode) !void {
assert(flags & posix.EPOLLET == posix.EPOLLET);
var ev = os.linux.epoll_event.{
var ev = os.linux.epoll_event{
.events = flags,
.data = os.linux.epoll_data.{ .ptr = @ptrToInt(resume_node) },
.data = os.linux.epoll_data{ .ptr = @ptrToInt(resume_node) },
};
try os.linuxEpollCtl(self.os_data.epollfd, op, fd, &ev);
}
@@ -412,8 +412,8 @@ pub const Loop = struct.{
defer self.linuxRemoveFd(fd);
suspend {
// TODO explicitly put this memory in the coroutine frame #1194
var resume_node = ResumeNode.Basic.{
.base = ResumeNode.{
var resume_node = ResumeNode.Basic{
.base = ResumeNode{
.id = ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = ResumeNode.overlapped_init,
@@ -428,8 +428,8 @@ pub const Loop = struct.{
suspend {
resume @handle();
}
var resume_node = ResumeNode.Basic.{
.base = ResumeNode.{
var resume_node = ResumeNode.Basic{
.base = ResumeNode{
.id = ResumeNode.Id.Basic,
.handle = @handle(),
.overlapped = ResumeNode.overlapped_init,
@@ -447,7 +447,7 @@ pub const Loop = struct.{
pub fn bsdAddKev(self: *Loop, resume_node: *ResumeNode.Basic, ident: usize, filter: i16, fflags: u32) !void {
self.beginOneEvent();
errdefer self.finishOneEvent();
var kev = posix.Kevent.{
var kev = posix.Kevent{
.ident = ident,
.filter = filter,
.flags = posix.EV_ADD | posix.EV_ENABLE | posix.EV_CLEAR,
@@ -461,7 +461,7 @@ pub const Loop = struct.{
}
pub fn bsdRemoveKev(self: *Loop, ident: usize, filter: i16) void {
var kev = posix.Kevent.{
var kev = posix.Kevent{
.ident = ident,
.filter = filter,
.flags = posix.EV_DELETE,
@@ -559,11 +559,11 @@ pub const Loop = struct.{
/// it immediately returns to the caller, and the async function is queued in the event loop. It still
/// returns a promise to be awaited.
pub fn call(self: *Loop, comptime func: var, args: ...) !(promise->@typeOf(func).ReturnType) {
const S = struct.{
const S = struct {
async fn asyncFunc(loop: *Loop, handle: *promise->@typeOf(func).ReturnType, args2: ...) @typeOf(func).ReturnType {
suspend {
handle.* = @handle();
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = @handle(),
@@ -585,7 +585,7 @@ pub const Loop = struct.{
/// is performed.
pub async fn yield(self: *Loop) void {
suspend {
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.prev = undefined,
.next = undefined,
.data = @handle(),
@@ -814,14 +814,14 @@ pub const Loop = struct.{
const OsData = switch (builtin.os) {
builtin.Os.linux => LinuxOsData,
builtin.Os.macosx => MacOsData,
builtin.Os.windows => struct.{
builtin.Os.windows => struct {
io_port: windows.HANDLE,
extra_thread_count: usize,
},
else => struct.{},
else => struct {},
};
const MacOsData = struct.{
const MacOsData = struct {
kqfd: i32,
final_kevent: posix.Kevent,
fs_kevent_wake: posix.Kevent,
@@ -832,7 +832,7 @@ pub const Loop = struct.{
fs_end_request: fs.RequestNode,
};
const LinuxOsData = struct.{
const LinuxOsData = struct {
epollfd: i32,
final_eventfd: i32,
final_eventfd_event: os.linux.epoll_event,
+18 -18
View File
@@ -7,7 +7,7 @@ const os = std.os;
const posix = os.posix;
const Loop = std.event.Loop;
pub const Server = struct.{
pub const Server = struct {
handleRequestFn: async<*mem.Allocator> fn (*Server, *const std.net.Address, os.File) void,
loop: *Loop,
@@ -22,14 +22,14 @@ pub const Server = struct.{
pub fn init(loop: *Loop) Server {
// TODO can't initialize handler coroutine here because we need well defined copy elision
return Server.{
return Server{
.loop = loop,
.sockfd = null,
.accept_coro = null,
.handleRequestFn = undefined,
.waiting_for_emfile_node = undefined,
.listen_address = undefined,
.listen_resume_node = event.Loop.ResumeNode.{
.listen_resume_node = event.Loop.ResumeNode{
.id = event.Loop.ResumeNode.Id.Basic,
.handle = undefined,
.overlapped = event.Loop.ResumeNode.overlapped_init,
@@ -118,7 +118,7 @@ pub async fn connectUnixSocket(loop: *Loop, path: []const u8) !i32 {
);
errdefer os.close(sockfd);
var sock_addr = posix.sockaddr_un.{
var sock_addr = posix.sockaddr_un{
.family = posix.AF_UNIX,
.path = undefined,
};
@@ -133,7 +133,7 @@ pub async fn connectUnixSocket(loop: *Loop, path: []const u8) !i32 {
return sockfd;
}
pub const ReadError = error.{
pub const ReadError = error{
SystemResources,
Unexpected,
UserResourceLimitReached,
@@ -147,7 +147,7 @@ pub const ReadError = error.{
/// returns number of bytes read. 0 means EOF.
pub async fn read(loop: *std.event.Loop, fd: os.FileHandle, buffer: []u8) ReadError!usize {
const iov = posix.iovec.{
const iov = posix.iovec{
.iov_base = buffer.ptr,
.iov_len = buffer.len,
};
@@ -155,10 +155,10 @@ pub async fn read(loop: *std.event.Loop, fd: os.FileHandle, buffer: []u8) ReadEr
return await (async readvPosix(loop, fd, iovs, 1) catch unreachable);
}
pub const WriteError = error.{};
pub const WriteError = error{};
pub async fn write(loop: *std.event.Loop, fd: os.FileHandle, buffer: []const u8) WriteError!void {
const iov = posix.iovec_const.{
const iov = posix.iovec_const{
.iov_base = buffer.ptr,
.iov_len = buffer.len,
};
@@ -232,7 +232,7 @@ pub async fn writev(loop: *Loop, fd: os.FileHandle, data: []const []const u8) !v
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec_const.{
iovecs[i] = os.posix.iovec_const{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@@ -246,7 +246,7 @@ pub async fn readv(loop: *Loop, fd: os.FileHandle, data: []const []u8) !usize {
defer loop.allocator.free(iovecs);
for (data) |buf, i| {
iovecs[i] = os.posix.iovec.{
iovecs[i] = os.posix.iovec{
.iov_base = buf.ptr,
.iov_len = buf.len,
};
@@ -274,7 +274,7 @@ test "listen on a port, send bytes, receive bytes" {
return error.SkipZigTest;
}
const MyServer = struct.{
const MyServer = struct {
tcp_server: Server,
const Self = @This();
@@ -305,7 +305,7 @@ test "listen on a port, send bytes, receive bytes" {
var loop: Loop = undefined;
try loop.initSingleThreaded(std.debug.global_allocator);
var server = MyServer.{ .tcp_server = Server.init(&loop) };
var server = MyServer{ .tcp_server = Server.init(&loop) };
defer server.tcp_server.deinit();
try server.tcp_server.listen(&addr, MyServer.handler);
@@ -327,7 +327,7 @@ async fn doAsyncTest(loop: *Loop, address: *const std.net.Address, server: *Serv
server.close();
}
pub const OutStream = struct.{
pub const OutStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@@ -336,10 +336,10 @@ pub const OutStream = struct.{
pub const Stream = event.io.OutStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle) OutStream {
return OutStream.{
return OutStream{
.fd = fd,
.loop = loop,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@@ -349,7 +349,7 @@ pub const OutStream = struct.{
}
};
pub const InStream = struct.{
pub const InStream = struct {
fd: os.FileHandle,
stream: Stream,
loop: *Loop,
@@ -358,10 +358,10 @@ pub const InStream = struct.{
pub const Stream = event.io.InStream(Error);
pub fn init(loop: *Loop, fd: os.FileHandle) InStream {
return InStream.{
return InStream{
.fd = fd,
.loop = loop,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
+11 -11
View File
@@ -12,7 +12,7 @@ const Loop = std.event.Loop;
/// Many readers can hold the lock at the same time; however locking for writing is exclusive.
/// When a read lock is held, it will not be released until the reader queue is empty.
/// When a write lock is held, it will not be released until the writer queue is empty.
pub const RwLock = struct.{
pub const RwLock = struct {
loop: *Loop,
shared_state: u8, // TODO make this an enum
writer_queue: Queue,
@@ -21,7 +21,7 @@ pub const RwLock = struct.{
reader_queue_empty_bit: u8, // TODO make this a bool
reader_lock_count: usize,
const State = struct.{
const State = struct {
const Unlocked = 0;
const WriteLock = 1;
const ReadLock = 2;
@@ -29,7 +29,7 @@ pub const RwLock = struct.{
const Queue = std.atomic.Queue(promise);
pub const HeldRead = struct.{
pub const HeldRead = struct {
lock: *RwLock,
pub fn release(self: HeldRead) void {
@@ -48,7 +48,7 @@ pub const RwLock = struct.{
}
};
pub const HeldWrite = struct.{
pub const HeldWrite = struct {
lock: *RwLock,
pub fn release(self: HeldWrite) void {
@@ -77,7 +77,7 @@ pub const RwLock = struct.{
};
pub fn init(loop: *Loop) RwLock {
return RwLock.{
return RwLock{
.loop = loop,
.shared_state = State.Unlocked,
.writer_queue = Queue.init(),
@@ -101,7 +101,7 @@ pub const RwLock = struct.{
suspend {
// TODO explicitly put this memory in the coroutine frame #1194
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.data = @handle(),
.prev = undefined,
.next = undefined,
@@ -128,13 +128,13 @@ pub const RwLock = struct.{
}
}
}
return HeldRead.{ .lock = self };
return HeldRead{ .lock = self };
}
pub async fn acquireWrite(self: *RwLock) HeldWrite {
suspend {
// TODO explicitly put this memory in the coroutine frame #1194
var my_tick_node = Loop.NextTickNode.{
var my_tick_node = Loop.NextTickNode{
.data = @handle(),
.prev = undefined,
.next = undefined,
@@ -158,7 +158,7 @@ pub const RwLock = struct.{
}
}
}
return HeldWrite.{ .lock = self };
return HeldWrite{ .lock = self };
}
fn commonPostUnlock(self: *RwLock) void {
@@ -227,7 +227,7 @@ test "std.event.RwLock" {
defer cancel handle;
loop.run();
const expected_result = [1]i32.{shared_it_count * @intCast(i32, shared_test_data.len)} ** shared_test_data.len;
const expected_result = [1]i32{shared_it_count * @intCast(i32, shared_test_data.len)} ** shared_test_data.len;
assert(mem.eql(i32, shared_test_data, expected_result));
}
@@ -258,7 +258,7 @@ async fn testLock(loop: *Loop, lock: *RwLock) void {
}
const shared_it_count = 10;
var shared_test_data = [1]i32.{0} ** 10;
var shared_test_data = [1]i32{0} ** 10;
var shared_test_index: usize = 0;
var shared_count: usize = 0;
+6 -6
View File
@@ -6,13 +6,13 @@ const Loop = std.event.Loop;
/// coroutines which are waiting for the lock are suspended, and
/// are resumed when the lock is released, in order.
pub fn RwLocked(comptime T: type) type {
return struct.{
return struct {
lock: RwLock,
locked_data: T,
const Self = @This();
pub const HeldReadLock = struct.{
pub const HeldReadLock = struct {
value: *const T,
held: RwLock.HeldRead,
@@ -21,7 +21,7 @@ pub fn RwLocked(comptime T: type) type {
}
};
pub const HeldWriteLock = struct.{
pub const HeldWriteLock = struct {
value: *T,
held: RwLock.HeldWrite,
@@ -31,7 +31,7 @@ pub fn RwLocked(comptime T: type) type {
};
pub fn init(loop: *Loop, data: T) Self {
return Self.{
return Self{
.lock = RwLock.init(loop),
.locked_data = data,
};
@@ -42,14 +42,14 @@ pub fn RwLocked(comptime T: type) type {
}
pub async fn acquireRead(self: *Self) HeldReadLock {
return HeldReadLock.{
return HeldReadLock{
.held = await (async self.lock.acquireRead() catch unreachable),
.value = &self.locked_data,
};
}
pub async fn acquireWrite(self: *Self) HeldWriteLock {
return HeldWriteLock.{
return HeldWriteLock{
.held = await (async self.lock.acquireWrite() catch unreachable),
.value = &self.locked_data,
};
+4 -4
View File
@@ -1,4 +1,4 @@
pub const enum3 = []u64.{
pub const enum3 = []u64{
0x4e2e2785c3a2a20b,
0x240a28877a09a4e1,
0x728fca36c06cf106,
@@ -433,19 +433,19 @@ pub const enum3 = []u64.{
0x6d4b9445072f4374,
};
const Slab = struct.{
const Slab = struct {
str: []const u8,
exp: i32,
};
fn slab(str: []const u8, exp: i32) Slab {
return Slab.{
return Slab{
.str = str,
.exp = exp,
};
}
pub const enum3_data = []Slab.{
pub const enum3_data = []Slab{
slab("40648030339495312", 69),
slab("4498645355592131", -134),
slab("678321594594593", 244),
+11 -11
View File
@@ -7,12 +7,12 @@ const math = std.math;
const mem = std.mem;
const assert = std.debug.assert;
pub const FloatDecimal = struct.{
pub const FloatDecimal = struct {
digits: []u8,
exp: i32,
};
pub const RoundMode = enum.{
pub const RoundMode = enum {
// Round only the fractional portion (e.g. 1234.23 has precision 2)
Decimal,
// Round the entire whole/fractional portion (e.g. 1.23423e3 has precision 5)
@@ -86,7 +86,7 @@ pub fn errol3(value: f64, buffer: []u8) FloatDecimal {
const data = enum3_data[i];
const digits = buffer[1 .. data.str.len + 1];
mem.copy(u8, digits, data.str);
return FloatDecimal.{
return FloatDecimal{
.digits = digits,
.exp = data.exp,
};
@@ -135,11 +135,11 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
}
// compute boundaries
var high = HP.{
var high = HP{
.val = mid.val,
.off = mid.off + (fpnext(val) - val) * lten * ten / 2.0,
};
var low = HP.{
var low = HP{
.val = mid.val,
.off = mid.off + (fpprev(val) - val) * lten * ten / 2.0,
};
@@ -164,7 +164,7 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
// digit generation
// We generate digits starting at index 1. If rounding a buffer later then it may be
// required to generate a preceeding digit in some cases (9.999) in which case we use
// required to generate a preceding digit in some cases (9.999) in which case we use
// the 0-index for this extra digit.
var buf_index: usize = 1;
while (true) {
@@ -191,7 +191,7 @@ fn errol3u(val: f64, buffer: []u8) FloatDecimal {
buffer[buf_index] = mdig + '0';
buf_index += 1;
return FloatDecimal.{
return FloatDecimal{
.digits = buffer[1..buf_index],
.exp = exp,
};
@@ -229,7 +229,7 @@ fn hpProd(in: HP, val: f64) HP {
const p = in.val * val;
const e = ((hi * hi2 - p) + lo * hi2 + hi * lo2) + lo * lo2;
return HP.{
return HP{
.val = p,
.off = in.off * val + e,
};
@@ -342,7 +342,7 @@ fn errolInt(val: f64, buffer: []u8) FloatDecimal {
buf_index += 1;
}
return FloatDecimal.{
return FloatDecimal{
.digits = buffer[0..buf_index],
.exp = @intCast(i32, buf_index) + mi,
};
@@ -401,7 +401,7 @@ fn errolFixed(val: f64, buffer: []u8) FloatDecimal {
buffer[j] = 0;
return FloatDecimal.{
return FloatDecimal{
.digits = buffer[0..j],
.exp = exp,
};
@@ -415,7 +415,7 @@ fn fpprev(val: f64) f64 {
return @bitCast(f64, @bitCast(u64, val) -% 1);
}
pub const c_digits_lut = []u8.{
pub const c_digits_lut = []u8{
'0', '0', '0', '1', '0', '2', '0', '3', '0', '4', '0', '5', '0', '6',
'0', '7', '0', '8', '0', '9', '1', '0', '1', '1', '1', '2', '1', '3',
'1', '4', '1', '5', '1', '6', '1', '7', '1', '8', '1', '9', '2', '0',
+602 -602
View File
@@ -1,606 +1,606 @@
pub const HP = struct.{
pub const HP = struct {
val: f64,
off: f64,
};
pub const lookup_table = []HP.{
HP.{ .val = 1.000000e+308, .off = -1.097906362944045488e+291 },
HP.{ .val = 1.000000e+307, .off = 1.396894023974354241e+290 },
HP.{ .val = 1.000000e+306, .off = -1.721606459673645508e+289 },
HP.{ .val = 1.000000e+305, .off = 6.074644749446353973e+288 },
HP.{ .val = 1.000000e+304, .off = 6.074644749446353567e+287 },
HP.{ .val = 1.000000e+303, .off = -1.617650767864564452e+284 },
HP.{ .val = 1.000000e+302, .off = -7.629703079084895055e+285 },
HP.{ .val = 1.000000e+301, .off = -5.250476025520442286e+284 },
HP.{ .val = 1.000000e+300, .off = -5.250476025520441956e+283 },
HP.{ .val = 1.000000e+299, .off = -5.250476025520441750e+282 },
HP.{ .val = 1.000000e+298, .off = 4.043379652465702264e+281 },
HP.{ .val = 1.000000e+297, .off = -1.765280146275637946e+280 },
HP.{ .val = 1.000000e+296, .off = 1.865132227937699609e+279 },
HP.{ .val = 1.000000e+295, .off = 1.865132227937699609e+278 },
HP.{ .val = 1.000000e+294, .off = -6.643646774124810287e+277 },
HP.{ .val = 1.000000e+293, .off = 7.537651562646039934e+276 },
HP.{ .val = 1.000000e+292, .off = -1.325659897835741608e+275 },
HP.{ .val = 1.000000e+291, .off = 4.213909764965371606e+274 },
HP.{ .val = 1.000000e+290, .off = -6.172783352786715670e+273 },
HP.{ .val = 1.000000e+289, .off = -6.172783352786715670e+272 },
HP.{ .val = 1.000000e+288, .off = -7.630473539575035471e+270 },
HP.{ .val = 1.000000e+287, .off = -7.525217352494018700e+270 },
HP.{ .val = 1.000000e+286, .off = -3.298861103408696612e+269 },
HP.{ .val = 1.000000e+285, .off = 1.984084207947955778e+268 },
HP.{ .val = 1.000000e+284, .off = -7.921438250845767591e+267 },
HP.{ .val = 1.000000e+283, .off = 4.460464822646386735e+266 },
HP.{ .val = 1.000000e+282, .off = -3.278224598286209647e+265 },
HP.{ .val = 1.000000e+281, .off = -3.278224598286209737e+264 },
HP.{ .val = 1.000000e+280, .off = -3.278224598286209961e+263 },
HP.{ .val = 1.000000e+279, .off = -5.797329227496039232e+262 },
HP.{ .val = 1.000000e+278, .off = 3.649313132040821498e+261 },
HP.{ .val = 1.000000e+277, .off = -2.867878510995372374e+259 },
HP.{ .val = 1.000000e+276, .off = -5.206914080024985409e+259 },
HP.{ .val = 1.000000e+275, .off = 4.018322599210230404e+258 },
HP.{ .val = 1.000000e+274, .off = 7.862171215558236495e+257 },
HP.{ .val = 1.000000e+273, .off = 5.459765830340732821e+256 },
HP.{ .val = 1.000000e+272, .off = -6.552261095746788047e+255 },
HP.{ .val = 1.000000e+271, .off = 4.709014147460262298e+254 },
HP.{ .val = 1.000000e+270, .off = -4.675381888545612729e+253 },
HP.{ .val = 1.000000e+269, .off = -4.675381888545612892e+252 },
HP.{ .val = 1.000000e+268, .off = 2.656177514583977380e+251 },
HP.{ .val = 1.000000e+267, .off = 2.656177514583977190e+250 },
HP.{ .val = 1.000000e+266, .off = -3.071603269111014892e+249 },
HP.{ .val = 1.000000e+265, .off = -6.651466258920385440e+248 },
HP.{ .val = 1.000000e+264, .off = -4.414051890289528972e+247 },
HP.{ .val = 1.000000e+263, .off = -1.617283929500958387e+246 },
HP.{ .val = 1.000000e+262, .off = -1.617283929500958241e+245 },
HP.{ .val = 1.000000e+261, .off = 7.122615947963323868e+244 },
HP.{ .val = 1.000000e+260, .off = -6.533477610574617382e+243 },
HP.{ .val = 1.000000e+259, .off = 7.122615947963323982e+242 },
HP.{ .val = 1.000000e+258, .off = -5.679971763165996225e+241 },
HP.{ .val = 1.000000e+257, .off = -3.012765990014054219e+240 },
HP.{ .val = 1.000000e+256, .off = -3.012765990014054219e+239 },
HP.{ .val = 1.000000e+255, .off = 1.154743030535854616e+238 },
HP.{ .val = 1.000000e+254, .off = 6.364129306223240767e+237 },
HP.{ .val = 1.000000e+253, .off = 6.364129306223241129e+236 },
HP.{ .val = 1.000000e+252, .off = -9.915202805299840595e+235 },
HP.{ .val = 1.000000e+251, .off = -4.827911520448877980e+234 },
HP.{ .val = 1.000000e+250, .off = 7.890316691678530146e+233 },
HP.{ .val = 1.000000e+249, .off = 7.890316691678529484e+232 },
HP.{ .val = 1.000000e+248, .off = -4.529828046727141859e+231 },
HP.{ .val = 1.000000e+247, .off = 4.785280507077111924e+230 },
HP.{ .val = 1.000000e+246, .off = -6.858605185178205305e+229 },
HP.{ .val = 1.000000e+245, .off = -4.432795665958347728e+228 },
HP.{ .val = 1.000000e+244, .off = -7.465057564983169531e+227 },
HP.{ .val = 1.000000e+243, .off = -7.465057564983169741e+226 },
HP.{ .val = 1.000000e+242, .off = -5.096102956370027445e+225 },
HP.{ .val = 1.000000e+241, .off = -5.096102956370026952e+224 },
HP.{ .val = 1.000000e+240, .off = -1.394611380411992474e+223 },
HP.{ .val = 1.000000e+239, .off = 9.188208545617793960e+221 },
HP.{ .val = 1.000000e+238, .off = -4.864759732872650359e+221 },
HP.{ .val = 1.000000e+237, .off = 5.979453868566904629e+220 },
HP.{ .val = 1.000000e+236, .off = -5.316601966265964857e+219 },
HP.{ .val = 1.000000e+235, .off = -5.316601966265964701e+218 },
HP.{ .val = 1.000000e+234, .off = -1.786584517880693123e+217 },
HP.{ .val = 1.000000e+233, .off = 2.625937292600896716e+216 },
HP.{ .val = 1.000000e+232, .off = -5.647541102052084079e+215 },
HP.{ .val = 1.000000e+231, .off = -5.647541102052083888e+214 },
HP.{ .val = 1.000000e+230, .off = -9.956644432600511943e+213 },
HP.{ .val = 1.000000e+229, .off = 8.161138937705571862e+211 },
HP.{ .val = 1.000000e+228, .off = 7.549087847752475275e+211 },
HP.{ .val = 1.000000e+227, .off = -9.283347037202319948e+210 },
HP.{ .val = 1.000000e+226, .off = 3.866992716668613820e+209 },
HP.{ .val = 1.000000e+225, .off = 7.154577655136347262e+208 },
HP.{ .val = 1.000000e+224, .off = 3.045096482051680688e+207 },
HP.{ .val = 1.000000e+223, .off = -4.660180717482069567e+206 },
HP.{ .val = 1.000000e+222, .off = -4.660180717482070101e+205 },
HP.{ .val = 1.000000e+221, .off = -4.660180717482069544e+204 },
HP.{ .val = 1.000000e+220, .off = 3.562757926310489022e+202 },
HP.{ .val = 1.000000e+219, .off = 3.491561111451748149e+202 },
HP.{ .val = 1.000000e+218, .off = -8.265758834125874135e+201 },
HP.{ .val = 1.000000e+217, .off = 3.981449442517482365e+200 },
HP.{ .val = 1.000000e+216, .off = -2.142154695804195936e+199 },
HP.{ .val = 1.000000e+215, .off = 9.339603063548950188e+198 },
HP.{ .val = 1.000000e+214, .off = 4.555537330485139746e+197 },
HP.{ .val = 1.000000e+213, .off = 1.565496247320257804e+196 },
HP.{ .val = 1.000000e+212, .off = 9.040598955232462036e+195 },
HP.{ .val = 1.000000e+211, .off = 4.368659762787334780e+194 },
HP.{ .val = 1.000000e+210, .off = 7.288621758065539072e+193 },
HP.{ .val = 1.000000e+209, .off = -7.311188218325485628e+192 },
HP.{ .val = 1.000000e+208, .off = 1.813693016918905189e+191 },
HP.{ .val = 1.000000e+207, .off = -3.889357755108838992e+190 },
HP.{ .val = 1.000000e+206, .off = -3.889357755108838992e+189 },
HP.{ .val = 1.000000e+205, .off = -1.661603547285501360e+188 },
HP.{ .val = 1.000000e+204, .off = 1.123089212493670643e+187 },
HP.{ .val = 1.000000e+203, .off = 1.123089212493670643e+186 },
HP.{ .val = 1.000000e+202, .off = 9.825254086803583029e+185 },
HP.{ .val = 1.000000e+201, .off = -3.771878529305654999e+184 },
HP.{ .val = 1.000000e+200, .off = 3.026687778748963675e+183 },
HP.{ .val = 1.000000e+199, .off = -9.720624048853446693e+182 },
HP.{ .val = 1.000000e+198, .off = -1.753554156601940139e+181 },
HP.{ .val = 1.000000e+197, .off = 4.885670753607648963e+180 },
HP.{ .val = 1.000000e+196, .off = 4.885670753607648963e+179 },
HP.{ .val = 1.000000e+195, .off = 2.292223523057028076e+178 },
HP.{ .val = 1.000000e+194, .off = 5.534032561245303825e+177 },
HP.{ .val = 1.000000e+193, .off = -6.622751331960730683e+176 },
HP.{ .val = 1.000000e+192, .off = -4.090088020876139692e+175 },
HP.{ .val = 1.000000e+191, .off = -7.255917159731877552e+174 },
HP.{ .val = 1.000000e+190, .off = -7.255917159731877992e+173 },
HP.{ .val = 1.000000e+189, .off = -2.309309130269787104e+172 },
HP.{ .val = 1.000000e+188, .off = -2.309309130269787019e+171 },
HP.{ .val = 1.000000e+187, .off = 9.284303438781988230e+170 },
HP.{ .val = 1.000000e+186, .off = 2.038295583124628364e+169 },
HP.{ .val = 1.000000e+185, .off = 2.038295583124628532e+168 },
HP.{ .val = 1.000000e+184, .off = -1.735666841696912925e+167 },
HP.{ .val = 1.000000e+183, .off = 5.340512704843477241e+166 },
HP.{ .val = 1.000000e+182, .off = -6.453119872723839321e+165 },
HP.{ .val = 1.000000e+181, .off = 8.288920849235306587e+164 },
HP.{ .val = 1.000000e+180, .off = -9.248546019891598293e+162 },
HP.{ .val = 1.000000e+179, .off = 1.954450226518486016e+162 },
HP.{ .val = 1.000000e+178, .off = -5.243811844750628197e+161 },
HP.{ .val = 1.000000e+177, .off = -7.448980502074320639e+159 },
HP.{ .val = 1.000000e+176, .off = -7.448980502074319858e+158 },
HP.{ .val = 1.000000e+175, .off = 6.284654753766312753e+158 },
HP.{ .val = 1.000000e+174, .off = -6.895756753684458388e+157 },
HP.{ .val = 1.000000e+173, .off = -1.403918625579970616e+156 },
HP.{ .val = 1.000000e+172, .off = -8.268716285710580522e+155 },
HP.{ .val = 1.000000e+171, .off = 4.602779327034313170e+154 },
HP.{ .val = 1.000000e+170, .off = -3.441905430931244940e+153 },
HP.{ .val = 1.000000e+169, .off = 6.613950516525702884e+152 },
HP.{ .val = 1.000000e+168, .off = 6.613950516525702652e+151 },
HP.{ .val = 1.000000e+167, .off = -3.860899428741951187e+150 },
HP.{ .val = 1.000000e+166, .off = 5.959272394946474605e+149 },
HP.{ .val = 1.000000e+165, .off = 1.005101065481665103e+149 },
HP.{ .val = 1.000000e+164, .off = -1.783349948587918355e+146 },
HP.{ .val = 1.000000e+163, .off = 6.215006036188360099e+146 },
HP.{ .val = 1.000000e+162, .off = 6.215006036188360099e+145 },
HP.{ .val = 1.000000e+161, .off = -3.774589324822814903e+144 },
HP.{ .val = 1.000000e+160, .off = -6.528407745068226929e+142 },
HP.{ .val = 1.000000e+159, .off = 7.151530601283157561e+142 },
HP.{ .val = 1.000000e+158, .off = 4.712664546348788765e+141 },
HP.{ .val = 1.000000e+157, .off = 1.664081977680827856e+140 },
HP.{ .val = 1.000000e+156, .off = 1.664081977680827750e+139 },
HP.{ .val = 1.000000e+155, .off = -7.176231540910168265e+137 },
HP.{ .val = 1.000000e+154, .off = -3.694754568805822650e+137 },
HP.{ .val = 1.000000e+153, .off = 2.665969958768462622e+134 },
HP.{ .val = 1.000000e+152, .off = -4.625108135904199522e+135 },
HP.{ .val = 1.000000e+151, .off = -1.717753238721771919e+134 },
HP.{ .val = 1.000000e+150, .off = 1.916440382756262433e+133 },
HP.{ .val = 1.000000e+149, .off = -4.897672657515052040e+132 },
HP.{ .val = 1.000000e+148, .off = -4.897672657515052198e+131 },
HP.{ .val = 1.000000e+147, .off = 2.200361759434233991e+130 },
HP.{ .val = 1.000000e+146, .off = 6.636633270027537273e+129 },
HP.{ .val = 1.000000e+145, .off = 1.091293881785907977e+128 },
HP.{ .val = 1.000000e+144, .off = -2.374543235865110597e+127 },
HP.{ .val = 1.000000e+143, .off = -2.374543235865110537e+126 },
HP.{ .val = 1.000000e+142, .off = -5.082228484029969099e+125 },
HP.{ .val = 1.000000e+141, .off = -1.697621923823895943e+124 },
HP.{ .val = 1.000000e+140, .off = -5.928380124081487212e+123 },
HP.{ .val = 1.000000e+139, .off = -3.284156248920492522e+122 },
HP.{ .val = 1.000000e+138, .off = -3.284156248920492706e+121 },
HP.{ .val = 1.000000e+137, .off = -3.284156248920492476e+120 },
HP.{ .val = 1.000000e+136, .off = -5.866406127007401066e+119 },
HP.{ .val = 1.000000e+135, .off = 3.817030915818506056e+118 },
HP.{ .val = 1.000000e+134, .off = 7.851796350329300951e+117 },
HP.{ .val = 1.000000e+133, .off = -2.235117235947686077e+116 },
HP.{ .val = 1.000000e+132, .off = 9.170432597638723691e+114 },
HP.{ .val = 1.000000e+131, .off = 8.797444499042767883e+114 },
HP.{ .val = 1.000000e+130, .off = -5.978307824605161274e+113 },
HP.{ .val = 1.000000e+129, .off = 1.782556435814758516e+111 },
HP.{ .val = 1.000000e+128, .off = -7.517448691651820362e+111 },
HP.{ .val = 1.000000e+127, .off = 4.507089332150205498e+110 },
HP.{ .val = 1.000000e+126, .off = 7.513223838100711695e+109 },
HP.{ .val = 1.000000e+125, .off = 7.513223838100712113e+108 },
HP.{ .val = 1.000000e+124, .off = 5.164681255326878494e+107 },
HP.{ .val = 1.000000e+123, .off = 2.229003026859587122e+106 },
HP.{ .val = 1.000000e+122, .off = -1.440594758724527399e+105 },
HP.{ .val = 1.000000e+121, .off = -3.734093374714598783e+104 },
HP.{ .val = 1.000000e+120, .off = 1.999653165260579757e+103 },
HP.{ .val = 1.000000e+119, .off = 5.583244752745066693e+102 },
HP.{ .val = 1.000000e+118, .off = 3.343500010567262234e+101 },
HP.{ .val = 1.000000e+117, .off = -5.055542772599503556e+100 },
HP.{ .val = 1.000000e+116, .off = -1.555941612946684331e+99 },
HP.{ .val = 1.000000e+115, .off = -1.555941612946684331e+98 },
HP.{ .val = 1.000000e+114, .off = -1.555941612946684293e+97 },
HP.{ .val = 1.000000e+113, .off = -1.555941612946684246e+96 },
HP.{ .val = 1.000000e+112, .off = 6.988006530736955847e+95 },
HP.{ .val = 1.000000e+111, .off = 4.318022735835818244e+94 },
HP.{ .val = 1.000000e+110, .off = -2.356936751417025578e+93 },
HP.{ .val = 1.000000e+109, .off = 1.814912928116001926e+92 },
HP.{ .val = 1.000000e+108, .off = -3.399899171300282744e+91 },
HP.{ .val = 1.000000e+107, .off = 3.118615952970072913e+90 },
HP.{ .val = 1.000000e+106, .off = -9.103599905036843605e+89 },
HP.{ .val = 1.000000e+105, .off = 6.174169917471802325e+88 },
HP.{ .val = 1.000000e+104, .off = -1.915675085734668657e+86 },
HP.{ .val = 1.000000e+103, .off = -1.915675085734668864e+85 },
HP.{ .val = 1.000000e+102, .off = 2.295048673475466221e+85 },
HP.{ .val = 1.000000e+101, .off = 2.295048673475466135e+84 },
HP.{ .val = 1.000000e+100, .off = -1.590289110975991792e+83 },
HP.{ .val = 1.000000e+99, .off = 3.266383119588331155e+82 },
HP.{ .val = 1.000000e+98, .off = 2.309629754856292029e+80 },
HP.{ .val = 1.000000e+97, .off = -7.357587384771124533e+80 },
HP.{ .val = 1.000000e+96, .off = -4.986165397190889509e+79 },
HP.{ .val = 1.000000e+95, .off = -2.021887912715594741e+78 },
HP.{ .val = 1.000000e+94, .off = -2.021887912715594638e+77 },
HP.{ .val = 1.000000e+93, .off = -4.337729697461918675e+76 },
HP.{ .val = 1.000000e+92, .off = -4.337729697461918997e+75 },
HP.{ .val = 1.000000e+91, .off = -7.956232486128049702e+74 },
HP.{ .val = 1.000000e+90, .off = 3.351588728453609882e+73 },
HP.{ .val = 1.000000e+89, .off = 5.246334248081951113e+71 },
HP.{ .val = 1.000000e+88, .off = 4.058327554364963672e+71 },
HP.{ .val = 1.000000e+87, .off = 4.058327554364963918e+70 },
HP.{ .val = 1.000000e+86, .off = -1.463069523067487266e+69 },
HP.{ .val = 1.000000e+85, .off = -1.463069523067487314e+68 },
HP.{ .val = 1.000000e+84, .off = -5.776660989811589441e+67 },
HP.{ .val = 1.000000e+83, .off = -3.080666323096525761e+66 },
HP.{ .val = 1.000000e+82, .off = 3.659320343691134468e+65 },
HP.{ .val = 1.000000e+81, .off = 7.871812010433421235e+64 },
HP.{ .val = 1.000000e+80, .off = -2.660986470836727449e+61 },
HP.{ .val = 1.000000e+79, .off = 3.264399249934044627e+62 },
HP.{ .val = 1.000000e+78, .off = -8.493621433689703070e+60 },
HP.{ .val = 1.000000e+77, .off = 1.721738727445414063e+60 },
HP.{ .val = 1.000000e+76, .off = -4.706013449590547218e+59 },
HP.{ .val = 1.000000e+75, .off = 7.346021882351880518e+58 },
HP.{ .val = 1.000000e+74, .off = 4.835181188197207515e+57 },
HP.{ .val = 1.000000e+73, .off = 1.696630320503867482e+56 },
HP.{ .val = 1.000000e+72, .off = 5.619818905120542959e+55 },
HP.{ .val = 1.000000e+71, .off = -4.188152556421145598e+54 },
HP.{ .val = 1.000000e+70, .off = -7.253143638152923145e+53 },
HP.{ .val = 1.000000e+69, .off = -7.253143638152923145e+52 },
HP.{ .val = 1.000000e+68, .off = 4.719477774861832896e+51 },
HP.{ .val = 1.000000e+67, .off = 1.726322421608144052e+50 },
HP.{ .val = 1.000000e+66, .off = 5.467766613175255107e+49 },
HP.{ .val = 1.000000e+65, .off = 7.909613737163661911e+47 },
HP.{ .val = 1.000000e+64, .off = -2.132041900945439564e+47 },
HP.{ .val = 1.000000e+63, .off = -5.785795994272697265e+46 },
HP.{ .val = 1.000000e+62, .off = -3.502199685943161329e+45 },
HP.{ .val = 1.000000e+61, .off = 5.061286470292598274e+44 },
HP.{ .val = 1.000000e+60, .off = 5.061286470292598472e+43 },
HP.{ .val = 1.000000e+59, .off = 2.831211950439536034e+42 },
HP.{ .val = 1.000000e+58, .off = 5.618805100255863927e+41 },
HP.{ .val = 1.000000e+57, .off = -4.834669211555366251e+40 },
HP.{ .val = 1.000000e+56, .off = -9.190283508143378583e+39 },
HP.{ .val = 1.000000e+55, .off = -1.023506702040855158e+38 },
HP.{ .val = 1.000000e+54, .off = -7.829154040459624616e+37 },
HP.{ .val = 1.000000e+53, .off = 6.779051325638372659e+35 },
HP.{ .val = 1.000000e+52, .off = 6.779051325638372290e+34 },
HP.{ .val = 1.000000e+51, .off = 6.779051325638371598e+33 },
HP.{ .val = 1.000000e+50, .off = -7.629769841091887392e+33 },
HP.{ .val = 1.000000e+49, .off = 5.350972305245182400e+32 },
HP.{ .val = 1.000000e+48, .off = -4.384584304507619764e+31 },
HP.{ .val = 1.000000e+47, .off = -4.384584304507619876e+30 },
HP.{ .val = 1.000000e+46, .off = 6.860180964052978705e+28 },
HP.{ .val = 1.000000e+45, .off = 7.024271097546444878e+28 },
HP.{ .val = 1.000000e+44, .off = -8.821361405306422641e+27 },
HP.{ .val = 1.000000e+43, .off = -1.393721169594140991e+26 },
HP.{ .val = 1.000000e+42, .off = -4.488571267807591679e+25 },
HP.{ .val = 1.000000e+41, .off = -6.200086450407783195e+23 },
HP.{ .val = 1.000000e+40, .off = -3.037860284270036669e+23 },
HP.{ .val = 1.000000e+39, .off = 6.029083362839682141e+22 },
HP.{ .val = 1.000000e+38, .off = 2.251190176543965970e+21 },
HP.{ .val = 1.000000e+37, .off = 4.612373417978788577e+20 },
HP.{ .val = 1.000000e+36, .off = -4.242063737401796198e+19 },
HP.{ .val = 1.000000e+35, .off = 3.136633892082024448e+18 },
HP.{ .val = 1.000000e+34, .off = 5.442476901295718400e+17 },
HP.{ .val = 1.000000e+33, .off = 5.442476901295718400e+16 },
HP.{ .val = 1.000000e+32, .off = -5.366162204393472000e+15 },
HP.{ .val = 1.000000e+31, .off = 3.641037050347520000e+14 },
HP.{ .val = 1.000000e+30, .off = -1.988462483865600000e+13 },
HP.{ .val = 1.000000e+29, .off = 8.566849142784000000e+12 },
HP.{ .val = 1.000000e+28, .off = 4.168802631680000000e+11 },
HP.{ .val = 1.000000e+27, .off = -1.328755507200000000e+10 },
HP.{ .val = 1.000000e+26, .off = -4.764729344000000000e+09 },
HP.{ .val = 1.000000e+25, .off = -9.059696640000000000e+08 },
HP.{ .val = 1.000000e+24, .off = 1.677721600000000000e+07 },
HP.{ .val = 1.000000e+23, .off = 8.388608000000000000e+06 },
HP.{ .val = 1.000000e+22, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+21, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+20, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+19, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+18, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+17, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+16, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+15, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+14, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+13, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+12, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+11, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+10, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+09, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+08, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+07, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+06, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+05, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+04, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+03, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+02, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+01, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e+00, .off = 0.000000000000000000e+00 },
HP.{ .val = 1.000000e-01, .off = -5.551115123125783010e-18 },
HP.{ .val = 1.000000e-02, .off = -2.081668171172168436e-19 },
HP.{ .val = 1.000000e-03, .off = -2.081668171172168557e-20 },
HP.{ .val = 1.000000e-04, .off = -4.792173602385929943e-21 },
HP.{ .val = 1.000000e-05, .off = -8.180305391403130547e-22 },
HP.{ .val = 1.000000e-06, .off = 4.525188817411374069e-23 },
HP.{ .val = 1.000000e-07, .off = 4.525188817411373922e-24 },
HP.{ .val = 1.000000e-08, .off = -2.092256083012847109e-25 },
HP.{ .val = 1.000000e-09, .off = -6.228159145777985254e-26 },
HP.{ .val = 1.000000e-10, .off = -3.643219731549774344e-27 },
HP.{ .val = 1.000000e-11, .off = 6.050303071806019080e-28 },
HP.{ .val = 1.000000e-12, .off = 2.011335237074438524e-29 },
HP.{ .val = 1.000000e-13, .off = -3.037374556340037101e-30 },
HP.{ .val = 1.000000e-14, .off = 1.180690645440101289e-32 },
HP.{ .val = 1.000000e-15, .off = -7.770539987666107583e-32 },
HP.{ .val = 1.000000e-16, .off = 2.090221327596539779e-33 },
HP.{ .val = 1.000000e-17, .off = -7.154242405462192144e-34 },
HP.{ .val = 1.000000e-18, .off = -7.154242405462192572e-35 },
HP.{ .val = 1.000000e-19, .off = 2.475407316473986894e-36 },
HP.{ .val = 1.000000e-20, .off = 5.484672854579042914e-37 },
HP.{ .val = 1.000000e-21, .off = 9.246254777210362522e-38 },
HP.{ .val = 1.000000e-22, .off = -4.859677432657087182e-39 },
HP.{ .val = 1.000000e-23, .off = 3.956530198510069291e-40 },
HP.{ .val = 1.000000e-24, .off = 7.629950044829717753e-41 },
HP.{ .val = 1.000000e-25, .off = -3.849486974919183692e-42 },
HP.{ .val = 1.000000e-26, .off = -3.849486974919184170e-43 },
HP.{ .val = 1.000000e-27, .off = -3.849486974919184070e-44 },
HP.{ .val = 1.000000e-28, .off = 2.876745653839937870e-45 },
HP.{ .val = 1.000000e-29, .off = 5.679342582489572168e-46 },
HP.{ .val = 1.000000e-30, .off = -8.333642060758598930e-47 },
HP.{ .val = 1.000000e-31, .off = -8.333642060758597958e-48 },
HP.{ .val = 1.000000e-32, .off = -5.596730997624190224e-49 },
HP.{ .val = 1.000000e-33, .off = -5.596730997624190604e-50 },
HP.{ .val = 1.000000e-34, .off = 7.232539610818348498e-51 },
HP.{ .val = 1.000000e-35, .off = -7.857545194582380514e-53 },
HP.{ .val = 1.000000e-36, .off = 5.896157255772251528e-53 },
HP.{ .val = 1.000000e-37, .off = -6.632427322784915796e-54 },
HP.{ .val = 1.000000e-38, .off = 3.808059826012723592e-55 },
HP.{ .val = 1.000000e-39, .off = 7.070712060011985131e-56 },
HP.{ .val = 1.000000e-40, .off = 7.070712060011985584e-57 },
HP.{ .val = 1.000000e-41, .off = -5.761291134237854167e-59 },
HP.{ .val = 1.000000e-42, .off = -3.762312935688689794e-59 },
HP.{ .val = 1.000000e-43, .off = -7.745042713519821150e-60 },
HP.{ .val = 1.000000e-44, .off = 4.700987842202462817e-61 },
HP.{ .val = 1.000000e-45, .off = 1.589480203271891964e-62 },
HP.{ .val = 1.000000e-46, .off = -2.299904345391321765e-63 },
HP.{ .val = 1.000000e-47, .off = 2.561826340437695261e-64 },
HP.{ .val = 1.000000e-48, .off = 2.561826340437695345e-65 },
HP.{ .val = 1.000000e-49, .off = 6.360053438741614633e-66 },
HP.{ .val = 1.000000e-50, .off = -7.616223705782342295e-68 },
HP.{ .val = 1.000000e-51, .off = -7.616223705782343324e-69 },
HP.{ .val = 1.000000e-52, .off = -7.616223705782342295e-70 },
HP.{ .val = 1.000000e-53, .off = -3.079876214757872338e-70 },
HP.{ .val = 1.000000e-54, .off = -3.079876214757872821e-71 },
HP.{ .val = 1.000000e-55, .off = 5.423954167728123147e-73 },
HP.{ .val = 1.000000e-56, .off = -3.985444122640543680e-73 },
HP.{ .val = 1.000000e-57, .off = 4.504255013759498850e-74 },
HP.{ .val = 1.000000e-58, .off = -2.570494266573869991e-75 },
HP.{ .val = 1.000000e-59, .off = -2.570494266573869930e-76 },
HP.{ .val = 1.000000e-60, .off = 2.956653608686574324e-77 },
HP.{ .val = 1.000000e-61, .off = -3.952281235388981376e-78 },
HP.{ .val = 1.000000e-62, .off = -3.952281235388981376e-79 },
HP.{ .val = 1.000000e-63, .off = -6.651083908855995172e-80 },
HP.{ .val = 1.000000e-64, .off = 3.469426116645307030e-81 },
HP.{ .val = 1.000000e-65, .off = 7.686305293937516319e-82 },
HP.{ .val = 1.000000e-66, .off = 2.415206322322254927e-83 },
HP.{ .val = 1.000000e-67, .off = 5.709643179581793251e-84 },
HP.{ .val = 1.000000e-68, .off = -6.644495035141475923e-85 },
HP.{ .val = 1.000000e-69, .off = 3.650620143794581913e-86 },
HP.{ .val = 1.000000e-70, .off = 4.333966503770636492e-88 },
HP.{ .val = 1.000000e-71, .off = 8.476455383920859113e-88 },
HP.{ .val = 1.000000e-72, .off = 3.449543675455986564e-89 },
HP.{ .val = 1.000000e-73, .off = 3.077238576654418974e-91 },
HP.{ .val = 1.000000e-74, .off = 4.234998629903623140e-91 },
HP.{ .val = 1.000000e-75, .off = 4.234998629903623412e-92 },
HP.{ .val = 1.000000e-76, .off = 7.303182045714702338e-93 },
HP.{ .val = 1.000000e-77, .off = 7.303182045714701699e-94 },
HP.{ .val = 1.000000e-78, .off = 1.121271649074855759e-96 },
HP.{ .val = 1.000000e-79, .off = 1.121271649074855863e-97 },
HP.{ .val = 1.000000e-80, .off = 3.857468248661243988e-97 },
HP.{ .val = 1.000000e-81, .off = 3.857468248661244248e-98 },
HP.{ .val = 1.000000e-82, .off = 3.857468248661244410e-99 },
HP.{ .val = 1.000000e-83, .off = -3.457651055545315679e-100 },
HP.{ .val = 1.000000e-84, .off = -3.457651055545315933e-101 },
HP.{ .val = 1.000000e-85, .off = 2.257285900866059216e-102 },
HP.{ .val = 1.000000e-86, .off = -8.458220892405268345e-103 },
HP.{ .val = 1.000000e-87, .off = -1.761029146610688867e-104 },
HP.{ .val = 1.000000e-88, .off = 6.610460535632536565e-105 },
HP.{ .val = 1.000000e-89, .off = -3.853901567171494935e-106 },
HP.{ .val = 1.000000e-90, .off = 5.062493089968513723e-108 },
HP.{ .val = 1.000000e-91, .off = -2.218844988608365240e-108 },
HP.{ .val = 1.000000e-92, .off = 1.187522883398155383e-109 },
HP.{ .val = 1.000000e-93, .off = 9.703442563414457296e-110 },
HP.{ .val = 1.000000e-94, .off = 4.380992763404268896e-111 },
HP.{ .val = 1.000000e-95, .off = 1.054461638397900823e-112 },
HP.{ .val = 1.000000e-96, .off = 9.370789450913819736e-113 },
HP.{ .val = 1.000000e-97, .off = -3.623472756142303998e-114 },
HP.{ .val = 1.000000e-98, .off = 6.122223899149788839e-115 },
HP.{ .val = 1.000000e-99, .off = -1.999189980260288281e-116 },
HP.{ .val = 1.000000e-100, .off = -1.999189980260288281e-117 },
HP.{ .val = 1.000000e-101, .off = -5.171617276904849634e-118 },
HP.{ .val = 1.000000e-102, .off = 6.724985085512256320e-119 },
HP.{ .val = 1.000000e-103, .off = 4.246526260008692213e-120 },
HP.{ .val = 1.000000e-104, .off = 7.344599791888147003e-121 },
HP.{ .val = 1.000000e-105, .off = 3.472007877038828407e-122 },
HP.{ .val = 1.000000e-106, .off = 5.892377823819652194e-123 },
HP.{ .val = 1.000000e-107, .off = -1.585470431324073925e-125 },
HP.{ .val = 1.000000e-108, .off = -3.940375084977444795e-125 },
HP.{ .val = 1.000000e-109, .off = 7.869099673288519908e-127 },
HP.{ .val = 1.000000e-110, .off = -5.122196348054018581e-127 },
HP.{ .val = 1.000000e-111, .off = -8.815387795168313713e-128 },
HP.{ .val = 1.000000e-112, .off = 5.034080131510290214e-129 },
HP.{ .val = 1.000000e-113, .off = 2.148774313452247863e-130 },
HP.{ .val = 1.000000e-114, .off = -5.064490231692858416e-131 },
HP.{ .val = 1.000000e-115, .off = -5.064490231692858166e-132 },
HP.{ .val = 1.000000e-116, .off = 5.708726942017560559e-134 },
HP.{ .val = 1.000000e-117, .off = -2.951229134482377772e-134 },
HP.{ .val = 1.000000e-118, .off = 1.451398151372789513e-135 },
HP.{ .val = 1.000000e-119, .off = -1.300243902286690040e-136 },
HP.{ .val = 1.000000e-120, .off = 2.139308664787659449e-137 },
HP.{ .val = 1.000000e-121, .off = 2.139308664787659329e-138 },
HP.{ .val = 1.000000e-122, .off = -5.922142664292847471e-139 },
HP.{ .val = 1.000000e-123, .off = -5.922142664292846912e-140 },
HP.{ .val = 1.000000e-124, .off = 6.673875037395443799e-141 },
HP.{ .val = 1.000000e-125, .off = -1.198636026159737932e-142 },
HP.{ .val = 1.000000e-126, .off = 5.361789860136246995e-143 },
HP.{ .val = 1.000000e-127, .off = -2.838742497733733936e-144 },
HP.{ .val = 1.000000e-128, .off = -5.401408859568103261e-145 },
HP.{ .val = 1.000000e-129, .off = 7.411922949603743011e-146 },
HP.{ .val = 1.000000e-130, .off = -8.604741811861064385e-147 },
HP.{ .val = 1.000000e-131, .off = 1.405673664054439890e-148 },
HP.{ .val = 1.000000e-132, .off = 1.405673664054439933e-149 },
HP.{ .val = 1.000000e-133, .off = -6.414963426504548053e-150 },
HP.{ .val = 1.000000e-134, .off = -3.971014335704864578e-151 },
HP.{ .val = 1.000000e-135, .off = -3.971014335704864748e-152 },
HP.{ .val = 1.000000e-136, .off = -1.523438813303585576e-154 },
HP.{ .val = 1.000000e-137, .off = 2.234325152653707766e-154 },
HP.{ .val = 1.000000e-138, .off = -6.715683724786540160e-155 },
HP.{ .val = 1.000000e-139, .off = -2.986513359186437306e-156 },
HP.{ .val = 1.000000e-140, .off = 1.674949597813692102e-157 },
HP.{ .val = 1.000000e-141, .off = -4.151879098436469092e-158 },
HP.{ .val = 1.000000e-142, .off = -4.151879098436469295e-159 },
HP.{ .val = 1.000000e-143, .off = 4.952540739454407825e-160 },
HP.{ .val = 1.000000e-144, .off = 4.952540739454407667e-161 },
HP.{ .val = 1.000000e-145, .off = 8.508954738630531443e-162 },
HP.{ .val = 1.000000e-146, .off = -2.604839008794855481e-163 },
HP.{ .val = 1.000000e-147, .off = 2.952057864917838382e-164 },
HP.{ .val = 1.000000e-148, .off = 6.425118410988271757e-165 },
HP.{ .val = 1.000000e-149, .off = 2.083792728400229858e-166 },
HP.{ .val = 1.000000e-150, .off = -6.295358232172964237e-168 },
HP.{ .val = 1.000000e-151, .off = 6.153785555826519421e-168 },
HP.{ .val = 1.000000e-152, .off = -6.564942029880634994e-169 },
HP.{ .val = 1.000000e-153, .off = -3.915207116191644540e-170 },
HP.{ .val = 1.000000e-154, .off = 2.709130168030831503e-171 },
HP.{ .val = 1.000000e-155, .off = -1.431080634608215966e-172 },
HP.{ .val = 1.000000e-156, .off = -4.018712386257620994e-173 },
HP.{ .val = 1.000000e-157, .off = 5.684906682427646782e-174 },
HP.{ .val = 1.000000e-158, .off = -6.444617153428937489e-175 },
HP.{ .val = 1.000000e-159, .off = 1.136335243981427681e-176 },
HP.{ .val = 1.000000e-160, .off = 1.136335243981427725e-177 },
HP.{ .val = 1.000000e-161, .off = -2.812077463003137395e-178 },
HP.{ .val = 1.000000e-162, .off = 4.591196362592922204e-179 },
HP.{ .val = 1.000000e-163, .off = 7.675893789924613703e-180 },
HP.{ .val = 1.000000e-164, .off = 3.820022005759999543e-181 },
HP.{ .val = 1.000000e-165, .off = -9.998177244457686588e-183 },
HP.{ .val = 1.000000e-166, .off = -4.012217555824373639e-183 },
HP.{ .val = 1.000000e-167, .off = -2.467177666011174334e-185 },
HP.{ .val = 1.000000e-168, .off = -4.953592503130188139e-185 },
HP.{ .val = 1.000000e-169, .off = -2.011795792799518887e-186 },
HP.{ .val = 1.000000e-170, .off = 1.665450095113817423e-187 },
HP.{ .val = 1.000000e-171, .off = 1.665450095113817487e-188 },
HP.{ .val = 1.000000e-172, .off = -4.080246604750770577e-189 },
HP.{ .val = 1.000000e-173, .off = -4.080246604750770677e-190 },
HP.{ .val = 1.000000e-174, .off = 4.085789420184387951e-192 },
HP.{ .val = 1.000000e-175, .off = 4.085789420184388146e-193 },
HP.{ .val = 1.000000e-176, .off = 4.085789420184388146e-194 },
HP.{ .val = 1.000000e-177, .off = 4.792197640035244894e-194 },
HP.{ .val = 1.000000e-178, .off = 4.792197640035244742e-195 },
HP.{ .val = 1.000000e-179, .off = -2.057206575616014662e-196 },
HP.{ .val = 1.000000e-180, .off = -2.057206575616014662e-197 },
HP.{ .val = 1.000000e-181, .off = -4.732755097354788053e-198 },
HP.{ .val = 1.000000e-182, .off = -4.732755097354787867e-199 },
HP.{ .val = 1.000000e-183, .off = -5.522105321379546765e-201 },
HP.{ .val = 1.000000e-184, .off = -5.777891238658996019e-201 },
HP.{ .val = 1.000000e-185, .off = 7.542096444923057046e-203 },
HP.{ .val = 1.000000e-186, .off = 8.919335748431433483e-203 },
HP.{ .val = 1.000000e-187, .off = -1.287071881492476028e-204 },
HP.{ .val = 1.000000e-188, .off = 5.091932887209967018e-205 },
HP.{ .val = 1.000000e-189, .off = -6.868701054107114024e-206 },
HP.{ .val = 1.000000e-190, .off = -1.885103578558330118e-207 },
HP.{ .val = 1.000000e-191, .off = -1.885103578558330205e-208 },
HP.{ .val = 1.000000e-192, .off = -9.671974634103305058e-209 },
HP.{ .val = 1.000000e-193, .off = -4.805180224387695640e-210 },
HP.{ .val = 1.000000e-194, .off = -1.763433718315439838e-211 },
HP.{ .val = 1.000000e-195, .off = -9.367799983496079132e-212 },
HP.{ .val = 1.000000e-196, .off = -4.615071067758179837e-213 },
HP.{ .val = 1.000000e-197, .off = 1.325840076914194777e-214 },
HP.{ .val = 1.000000e-198, .off = 8.751979007754662425e-215 },
HP.{ .val = 1.000000e-199, .off = 1.789973760091724198e-216 },
HP.{ .val = 1.000000e-200, .off = 1.789973760091724077e-217 },
HP.{ .val = 1.000000e-201, .off = 5.416018159916171171e-218 },
HP.{ .val = 1.000000e-202, .off = -3.649092839644947067e-219 },
HP.{ .val = 1.000000e-203, .off = -3.649092839644947067e-220 },
HP.{ .val = 1.000000e-204, .off = -1.080338554413850956e-222 },
HP.{ .val = 1.000000e-205, .off = -1.080338554413850841e-223 },
HP.{ .val = 1.000000e-206, .off = -2.874486186850417807e-223 },
HP.{ .val = 1.000000e-207, .off = 7.499710055933455072e-224 },
HP.{ .val = 1.000000e-208, .off = -9.790617015372999087e-225 },
HP.{ .val = 1.000000e-209, .off = -4.387389805589732612e-226 },
HP.{ .val = 1.000000e-210, .off = -4.387389805589732612e-227 },
HP.{ .val = 1.000000e-211, .off = -8.608661063232909897e-228 },
HP.{ .val = 1.000000e-212, .off = 4.582811616902018972e-229 },
HP.{ .val = 1.000000e-213, .off = 4.582811616902019155e-230 },
HP.{ .val = 1.000000e-214, .off = 8.705146829444184930e-231 },
HP.{ .val = 1.000000e-215, .off = -4.177150709750081830e-232 },
HP.{ .val = 1.000000e-216, .off = -4.177150709750082366e-233 },
HP.{ .val = 1.000000e-217, .off = -8.202868690748290237e-234 },
HP.{ .val = 1.000000e-218, .off = -3.170721214500530119e-235 },
HP.{ .val = 1.000000e-219, .off = -3.170721214500529857e-236 },
HP.{ .val = 1.000000e-220, .off = 7.606440013180328441e-238 },
HP.{ .val = 1.000000e-221, .off = -1.696459258568569049e-238 },
HP.{ .val = 1.000000e-222, .off = -4.767838333426821244e-239 },
HP.{ .val = 1.000000e-223, .off = 2.910609353718809138e-240 },
HP.{ .val = 1.000000e-224, .off = -1.888420450747209784e-241 },
HP.{ .val = 1.000000e-225, .off = 4.110366804835314035e-242 },
HP.{ .val = 1.000000e-226, .off = 7.859608839574391006e-243 },
HP.{ .val = 1.000000e-227, .off = 5.516332567862468419e-244 },
HP.{ .val = 1.000000e-228, .off = -3.270953451057244613e-245 },
HP.{ .val = 1.000000e-229, .off = -6.932322625607124670e-246 },
HP.{ .val = 1.000000e-230, .off = -4.643966891513449762e-247 },
HP.{ .val = 1.000000e-231, .off = 1.076922443720738305e-248 },
HP.{ .val = 1.000000e-232, .off = -2.498633390800628939e-249 },
HP.{ .val = 1.000000e-233, .off = 4.205533798926934891e-250 },
HP.{ .val = 1.000000e-234, .off = 4.205533798926934891e-251 },
HP.{ .val = 1.000000e-235, .off = 4.205533798926934697e-252 },
HP.{ .val = 1.000000e-236, .off = -4.523850562697497656e-253 },
HP.{ .val = 1.000000e-237, .off = 9.320146633177728298e-255 },
HP.{ .val = 1.000000e-238, .off = 9.320146633177728062e-256 },
HP.{ .val = 1.000000e-239, .off = -7.592774752331086440e-256 },
HP.{ .val = 1.000000e-240, .off = 3.063212017229987840e-257 },
HP.{ .val = 1.000000e-241, .off = 3.063212017229987562e-258 },
HP.{ .val = 1.000000e-242, .off = 3.063212017229987562e-259 },
HP.{ .val = 1.000000e-243, .off = 4.616527473176159842e-261 },
HP.{ .val = 1.000000e-244, .off = 6.965550922098544975e-261 },
HP.{ .val = 1.000000e-245, .off = 6.965550922098544749e-262 },
HP.{ .val = 1.000000e-246, .off = 4.424965697574744679e-263 },
HP.{ .val = 1.000000e-247, .off = -1.926497363734756420e-264 },
HP.{ .val = 1.000000e-248, .off = 2.043167049583681740e-265 },
HP.{ .val = 1.000000e-249, .off = -5.399953725388390154e-266 },
HP.{ .val = 1.000000e-250, .off = -5.399953725388389982e-267 },
HP.{ .val = 1.000000e-251, .off = -1.523328321757102663e-268 },
HP.{ .val = 1.000000e-252, .off = 5.745344310051561161e-269 },
HP.{ .val = 1.000000e-253, .off = -6.369110076296211879e-270 },
HP.{ .val = 1.000000e-254, .off = 8.773957906638504842e-271 },
HP.{ .val = 1.000000e-255, .off = -6.904595826956931908e-273 },
HP.{ .val = 1.000000e-256, .off = 2.267170882721243669e-273 },
HP.{ .val = 1.000000e-257, .off = 2.267170882721243669e-274 },
HP.{ .val = 1.000000e-258, .off = 4.577819683828225398e-275 },
HP.{ .val = 1.000000e-259, .off = -6.975424321706684210e-276 },
HP.{ .val = 1.000000e-260, .off = 3.855741933482293648e-277 },
HP.{ .val = 1.000000e-261, .off = 1.599248963651256552e-278 },
HP.{ .val = 1.000000e-262, .off = -1.221367248637539543e-279 },
HP.{ .val = 1.000000e-263, .off = -1.221367248637539494e-280 },
HP.{ .val = 1.000000e-264, .off = -1.221367248637539647e-281 },
HP.{ .val = 1.000000e-265, .off = 1.533140771175737943e-282 },
HP.{ .val = 1.000000e-266, .off = 1.533140771175737895e-283 },
HP.{ .val = 1.000000e-267, .off = 1.533140771175738074e-284 },
HP.{ .val = 1.000000e-268, .off = 4.223090009274641634e-285 },
HP.{ .val = 1.000000e-269, .off = 4.223090009274641634e-286 },
HP.{ .val = 1.000000e-270, .off = -4.183001359784432924e-287 },
HP.{ .val = 1.000000e-271, .off = 3.697709298708449474e-288 },
HP.{ .val = 1.000000e-272, .off = 6.981338739747150474e-289 },
HP.{ .val = 1.000000e-273, .off = -9.436808465446354751e-290 },
HP.{ .val = 1.000000e-274, .off = 3.389869038611071740e-291 },
HP.{ .val = 1.000000e-275, .off = 6.596538414625427829e-292 },
HP.{ .val = 1.000000e-276, .off = -9.436808465446354618e-293 },
HP.{ .val = 1.000000e-277, .off = 3.089243784609725523e-294 },
HP.{ .val = 1.000000e-278, .off = 6.220756847123745836e-295 },
HP.{ .val = 1.000000e-279, .off = -5.522417137303829470e-296 },
HP.{ .val = 1.000000e-280, .off = 4.263561183052483059e-297 },
HP.{ .val = 1.000000e-281, .off = -1.852675267170212272e-298 },
HP.{ .val = 1.000000e-282, .off = -1.852675267170212378e-299 },
HP.{ .val = 1.000000e-283, .off = 5.314789322934508480e-300 },
HP.{ .val = 1.000000e-284, .off = -3.644541414696392675e-301 },
HP.{ .val = 1.000000e-285, .off = -7.377595888709267777e-302 },
HP.{ .val = 1.000000e-286, .off = -5.044436842451220838e-303 },
HP.{ .val = 1.000000e-287, .off = -2.127988034628661760e-304 },
HP.{ .val = 1.000000e-288, .off = -5.773549044406860911e-305 },
HP.{ .val = 1.000000e-289, .off = -1.216597782184112068e-306 },
HP.{ .val = 1.000000e-290, .off = -6.912786859962547924e-307 },
HP.{ .val = 1.000000e-291, .off = 3.767567660872018813e-308 },
pub const lookup_table = []HP{
HP{ .val = 1.000000e+308, .off = -1.097906362944045488e+291 },
HP{ .val = 1.000000e+307, .off = 1.396894023974354241e+290 },
HP{ .val = 1.000000e+306, .off = -1.721606459673645508e+289 },
HP{ .val = 1.000000e+305, .off = 6.074644749446353973e+288 },
HP{ .val = 1.000000e+304, .off = 6.074644749446353567e+287 },
HP{ .val = 1.000000e+303, .off = -1.617650767864564452e+284 },
HP{ .val = 1.000000e+302, .off = -7.629703079084895055e+285 },
HP{ .val = 1.000000e+301, .off = -5.250476025520442286e+284 },
HP{ .val = 1.000000e+300, .off = -5.250476025520441956e+283 },
HP{ .val = 1.000000e+299, .off = -5.250476025520441750e+282 },
HP{ .val = 1.000000e+298, .off = 4.043379652465702264e+281 },
HP{ .val = 1.000000e+297, .off = -1.765280146275637946e+280 },
HP{ .val = 1.000000e+296, .off = 1.865132227937699609e+279 },
HP{ .val = 1.000000e+295, .off = 1.865132227937699609e+278 },
HP{ .val = 1.000000e+294, .off = -6.643646774124810287e+277 },
HP{ .val = 1.000000e+293, .off = 7.537651562646039934e+276 },
HP{ .val = 1.000000e+292, .off = -1.325659897835741608e+275 },
HP{ .val = 1.000000e+291, .off = 4.213909764965371606e+274 },
HP{ .val = 1.000000e+290, .off = -6.172783352786715670e+273 },
HP{ .val = 1.000000e+289, .off = -6.172783352786715670e+272 },
HP{ .val = 1.000000e+288, .off = -7.630473539575035471e+270 },
HP{ .val = 1.000000e+287, .off = -7.525217352494018700e+270 },
HP{ .val = 1.000000e+286, .off = -3.298861103408696612e+269 },
HP{ .val = 1.000000e+285, .off = 1.984084207947955778e+268 },
HP{ .val = 1.000000e+284, .off = -7.921438250845767591e+267 },
HP{ .val = 1.000000e+283, .off = 4.460464822646386735e+266 },
HP{ .val = 1.000000e+282, .off = -3.278224598286209647e+265 },
HP{ .val = 1.000000e+281, .off = -3.278224598286209737e+264 },
HP{ .val = 1.000000e+280, .off = -3.278224598286209961e+263 },
HP{ .val = 1.000000e+279, .off = -5.797329227496039232e+262 },
HP{ .val = 1.000000e+278, .off = 3.649313132040821498e+261 },
HP{ .val = 1.000000e+277, .off = -2.867878510995372374e+259 },
HP{ .val = 1.000000e+276, .off = -5.206914080024985409e+259 },
HP{ .val = 1.000000e+275, .off = 4.018322599210230404e+258 },
HP{ .val = 1.000000e+274, .off = 7.862171215558236495e+257 },
HP{ .val = 1.000000e+273, .off = 5.459765830340732821e+256 },
HP{ .val = 1.000000e+272, .off = -6.552261095746788047e+255 },
HP{ .val = 1.000000e+271, .off = 4.709014147460262298e+254 },
HP{ .val = 1.000000e+270, .off = -4.675381888545612729e+253 },
HP{ .val = 1.000000e+269, .off = -4.675381888545612892e+252 },
HP{ .val = 1.000000e+268, .off = 2.656177514583977380e+251 },
HP{ .val = 1.000000e+267, .off = 2.656177514583977190e+250 },
HP{ .val = 1.000000e+266, .off = -3.071603269111014892e+249 },
HP{ .val = 1.000000e+265, .off = -6.651466258920385440e+248 },
HP{ .val = 1.000000e+264, .off = -4.414051890289528972e+247 },
HP{ .val = 1.000000e+263, .off = -1.617283929500958387e+246 },
HP{ .val = 1.000000e+262, .off = -1.617283929500958241e+245 },
HP{ .val = 1.000000e+261, .off = 7.122615947963323868e+244 },
HP{ .val = 1.000000e+260, .off = -6.533477610574617382e+243 },
HP{ .val = 1.000000e+259, .off = 7.122615947963323982e+242 },
HP{ .val = 1.000000e+258, .off = -5.679971763165996225e+241 },
HP{ .val = 1.000000e+257, .off = -3.012765990014054219e+240 },
HP{ .val = 1.000000e+256, .off = -3.012765990014054219e+239 },
HP{ .val = 1.000000e+255, .off = 1.154743030535854616e+238 },
HP{ .val = 1.000000e+254, .off = 6.364129306223240767e+237 },
HP{ .val = 1.000000e+253, .off = 6.364129306223241129e+236 },
HP{ .val = 1.000000e+252, .off = -9.915202805299840595e+235 },
HP{ .val = 1.000000e+251, .off = -4.827911520448877980e+234 },
HP{ .val = 1.000000e+250, .off = 7.890316691678530146e+233 },
HP{ .val = 1.000000e+249, .off = 7.890316691678529484e+232 },
HP{ .val = 1.000000e+248, .off = -4.529828046727141859e+231 },
HP{ .val = 1.000000e+247, .off = 4.785280507077111924e+230 },
HP{ .val = 1.000000e+246, .off = -6.858605185178205305e+229 },
HP{ .val = 1.000000e+245, .off = -4.432795665958347728e+228 },
HP{ .val = 1.000000e+244, .off = -7.465057564983169531e+227 },
HP{ .val = 1.000000e+243, .off = -7.465057564983169741e+226 },
HP{ .val = 1.000000e+242, .off = -5.096102956370027445e+225 },
HP{ .val = 1.000000e+241, .off = -5.096102956370026952e+224 },
HP{ .val = 1.000000e+240, .off = -1.394611380411992474e+223 },
HP{ .val = 1.000000e+239, .off = 9.188208545617793960e+221 },
HP{ .val = 1.000000e+238, .off = -4.864759732872650359e+221 },
HP{ .val = 1.000000e+237, .off = 5.979453868566904629e+220 },
HP{ .val = 1.000000e+236, .off = -5.316601966265964857e+219 },
HP{ .val = 1.000000e+235, .off = -5.316601966265964701e+218 },
HP{ .val = 1.000000e+234, .off = -1.786584517880693123e+217 },
HP{ .val = 1.000000e+233, .off = 2.625937292600896716e+216 },
HP{ .val = 1.000000e+232, .off = -5.647541102052084079e+215 },
HP{ .val = 1.000000e+231, .off = -5.647541102052083888e+214 },
HP{ .val = 1.000000e+230, .off = -9.956644432600511943e+213 },
HP{ .val = 1.000000e+229, .off = 8.161138937705571862e+211 },
HP{ .val = 1.000000e+228, .off = 7.549087847752475275e+211 },
HP{ .val = 1.000000e+227, .off = -9.283347037202319948e+210 },
HP{ .val = 1.000000e+226, .off = 3.866992716668613820e+209 },
HP{ .val = 1.000000e+225, .off = 7.154577655136347262e+208 },
HP{ .val = 1.000000e+224, .off = 3.045096482051680688e+207 },
HP{ .val = 1.000000e+223, .off = -4.660180717482069567e+206 },
HP{ .val = 1.000000e+222, .off = -4.660180717482070101e+205 },
HP{ .val = 1.000000e+221, .off = -4.660180717482069544e+204 },
HP{ .val = 1.000000e+220, .off = 3.562757926310489022e+202 },
HP{ .val = 1.000000e+219, .off = 3.491561111451748149e+202 },
HP{ .val = 1.000000e+218, .off = -8.265758834125874135e+201 },
HP{ .val = 1.000000e+217, .off = 3.981449442517482365e+200 },
HP{ .val = 1.000000e+216, .off = -2.142154695804195936e+199 },
HP{ .val = 1.000000e+215, .off = 9.339603063548950188e+198 },
HP{ .val = 1.000000e+214, .off = 4.555537330485139746e+197 },
HP{ .val = 1.000000e+213, .off = 1.565496247320257804e+196 },
HP{ .val = 1.000000e+212, .off = 9.040598955232462036e+195 },
HP{ .val = 1.000000e+211, .off = 4.368659762787334780e+194 },
HP{ .val = 1.000000e+210, .off = 7.288621758065539072e+193 },
HP{ .val = 1.000000e+209, .off = -7.311188218325485628e+192 },
HP{ .val = 1.000000e+208, .off = 1.813693016918905189e+191 },
HP{ .val = 1.000000e+207, .off = -3.889357755108838992e+190 },
HP{ .val = 1.000000e+206, .off = -3.889357755108838992e+189 },
HP{ .val = 1.000000e+205, .off = -1.661603547285501360e+188 },
HP{ .val = 1.000000e+204, .off = 1.123089212493670643e+187 },
HP{ .val = 1.000000e+203, .off = 1.123089212493670643e+186 },
HP{ .val = 1.000000e+202, .off = 9.825254086803583029e+185 },
HP{ .val = 1.000000e+201, .off = -3.771878529305654999e+184 },
HP{ .val = 1.000000e+200, .off = 3.026687778748963675e+183 },
HP{ .val = 1.000000e+199, .off = -9.720624048853446693e+182 },
HP{ .val = 1.000000e+198, .off = -1.753554156601940139e+181 },
HP{ .val = 1.000000e+197, .off = 4.885670753607648963e+180 },
HP{ .val = 1.000000e+196, .off = 4.885670753607648963e+179 },
HP{ .val = 1.000000e+195, .off = 2.292223523057028076e+178 },
HP{ .val = 1.000000e+194, .off = 5.534032561245303825e+177 },
HP{ .val = 1.000000e+193, .off = -6.622751331960730683e+176 },
HP{ .val = 1.000000e+192, .off = -4.090088020876139692e+175 },
HP{ .val = 1.000000e+191, .off = -7.255917159731877552e+174 },
HP{ .val = 1.000000e+190, .off = -7.255917159731877992e+173 },
HP{ .val = 1.000000e+189, .off = -2.309309130269787104e+172 },
HP{ .val = 1.000000e+188, .off = -2.309309130269787019e+171 },
HP{ .val = 1.000000e+187, .off = 9.284303438781988230e+170 },
HP{ .val = 1.000000e+186, .off = 2.038295583124628364e+169 },
HP{ .val = 1.000000e+185, .off = 2.038295583124628532e+168 },
HP{ .val = 1.000000e+184, .off = -1.735666841696912925e+167 },
HP{ .val = 1.000000e+183, .off = 5.340512704843477241e+166 },
HP{ .val = 1.000000e+182, .off = -6.453119872723839321e+165 },
HP{ .val = 1.000000e+181, .off = 8.288920849235306587e+164 },
HP{ .val = 1.000000e+180, .off = -9.248546019891598293e+162 },
HP{ .val = 1.000000e+179, .off = 1.954450226518486016e+162 },
HP{ .val = 1.000000e+178, .off = -5.243811844750628197e+161 },
HP{ .val = 1.000000e+177, .off = -7.448980502074320639e+159 },
HP{ .val = 1.000000e+176, .off = -7.448980502074319858e+158 },
HP{ .val = 1.000000e+175, .off = 6.284654753766312753e+158 },
HP{ .val = 1.000000e+174, .off = -6.895756753684458388e+157 },
HP{ .val = 1.000000e+173, .off = -1.403918625579970616e+156 },
HP{ .val = 1.000000e+172, .off = -8.268716285710580522e+155 },
HP{ .val = 1.000000e+171, .off = 4.602779327034313170e+154 },
HP{ .val = 1.000000e+170, .off = -3.441905430931244940e+153 },
HP{ .val = 1.000000e+169, .off = 6.613950516525702884e+152 },
HP{ .val = 1.000000e+168, .off = 6.613950516525702652e+151 },
HP{ .val = 1.000000e+167, .off = -3.860899428741951187e+150 },
HP{ .val = 1.000000e+166, .off = 5.959272394946474605e+149 },
HP{ .val = 1.000000e+165, .off = 1.005101065481665103e+149 },
HP{ .val = 1.000000e+164, .off = -1.783349948587918355e+146 },
HP{ .val = 1.000000e+163, .off = 6.215006036188360099e+146 },
HP{ .val = 1.000000e+162, .off = 6.215006036188360099e+145 },
HP{ .val = 1.000000e+161, .off = -3.774589324822814903e+144 },
HP{ .val = 1.000000e+160, .off = -6.528407745068226929e+142 },
HP{ .val = 1.000000e+159, .off = 7.151530601283157561e+142 },
HP{ .val = 1.000000e+158, .off = 4.712664546348788765e+141 },
HP{ .val = 1.000000e+157, .off = 1.664081977680827856e+140 },
HP{ .val = 1.000000e+156, .off = 1.664081977680827750e+139 },
HP{ .val = 1.000000e+155, .off = -7.176231540910168265e+137 },
HP{ .val = 1.000000e+154, .off = -3.694754568805822650e+137 },
HP{ .val = 1.000000e+153, .off = 2.665969958768462622e+134 },
HP{ .val = 1.000000e+152, .off = -4.625108135904199522e+135 },
HP{ .val = 1.000000e+151, .off = -1.717753238721771919e+134 },
HP{ .val = 1.000000e+150, .off = 1.916440382756262433e+133 },
HP{ .val = 1.000000e+149, .off = -4.897672657515052040e+132 },
HP{ .val = 1.000000e+148, .off = -4.897672657515052198e+131 },
HP{ .val = 1.000000e+147, .off = 2.200361759434233991e+130 },
HP{ .val = 1.000000e+146, .off = 6.636633270027537273e+129 },
HP{ .val = 1.000000e+145, .off = 1.091293881785907977e+128 },
HP{ .val = 1.000000e+144, .off = -2.374543235865110597e+127 },
HP{ .val = 1.000000e+143, .off = -2.374543235865110537e+126 },
HP{ .val = 1.000000e+142, .off = -5.082228484029969099e+125 },
HP{ .val = 1.000000e+141, .off = -1.697621923823895943e+124 },
HP{ .val = 1.000000e+140, .off = -5.928380124081487212e+123 },
HP{ .val = 1.000000e+139, .off = -3.284156248920492522e+122 },
HP{ .val = 1.000000e+138, .off = -3.284156248920492706e+121 },
HP{ .val = 1.000000e+137, .off = -3.284156248920492476e+120 },
HP{ .val = 1.000000e+136, .off = -5.866406127007401066e+119 },
HP{ .val = 1.000000e+135, .off = 3.817030915818506056e+118 },
HP{ .val = 1.000000e+134, .off = 7.851796350329300951e+117 },
HP{ .val = 1.000000e+133, .off = -2.235117235947686077e+116 },
HP{ .val = 1.000000e+132, .off = 9.170432597638723691e+114 },
HP{ .val = 1.000000e+131, .off = 8.797444499042767883e+114 },
HP{ .val = 1.000000e+130, .off = -5.978307824605161274e+113 },
HP{ .val = 1.000000e+129, .off = 1.782556435814758516e+111 },
HP{ .val = 1.000000e+128, .off = -7.517448691651820362e+111 },
HP{ .val = 1.000000e+127, .off = 4.507089332150205498e+110 },
HP{ .val = 1.000000e+126, .off = 7.513223838100711695e+109 },
HP{ .val = 1.000000e+125, .off = 7.513223838100712113e+108 },
HP{ .val = 1.000000e+124, .off = 5.164681255326878494e+107 },
HP{ .val = 1.000000e+123, .off = 2.229003026859587122e+106 },
HP{ .val = 1.000000e+122, .off = -1.440594758724527399e+105 },
HP{ .val = 1.000000e+121, .off = -3.734093374714598783e+104 },
HP{ .val = 1.000000e+120, .off = 1.999653165260579757e+103 },
HP{ .val = 1.000000e+119, .off = 5.583244752745066693e+102 },
HP{ .val = 1.000000e+118, .off = 3.343500010567262234e+101 },
HP{ .val = 1.000000e+117, .off = -5.055542772599503556e+100 },
HP{ .val = 1.000000e+116, .off = -1.555941612946684331e+99 },
HP{ .val = 1.000000e+115, .off = -1.555941612946684331e+98 },
HP{ .val = 1.000000e+114, .off = -1.555941612946684293e+97 },
HP{ .val = 1.000000e+113, .off = -1.555941612946684246e+96 },
HP{ .val = 1.000000e+112, .off = 6.988006530736955847e+95 },
HP{ .val = 1.000000e+111, .off = 4.318022735835818244e+94 },
HP{ .val = 1.000000e+110, .off = -2.356936751417025578e+93 },
HP{ .val = 1.000000e+109, .off = 1.814912928116001926e+92 },
HP{ .val = 1.000000e+108, .off = -3.399899171300282744e+91 },
HP{ .val = 1.000000e+107, .off = 3.118615952970072913e+90 },
HP{ .val = 1.000000e+106, .off = -9.103599905036843605e+89 },
HP{ .val = 1.000000e+105, .off = 6.174169917471802325e+88 },
HP{ .val = 1.000000e+104, .off = -1.915675085734668657e+86 },
HP{ .val = 1.000000e+103, .off = -1.915675085734668864e+85 },
HP{ .val = 1.000000e+102, .off = 2.295048673475466221e+85 },
HP{ .val = 1.000000e+101, .off = 2.295048673475466135e+84 },
HP{ .val = 1.000000e+100, .off = -1.590289110975991792e+83 },
HP{ .val = 1.000000e+99, .off = 3.266383119588331155e+82 },
HP{ .val = 1.000000e+98, .off = 2.309629754856292029e+80 },
HP{ .val = 1.000000e+97, .off = -7.357587384771124533e+80 },
HP{ .val = 1.000000e+96, .off = -4.986165397190889509e+79 },
HP{ .val = 1.000000e+95, .off = -2.021887912715594741e+78 },
HP{ .val = 1.000000e+94, .off = -2.021887912715594638e+77 },
HP{ .val = 1.000000e+93, .off = -4.337729697461918675e+76 },
HP{ .val = 1.000000e+92, .off = -4.337729697461918997e+75 },
HP{ .val = 1.000000e+91, .off = -7.956232486128049702e+74 },
HP{ .val = 1.000000e+90, .off = 3.351588728453609882e+73 },
HP{ .val = 1.000000e+89, .off = 5.246334248081951113e+71 },
HP{ .val = 1.000000e+88, .off = 4.058327554364963672e+71 },
HP{ .val = 1.000000e+87, .off = 4.058327554364963918e+70 },
HP{ .val = 1.000000e+86, .off = -1.463069523067487266e+69 },
HP{ .val = 1.000000e+85, .off = -1.463069523067487314e+68 },
HP{ .val = 1.000000e+84, .off = -5.776660989811589441e+67 },
HP{ .val = 1.000000e+83, .off = -3.080666323096525761e+66 },
HP{ .val = 1.000000e+82, .off = 3.659320343691134468e+65 },
HP{ .val = 1.000000e+81, .off = 7.871812010433421235e+64 },
HP{ .val = 1.000000e+80, .off = -2.660986470836727449e+61 },
HP{ .val = 1.000000e+79, .off = 3.264399249934044627e+62 },
HP{ .val = 1.000000e+78, .off = -8.493621433689703070e+60 },
HP{ .val = 1.000000e+77, .off = 1.721738727445414063e+60 },
HP{ .val = 1.000000e+76, .off = -4.706013449590547218e+59 },
HP{ .val = 1.000000e+75, .off = 7.346021882351880518e+58 },
HP{ .val = 1.000000e+74, .off = 4.835181188197207515e+57 },
HP{ .val = 1.000000e+73, .off = 1.696630320503867482e+56 },
HP{ .val = 1.000000e+72, .off = 5.619818905120542959e+55 },
HP{ .val = 1.000000e+71, .off = -4.188152556421145598e+54 },
HP{ .val = 1.000000e+70, .off = -7.253143638152923145e+53 },
HP{ .val = 1.000000e+69, .off = -7.253143638152923145e+52 },
HP{ .val = 1.000000e+68, .off = 4.719477774861832896e+51 },
HP{ .val = 1.000000e+67, .off = 1.726322421608144052e+50 },
HP{ .val = 1.000000e+66, .off = 5.467766613175255107e+49 },
HP{ .val = 1.000000e+65, .off = 7.909613737163661911e+47 },
HP{ .val = 1.000000e+64, .off = -2.132041900945439564e+47 },
HP{ .val = 1.000000e+63, .off = -5.785795994272697265e+46 },
HP{ .val = 1.000000e+62, .off = -3.502199685943161329e+45 },
HP{ .val = 1.000000e+61, .off = 5.061286470292598274e+44 },
HP{ .val = 1.000000e+60, .off = 5.061286470292598472e+43 },
HP{ .val = 1.000000e+59, .off = 2.831211950439536034e+42 },
HP{ .val = 1.000000e+58, .off = 5.618805100255863927e+41 },
HP{ .val = 1.000000e+57, .off = -4.834669211555366251e+40 },
HP{ .val = 1.000000e+56, .off = -9.190283508143378583e+39 },
HP{ .val = 1.000000e+55, .off = -1.023506702040855158e+38 },
HP{ .val = 1.000000e+54, .off = -7.829154040459624616e+37 },
HP{ .val = 1.000000e+53, .off = 6.779051325638372659e+35 },
HP{ .val = 1.000000e+52, .off = 6.779051325638372290e+34 },
HP{ .val = 1.000000e+51, .off = 6.779051325638371598e+33 },
HP{ .val = 1.000000e+50, .off = -7.629769841091887392e+33 },
HP{ .val = 1.000000e+49, .off = 5.350972305245182400e+32 },
HP{ .val = 1.000000e+48, .off = -4.384584304507619764e+31 },
HP{ .val = 1.000000e+47, .off = -4.384584304507619876e+30 },
HP{ .val = 1.000000e+46, .off = 6.860180964052978705e+28 },
HP{ .val = 1.000000e+45, .off = 7.024271097546444878e+28 },
HP{ .val = 1.000000e+44, .off = -8.821361405306422641e+27 },
HP{ .val = 1.000000e+43, .off = -1.393721169594140991e+26 },
HP{ .val = 1.000000e+42, .off = -4.488571267807591679e+25 },
HP{ .val = 1.000000e+41, .off = -6.200086450407783195e+23 },
HP{ .val = 1.000000e+40, .off = -3.037860284270036669e+23 },
HP{ .val = 1.000000e+39, .off = 6.029083362839682141e+22 },
HP{ .val = 1.000000e+38, .off = 2.251190176543965970e+21 },
HP{ .val = 1.000000e+37, .off = 4.612373417978788577e+20 },
HP{ .val = 1.000000e+36, .off = -4.242063737401796198e+19 },
HP{ .val = 1.000000e+35, .off = 3.136633892082024448e+18 },
HP{ .val = 1.000000e+34, .off = 5.442476901295718400e+17 },
HP{ .val = 1.000000e+33, .off = 5.442476901295718400e+16 },
HP{ .val = 1.000000e+32, .off = -5.366162204393472000e+15 },
HP{ .val = 1.000000e+31, .off = 3.641037050347520000e+14 },
HP{ .val = 1.000000e+30, .off = -1.988462483865600000e+13 },
HP{ .val = 1.000000e+29, .off = 8.566849142784000000e+12 },
HP{ .val = 1.000000e+28, .off = 4.168802631680000000e+11 },
HP{ .val = 1.000000e+27, .off = -1.328755507200000000e+10 },
HP{ .val = 1.000000e+26, .off = -4.764729344000000000e+09 },
HP{ .val = 1.000000e+25, .off = -9.059696640000000000e+08 },
HP{ .val = 1.000000e+24, .off = 1.677721600000000000e+07 },
HP{ .val = 1.000000e+23, .off = 8.388608000000000000e+06 },
HP{ .val = 1.000000e+22, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+21, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+20, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+19, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+18, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+17, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+16, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+15, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+14, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+13, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+12, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+11, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+10, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+09, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+08, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+07, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+06, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+05, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+04, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+03, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+02, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+01, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e+00, .off = 0.000000000000000000e+00 },
HP{ .val = 1.000000e-01, .off = -5.551115123125783010e-18 },
HP{ .val = 1.000000e-02, .off = -2.081668171172168436e-19 },
HP{ .val = 1.000000e-03, .off = -2.081668171172168557e-20 },
HP{ .val = 1.000000e-04, .off = -4.792173602385929943e-21 },
HP{ .val = 1.000000e-05, .off = -8.180305391403130547e-22 },
HP{ .val = 1.000000e-06, .off = 4.525188817411374069e-23 },
HP{ .val = 1.000000e-07, .off = 4.525188817411373922e-24 },
HP{ .val = 1.000000e-08, .off = -2.092256083012847109e-25 },
HP{ .val = 1.000000e-09, .off = -6.228159145777985254e-26 },
HP{ .val = 1.000000e-10, .off = -3.643219731549774344e-27 },
HP{ .val = 1.000000e-11, .off = 6.050303071806019080e-28 },
HP{ .val = 1.000000e-12, .off = 2.011335237074438524e-29 },
HP{ .val = 1.000000e-13, .off = -3.037374556340037101e-30 },
HP{ .val = 1.000000e-14, .off = 1.180690645440101289e-32 },
HP{ .val = 1.000000e-15, .off = -7.770539987666107583e-32 },
HP{ .val = 1.000000e-16, .off = 2.090221327596539779e-33 },
HP{ .val = 1.000000e-17, .off = -7.154242405462192144e-34 },
HP{ .val = 1.000000e-18, .off = -7.154242405462192572e-35 },
HP{ .val = 1.000000e-19, .off = 2.475407316473986894e-36 },
HP{ .val = 1.000000e-20, .off = 5.484672854579042914e-37 },
HP{ .val = 1.000000e-21, .off = 9.246254777210362522e-38 },
HP{ .val = 1.000000e-22, .off = -4.859677432657087182e-39 },
HP{ .val = 1.000000e-23, .off = 3.956530198510069291e-40 },
HP{ .val = 1.000000e-24, .off = 7.629950044829717753e-41 },
HP{ .val = 1.000000e-25, .off = -3.849486974919183692e-42 },
HP{ .val = 1.000000e-26, .off = -3.849486974919184170e-43 },
HP{ .val = 1.000000e-27, .off = -3.849486974919184070e-44 },
HP{ .val = 1.000000e-28, .off = 2.876745653839937870e-45 },
HP{ .val = 1.000000e-29, .off = 5.679342582489572168e-46 },
HP{ .val = 1.000000e-30, .off = -8.333642060758598930e-47 },
HP{ .val = 1.000000e-31, .off = -8.333642060758597958e-48 },
HP{ .val = 1.000000e-32, .off = -5.596730997624190224e-49 },
HP{ .val = 1.000000e-33, .off = -5.596730997624190604e-50 },
HP{ .val = 1.000000e-34, .off = 7.232539610818348498e-51 },
HP{ .val = 1.000000e-35, .off = -7.857545194582380514e-53 },
HP{ .val = 1.000000e-36, .off = 5.896157255772251528e-53 },
HP{ .val = 1.000000e-37, .off = -6.632427322784915796e-54 },
HP{ .val = 1.000000e-38, .off = 3.808059826012723592e-55 },
HP{ .val = 1.000000e-39, .off = 7.070712060011985131e-56 },
HP{ .val = 1.000000e-40, .off = 7.070712060011985584e-57 },
HP{ .val = 1.000000e-41, .off = -5.761291134237854167e-59 },
HP{ .val = 1.000000e-42, .off = -3.762312935688689794e-59 },
HP{ .val = 1.000000e-43, .off = -7.745042713519821150e-60 },
HP{ .val = 1.000000e-44, .off = 4.700987842202462817e-61 },
HP{ .val = 1.000000e-45, .off = 1.589480203271891964e-62 },
HP{ .val = 1.000000e-46, .off = -2.299904345391321765e-63 },
HP{ .val = 1.000000e-47, .off = 2.561826340437695261e-64 },
HP{ .val = 1.000000e-48, .off = 2.561826340437695345e-65 },
HP{ .val = 1.000000e-49, .off = 6.360053438741614633e-66 },
HP{ .val = 1.000000e-50, .off = -7.616223705782342295e-68 },
HP{ .val = 1.000000e-51, .off = -7.616223705782343324e-69 },
HP{ .val = 1.000000e-52, .off = -7.616223705782342295e-70 },
HP{ .val = 1.000000e-53, .off = -3.079876214757872338e-70 },
HP{ .val = 1.000000e-54, .off = -3.079876214757872821e-71 },
HP{ .val = 1.000000e-55, .off = 5.423954167728123147e-73 },
HP{ .val = 1.000000e-56, .off = -3.985444122640543680e-73 },
HP{ .val = 1.000000e-57, .off = 4.504255013759498850e-74 },
HP{ .val = 1.000000e-58, .off = -2.570494266573869991e-75 },
HP{ .val = 1.000000e-59, .off = -2.570494266573869930e-76 },
HP{ .val = 1.000000e-60, .off = 2.956653608686574324e-77 },
HP{ .val = 1.000000e-61, .off = -3.952281235388981376e-78 },
HP{ .val = 1.000000e-62, .off = -3.952281235388981376e-79 },
HP{ .val = 1.000000e-63, .off = -6.651083908855995172e-80 },
HP{ .val = 1.000000e-64, .off = 3.469426116645307030e-81 },
HP{ .val = 1.000000e-65, .off = 7.686305293937516319e-82 },
HP{ .val = 1.000000e-66, .off = 2.415206322322254927e-83 },
HP{ .val = 1.000000e-67, .off = 5.709643179581793251e-84 },
HP{ .val = 1.000000e-68, .off = -6.644495035141475923e-85 },
HP{ .val = 1.000000e-69, .off = 3.650620143794581913e-86 },
HP{ .val = 1.000000e-70, .off = 4.333966503770636492e-88 },
HP{ .val = 1.000000e-71, .off = 8.476455383920859113e-88 },
HP{ .val = 1.000000e-72, .off = 3.449543675455986564e-89 },
HP{ .val = 1.000000e-73, .off = 3.077238576654418974e-91 },
HP{ .val = 1.000000e-74, .off = 4.234998629903623140e-91 },
HP{ .val = 1.000000e-75, .off = 4.234998629903623412e-92 },
HP{ .val = 1.000000e-76, .off = 7.303182045714702338e-93 },
HP{ .val = 1.000000e-77, .off = 7.303182045714701699e-94 },
HP{ .val = 1.000000e-78, .off = 1.121271649074855759e-96 },
HP{ .val = 1.000000e-79, .off = 1.121271649074855863e-97 },
HP{ .val = 1.000000e-80, .off = 3.857468248661243988e-97 },
HP{ .val = 1.000000e-81, .off = 3.857468248661244248e-98 },
HP{ .val = 1.000000e-82, .off = 3.857468248661244410e-99 },
HP{ .val = 1.000000e-83, .off = -3.457651055545315679e-100 },
HP{ .val = 1.000000e-84, .off = -3.457651055545315933e-101 },
HP{ .val = 1.000000e-85, .off = 2.257285900866059216e-102 },
HP{ .val = 1.000000e-86, .off = -8.458220892405268345e-103 },
HP{ .val = 1.000000e-87, .off = -1.761029146610688867e-104 },
HP{ .val = 1.000000e-88, .off = 6.610460535632536565e-105 },
HP{ .val = 1.000000e-89, .off = -3.853901567171494935e-106 },
HP{ .val = 1.000000e-90, .off = 5.062493089968513723e-108 },
HP{ .val = 1.000000e-91, .off = -2.218844988608365240e-108 },
HP{ .val = 1.000000e-92, .off = 1.187522883398155383e-109 },
HP{ .val = 1.000000e-93, .off = 9.703442563414457296e-110 },
HP{ .val = 1.000000e-94, .off = 4.380992763404268896e-111 },
HP{ .val = 1.000000e-95, .off = 1.054461638397900823e-112 },
HP{ .val = 1.000000e-96, .off = 9.370789450913819736e-113 },
HP{ .val = 1.000000e-97, .off = -3.623472756142303998e-114 },
HP{ .val = 1.000000e-98, .off = 6.122223899149788839e-115 },
HP{ .val = 1.000000e-99, .off = -1.999189980260288281e-116 },
HP{ .val = 1.000000e-100, .off = -1.999189980260288281e-117 },
HP{ .val = 1.000000e-101, .off = -5.171617276904849634e-118 },
HP{ .val = 1.000000e-102, .off = 6.724985085512256320e-119 },
HP{ .val = 1.000000e-103, .off = 4.246526260008692213e-120 },
HP{ .val = 1.000000e-104, .off = 7.344599791888147003e-121 },
HP{ .val = 1.000000e-105, .off = 3.472007877038828407e-122 },
HP{ .val = 1.000000e-106, .off = 5.892377823819652194e-123 },
HP{ .val = 1.000000e-107, .off = -1.585470431324073925e-125 },
HP{ .val = 1.000000e-108, .off = -3.940375084977444795e-125 },
HP{ .val = 1.000000e-109, .off = 7.869099673288519908e-127 },
HP{ .val = 1.000000e-110, .off = -5.122196348054018581e-127 },
HP{ .val = 1.000000e-111, .off = -8.815387795168313713e-128 },
HP{ .val = 1.000000e-112, .off = 5.034080131510290214e-129 },
HP{ .val = 1.000000e-113, .off = 2.148774313452247863e-130 },
HP{ .val = 1.000000e-114, .off = -5.064490231692858416e-131 },
HP{ .val = 1.000000e-115, .off = -5.064490231692858166e-132 },
HP{ .val = 1.000000e-116, .off = 5.708726942017560559e-134 },
HP{ .val = 1.000000e-117, .off = -2.951229134482377772e-134 },
HP{ .val = 1.000000e-118, .off = 1.451398151372789513e-135 },
HP{ .val = 1.000000e-119, .off = -1.300243902286690040e-136 },
HP{ .val = 1.000000e-120, .off = 2.139308664787659449e-137 },
HP{ .val = 1.000000e-121, .off = 2.139308664787659329e-138 },
HP{ .val = 1.000000e-122, .off = -5.922142664292847471e-139 },
HP{ .val = 1.000000e-123, .off = -5.922142664292846912e-140 },
HP{ .val = 1.000000e-124, .off = 6.673875037395443799e-141 },
HP{ .val = 1.000000e-125, .off = -1.198636026159737932e-142 },
HP{ .val = 1.000000e-126, .off = 5.361789860136246995e-143 },
HP{ .val = 1.000000e-127, .off = -2.838742497733733936e-144 },
HP{ .val = 1.000000e-128, .off = -5.401408859568103261e-145 },
HP{ .val = 1.000000e-129, .off = 7.411922949603743011e-146 },
HP{ .val = 1.000000e-130, .off = -8.604741811861064385e-147 },
HP{ .val = 1.000000e-131, .off = 1.405673664054439890e-148 },
HP{ .val = 1.000000e-132, .off = 1.405673664054439933e-149 },
HP{ .val = 1.000000e-133, .off = -6.414963426504548053e-150 },
HP{ .val = 1.000000e-134, .off = -3.971014335704864578e-151 },
HP{ .val = 1.000000e-135, .off = -3.971014335704864748e-152 },
HP{ .val = 1.000000e-136, .off = -1.523438813303585576e-154 },
HP{ .val = 1.000000e-137, .off = 2.234325152653707766e-154 },
HP{ .val = 1.000000e-138, .off = -6.715683724786540160e-155 },
HP{ .val = 1.000000e-139, .off = -2.986513359186437306e-156 },
HP{ .val = 1.000000e-140, .off = 1.674949597813692102e-157 },
HP{ .val = 1.000000e-141, .off = -4.151879098436469092e-158 },
HP{ .val = 1.000000e-142, .off = -4.151879098436469295e-159 },
HP{ .val = 1.000000e-143, .off = 4.952540739454407825e-160 },
HP{ .val = 1.000000e-144, .off = 4.952540739454407667e-161 },
HP{ .val = 1.000000e-145, .off = 8.508954738630531443e-162 },
HP{ .val = 1.000000e-146, .off = -2.604839008794855481e-163 },
HP{ .val = 1.000000e-147, .off = 2.952057864917838382e-164 },
HP{ .val = 1.000000e-148, .off = 6.425118410988271757e-165 },
HP{ .val = 1.000000e-149, .off = 2.083792728400229858e-166 },
HP{ .val = 1.000000e-150, .off = -6.295358232172964237e-168 },
HP{ .val = 1.000000e-151, .off = 6.153785555826519421e-168 },
HP{ .val = 1.000000e-152, .off = -6.564942029880634994e-169 },
HP{ .val = 1.000000e-153, .off = -3.915207116191644540e-170 },
HP{ .val = 1.000000e-154, .off = 2.709130168030831503e-171 },
HP{ .val = 1.000000e-155, .off = -1.431080634608215966e-172 },
HP{ .val = 1.000000e-156, .off = -4.018712386257620994e-173 },
HP{ .val = 1.000000e-157, .off = 5.684906682427646782e-174 },
HP{ .val = 1.000000e-158, .off = -6.444617153428937489e-175 },
HP{ .val = 1.000000e-159, .off = 1.136335243981427681e-176 },
HP{ .val = 1.000000e-160, .off = 1.136335243981427725e-177 },
HP{ .val = 1.000000e-161, .off = -2.812077463003137395e-178 },
HP{ .val = 1.000000e-162, .off = 4.591196362592922204e-179 },
HP{ .val = 1.000000e-163, .off = 7.675893789924613703e-180 },
HP{ .val = 1.000000e-164, .off = 3.820022005759999543e-181 },
HP{ .val = 1.000000e-165, .off = -9.998177244457686588e-183 },
HP{ .val = 1.000000e-166, .off = -4.012217555824373639e-183 },
HP{ .val = 1.000000e-167, .off = -2.467177666011174334e-185 },
HP{ .val = 1.000000e-168, .off = -4.953592503130188139e-185 },
HP{ .val = 1.000000e-169, .off = -2.011795792799518887e-186 },
HP{ .val = 1.000000e-170, .off = 1.665450095113817423e-187 },
HP{ .val = 1.000000e-171, .off = 1.665450095113817487e-188 },
HP{ .val = 1.000000e-172, .off = -4.080246604750770577e-189 },
HP{ .val = 1.000000e-173, .off = -4.080246604750770677e-190 },
HP{ .val = 1.000000e-174, .off = 4.085789420184387951e-192 },
HP{ .val = 1.000000e-175, .off = 4.085789420184388146e-193 },
HP{ .val = 1.000000e-176, .off = 4.085789420184388146e-194 },
HP{ .val = 1.000000e-177, .off = 4.792197640035244894e-194 },
HP{ .val = 1.000000e-178, .off = 4.792197640035244742e-195 },
HP{ .val = 1.000000e-179, .off = -2.057206575616014662e-196 },
HP{ .val = 1.000000e-180, .off = -2.057206575616014662e-197 },
HP{ .val = 1.000000e-181, .off = -4.732755097354788053e-198 },
HP{ .val = 1.000000e-182, .off = -4.732755097354787867e-199 },
HP{ .val = 1.000000e-183, .off = -5.522105321379546765e-201 },
HP{ .val = 1.000000e-184, .off = -5.777891238658996019e-201 },
HP{ .val = 1.000000e-185, .off = 7.542096444923057046e-203 },
HP{ .val = 1.000000e-186, .off = 8.919335748431433483e-203 },
HP{ .val = 1.000000e-187, .off = -1.287071881492476028e-204 },
HP{ .val = 1.000000e-188, .off = 5.091932887209967018e-205 },
HP{ .val = 1.000000e-189, .off = -6.868701054107114024e-206 },
HP{ .val = 1.000000e-190, .off = -1.885103578558330118e-207 },
HP{ .val = 1.000000e-191, .off = -1.885103578558330205e-208 },
HP{ .val = 1.000000e-192, .off = -9.671974634103305058e-209 },
HP{ .val = 1.000000e-193, .off = -4.805180224387695640e-210 },
HP{ .val = 1.000000e-194, .off = -1.763433718315439838e-211 },
HP{ .val = 1.000000e-195, .off = -9.367799983496079132e-212 },
HP{ .val = 1.000000e-196, .off = -4.615071067758179837e-213 },
HP{ .val = 1.000000e-197, .off = 1.325840076914194777e-214 },
HP{ .val = 1.000000e-198, .off = 8.751979007754662425e-215 },
HP{ .val = 1.000000e-199, .off = 1.789973760091724198e-216 },
HP{ .val = 1.000000e-200, .off = 1.789973760091724077e-217 },
HP{ .val = 1.000000e-201, .off = 5.416018159916171171e-218 },
HP{ .val = 1.000000e-202, .off = -3.649092839644947067e-219 },
HP{ .val = 1.000000e-203, .off = -3.649092839644947067e-220 },
HP{ .val = 1.000000e-204, .off = -1.080338554413850956e-222 },
HP{ .val = 1.000000e-205, .off = -1.080338554413850841e-223 },
HP{ .val = 1.000000e-206, .off = -2.874486186850417807e-223 },
HP{ .val = 1.000000e-207, .off = 7.499710055933455072e-224 },
HP{ .val = 1.000000e-208, .off = -9.790617015372999087e-225 },
HP{ .val = 1.000000e-209, .off = -4.387389805589732612e-226 },
HP{ .val = 1.000000e-210, .off = -4.387389805589732612e-227 },
HP{ .val = 1.000000e-211, .off = -8.608661063232909897e-228 },
HP{ .val = 1.000000e-212, .off = 4.582811616902018972e-229 },
HP{ .val = 1.000000e-213, .off = 4.582811616902019155e-230 },
HP{ .val = 1.000000e-214, .off = 8.705146829444184930e-231 },
HP{ .val = 1.000000e-215, .off = -4.177150709750081830e-232 },
HP{ .val = 1.000000e-216, .off = -4.177150709750082366e-233 },
HP{ .val = 1.000000e-217, .off = -8.202868690748290237e-234 },
HP{ .val = 1.000000e-218, .off = -3.170721214500530119e-235 },
HP{ .val = 1.000000e-219, .off = -3.170721214500529857e-236 },
HP{ .val = 1.000000e-220, .off = 7.606440013180328441e-238 },
HP{ .val = 1.000000e-221, .off = -1.696459258568569049e-238 },
HP{ .val = 1.000000e-222, .off = -4.767838333426821244e-239 },
HP{ .val = 1.000000e-223, .off = 2.910609353718809138e-240 },
HP{ .val = 1.000000e-224, .off = -1.888420450747209784e-241 },
HP{ .val = 1.000000e-225, .off = 4.110366804835314035e-242 },
HP{ .val = 1.000000e-226, .off = 7.859608839574391006e-243 },
HP{ .val = 1.000000e-227, .off = 5.516332567862468419e-244 },
HP{ .val = 1.000000e-228, .off = -3.270953451057244613e-245 },
HP{ .val = 1.000000e-229, .off = -6.932322625607124670e-246 },
HP{ .val = 1.000000e-230, .off = -4.643966891513449762e-247 },
HP{ .val = 1.000000e-231, .off = 1.076922443720738305e-248 },
HP{ .val = 1.000000e-232, .off = -2.498633390800628939e-249 },
HP{ .val = 1.000000e-233, .off = 4.205533798926934891e-250 },
HP{ .val = 1.000000e-234, .off = 4.205533798926934891e-251 },
HP{ .val = 1.000000e-235, .off = 4.205533798926934697e-252 },
HP{ .val = 1.000000e-236, .off = -4.523850562697497656e-253 },
HP{ .val = 1.000000e-237, .off = 9.320146633177728298e-255 },
HP{ .val = 1.000000e-238, .off = 9.320146633177728062e-256 },
HP{ .val = 1.000000e-239, .off = -7.592774752331086440e-256 },
HP{ .val = 1.000000e-240, .off = 3.063212017229987840e-257 },
HP{ .val = 1.000000e-241, .off = 3.063212017229987562e-258 },
HP{ .val = 1.000000e-242, .off = 3.063212017229987562e-259 },
HP{ .val = 1.000000e-243, .off = 4.616527473176159842e-261 },
HP{ .val = 1.000000e-244, .off = 6.965550922098544975e-261 },
HP{ .val = 1.000000e-245, .off = 6.965550922098544749e-262 },
HP{ .val = 1.000000e-246, .off = 4.424965697574744679e-263 },
HP{ .val = 1.000000e-247, .off = -1.926497363734756420e-264 },
HP{ .val = 1.000000e-248, .off = 2.043167049583681740e-265 },
HP{ .val = 1.000000e-249, .off = -5.399953725388390154e-266 },
HP{ .val = 1.000000e-250, .off = -5.399953725388389982e-267 },
HP{ .val = 1.000000e-251, .off = -1.523328321757102663e-268 },
HP{ .val = 1.000000e-252, .off = 5.745344310051561161e-269 },
HP{ .val = 1.000000e-253, .off = -6.369110076296211879e-270 },
HP{ .val = 1.000000e-254, .off = 8.773957906638504842e-271 },
HP{ .val = 1.000000e-255, .off = -6.904595826956931908e-273 },
HP{ .val = 1.000000e-256, .off = 2.267170882721243669e-273 },
HP{ .val = 1.000000e-257, .off = 2.267170882721243669e-274 },
HP{ .val = 1.000000e-258, .off = 4.577819683828225398e-275 },
HP{ .val = 1.000000e-259, .off = -6.975424321706684210e-276 },
HP{ .val = 1.000000e-260, .off = 3.855741933482293648e-277 },
HP{ .val = 1.000000e-261, .off = 1.599248963651256552e-278 },
HP{ .val = 1.000000e-262, .off = -1.221367248637539543e-279 },
HP{ .val = 1.000000e-263, .off = -1.221367248637539494e-280 },
HP{ .val = 1.000000e-264, .off = -1.221367248637539647e-281 },
HP{ .val = 1.000000e-265, .off = 1.533140771175737943e-282 },
HP{ .val = 1.000000e-266, .off = 1.533140771175737895e-283 },
HP{ .val = 1.000000e-267, .off = 1.533140771175738074e-284 },
HP{ .val = 1.000000e-268, .off = 4.223090009274641634e-285 },
HP{ .val = 1.000000e-269, .off = 4.223090009274641634e-286 },
HP{ .val = 1.000000e-270, .off = -4.183001359784432924e-287 },
HP{ .val = 1.000000e-271, .off = 3.697709298708449474e-288 },
HP{ .val = 1.000000e-272, .off = 6.981338739747150474e-289 },
HP{ .val = 1.000000e-273, .off = -9.436808465446354751e-290 },
HP{ .val = 1.000000e-274, .off = 3.389869038611071740e-291 },
HP{ .val = 1.000000e-275, .off = 6.596538414625427829e-292 },
HP{ .val = 1.000000e-276, .off = -9.436808465446354618e-293 },
HP{ .val = 1.000000e-277, .off = 3.089243784609725523e-294 },
HP{ .val = 1.000000e-278, .off = 6.220756847123745836e-295 },
HP{ .val = 1.000000e-279, .off = -5.522417137303829470e-296 },
HP{ .val = 1.000000e-280, .off = 4.263561183052483059e-297 },
HP{ .val = 1.000000e-281, .off = -1.852675267170212272e-298 },
HP{ .val = 1.000000e-282, .off = -1.852675267170212378e-299 },
HP{ .val = 1.000000e-283, .off = 5.314789322934508480e-300 },
HP{ .val = 1.000000e-284, .off = -3.644541414696392675e-301 },
HP{ .val = 1.000000e-285, .off = -7.377595888709267777e-302 },
HP{ .val = 1.000000e-286, .off = -5.044436842451220838e-303 },
HP{ .val = 1.000000e-287, .off = -2.127988034628661760e-304 },
HP{ .val = 1.000000e-288, .off = -5.773549044406860911e-305 },
HP{ .val = 1.000000e-289, .off = -1.216597782184112068e-306 },
HP{ .val = 1.000000e-290, .off = -6.912786859962547924e-307 },
HP{ .val = 1.000000e-291, .off = 3.767567660872018813e-308 },
};
+45 -39
View File
@@ -13,7 +13,7 @@ const max_int_digits = 65;
/// If `output` returns an error, the error is returned from `format` and
/// `output` is not called again.
pub fn format(context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void, comptime fmt: []const u8, args: ...) Errors!void {
const State = enum.{
const State = enum {
Start,
OpenBrace,
CloseBrace,
@@ -111,7 +111,7 @@ pub fn formatType(
output: fn (@typeOf(context), []const u8) Errors!void,
) Errors!void {
const T = @typeOf(value);
if (T == error) {
if (T == anyerror) {
try output(context, "error.");
return output(context, @errorName(value));
}
@@ -229,6 +229,9 @@ pub fn formatType(
return format(context, Errors, output, "{}@{x}", @typeName(T.Child), @ptrToInt(value));
},
builtin.TypeInfo.Pointer.Size.Slice => {
if (fmt.len > 0 and ((fmt[0] == 'x') or (fmt[0] == 'X'))) {
return formatText(value, fmt, context, Errors, output);
}
const casted_value = ([]const u8)(value);
return output(context, casted_value);
},
@@ -286,7 +289,7 @@ pub fn formatIntValue(
switch (fmt[0]) {
'c' => {
if (@typeOf(value) == u8) {
if (fmt.len > 1) @compileError("Unknown format character: " ++ []u8.{fmt[1]});
if (fmt.len > 1) @compileError("Unknown format character: " ++ []u8{fmt[1]});
return formatAsciiChar(value, context, Errors, output);
}
},
@@ -310,7 +313,7 @@ pub fn formatIntValue(
uppercase = true;
width = 0;
},
else => @compileError("Unknown format character: " ++ []u8.{fmt[0]}),
else => @compileError("Unknown format character: " ++ []u8{fmt[0]}),
}
if (fmt.len > 1) width = comptime (parseUnsigned(usize, fmt[1..], 10) catch unreachable);
}
@@ -334,7 +337,7 @@ fn formatFloatValue(
switch (float_fmt) {
'e' => try formatFloatScientific(value, width, context, Errors, output),
'.' => try formatFloatDecimal(value, width, context, Errors, output),
else => @compileError("Unknown format character: " ++ []u8.{float_fmt}),
else => @compileError("Unknown format character: " ++ []u8{float_fmt}),
}
}
@@ -355,7 +358,7 @@ pub fn formatText(
try formatInt(c, 16, fmt[0] == 'X', 2, context, Errors, output);
}
return;
} else @compileError("Unknown format character: " ++ []u8.{fmt[0]});
} else @compileError("Unknown format character: " ++ []u8{fmt[0]});
}
return output(context, bytes);
}
@@ -661,8 +664,8 @@ pub fn formatBytes(
}
const buf = switch (radix) {
1000 => []u8.{ suffix, 'B' },
1024 => []u8.{ suffix, 'i', 'B' },
1000 => []u8{ suffix, 'B' },
1024 => []u8{ suffix, 'i', 'B' },
else => unreachable,
};
return output(context, buf);
@@ -757,18 +760,18 @@ fn formatIntUnsigned(
}
pub fn formatIntBuf(out_buf: []u8, value: var, base: u8, uppercase: bool, width: usize) usize {
var context = FormatIntBuf.{
var context = FormatIntBuf{
.out_buf = out_buf,
.index = 0,
};
formatInt(value, base, uppercase, width, &context, error.{}, formatIntCallback) catch unreachable;
formatInt(value, base, uppercase, width, &context, error{}, formatIntCallback) catch unreachable;
return context.index;
}
const FormatIntBuf = struct.{
const FormatIntBuf = struct {
out_buf: []u8,
index: usize,
};
fn formatIntCallback(context: *FormatIntBuf, bytes: []const u8) (error.{}!void) {
fn formatIntCallback(context: *FormatIntBuf, bytes: []const u8) (error{}!void) {
mem.copy(u8, context.out_buf[context.index..], bytes);
context.index += bytes.len;
}
@@ -795,7 +798,7 @@ test "fmt.parseInt" {
assert(if (parseInt(u8, "256", 10)) |_| false else |err| err == error.Overflow);
}
const ParseUnsignedError = error.{
const ParseUnsignedError = error{
/// The result cannot fit in the type specified
Overflow,
@@ -815,7 +818,7 @@ pub fn parseUnsigned(comptime T: type, buf: []const u8, radix: u8) ParseUnsigned
return x;
}
pub fn charToDigit(c: u8, radix: u8) (error.{InvalidCharacter}!u8) {
pub fn charToDigit(c: u8, radix: u8) (error{InvalidCharacter}!u8) {
const value = switch (c) {
'0'...'9' => c - '0',
'A'...'Z' => c - 'A' + 10,
@@ -836,7 +839,7 @@ fn digitToChar(digit: u8, uppercase: bool) u8 {
};
}
const BufPrintContext = struct.{
const BufPrintContext = struct {
remaining: []u8,
};
@@ -847,23 +850,23 @@ fn bufPrintWrite(context: *BufPrintContext, bytes: []const u8) !void {
}
pub fn bufPrint(buf: []u8, comptime fmt: []const u8, args: ...) ![]u8 {
var context = BufPrintContext.{ .remaining = buf };
try format(&context, error.{BufferTooSmall}, bufPrintWrite, fmt, args);
var context = BufPrintContext{ .remaining = buf };
try format(&context, error{BufferTooSmall}, bufPrintWrite, fmt, args);
return buf[0 .. buf.len - context.remaining.len];
}
pub const AllocPrintError = error.{OutOfMemory};
pub const AllocPrintError = error{OutOfMemory};
pub fn allocPrint(allocator: *mem.Allocator, comptime fmt: []const u8, args: ...) AllocPrintError![]u8 {
var size: usize = 0;
format(&size, error.{}, countSize, fmt, args) catch |err| switch (err) {};
format(&size, error{}, countSize, fmt, args) catch |err| switch (err) {};
const buf = try allocator.alloc(u8, size);
return bufPrint(buf, fmt, args) catch |err| switch (err) {
error.BufferTooSmall => unreachable, // we just counted the size above
};
}
fn countSize(size: *usize, bytes: []const u8) (error.{}!void) {
fn countSize(size: *usize, bytes: []const u8) (error{}!void) {
size.* += bytes.len;
}
@@ -913,11 +916,11 @@ test "fmt.format" {
try testFmt("optional: null\n", "optional: {}\n", value);
}
{
const value: error!i32 = 1234;
const value: anyerror!i32 = 1234;
try testFmt("error union: 1234\n", "error union: {}\n", value);
}
{
const value: error!i32 = error.InvalidChar;
const value: anyerror!i32 = error.InvalidChar;
try testFmt("error union: error.InvalidChar\n", "error union: {}\n", value);
}
{
@@ -960,23 +963,23 @@ test "fmt.format" {
try testFmt("file size: 63MiB\n", "file size: {Bi}\n", usize(63 * 1024 * 1024));
try testFmt("file size: 66.06MB\n", "file size: {B2}\n", usize(63 * 1024 * 1024));
{
const Struct = struct.{
const Struct = struct {
field: u8,
};
const value = Struct.{ .field = 42 };
const value = Struct{ .field = 42 };
try testFmt("struct: Struct{ .field = 42 }\n", "struct: {}\n", value);
try testFmt("struct: Struct{ .field = 42 }\n", "struct: {}\n", &value);
}
{
const Struct = struct.{
const Struct = struct {
a: u0,
b: u1,
};
const value = Struct.{ .a = 0, .b = 1 };
const value = Struct{ .a = 0, .b = 1 };
try testFmt("struct: Struct{ .a = 0, .b = 1 }\n", "struct: {}\n", value);
}
{
const Enum = enum.{
const Enum = enum {
One,
Two,
};
@@ -1194,7 +1197,7 @@ test "fmt.format" {
}
//custom type format
{
const Vec2 = struct.{
const Vec2 = struct {
const SelfType = @This();
x: f32,
y: f32,
@@ -1221,7 +1224,7 @@ test "fmt.format" {
};
var buf1: [32]u8 = undefined;
var value = Vec2.{
var value = Vec2{
.x = 10.2,
.y = 2.22,
};
@@ -1234,12 +1237,12 @@ test "fmt.format" {
}
//struct format
{
const S = struct.{
const S = struct {
a: u32,
b: error,
b: anyerror,
};
const inst = S.{
const inst = S{
.a = 456,
.b = error.Unused,
};
@@ -1248,24 +1251,24 @@ test "fmt.format" {
}
//union format
{
const TU = union(enum).{
const TU = union(enum) {
float: f32,
int: u32,
};
const UU = union.{
const UU = union {
float: f32,
int: u32,
};
const EU = extern union.{
const EU = extern union {
float: f32,
int: u32,
};
const tu_inst = TU.{ .int = 123 };
const uu_inst = UU.{ .int = 456 };
const eu_inst = EU.{ .float = 321.123 };
const tu_inst = TU{ .int = 123 };
const uu_inst = UU{ .int = 456 };
const eu_inst = EU{ .float = 321.123 };
try testFmt("TU{ .int = 123 }", "{}", tu_inst);
@@ -1278,7 +1281,7 @@ test "fmt.format" {
}
//enum format
{
const E = enum.{
const E = enum {
One,
Two,
Three,
@@ -1293,6 +1296,9 @@ test "fmt.format" {
const some_bytes = "\xCA\xFE\xBA\xBE";
try testFmt("lowercase: cafebabe\n", "lowercase: {x}\n", some_bytes);
try testFmt("uppercase: CAFEBABE\n", "uppercase: {X}\n", some_bytes);
//Test Slices
try testFmt("uppercase: CAFE\n", "uppercase: {X}\n", some_bytes[0..2]);
try testFmt("lowercase: babe\n", "lowercase: {x}\n", some_bytes[2..]);
const bytes_with_zeros = "\x00\x0E\xBA\xBE";
try testFmt("lowercase: 000ebabe\n", "lowercase: {x}\n", bytes_with_zeros);
}
+5 -5
View File
@@ -6,14 +6,14 @@
const std = @import("../index.zig");
const debug = std.debug;
pub const Adler32 = struct.{
pub const Adler32 = struct {
const base = 65521;
const nmax = 5552;
adler: u32,
pub fn init() Adler32 {
return Adler32.{ .adler = 1 };
return Adler32{ .adler = 1 };
}
// This fast variant is taken from zlib. It reduces the required modulos and unrolls longer
@@ -94,14 +94,14 @@ test "adler32 sanity" {
}
test "adler32 long" {
const long1 = []u8.{1} ** 1024;
const long1 = []u8{1} ** 1024;
debug.assert(Adler32.hash(long1[0..]) == 0x06780401);
const long2 = []u8.{1} ** 1025;
const long2 = []u8{1} ** 1025;
debug.assert(Adler32.hash(long2[0..]) == 0x0a7a0402);
}
test "adler32 very long" {
const long = []u8.{1} ** 5553;
const long = []u8{1} ** 5553;
debug.assert(Adler32.hash(long[0..]) == 0x707f15b2);
}
+5 -5
View File
@@ -8,7 +8,7 @@
const std = @import("../index.zig");
const debug = std.debug;
pub const Polynomial = struct.{
pub const Polynomial = struct {
const IEEE = 0xedb88320;
const Castagnoli = 0x82f63b78;
const Koopman = 0xeb31d82e;
@@ -19,7 +19,7 @@ pub const Crc32 = Crc32WithPoly(Polynomial.IEEE);
// slicing-by-8 crc32 implementation.
pub fn Crc32WithPoly(comptime poly: u32) type {
return struct.{
return struct {
const Self = @This();
const lookup_tables = comptime block: {
@setEvalBranchQuota(20000);
@@ -55,7 +55,7 @@ pub fn Crc32WithPoly(comptime poly: u32) type {
crc: u32,
pub fn init() Self {
return Self.{ .crc = 0xffffffff };
return Self{ .crc = 0xffffffff };
}
pub fn update(self: *Self, input: []const u8) void {
@@ -116,7 +116,7 @@ test "crc32 castagnoli" {
// half-byte lookup table implementation.
pub fn Crc32SmallWithPoly(comptime poly: u32) type {
return struct.{
return struct {
const Self = @This();
const lookup_table = comptime block: {
var table: [16]u32 = undefined;
@@ -140,7 +140,7 @@ pub fn Crc32SmallWithPoly(comptime poly: u32) type {
crc: u32,
pub fn init() Self {
return Self.{ .crc = 0xffffffff };
return Self{ .crc = 0xffffffff };
}
pub fn update(self: *Self, input: []const u8) void {
+2 -2
View File
@@ -12,13 +12,13 @@ pub const Fnv1a_64 = Fnv1a(u64, 0x100000001b3, 0xcbf29ce484222325);
pub const Fnv1a_128 = Fnv1a(u128, 0x1000000000000000000013b, 0x6c62272e07bb014262b821756295c58d);
fn Fnv1a(comptime T: type, comptime prime: T, comptime offset: T) type {
return struct.{
return struct {
const Self = @This();
value: T,
pub fn init() Self {
return Self.{ .value = offset };
return Self{ .value = offset };
}
pub fn update(self: *Self, input: []const u8) void {
+4 -4
View File
@@ -24,7 +24,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
debug.assert(T == u64 or T == u128);
debug.assert(c_rounds > 0 and d_rounds > 0);
return struct.{
return struct {
const Self = @This();
const digest_size = 64;
const block_size = 64;
@@ -45,7 +45,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
const k0 = mem.readInt(key[0..8], u64, Endian.Little);
const k1 = mem.readInt(key[8..16], u64, Endian.Little);
var d = Self.{
var d = Self{
.v0 = k0 ^ 0x736f6d6570736575,
.v1 = k1 ^ 0x646f72616e646f6d,
.v2 = k0 ^ 0x6c7967656e657261,
@@ -162,7 +162,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
const test_key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f";
test "siphash64-2-4 sanity" {
const vectors = [][]const u8.{
const vectors = [][]const u8{
"\x31\x0e\x0e\xdd\x47\xdb\x6f\x72", // ""
"\xfd\x67\xdc\x93\xc5\x39\xf8\x74", // "\x00"
"\x5a\x4f\xa9\xd9\x09\x80\x6c\x0d", // "\x00\x01" ... etc
@@ -241,7 +241,7 @@ test "siphash64-2-4 sanity" {
}
test "siphash128-2-4 sanity" {
const vectors = [][]const u8.{
const vectors = [][]const u8{
"\xa3\x81\x7f\x04\xba\x25\xa8\xe6\x6d\xf6\x72\x14\xc7\x55\x02\x93",
"\xda\x87\xc1\xd8\x6b\x99\xaf\x44\x34\x76\x59\x11\x9b\x22\xfc\x45",
"\x81\x77\x22\x8d\xa4\xa4\x5d\xc7\xfc\xa3\x8b\xde\xf6\x0a\xff\xe4",
+22 -22
View File
@@ -14,7 +14,7 @@ pub fn AutoHashMap(comptime K: type, comptime V: type) type {
}
pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u32, comptime eql: fn (a: K, b: K) bool) type {
return struct.{
return struct {
entries: []Entry,
size: usize,
max_distance_from_start_index: usize,
@@ -24,23 +24,23 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
const Self = @This();
pub const KV = struct.{
pub const KV = struct {
key: K,
value: V,
};
const Entry = struct.{
const Entry = struct {
used: bool,
distance_from_start_index: usize,
kv: KV,
};
pub const GetOrPutResult = struct.{
pub const GetOrPutResult = struct {
kv: *KV,
found_existing: bool,
};
pub const Iterator = struct.{
pub const Iterator = struct {
hm: *const Self,
// how many items have we returned
count: usize,
@@ -75,8 +75,8 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
};
pub fn init(allocator: *Allocator) Self {
return Self.{
.entries = []Entry.{},
return Self{
.entries = []Entry{},
.allocator = allocator,
.size = 0,
.max_distance_from_start_index = 0,
@@ -111,7 +111,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
// TODO this implementation can be improved - we should only
// have to hash once and find the entry once.
if (self.get(key)) |kv| {
return GetOrPutResult.{
return GetOrPutResult{
.kv = kv,
.found_existing = true,
};
@@ -120,7 +120,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
try self.ensureCapacity();
const put_result = self.internalPut(key);
assert(put_result.old_kv == null);
return GetOrPutResult.{
return GetOrPutResult{
.kv = &put_result.new_entry.kv,
.found_existing = false,
};
@@ -199,7 +199,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
}
pub fn iterator(hm: *const Self) Iterator {
return Iterator.{
return Iterator{
.hm = hm,
.count = 0,
.index = 0,
@@ -232,7 +232,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
}
}
const InternalPutResult = struct.{
const InternalPutResult = struct {
new_entry: *Entry,
old_kv: ?KV,
};
@@ -246,7 +246,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
var roll_over: usize = 0;
var distance_from_start_index: usize = 0;
var got_result_entry = false;
var result = InternalPutResult.{
var result = InternalPutResult{
.new_entry = undefined,
.old_kv = null,
};
@@ -266,10 +266,10 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
got_result_entry = true;
result.new_entry = entry;
}
entry.* = Entry.{
entry.* = Entry{
.used = true,
.distance_from_start_index = distance_from_start_index,
.kv = KV.{
.kv = KV{
.key = key,
.value = value,
},
@@ -293,10 +293,10 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u3
if (!got_result_entry) {
result.new_entry = entry;
}
entry.* = Entry.{
entry.* = Entry{
.used = true,
.distance_from_start_index = distance_from_start_index,
.kv = KV.{
.kv = KV{
.key = key,
.value = value,
},
@@ -372,12 +372,12 @@ test "iterator hash map" {
assert((try reset_map.put(2, 22)) == null);
assert((try reset_map.put(3, 33)) == null);
var keys = []i32.{
var keys = []i32{
3,
2,
1,
};
var values = []i32.{
var values = []i32{
33,
22,
11,
@@ -409,7 +409,7 @@ test "iterator hash map" {
}
pub fn getHashPtrAddrFn(comptime K: type) (fn (K) u32) {
return struct.{
return struct {
fn hash(key: K) u32 {
return getAutoHashFn(usize)(@ptrToInt(key));
}
@@ -417,7 +417,7 @@ pub fn getHashPtrAddrFn(comptime K: type) (fn (K) u32) {
}
pub fn getTrivialEqlFn(comptime K: type) (fn (K, K) bool) {
return struct.{
return struct {
fn eql(a: K, b: K) bool {
return a == b;
}
@@ -425,7 +425,7 @@ pub fn getTrivialEqlFn(comptime K: type) (fn (K, K) bool) {
}
pub fn getAutoHashFn(comptime K: type) (fn (K) u32) {
return struct.{
return struct {
fn hash(key: K) u32 {
comptime var rng = comptime std.rand.DefaultPrng.init(0);
return autoHash(key, &rng.random, u32);
@@ -434,7 +434,7 @@ pub fn getAutoHashFn(comptime K: type) (fn (K) u32) {
}
pub fn getAutoEqlFn(comptime K: type) (fn (K, K) bool) {
return struct.{
return struct {
fn eql(a: K, b: K) bool {
return autoEql(a, b);
}
+17 -17
View File
@@ -11,7 +11,7 @@ const maxInt = std.math.maxInt;
const Allocator = mem.Allocator;
pub const c_allocator = &c_allocator_state;
var c_allocator_state = Allocator.{
var c_allocator_state = Allocator{
.allocFn = cAlloc,
.reallocFn = cRealloc,
.freeFn = cFree,
@@ -40,15 +40,15 @@ fn cFree(self: *Allocator, old_mem: []u8) void {
/// This allocator makes a syscall directly for every allocation and free.
/// Thread-safe and lock-free.
pub const DirectAllocator = struct.{
pub const DirectAllocator = struct {
allocator: Allocator,
heap_handle: ?HeapHandle,
const HeapHandle = if (builtin.os == Os.windows) os.windows.HANDLE else void;
pub fn init() DirectAllocator {
return DirectAllocator.{
.allocator = Allocator.{
return DirectAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@@ -182,7 +182,7 @@ pub const DirectAllocator = struct.{
/// This allocator takes an existing allocator, wraps it, and provides an interface
/// where you can allocate without freeing, and then free it all together.
pub const ArenaAllocator = struct.{
pub const ArenaAllocator = struct {
pub allocator: Allocator,
child_allocator: *Allocator,
@@ -192,8 +192,8 @@ pub const ArenaAllocator = struct.{
const BufNode = std.LinkedList([]u8).Node;
pub fn init(child_allocator: *Allocator) ArenaAllocator {
return ArenaAllocator.{
.allocator = Allocator.{
return ArenaAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@@ -225,7 +225,7 @@ pub const ArenaAllocator = struct.{
const buf = try self.child_allocator.alignedAlloc(u8, @alignOf(BufNode), len);
const buf_node_slice = @bytesToSlice(BufNode, buf[0..@sizeOf(BufNode)]);
const buf_node = &buf_node_slice[0];
buf_node.* = BufNode.{
buf_node.* = BufNode{
.data = buf,
.prev = null,
.next = null,
@@ -269,14 +269,14 @@ pub const ArenaAllocator = struct.{
fn free(allocator: *Allocator, bytes: []u8) void {}
};
pub const FixedBufferAllocator = struct.{
pub const FixedBufferAllocator = struct {
allocator: Allocator,
end_index: usize,
buffer: []u8,
pub fn init(buffer: []u8) FixedBufferAllocator {
return FixedBufferAllocator.{
.allocator = Allocator.{
return FixedBufferAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@@ -325,14 +325,14 @@ pub const FixedBufferAllocator = struct.{
};
/// lock free
pub const ThreadSafeFixedBufferAllocator = struct.{
pub const ThreadSafeFixedBufferAllocator = struct {
allocator: Allocator,
end_index: usize,
buffer: []u8,
pub fn init(buffer: []u8) ThreadSafeFixedBufferAllocator {
return ThreadSafeFixedBufferAllocator.{
.allocator = Allocator.{
return ThreadSafeFixedBufferAllocator{
.allocator = Allocator{
.allocFn = alloc,
.reallocFn = realloc,
.freeFn = free,
@@ -372,11 +372,11 @@ pub const ThreadSafeFixedBufferAllocator = struct.{
};
pub fn stackFallback(comptime size: usize, fallback_allocator: *Allocator) StackFallbackAllocator(size) {
return StackFallbackAllocator(size).{
return StackFallbackAllocator(size){
.buffer = undefined,
.fallback_allocator = fallback_allocator,
.fixed_buffer_allocator = undefined,
.allocator = Allocator.{
.allocator = Allocator{
.allocFn = StackFallbackAllocator(size).alloc,
.reallocFn = StackFallbackAllocator(size).realloc,
.freeFn = StackFallbackAllocator(size).free,
@@ -385,7 +385,7 @@ pub fn stackFallback(comptime size: usize, fallback_allocator: *Allocator) Stack
}
pub fn StackFallbackAllocator(comptime size: usize) type {
return struct.{
return struct {
const Self = @This();
buffer: [size]u8,
+120 -39
View File
@@ -33,7 +33,7 @@ pub fn getStdIn() GetStdIoErrs!File {
}
pub fn InStream(comptime ReadError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = ReadError;
@@ -155,11 +155,15 @@ pub fn InStream(comptime ReadError: type) type {
}
pub fn readIntLe(self: *Self, comptime T: type) !T {
return self.readInt(builtin.Endian.Little, T);
var bytes: [@sizeOf(T)]u8 = undefined;
try self.readNoEof(bytes[0..]);
return mem.readIntLE(T, bytes);
}
pub fn readIntBe(self: *Self, comptime T: type) !T {
return self.readInt(builtin.Endian.Big, T);
var bytes: [@sizeOf(T)]u8 = undefined;
try self.readNoEof(bytes[0..]);
return mem.readIntBE(T, bytes);
}
pub fn readInt(self: *Self, endian: builtin.Endian, comptime T: type) !T {
@@ -184,35 +188,37 @@ pub fn InStream(comptime ReadError: type) type {
}
}
pub fn readStruct(self: *Self, comptime T: type, ptr: *T) !void {
pub fn readStruct(self: *Self, comptime T: type) !T {
// Only extern and packed structs have defined in-memory layout.
comptime assert(@typeInfo(T).Struct.layout != builtin.TypeInfo.ContainerLayout.Auto);
return self.readNoEof(@sliceToBytes((*[1]T)(ptr)[0..]));
var res: [1]T = undefined;
try self.readNoEof(@sliceToBytes(res[0..]));
return res[0];
}
};
}
pub fn OutStream(comptime WriteError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = WriteError;
writeFn: fn (self: *Self, bytes: []const u8) Error!void,
pub fn print(self: *Self, comptime format: []const u8, args: ...) !void {
pub fn print(self: *Self, comptime format: []const u8, args: ...) Error!void {
return std.fmt.format(self, Error, self.writeFn, format, args);
}
pub fn write(self: *Self, bytes: []const u8) !void {
pub fn write(self: *Self, bytes: []const u8) Error!void {
return self.writeFn(self, bytes);
}
pub fn writeByte(self: *Self, byte: u8) !void {
pub fn writeByte(self: *Self, byte: u8) Error!void {
const slice = (*[1]u8)(&byte)[0..];
return self.writeFn(self, slice);
}
pub fn writeByteNTimes(self: *Self, byte: u8, n: usize) !void {
pub fn writeByteNTimes(self: *Self, byte: u8, n: usize) Error!void {
const slice = (*[1]u8)(&byte)[0..];
var i: usize = 0;
while (i < n) : (i += 1) {
@@ -221,19 +227,23 @@ pub fn OutStream(comptime WriteError: type) type {
}
/// Write a native-endian integer.
pub fn writeIntNe(self: *Self, comptime T: type, value: T) !void {
pub fn writeIntNe(self: *Self, comptime T: type, value: T) Error!void {
return self.writeInt(builtin.endian, T, value);
}
pub fn writeIntLe(self: *Self, comptime T: type, value: T) !void {
return self.writeInt(builtin.Endian.Little, T, value);
pub fn writeIntLe(self: *Self, comptime T: type, value: T) Error!void {
var bytes: [@sizeOf(T)]u8 = undefined;
mem.writeIntLE(T, &bytes, value);
return self.writeFn(self, bytes);
}
pub fn writeIntBe(self: *Self, comptime T: type, value: T) !void {
return self.writeInt(builtin.Endian.Big, T, value);
pub fn writeIntBe(self: *Self, comptime T: type, value: T) Error!void {
var bytes: [@sizeOf(T)]u8 = undefined;
mem.writeIntBE(T, &bytes, value);
return self.writeFn(self, bytes);
}
pub fn writeInt(self: *Self, endian: builtin.Endian, comptime T: type, value: T) !void {
pub fn writeInt(self: *Self, endian: builtin.Endian, comptime T: type, value: T) Error!void {
var bytes: [@sizeOf(T)]u8 = undefined;
mem.writeInt(bytes[0..], value, endian);
return self.writeFn(self, bytes);
@@ -271,7 +281,7 @@ pub fn BufferedInStream(comptime Error: type) type {
}
pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type) type {
return struct.{
return struct {
const Self = @This();
const Stream = InStream(Error);
@@ -284,7 +294,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
end_index: usize,
pub fn init(unbuffered_in_stream: *Stream) Self {
return Self.{
return Self{
.unbuffered_in_stream = unbuffered_in_stream,
.buffer = undefined,
@@ -295,7 +305,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
.start_index = buffer_size,
.end_index = buffer_size,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
@@ -341,7 +351,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
/// Creates a stream which supports 'un-reading' data, so that it can be read again.
/// This makes look-ahead style parsing much easier.
pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Error = InStreamError;
pub const Stream = InStream(Error);
@@ -356,12 +366,12 @@ pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) typ
at_end: bool,
pub fn init(base: *Stream) Self {
return Self.{
return Self{
.base = base,
.buffer = undefined,
.index = 0,
.at_end = false,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
@@ -404,9 +414,9 @@ pub fn PeekStream(comptime buffer_size: usize, comptime InStreamError: type) typ
};
}
pub const SliceInStream = struct.{
pub const SliceInStream = struct {
const Self = @This();
pub const Error = error.{};
pub const Error = error{};
pub const Stream = InStream(Error);
pub stream: Stream,
@@ -415,10 +425,10 @@ pub const SliceInStream = struct.{
slice: []const u8,
pub fn init(slice: []const u8) Self {
return Self.{
return Self{
.slice = slice,
.pos = 0,
.stream = Stream.{ .readFn = readFn },
.stream = Stream{ .readFn = readFn },
};
}
@@ -436,8 +446,8 @@ pub const SliceInStream = struct.{
/// This is a simple OutStream that writes to a slice, and returns an error
/// when it runs out of space.
pub const SliceOutStream = struct.{
pub const Error = error.{OutOfSpace};
pub const SliceOutStream = struct {
pub const Error = error{OutOfSpace};
pub const Stream = OutStream(Error);
pub stream: Stream,
@@ -446,10 +456,10 @@ pub const SliceOutStream = struct.{
slice: []u8,
pub fn init(slice: []u8) SliceOutStream {
return SliceOutStream.{
return SliceOutStream{
.slice = slice,
.pos = 0,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@@ -480,12 +490,83 @@ pub const SliceOutStream = struct.{
}
};
test "io.SliceOutStream" {
var buf: [255]u8 = undefined;
var slice_stream = SliceOutStream.init(buf[0..]);
const stream = &slice_stream.stream;
try stream.print("{}{}!", "Hello", "World");
debug.assert(mem.eql(u8, "HelloWorld!", slice_stream.getWritten()));
}
var null_out_stream_state = NullOutStream.init();
pub const null_out_stream = &null_out_stream_state.stream;
/// An OutStream that doesn't write to anything.
pub const NullOutStream = struct {
pub const Error = error{};
pub const Stream = OutStream(Error);
pub stream: Stream,
pub fn init() NullOutStream {
return NullOutStream{
.stream = Stream{ .writeFn = writeFn },
};
}
fn writeFn(out_stream: *Stream, bytes: []const u8) Error!void {}
};
test "io.NullOutStream" {
var null_stream = NullOutStream.init();
const stream = &null_stream.stream;
stream.write("yay" ** 10000) catch unreachable;
}
/// An OutStream that counts how many bytes has been written to it.
pub fn CountingOutStream(comptime OutStreamError: type) type {
return struct {
const Self = @This();
pub const Stream = OutStream(Error);
pub const Error = OutStreamError;
pub stream: Stream,
pub bytes_written: usize,
child_stream: *Stream,
pub fn init(child_stream: *Stream) Self {
return Self{
.stream = Stream{ .writeFn = writeFn },
.bytes_written = 0,
.child_stream = child_stream,
};
}
fn writeFn(out_stream: *Stream, bytes: []const u8) OutStreamError!void {
const self = @fieldParentPtr(Self, "stream", out_stream);
try self.child_stream.write(bytes);
self.bytes_written += bytes.len;
}
};
}
test "io.CountingOutStream" {
var null_stream = NullOutStream.init();
var counting_stream = CountingOutStream(NullOutStream.Error).init(&null_stream.stream);
const stream = &counting_stream.stream;
const bytes = "yay" ** 10000;
stream.write(bytes) catch unreachable;
debug.assert(counting_stream.bytes_written == bytes.len);
}
pub fn BufferedOutStream(comptime Error: type) type {
return BufferedOutStreamCustom(os.page_size, Error);
}
pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamError: type) type {
return struct.{
return struct {
const Self = @This();
pub const Stream = OutStream(Error);
pub const Error = OutStreamError;
@@ -498,11 +579,11 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
index: usize,
pub fn init(unbuffered_out_stream: *Stream) Self {
return Self.{
return Self{
.unbuffered_out_stream = unbuffered_out_stream,
.buffer = undefined,
.index = 0,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@@ -536,17 +617,17 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
}
/// Implementation of OutStream trait for Buffer
pub const BufferOutStream = struct.{
pub const BufferOutStream = struct {
buffer: *Buffer,
stream: Stream,
pub const Error = error.{OutOfMemory};
pub const Error = error{OutOfMemory};
pub const Stream = OutStream(Error);
pub fn init(buffer: *Buffer) BufferOutStream {
return BufferOutStream.{
return BufferOutStream{
.buffer = buffer,
.stream = Stream.{ .writeFn = writeFn },
.stream = Stream{ .writeFn = writeFn },
};
}
@@ -556,7 +637,7 @@ pub const BufferOutStream = struct.{
}
};
pub const BufferedAtomicFile = struct.{
pub const BufferedAtomicFile = struct {
atomic_file: os.AtomicFile,
file_stream: os.File.OutStream,
buffered_stream: BufferedOutStream(os.File.WriteError),
@@ -564,7 +645,7 @@ pub const BufferedAtomicFile = struct.{
pub fn create(allocator: *mem.Allocator, dest_path: []const u8) !*BufferedAtomicFile {
// TODO with well defined copy elision we don't need this allocation
var self = try allocator.create(BufferedAtomicFile.{
var self = try allocator.create(BufferedAtomicFile{
.atomic_file = undefined,
.file_stream = undefined,
.buffered_stream = undefined,
+2 -2
View File
@@ -63,7 +63,7 @@ test "BufferOutStream" {
}
test "SliceInStream" {
const bytes = []const u8.{ 1, 2, 3, 4, 5, 6, 7 };
const bytes = []const u8{ 1, 2, 3, 4, 5, 6, 7 };
var ss = io.SliceInStream.init(bytes);
var dest: [4]u8 = undefined;
@@ -81,7 +81,7 @@ test "SliceInStream" {
}
test "PeekStream" {
const bytes = []const u8.{ 1, 2, 3, 4, 5, 6, 7, 8 };
const bytes = []const u8{ 1, 2, 3, 4, 5, 6, 7, 8 };
var ss = io.SliceInStream.init(bytes);
var ps = io.PeekStream(2, io.SliceInStream.Error).init(&ss.stream);

Some files were not shown because too many files have changed in this diff Show More