mirror of
https://codeberg.org/ziglang/zig.git
synced 2026-04-27 19:09:47 +03:00
re-enable test-cases and get them all passing
Instead of using `zig test` to build a special version of the compiler that runs all the test-cases, the zig build system is now used as much as possible - all with the basic steps found in the standard library. For incremental compilation tests (the ones that look like foo.0.zig, foo.1.zig, foo.2.zig, etc.), a special version of the compiler is compiled into a utility executable called "check-case" which checks exactly one sequence of incremental updates in an independent subprocess. Previously, all incremental and non-incremental test cases were done in the same test runner process. The compile error checking code is now simpler, but also a bit rudimentary, and so it additionally makes sure that the actual compile errors do not include *extra* messages, and it makes sure that the actual compile errors output in the same order as expected. It is also based on the "ends-with" property of each line rather than the previous logic, which frankly I didn't want to touch with a ten-meter pole. The compile error test cases have been updated to pass in light of these differences. Previously, 'error' mode with 0 compile errors was used to shoehorn in a different kind of test-case - one that only checks if a piece of code compiles without errors. Now there is a 'compile' mode of test-cases, and 'error' must be only used when there are greater than 0 errors. link test cases are updated to omit the target object format argument when calling checkObject since that is no longer needed. The test/stage2 directory is removed; the 2 files within are moved to be directly in the test/ directory.
This commit is contained in:
@@ -53,13 +53,14 @@ pub fn build(b: *std.Build) !void {
|
||||
const docs_step = b.step("docs", "Build documentation");
|
||||
docs_step.dependOn(&docgen_cmd.step);
|
||||
|
||||
const test_cases = b.addTest(.{
|
||||
.root_source_file = .{ .path = "src/test.zig" },
|
||||
const check_case_exe = b.addExecutable(.{
|
||||
.name = "check-case",
|
||||
.root_source_file = .{ .path = "test/src/Cases.zig" },
|
||||
.optimize = optimize,
|
||||
});
|
||||
test_cases.main_pkg_path = ".";
|
||||
test_cases.stack_size = stack_size;
|
||||
test_cases.single_threaded = single_threaded;
|
||||
check_case_exe.main_pkg_path = ".";
|
||||
check_case_exe.stack_size = stack_size;
|
||||
check_case_exe.single_threaded = single_threaded;
|
||||
|
||||
const skip_debug = b.option(bool, "skip-debug", "Main test suite skips debug builds") orelse false;
|
||||
const skip_release = b.option(bool, "skip-release", "Main test suite skips release builds") orelse false;
|
||||
@@ -178,7 +179,7 @@ pub fn build(b: *std.Build) !void {
|
||||
if (target.isWindows() and target.getAbi() == .gnu) {
|
||||
// LTO is currently broken on mingw, this can be removed when it's fixed.
|
||||
exe.want_lto = false;
|
||||
test_cases.want_lto = false;
|
||||
check_case_exe.want_lto = false;
|
||||
}
|
||||
|
||||
const exe_options = b.addOptions();
|
||||
@@ -196,7 +197,7 @@ pub fn build(b: *std.Build) !void {
|
||||
|
||||
if (link_libc) {
|
||||
exe.linkLibC();
|
||||
test_cases.linkLibC();
|
||||
check_case_exe.linkLibC();
|
||||
}
|
||||
|
||||
const is_debug = optimize == .Debug;
|
||||
@@ -282,14 +283,14 @@ pub fn build(b: *std.Build) !void {
|
||||
}
|
||||
|
||||
try addCmakeCfgOptionsToExe(b, cfg, exe, use_zig_libcxx);
|
||||
try addCmakeCfgOptionsToExe(b, cfg, test_cases, use_zig_libcxx);
|
||||
try addCmakeCfgOptionsToExe(b, cfg, check_case_exe, use_zig_libcxx);
|
||||
} else {
|
||||
// Here we are -Denable-llvm but no cmake integration.
|
||||
try addStaticLlvmOptionsToExe(exe);
|
||||
try addStaticLlvmOptionsToExe(test_cases);
|
||||
try addStaticLlvmOptionsToExe(check_case_exe);
|
||||
}
|
||||
if (target.isWindows()) {
|
||||
inline for (.{ exe, test_cases }) |artifact| {
|
||||
inline for (.{ exe, check_case_exe }) |artifact| {
|
||||
artifact.linkSystemLibrary("version");
|
||||
artifact.linkSystemLibrary("uuid");
|
||||
artifact.linkSystemLibrary("ole32");
|
||||
@@ -334,8 +335,9 @@ pub fn build(b: *std.Build) !void {
|
||||
const test_filter = b.option([]const u8, "test-filter", "Skip tests that do not match filter");
|
||||
|
||||
const test_cases_options = b.addOptions();
|
||||
test_cases.addOptions("build_options", test_cases_options);
|
||||
check_case_exe.addOptions("build_options", test_cases_options);
|
||||
|
||||
test_cases_options.addOption(bool, "enable_tracy", false);
|
||||
test_cases_options.addOption(bool, "enable_logging", enable_logging);
|
||||
test_cases_options.addOption(bool, "enable_link_snapshots", enable_link_snapshots);
|
||||
test_cases_options.addOption(bool, "skip_non_native", skip_non_native);
|
||||
@@ -358,12 +360,6 @@ pub fn build(b: *std.Build) !void {
|
||||
test_cases_options.addOption(std.SemanticVersion, "semver", semver);
|
||||
test_cases_options.addOption(?[]const u8, "test_filter", test_filter);
|
||||
|
||||
const test_cases_step = b.step("test-cases", "Run the main compiler test cases");
|
||||
test_cases_step.dependOn(&test_cases.step);
|
||||
if (!skip_stage2_tests) {
|
||||
test_step.dependOn(test_cases_step);
|
||||
}
|
||||
|
||||
var chosen_opt_modes_buf: [4]builtin.Mode = undefined;
|
||||
var chosen_mode_index: usize = 0;
|
||||
if (!skip_debug) {
|
||||
@@ -386,21 +382,20 @@ pub fn build(b: *std.Build) !void {
|
||||
|
||||
const fmt_include_paths = &.{ "doc", "lib", "src", "test", "tools", "build.zig" };
|
||||
const fmt_exclude_paths = &.{"test/cases"};
|
||||
const check_fmt = b.addFmt(.{
|
||||
.paths = fmt_include_paths,
|
||||
.exclude_paths = fmt_exclude_paths,
|
||||
.check = true,
|
||||
});
|
||||
const do_fmt = b.addFmt(.{
|
||||
.paths = fmt_include_paths,
|
||||
.exclude_paths = fmt_exclude_paths,
|
||||
});
|
||||
|
||||
const test_fmt_step = b.step("test-fmt", "Check whether source files have conforming formatting");
|
||||
test_fmt_step.dependOn(&check_fmt.step);
|
||||
b.step("test-fmt", "Check source files having conforming formatting").dependOn(&b.addFmt(.{
|
||||
.paths = fmt_include_paths,
|
||||
.exclude_paths = fmt_exclude_paths,
|
||||
.check = true,
|
||||
}).step);
|
||||
|
||||
const do_fmt_step = b.step("fmt", "Modify source files in place to have conforming formatting");
|
||||
do_fmt_step.dependOn(&do_fmt.step);
|
||||
const test_cases_step = b.step("test-cases", "Run the main compiler test cases");
|
||||
try tests.addCases(b, test_cases_step, test_filter, check_case_exe);
|
||||
if (!skip_stage2_tests) test_step.dependOn(test_cases_step);
|
||||
|
||||
test_step.dependOn(tests.addModuleTests(b, .{
|
||||
.test_filter = test_filter,
|
||||
@@ -475,6 +470,9 @@ pub fn build(b: *std.Build) !void {
|
||||
}));
|
||||
|
||||
try addWasiUpdateStep(b, version);
|
||||
|
||||
b.step("fmt", "Modify source files in place to have conforming formatting")
|
||||
.dependOn(&do_fmt.step);
|
||||
}
|
||||
|
||||
fn addWasiUpdateStep(b: *std.Build, version: [:0]const u8) !void {
|
||||
|
||||
-1968
@@ -1,1968 +0,0 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const CrossTarget = std.zig.CrossTarget;
|
||||
const print = std.debug.print;
|
||||
const assert = std.debug.assert;
|
||||
const ThreadPool = std.Thread.Pool;
|
||||
const WaitGroup = std.Thread.WaitGroup;
|
||||
|
||||
const link = @import("link.zig");
|
||||
const Compilation = @import("Compilation.zig");
|
||||
const Package = @import("Package.zig");
|
||||
const introspect = @import("introspect.zig");
|
||||
const build_options = @import("build_options");
|
||||
const zig_h = link.File.C.zig_h;
|
||||
|
||||
const enable_qemu: bool = build_options.enable_qemu;
|
||||
const enable_wine: bool = build_options.enable_wine;
|
||||
const enable_wasmtime: bool = build_options.enable_wasmtime;
|
||||
const enable_darling: bool = build_options.enable_darling;
|
||||
const enable_rosetta: bool = build_options.enable_rosetta;
|
||||
const glibc_runtimes_dir: ?[]const u8 = build_options.glibc_runtimes_dir;
|
||||
const skip_stage1 = true;
|
||||
|
||||
const hr = "=" ** 80;
|
||||
|
||||
test {
|
||||
const use_gpa = build_options.force_gpa or !builtin.link_libc;
|
||||
const gpa = gpa: {
|
||||
if (use_gpa) {
|
||||
break :gpa std.testing.allocator;
|
||||
}
|
||||
// We would prefer to use raw libc allocator here, but cannot
|
||||
// use it if it won't support the alignment we need.
|
||||
if (@alignOf(std.c.max_align_t) < @alignOf(i128)) {
|
||||
break :gpa std.heap.c_allocator;
|
||||
}
|
||||
break :gpa std.heap.raw_c_allocator;
|
||||
};
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
var ctx = TestContext.init(gpa, arena);
|
||||
defer ctx.deinit();
|
||||
|
||||
{
|
||||
const dir_path = try std.fs.path.join(arena, &.{
|
||||
std.fs.path.dirname(@src().file).?, "..", "test", "cases",
|
||||
});
|
||||
|
||||
var dir = try std.fs.cwd().openIterableDir(dir_path, .{});
|
||||
defer dir.close();
|
||||
|
||||
ctx.addTestCasesFromDir(dir);
|
||||
}
|
||||
|
||||
try @import("../test/cases.zig").addCases(&ctx);
|
||||
|
||||
try ctx.run();
|
||||
}
|
||||
|
||||
const ErrorMsg = union(enum) {
|
||||
src: struct {
|
||||
src_path: []const u8,
|
||||
msg: []const u8,
|
||||
// maxint means match anything
|
||||
// this is a workaround for stage1 compiler bug I ran into when making it ?u32
|
||||
line: u32,
|
||||
// maxint means match anything
|
||||
// this is a workaround for stage1 compiler bug I ran into when making it ?u32
|
||||
column: u32,
|
||||
kind: Kind,
|
||||
count: u32,
|
||||
},
|
||||
plain: struct {
|
||||
msg: []const u8,
|
||||
kind: Kind,
|
||||
count: u32,
|
||||
},
|
||||
|
||||
const Kind = enum {
|
||||
@"error",
|
||||
note,
|
||||
};
|
||||
|
||||
fn init(other: Compilation.AllErrors.Message, kind: Kind) ErrorMsg {
|
||||
switch (other) {
|
||||
.src => |src| return .{
|
||||
.src = .{
|
||||
.src_path = src.src_path,
|
||||
.msg = src.msg,
|
||||
.line = @intCast(u32, src.line),
|
||||
.column = @intCast(u32, src.column),
|
||||
.kind = kind,
|
||||
.count = src.count,
|
||||
},
|
||||
},
|
||||
.plain => |plain| return .{
|
||||
.plain = .{
|
||||
.msg = plain.msg,
|
||||
.kind = kind,
|
||||
.count = plain.count,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(
|
||||
self: ErrorMsg,
|
||||
comptime fmt: []const u8,
|
||||
options: std.fmt.FormatOptions,
|
||||
writer: anytype,
|
||||
) !void {
|
||||
_ = fmt;
|
||||
_ = options;
|
||||
switch (self) {
|
||||
.src => |src| {
|
||||
if (!std.mem.eql(u8, src.src_path, "?") or
|
||||
src.line != std.math.maxInt(u32) or
|
||||
src.column != std.math.maxInt(u32))
|
||||
{
|
||||
try writer.print("{s}:", .{src.src_path});
|
||||
if (src.line != std.math.maxInt(u32)) {
|
||||
try writer.print("{d}:", .{src.line + 1});
|
||||
} else {
|
||||
try writer.writeAll("?:");
|
||||
}
|
||||
if (src.column != std.math.maxInt(u32)) {
|
||||
try writer.print("{d}: ", .{src.column + 1});
|
||||
} else {
|
||||
try writer.writeAll("?: ");
|
||||
}
|
||||
}
|
||||
try writer.print("{s}: {s}", .{ @tagName(src.kind), src.msg });
|
||||
if (src.count != 1) {
|
||||
try writer.print(" ({d} times)", .{src.count});
|
||||
}
|
||||
},
|
||||
.plain => |plain| {
|
||||
try writer.print("{s}: {s}", .{ @tagName(plain.kind), plain.msg });
|
||||
if (plain.count != 1) {
|
||||
try writer.print(" ({d} times)", .{plain.count});
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/// Default config values for known test manifest key-value pairings.
|
||||
/// Currently handled defaults are:
|
||||
/// * backend
|
||||
/// * target
|
||||
/// * output_mode
|
||||
/// * is_test
|
||||
const TestManifestConfigDefaults = struct {
|
||||
/// Asserts if the key doesn't exist - yep, it's an oversight alright.
|
||||
fn get(@"type": TestManifest.Type, key: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, key, "backend")) {
|
||||
return "stage2";
|
||||
} else if (std.mem.eql(u8, key, "target")) {
|
||||
comptime {
|
||||
var defaults: []const u8 = "";
|
||||
// TODO should we only return "mainstream" targets by default here?
|
||||
// TODO we should also specify ABIs explicitly as the backends are
|
||||
// getting more and more complete
|
||||
// Linux
|
||||
inline for (&[_][]const u8{ "x86_64", "arm", "aarch64" }) |arch| {
|
||||
defaults = defaults ++ arch ++ "-linux" ++ ",";
|
||||
}
|
||||
// macOS
|
||||
inline for (&[_][]const u8{ "x86_64", "aarch64" }) |arch| {
|
||||
defaults = defaults ++ arch ++ "-macos" ++ ",";
|
||||
}
|
||||
// Windows
|
||||
defaults = defaults ++ "x86_64-windows" ++ ",";
|
||||
// Wasm
|
||||
defaults = defaults ++ "wasm32-wasi";
|
||||
return defaults;
|
||||
}
|
||||
} else if (std.mem.eql(u8, key, "output_mode")) {
|
||||
return switch (@"type") {
|
||||
.@"error" => "Obj",
|
||||
.run => "Exe",
|
||||
.cli => @panic("TODO test harness for CLI tests"),
|
||||
};
|
||||
} else if (std.mem.eql(u8, key, "is_test")) {
|
||||
return "0";
|
||||
} else unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
/// Manifest syntax example:
|
||||
/// (see https://github.com/ziglang/zig/issues/11288)
|
||||
///
|
||||
/// error
|
||||
/// backend=stage1,stage2
|
||||
/// output_mode=exe
|
||||
///
|
||||
/// :3:19: error: foo
|
||||
///
|
||||
/// run
|
||||
/// target=x86_64-linux,aarch64-macos
|
||||
///
|
||||
/// I am expected stdout! Hello!
|
||||
///
|
||||
/// cli
|
||||
///
|
||||
/// build test
|
||||
const TestManifest = struct {
|
||||
type: Type,
|
||||
config_map: std.StringHashMap([]const u8),
|
||||
trailing_bytes: []const u8 = "",
|
||||
|
||||
const Type = enum {
|
||||
@"error",
|
||||
run,
|
||||
cli,
|
||||
};
|
||||
|
||||
const TrailingIterator = struct {
|
||||
inner: std.mem.TokenIterator(u8),
|
||||
|
||||
fn next(self: *TrailingIterator) ?[]const u8 {
|
||||
const next_inner = self.inner.next() orelse return null;
|
||||
return std.mem.trim(u8, next_inner[2..], " \t");
|
||||
}
|
||||
};
|
||||
|
||||
fn ConfigValueIterator(comptime T: type) type {
|
||||
return struct {
|
||||
inner: std.mem.SplitIterator(u8),
|
||||
|
||||
fn next(self: *@This()) !?T {
|
||||
const next_raw = self.inner.next() orelse return null;
|
||||
const parseFn = getDefaultParser(T);
|
||||
return try parseFn(next_raw);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn parse(arena: Allocator, bytes: []const u8) !TestManifest {
|
||||
// The manifest is the last contiguous block of comments in the file
|
||||
// We scan for the beginning by searching backward for the first non-empty line that does not start with "//"
|
||||
var start: ?usize = null;
|
||||
var end: usize = bytes.len;
|
||||
if (bytes.len > 0) {
|
||||
var cursor: usize = bytes.len - 1;
|
||||
while (true) {
|
||||
// Move to beginning of line
|
||||
while (cursor > 0 and bytes[cursor - 1] != '\n') cursor -= 1;
|
||||
|
||||
if (std.mem.startsWith(u8, bytes[cursor..], "//")) {
|
||||
start = cursor; // Contiguous comment line, include in manifest
|
||||
} else {
|
||||
if (start != null) break; // Encountered non-comment line, end of manifest
|
||||
|
||||
// We ignore all-whitespace lines following the comment block, but anything else
|
||||
// means that there is no manifest present.
|
||||
if (std.mem.trim(u8, bytes[cursor..end], " \r\n\t").len == 0) {
|
||||
end = cursor;
|
||||
} else break; // If it's not whitespace, there is no manifest
|
||||
}
|
||||
|
||||
// Move to previous line
|
||||
if (cursor != 0) cursor -= 1 else break;
|
||||
}
|
||||
}
|
||||
|
||||
const actual_start = start orelse return error.MissingTestManifest;
|
||||
const manifest_bytes = bytes[actual_start..end];
|
||||
|
||||
var it = std.mem.tokenize(u8, manifest_bytes, "\r\n");
|
||||
|
||||
// First line is the test type
|
||||
const tt: Type = blk: {
|
||||
const line = it.next() orelse return error.MissingTestCaseType;
|
||||
const raw = std.mem.trim(u8, line[2..], " \t");
|
||||
if (std.mem.eql(u8, raw, "error")) {
|
||||
break :blk .@"error";
|
||||
} else if (std.mem.eql(u8, raw, "run")) {
|
||||
break :blk .run;
|
||||
} else if (std.mem.eql(u8, raw, "cli")) {
|
||||
break :blk .cli;
|
||||
} else {
|
||||
std.log.warn("unknown test case type requested: {s}", .{raw});
|
||||
return error.UnknownTestCaseType;
|
||||
}
|
||||
};
|
||||
|
||||
var manifest: TestManifest = .{
|
||||
.type = tt,
|
||||
.config_map = std.StringHashMap([]const u8).init(arena),
|
||||
};
|
||||
|
||||
// Any subsequent line until a blank comment line is key=value(s) pair
|
||||
while (it.next()) |line| {
|
||||
const trimmed = std.mem.trim(u8, line[2..], " \t");
|
||||
if (trimmed.len == 0) break;
|
||||
|
||||
// Parse key=value(s)
|
||||
var kv_it = std.mem.split(u8, trimmed, "=");
|
||||
const key = kv_it.first();
|
||||
try manifest.config_map.putNoClobber(key, kv_it.next() orelse return error.MissingValuesForConfig);
|
||||
}
|
||||
|
||||
// Finally, trailing is expected output
|
||||
manifest.trailing_bytes = manifest_bytes[it.index..];
|
||||
|
||||
return manifest;
|
||||
}
|
||||
|
||||
fn getConfigForKey(
|
||||
self: TestManifest,
|
||||
key: []const u8,
|
||||
comptime T: type,
|
||||
) ConfigValueIterator(T) {
|
||||
const bytes = self.config_map.get(key) orelse TestManifestConfigDefaults.get(self.type, key);
|
||||
return ConfigValueIterator(T){
|
||||
.inner = std.mem.split(u8, bytes, ","),
|
||||
};
|
||||
}
|
||||
|
||||
fn getConfigForKeyAlloc(
|
||||
self: TestManifest,
|
||||
allocator: Allocator,
|
||||
key: []const u8,
|
||||
comptime T: type,
|
||||
) ![]const T {
|
||||
var out = std.ArrayList(T).init(allocator);
|
||||
defer out.deinit();
|
||||
var it = self.getConfigForKey(key, T);
|
||||
while (try it.next()) |item| {
|
||||
try out.append(item);
|
||||
}
|
||||
return try out.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn getConfigForKeyAssertSingle(self: TestManifest, key: []const u8, comptime T: type) !T {
|
||||
var it = self.getConfigForKey(key, T);
|
||||
const res = (try it.next()) orelse unreachable;
|
||||
assert((try it.next()) == null);
|
||||
return res;
|
||||
}
|
||||
|
||||
fn trailing(self: TestManifest) TrailingIterator {
|
||||
return .{
|
||||
.inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"),
|
||||
};
|
||||
}
|
||||
|
||||
fn trailingAlloc(self: TestManifest, allocator: Allocator) error{OutOfMemory}![]const []const u8 {
|
||||
var out = std.ArrayList([]const u8).init(allocator);
|
||||
defer out.deinit();
|
||||
var it = self.trailing();
|
||||
while (it.next()) |line| {
|
||||
try out.append(line);
|
||||
}
|
||||
return try out.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn ParseFn(comptime T: type) type {
|
||||
return fn ([]const u8) anyerror!T;
|
||||
}
|
||||
|
||||
fn getDefaultParser(comptime T: type) ParseFn(T) {
|
||||
if (T == CrossTarget) return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
var opts = CrossTarget.ParseOptions{
|
||||
.arch_os_abi = str,
|
||||
};
|
||||
return try CrossTarget.parse(opts);
|
||||
}
|
||||
}.parse;
|
||||
|
||||
switch (@typeInfo(T)) {
|
||||
.Int => return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
return try std.fmt.parseInt(T, str, 0);
|
||||
}
|
||||
}.parse,
|
||||
.Bool => return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
const as_int = try std.fmt.parseInt(u1, str, 0);
|
||||
return as_int > 0;
|
||||
}
|
||||
}.parse,
|
||||
.Enum => return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
return std.meta.stringToEnum(T, str) orelse {
|
||||
std.log.err("unknown enum variant for {s}: {s}", .{ @typeName(T), str });
|
||||
return error.UnknownEnumVariant;
|
||||
};
|
||||
}
|
||||
}.parse,
|
||||
.Struct => @compileError("no default parser for " ++ @typeName(T)),
|
||||
else => @compileError("no default parser for " ++ @typeName(T)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const TestStrategy = enum {
|
||||
/// Execute tests as independent compilations, unless they are explicitly
|
||||
/// incremental ("foo.0.zig", "foo.1.zig", etc.)
|
||||
independent,
|
||||
/// Execute all tests as incremental updates to a single compilation. Explicitly
|
||||
/// incremental tests ("foo.0.zig", "foo.1.zig", etc.) still execute in order
|
||||
incremental,
|
||||
};
|
||||
|
||||
/// Iterates a set of filenames extracting batches that are either incremental
|
||||
/// ("foo.0.zig", "foo.1.zig", etc.) or independent ("foo.zig", "bar.zig", etc.).
|
||||
/// Assumes filenames are sorted.
|
||||
const TestIterator = struct {
|
||||
start: usize = 0,
|
||||
end: usize = 0,
|
||||
filenames: []const []const u8,
|
||||
/// reset on each call to `next`
|
||||
index: usize = 0,
|
||||
|
||||
const Error = error{InvalidIncrementalTestIndex};
|
||||
|
||||
fn next(it: *TestIterator) Error!?[]const []const u8 {
|
||||
try it.nextInner();
|
||||
if (it.start == it.end) return null;
|
||||
return it.filenames[it.start..it.end];
|
||||
}
|
||||
|
||||
fn nextInner(it: *TestIterator) Error!void {
|
||||
it.start = it.end;
|
||||
if (it.end == it.filenames.len) return;
|
||||
if (it.end + 1 == it.filenames.len) {
|
||||
it.end += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
const remaining = it.filenames[it.end..];
|
||||
it.index = 0;
|
||||
while (it.index < remaining.len - 1) : (it.index += 1) {
|
||||
// First, check if this file is part of an incremental update sequence
|
||||
// Split filename into "<base_name>.<index>.<file_ext>"
|
||||
const prev_parts = getTestFileNameParts(remaining[it.index]);
|
||||
const new_parts = getTestFileNameParts(remaining[it.index + 1]);
|
||||
|
||||
// If base_name and file_ext match, these files are in the same test sequence
|
||||
// and the new one should be the incremented version of the previous test
|
||||
if (std.mem.eql(u8, prev_parts.base_name, new_parts.base_name) and
|
||||
std.mem.eql(u8, prev_parts.file_ext, new_parts.file_ext))
|
||||
{
|
||||
// This is "foo.X.zig" followed by "foo.Y.zig". Make sure that X = Y + 1
|
||||
if (prev_parts.test_index == null)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
if (new_parts.test_index == null)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
if (new_parts.test_index.? != prev_parts.test_index.? + 1)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
} else {
|
||||
// This is not the same test sequence, so the new file must be the first file
|
||||
// in a new sequence ("*.0.zig") or an independent test file ("*.zig")
|
||||
if (new_parts.test_index != null and new_parts.test_index.? != 0)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
|
||||
it.end += it.index + 1;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
it.end += remaining.len;
|
||||
}
|
||||
}
|
||||
|
||||
/// In the event of an `error.InvalidIncrementalTestIndex`, this function can
|
||||
/// be used to find the current filename that was being processed.
|
||||
/// Asserts the iterator hasn't reached the end.
|
||||
fn currentFilename(it: TestIterator) []const u8 {
|
||||
assert(it.end != it.filenames.len);
|
||||
const remaining = it.filenames[it.end..];
|
||||
return remaining[it.index + 1];
|
||||
}
|
||||
};
|
||||
|
||||
/// For a filename in the format "<filename>.X.<ext>" or "<filename>.<ext>", returns
|
||||
/// "<filename>", "<ext>" and X parsed as a decimal number. If X is not present, or
|
||||
/// cannot be parsed as a decimal number, it is treated as part of <filename>
|
||||
fn getTestFileNameParts(name: []const u8) struct {
|
||||
base_name: []const u8,
|
||||
file_ext: []const u8,
|
||||
test_index: ?usize,
|
||||
} {
|
||||
const file_ext = std.fs.path.extension(name);
|
||||
const trimmed = name[0 .. name.len - file_ext.len]; // Trim off ".<ext>"
|
||||
const maybe_index = std.fs.path.extension(trimmed); // Extract ".X"
|
||||
|
||||
// Attempt to parse index
|
||||
const index: ?usize = if (maybe_index.len > 0)
|
||||
std.fmt.parseInt(usize, maybe_index[1..], 10) catch null
|
||||
else
|
||||
null;
|
||||
|
||||
// Adjust "<filename>" extent based on parsing success
|
||||
const base_name_end = trimmed.len - if (index != null) maybe_index.len else 0;
|
||||
return .{
|
||||
.base_name = name[0..base_name_end],
|
||||
.file_ext = if (file_ext.len > 0) file_ext[1..] else file_ext,
|
||||
.test_index = index,
|
||||
};
|
||||
}
|
||||
|
||||
/// Sort test filenames in-place, so that incremental test cases ("foo.0.zig",
|
||||
/// "foo.1.zig", etc.) are contiguous and appear in numerical order.
|
||||
fn sortTestFilenames(filenames: [][]const u8) void {
|
||||
const Context = struct {
|
||||
pub fn lessThan(_: @This(), a: []const u8, b: []const u8) bool {
|
||||
const a_parts = getTestFileNameParts(a);
|
||||
const b_parts = getTestFileNameParts(b);
|
||||
|
||||
// Sort "<base_name>.X.<file_ext>" based on "<base_name>" and "<file_ext>" first
|
||||
return switch (std.mem.order(u8, a_parts.base_name, b_parts.base_name)) {
|
||||
.lt => true,
|
||||
.gt => false,
|
||||
.eq => switch (std.mem.order(u8, a_parts.file_ext, b_parts.file_ext)) {
|
||||
.lt => true,
|
||||
.gt => false,
|
||||
.eq => {
|
||||
// a and b differ only in their ".X" part
|
||||
|
||||
// Sort "<base_name>.<file_ext>" before any "<base_name>.X.<file_ext>"
|
||||
if (a_parts.test_index) |a_index| {
|
||||
if (b_parts.test_index) |b_index| {
|
||||
// Make sure that incremental tests appear in linear order
|
||||
return a_index < b_index;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return b_parts.test_index != null;
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
std.sort.sort([]const u8, filenames, Context{}, Context.lessThan);
|
||||
}
|
||||
|
||||
pub const TestContext = struct {
|
||||
gpa: Allocator,
|
||||
arena: Allocator,
|
||||
cases: std.ArrayList(Case),
|
||||
|
||||
pub const Update = struct {
|
||||
/// The input to the current update. We simulate an incremental update
|
||||
/// with the file's contents changed to this value each update.
|
||||
///
|
||||
/// This value can change entirely between updates, which would be akin
|
||||
/// to deleting the source file and creating a new one from scratch; or
|
||||
/// you can keep it mostly consistent, with small changes, testing the
|
||||
/// effects of the incremental compilation.
|
||||
src: [:0]const u8,
|
||||
name: []const u8,
|
||||
case: union(enum) {
|
||||
/// Check the main binary output file against an expected set of bytes.
|
||||
/// This is most useful with, for example, `-ofmt=c`.
|
||||
CompareObjectFile: []const u8,
|
||||
/// An error update attempts to compile bad code, and ensures that it
|
||||
/// fails to compile, and for the expected reasons.
|
||||
/// A slice containing the expected errors *in sequential order*.
|
||||
Error: []const ErrorMsg,
|
||||
/// An execution update compiles and runs the input, testing the
|
||||
/// stdout against the expected results
|
||||
/// This is a slice containing the expected message.
|
||||
Execution: []const u8,
|
||||
/// A header update compiles the input with the equivalent of
|
||||
/// `-femit-h` and tests the produced header against the
|
||||
/// expected result
|
||||
Header: []const u8,
|
||||
},
|
||||
};
|
||||
|
||||
pub const File = struct {
|
||||
/// Contents of the importable file. Doesn't yet support incremental updates.
|
||||
src: [:0]const u8,
|
||||
path: []const u8,
|
||||
};
|
||||
|
||||
pub const DepModule = struct {
|
||||
name: []const u8,
|
||||
path: []const u8,
|
||||
};
|
||||
|
||||
pub const Backend = enum {
|
||||
stage1,
|
||||
stage2,
|
||||
llvm,
|
||||
};
|
||||
|
||||
/// A `Case` consists of a list of `Update`. The same `Compilation` is used for each
|
||||
/// update, so each update's source is treated as a single file being
|
||||
/// updated by the test harness and incrementally compiled.
|
||||
pub const Case = struct {
|
||||
/// The name of the test case. This is shown if a test fails, and
|
||||
/// otherwise ignored.
|
||||
name: []const u8,
|
||||
/// The platform the test targets. For non-native platforms, an emulator
|
||||
/// such as QEMU is required for tests to complete.
|
||||
target: CrossTarget,
|
||||
/// In order to be able to run e.g. Execution updates, this must be set
|
||||
/// to Executable.
|
||||
output_mode: std.builtin.OutputMode,
|
||||
optimize_mode: std.builtin.Mode = .Debug,
|
||||
updates: std.ArrayList(Update),
|
||||
emit_h: bool = false,
|
||||
is_test: bool = false,
|
||||
expect_exact: bool = false,
|
||||
backend: Backend = .stage2,
|
||||
link_libc: bool = false,
|
||||
|
||||
files: std.ArrayList(File),
|
||||
deps: std.ArrayList(DepModule),
|
||||
|
||||
result: anyerror!void = {},
|
||||
|
||||
pub fn addSourceFile(case: *Case, name: []const u8, src: [:0]const u8) void {
|
||||
case.files.append(.{ .path = name, .src = src }) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
pub fn addDepModule(case: *Case, name: []const u8, path: []const u8) void {
|
||||
case.deps.append(.{
|
||||
.name = name,
|
||||
.path = path,
|
||||
}) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, and a C
|
||||
/// header is generated.
|
||||
pub fn addHeader(self: *Case, src: [:0]const u8, result: [:0]const u8) void {
|
||||
self.emit_h = true;
|
||||
self.updates.append(.{
|
||||
.src = src,
|
||||
.name = "update",
|
||||
.case = .{ .Header = result },
|
||||
}) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, compiled,
|
||||
/// run, and the output is tested against `result`.
|
||||
pub fn addCompareOutput(self: *Case, src: [:0]const u8, result: []const u8) void {
|
||||
self.updates.append(.{
|
||||
.src = src,
|
||||
.name = "update",
|
||||
.case = .{ .Execution = result },
|
||||
}) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, compiled,
|
||||
/// and the object file data is compared against `result`.
|
||||
pub fn addCompareObjectFile(self: *Case, src: [:0]const u8, result: []const u8) void {
|
||||
self.updates.append(.{
|
||||
.src = src,
|
||||
.name = "update",
|
||||
.case = .{ .CompareObjectFile = result },
|
||||
}) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
pub fn addError(self: *Case, src: [:0]const u8, errors: []const []const u8) void {
|
||||
return self.addErrorNamed("update", src, errors);
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, which
|
||||
/// should contain invalid input, and ensures that compilation fails
|
||||
/// for the expected reasons, given in sequential order in `errors` in
|
||||
/// the form `:line:column: error: message`.
|
||||
pub fn addErrorNamed(
|
||||
self: *Case,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
errors: []const []const u8,
|
||||
) void {
|
||||
var array = self.updates.allocator.alloc(ErrorMsg, errors.len) catch @panic("out of memory");
|
||||
for (errors, 0..) |err_msg_line, i| {
|
||||
if (std.mem.startsWith(u8, err_msg_line, "error: ")) {
|
||||
array[i] = .{
|
||||
.plain = .{
|
||||
.msg = err_msg_line["error: ".len..],
|
||||
.kind = .@"error",
|
||||
.count = 1,
|
||||
},
|
||||
};
|
||||
continue;
|
||||
} else if (std.mem.startsWith(u8, err_msg_line, "note: ")) {
|
||||
array[i] = .{
|
||||
.plain = .{
|
||||
.msg = err_msg_line["note: ".len..],
|
||||
.kind = .note,
|
||||
.count = 1,
|
||||
},
|
||||
};
|
||||
continue;
|
||||
}
|
||||
// example: "file.zig:1:2: error: bad thing happened"
|
||||
var it = std.mem.split(u8, err_msg_line, ":");
|
||||
const src_path = it.first();
|
||||
const line_text = it.next() orelse @panic("missing line");
|
||||
const col_text = it.next() orelse @panic("missing column");
|
||||
const kind_text = it.next() orelse @panic("missing 'error'/'note'");
|
||||
var msg = it.rest()[1..]; // skip over the space at end of "error: "
|
||||
|
||||
const line: ?u32 = if (std.mem.eql(u8, line_text, "?"))
|
||||
null
|
||||
else
|
||||
std.fmt.parseInt(u32, line_text, 10) catch @panic("bad line number");
|
||||
const column: ?u32 = if (std.mem.eql(u8, line_text, "?"))
|
||||
null
|
||||
else
|
||||
std.fmt.parseInt(u32, col_text, 10) catch @panic("bad column number");
|
||||
const kind: ErrorMsg.Kind = if (std.mem.eql(u8, kind_text, " error"))
|
||||
.@"error"
|
||||
else if (std.mem.eql(u8, kind_text, " note"))
|
||||
.note
|
||||
else
|
||||
@panic("expected 'error'/'note'");
|
||||
|
||||
const line_0based: u32 = if (line) |n| blk: {
|
||||
if (n == 0) {
|
||||
print("{s}: line must be specified starting at one\n", .{self.name});
|
||||
return;
|
||||
}
|
||||
break :blk n - 1;
|
||||
} else std.math.maxInt(u32);
|
||||
|
||||
const column_0based: u32 = if (column) |n| blk: {
|
||||
if (n == 0) {
|
||||
print("{s}: line must be specified starting at one\n", .{self.name});
|
||||
return;
|
||||
}
|
||||
break :blk n - 1;
|
||||
} else std.math.maxInt(u32);
|
||||
|
||||
const suffix = " times)";
|
||||
const count = if (std.mem.endsWith(u8, msg, suffix)) count: {
|
||||
const lparen = std.mem.lastIndexOfScalar(u8, msg, '(').?;
|
||||
const count = std.fmt.parseInt(u32, msg[lparen + 1 .. msg.len - suffix.len], 10) catch @panic("bad error note count number");
|
||||
msg = msg[0 .. lparen - 1];
|
||||
break :count count;
|
||||
} else 1;
|
||||
|
||||
array[i] = .{
|
||||
.src = .{
|
||||
.src_path = src_path,
|
||||
.msg = msg,
|
||||
.line = line_0based,
|
||||
.column = column_0based,
|
||||
.kind = kind,
|
||||
.count = count,
|
||||
},
|
||||
};
|
||||
}
|
||||
self.updates.append(.{
|
||||
.src = src,
|
||||
.name = name,
|
||||
.case = .{ .Error = array },
|
||||
}) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, and
|
||||
/// asserts that it compiles without issue
|
||||
pub fn compiles(self: *Case, src: [:0]const u8) void {
|
||||
self.addError(src, &[_][]const u8{});
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addExe(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.files = std.ArrayList(File).init(ctx.arena),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig input, producing an executable
|
||||
pub fn exe(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addExe(name, target);
|
||||
}
|
||||
|
||||
pub fn exeFromCompiledC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
const prefixed_name = std.fmt.allocPrint(ctx.arena, "CBE: {s}", .{name}) catch
|
||||
@panic("out of memory");
|
||||
var target_adjusted = target;
|
||||
target_adjusted.ofmt = std.Target.ObjectFormat.c;
|
||||
ctx.cases.append(Case{
|
||||
.name = prefixed_name,
|
||||
.target = target_adjusted,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.files = std.ArrayList(File).init(ctx.arena),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
.link_libc = true,
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
/// Adds a test case that uses the LLVM backend to emit an executable.
|
||||
/// Currently this implies linking libc, because only then we can generate a testable executable.
|
||||
pub fn exeUsingLlvmBackend(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.files = std.ArrayList(File).init(ctx.arena),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
.backend = .llvm,
|
||||
.link_libc = true,
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
pub fn addObj(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.files = std.ArrayList(File).init(ctx.arena),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
pub fn addTest(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.is_test = true,
|
||||
.files = std.ArrayList(File).init(ctx.arena),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig input, producing an object file.
|
||||
pub fn obj(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addObj(name, target);
|
||||
}
|
||||
|
||||
/// Adds a test case for ZIR input, producing an object file.
|
||||
pub fn objZIR(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addObj(name, target, .ZIR);
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig or ZIR input, producing C code.
|
||||
pub fn addC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
var target_adjusted = target;
|
||||
target_adjusted.ofmt = std.Target.ObjectFormat.c;
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target_adjusted,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.files = std.ArrayList(File).init(ctx.arena),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
pub fn c(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void {
|
||||
ctx.addC(name, target).addCompareObjectFile(src, zig_h ++ out);
|
||||
}
|
||||
|
||||
pub fn h(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void {
|
||||
ctx.addC(name, target).addHeader(src, zig_h ++ out);
|
||||
}
|
||||
|
||||
pub fn objErrStage1(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
const case = ctx.addObj(name, .{});
|
||||
case.backend = .stage1;
|
||||
case.addError(src, expected_errors);
|
||||
}
|
||||
|
||||
pub fn testErrStage1(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
const case = ctx.addTest(name, .{});
|
||||
case.backend = .stage1;
|
||||
case.addError(src, expected_errors);
|
||||
}
|
||||
|
||||
pub fn exeErrStage1(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
const case = ctx.addExe(name, .{});
|
||||
case.backend = .stage1;
|
||||
case.addError(src, expected_errors);
|
||||
}
|
||||
|
||||
pub fn addCompareOutput(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
ctx.addExe(name, .{}).addCompareOutput(src, expected_stdout);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the Zig source given in `src`, executes
|
||||
/// it, runs it, and tests the output against `expected_stdout`
|
||||
pub fn compareOutput(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
return ctx.addCompareOutput(name, src, expected_stdout);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the ZIR source given in `src`, executes
|
||||
/// it, runs it, and tests the output against `expected_stdout`
|
||||
pub fn compareOutputZIR(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
ctx.addCompareOutput(name, .ZIR, src, expected_stdout);
|
||||
}
|
||||
|
||||
pub fn addTransform(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
result: [:0]const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target).addTransform(src, result);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the Zig given in `src` to ZIR and tests
|
||||
/// the ZIR against `result`
|
||||
pub fn transform(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
result: [:0]const u8,
|
||||
) void {
|
||||
ctx.addTransform(name, target, src, result);
|
||||
}
|
||||
|
||||
pub fn addError(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target).addError(src, expected_errors);
|
||||
}
|
||||
|
||||
/// Adds a test case that ensures that the Zig given in `src` fails to
|
||||
/// compile for the expected reasons, given in sequential order in
|
||||
/// `expected_errors` in the form `:line:column: error: message`.
|
||||
pub fn compileError(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addError(name, target, src, expected_errors);
|
||||
}
|
||||
|
||||
/// Adds a test case that ensures that the ZIR given in `src` fails to
|
||||
/// compile for the expected reasons, given in sequential order in
|
||||
/// `expected_errors` in the form `:line:column: error: message`.
|
||||
pub fn compileErrorZIR(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addError(name, target, .ZIR, src, expected_errors);
|
||||
}
|
||||
|
||||
pub fn addCompiles(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target).compiles(src);
|
||||
}
|
||||
|
||||
/// Adds a test case that asserts that the Zig given in `src` compiles
|
||||
/// without any errors.
|
||||
pub fn compiles(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
) void {
|
||||
ctx.addCompiles(name, target, src);
|
||||
}
|
||||
|
||||
/// Adds a test case that asserts that the ZIR given in `src` compiles
|
||||
/// without any errors.
|
||||
pub fn compilesZIR(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
) void {
|
||||
ctx.addCompiles(name, target, .ZIR, src);
|
||||
}
|
||||
|
||||
/// Adds a test case that first ensures that the Zig given in `src` fails
|
||||
/// to compile for the reasons given in sequential order in
|
||||
/// `expected_errors` in the form `:line:column: error: message`, then
|
||||
/// asserts that fixing the source (updating with `fixed_src`) isn't broken
|
||||
/// by incremental compilation.
|
||||
pub fn incrementalFailure(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
fixed_src: [:0]const u8,
|
||||
) void {
|
||||
var case = ctx.addObj(name, target);
|
||||
case.addError(src, expected_errors);
|
||||
case.compiles(fixed_src);
|
||||
}
|
||||
|
||||
/// Adds a test case that first ensures that the ZIR given in `src` fails
|
||||
/// to compile for the reasons given in sequential order in
|
||||
/// `expected_errors` in the form `:line:column: error: message`, then
|
||||
/// asserts that fixing the source (updating with `fixed_src`) isn't broken
|
||||
/// by incremental compilation.
|
||||
pub fn incrementalFailureZIR(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
fixed_src: [:0]const u8,
|
||||
) void {
|
||||
var case = ctx.addObj(name, target, .ZIR);
|
||||
case.addError(src, expected_errors);
|
||||
case.compiles(fixed_src);
|
||||
}
|
||||
|
||||
/// Adds a test for each file in the provided directory.
|
||||
/// Testing strategy (TestStrategy) is inferred automatically from filenames.
|
||||
/// Recurses nested directories.
|
||||
///
|
||||
/// Each file should include a test manifest as a contiguous block of comments at
|
||||
/// the end of the file. The first line should be the test type, followed by a set of
|
||||
/// key-value config values, followed by a blank line, then the expected output.
|
||||
pub fn addTestCasesFromDir(ctx: *TestContext, dir: std.fs.IterableDir) void {
|
||||
var current_file: []const u8 = "none";
|
||||
ctx.addTestCasesFromDirInner(dir, ¤t_file) catch |err| {
|
||||
std.debug.panic("test harness failed to process file '{s}': {s}\n", .{
|
||||
current_file, @errorName(err),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
fn addTestCasesFromDirInner(
|
||||
ctx: *TestContext,
|
||||
iterable_dir: std.fs.IterableDir,
|
||||
/// This is kept up to date with the currently being processed file so
|
||||
/// that if any errors occur the caller knows it happened during this file.
|
||||
current_file: *[]const u8,
|
||||
) !void {
|
||||
var it = try iterable_dir.walk(ctx.arena);
|
||||
var filenames = std.ArrayList([]const u8).init(ctx.arena);
|
||||
|
||||
while (try it.next()) |entry| {
|
||||
if (entry.kind != .File) continue;
|
||||
|
||||
// Ignore stuff such as .swp files
|
||||
switch (Compilation.classifyFileExt(entry.basename)) {
|
||||
.unknown => continue,
|
||||
else => {},
|
||||
}
|
||||
try filenames.append(try ctx.arena.dupe(u8, entry.path));
|
||||
}
|
||||
|
||||
// Sort filenames, so that incremental tests are contiguous and in-order
|
||||
sortTestFilenames(filenames.items);
|
||||
|
||||
var test_it = TestIterator{ .filenames = filenames.items };
|
||||
while (test_it.next()) |maybe_batch| {
|
||||
const batch = maybe_batch orelse break;
|
||||
const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent;
|
||||
var cases = std.ArrayList(usize).init(ctx.arena);
|
||||
|
||||
for (batch) |filename| {
|
||||
current_file.* = filename;
|
||||
|
||||
const max_file_size = 10 * 1024 * 1024;
|
||||
const src = try iterable_dir.dir.readFileAllocOptions(ctx.arena, filename, max_file_size, null, 1, 0);
|
||||
|
||||
// Parse the manifest
|
||||
var manifest = try TestManifest.parse(ctx.arena, src);
|
||||
|
||||
if (cases.items.len == 0) {
|
||||
const backends = try manifest.getConfigForKeyAlloc(ctx.arena, "backend", Backend);
|
||||
const targets = try manifest.getConfigForKeyAlloc(ctx.arena, "target", CrossTarget);
|
||||
const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool);
|
||||
const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode);
|
||||
|
||||
const name_prefix = blk: {
|
||||
const ext_index = std.mem.lastIndexOfScalar(u8, current_file.*, '.') orelse
|
||||
return error.InvalidFilename;
|
||||
const index = std.mem.lastIndexOfScalar(u8, current_file.*[0..ext_index], '.') orelse ext_index;
|
||||
break :blk current_file.*[0..index];
|
||||
};
|
||||
|
||||
// Cross-product to get all possible test combinations
|
||||
for (backends) |backend| {
|
||||
for (targets) |target| {
|
||||
const name = try std.fmt.allocPrint(ctx.arena, "{s} ({s}, {s})", .{
|
||||
name_prefix,
|
||||
@tagName(backend),
|
||||
try target.zigTriple(ctx.arena),
|
||||
});
|
||||
const next = ctx.cases.items.len;
|
||||
try ctx.cases.append(.{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.backend = backend,
|
||||
.updates = std.ArrayList(TestContext.Update).init(ctx.cases.allocator),
|
||||
.is_test = is_test,
|
||||
.output_mode = output_mode,
|
||||
.link_libc = backend == .llvm,
|
||||
.files = std.ArrayList(TestContext.File).init(ctx.cases.allocator),
|
||||
.deps = std.ArrayList(DepModule).init(ctx.cases.allocator),
|
||||
});
|
||||
try cases.append(next);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (cases.items) |case_index| {
|
||||
const case = &ctx.cases.items[case_index];
|
||||
switch (manifest.type) {
|
||||
.@"error" => {
|
||||
const errors = try manifest.trailingAlloc(ctx.arena);
|
||||
switch (strategy) {
|
||||
.independent => {
|
||||
case.addError(src, errors);
|
||||
},
|
||||
.incremental => {
|
||||
case.addErrorNamed("update", src, errors);
|
||||
},
|
||||
}
|
||||
},
|
||||
.run => {
|
||||
var output = std.ArrayList(u8).init(ctx.arena);
|
||||
var trailing_it = manifest.trailing();
|
||||
while (trailing_it.next()) |line| {
|
||||
try output.appendSlice(line);
|
||||
try output.append('\n');
|
||||
}
|
||||
if (output.items.len > 0) {
|
||||
try output.resize(output.items.len - 1);
|
||||
}
|
||||
case.addCompareOutput(src, try output.toOwnedSlice());
|
||||
},
|
||||
.cli => @panic("TODO cli tests"),
|
||||
}
|
||||
}
|
||||
}
|
||||
} else |err| {
|
||||
// make sure the current file is set to the file that produced an error
|
||||
current_file.* = test_it.currentFilename();
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
fn init(gpa: Allocator, arena: Allocator) TestContext {
|
||||
return .{
|
||||
.gpa = gpa,
|
||||
.cases = std.ArrayList(Case).init(gpa),
|
||||
.arena = arena,
|
||||
};
|
||||
}
|
||||
|
||||
fn deinit(self: *TestContext) void {
|
||||
for (self.cases.items) |case| {
|
||||
for (case.updates.items) |u| {
|
||||
if (u.case == .Error) {
|
||||
case.updates.allocator.free(u.case.Error);
|
||||
}
|
||||
}
|
||||
case.updates.deinit();
|
||||
}
|
||||
self.cases.deinit();
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
fn run(self: *TestContext) !void {
|
||||
const host = try std.zig.system.NativeTargetInfo.detect(.{});
|
||||
const zig_exe_path = try std.process.getEnvVarOwned(self.arena, "ZIG_EXE");
|
||||
|
||||
var progress = std.Progress{};
|
||||
const root_node = progress.start("compiler", self.cases.items.len);
|
||||
defer root_node.end();
|
||||
|
||||
var zig_lib_directory = try introspect.findZigLibDir(self.gpa);
|
||||
defer zig_lib_directory.handle.close();
|
||||
defer self.gpa.free(zig_lib_directory.path.?);
|
||||
|
||||
var aux_thread_pool: ThreadPool = undefined;
|
||||
try aux_thread_pool.init(.{ .allocator = self.gpa });
|
||||
defer aux_thread_pool.deinit();
|
||||
|
||||
// Use the same global cache dir for all the tests, such that we for example don't have to
|
||||
// rebuild musl libc for every case (when LLVM backend is enabled).
|
||||
var global_tmp = std.testing.tmpDir(.{});
|
||||
defer global_tmp.cleanup();
|
||||
|
||||
var cache_dir = try global_tmp.dir.makeOpenPath("zig-cache", .{});
|
||||
defer cache_dir.close();
|
||||
const tmp_dir_path = try std.fs.path.join(self.gpa, &[_][]const u8{ ".", "zig-cache", "tmp", &global_tmp.sub_path });
|
||||
defer self.gpa.free(tmp_dir_path);
|
||||
|
||||
const global_cache_directory: Compilation.Directory = .{
|
||||
.handle = cache_dir,
|
||||
.path = try std.fs.path.join(self.gpa, &[_][]const u8{ tmp_dir_path, "zig-cache" }),
|
||||
};
|
||||
defer self.gpa.free(global_cache_directory.path.?);
|
||||
|
||||
{
|
||||
for (self.cases.items) |*case| {
|
||||
if (build_options.skip_non_native) {
|
||||
if (case.target.getCpuArch() != builtin.cpu.arch)
|
||||
continue;
|
||||
if (case.target.getObjectFormat() != builtin.object_format)
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip tests that require LLVM backend when it is not available
|
||||
if (!build_options.have_llvm and case.backend == .llvm)
|
||||
continue;
|
||||
|
||||
if (skip_stage1 and case.backend == .stage1)
|
||||
continue;
|
||||
|
||||
if (build_options.test_filter) |test_filter| {
|
||||
if (std.mem.indexOf(u8, case.name, test_filter) == null) continue;
|
||||
}
|
||||
|
||||
var prg_node = root_node.start(case.name, case.updates.items.len);
|
||||
prg_node.activate();
|
||||
defer prg_node.end();
|
||||
|
||||
case.result = runOneCase(
|
||||
self.gpa,
|
||||
&prg_node,
|
||||
case.*,
|
||||
zig_lib_directory,
|
||||
zig_exe_path,
|
||||
&aux_thread_pool,
|
||||
global_cache_directory,
|
||||
host,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
var fail_count: usize = 0;
|
||||
for (self.cases.items) |*case| {
|
||||
case.result catch |err| {
|
||||
fail_count += 1;
|
||||
print("{s} failed: {s}\n", .{ case.name, @errorName(err) });
|
||||
};
|
||||
}
|
||||
|
||||
if (fail_count != 0) {
|
||||
print("{d} tests failed\n", .{fail_count});
|
||||
return error.TestFailed;
|
||||
}
|
||||
}
|
||||
|
||||
fn runOneCase(
|
||||
allocator: Allocator,
|
||||
root_node: *std.Progress.Node,
|
||||
case: Case,
|
||||
zig_lib_directory: Compilation.Directory,
|
||||
zig_exe_path: []const u8,
|
||||
thread_pool: *ThreadPool,
|
||||
global_cache_directory: Compilation.Directory,
|
||||
host: std.zig.system.NativeTargetInfo,
|
||||
) !void {
|
||||
const target_info = try std.zig.system.NativeTargetInfo.detect(case.target);
|
||||
const target = target_info.target;
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
var tmp = std.testing.tmpDir(.{});
|
||||
defer tmp.cleanup();
|
||||
|
||||
var cache_dir = try tmp.dir.makeOpenPath("zig-cache", .{});
|
||||
defer cache_dir.close();
|
||||
|
||||
const tmp_dir_path = try std.fs.path.join(
|
||||
arena,
|
||||
&[_][]const u8{ ".", "zig-cache", "tmp", &tmp.sub_path },
|
||||
);
|
||||
const tmp_dir_path_plus_slash = try std.fmt.allocPrint(
|
||||
arena,
|
||||
"{s}" ++ std.fs.path.sep_str,
|
||||
.{tmp_dir_path},
|
||||
);
|
||||
const local_cache_path = try std.fs.path.join(
|
||||
arena,
|
||||
&[_][]const u8{ tmp_dir_path, "zig-cache" },
|
||||
);
|
||||
|
||||
for (case.files.items) |file| {
|
||||
try tmp.dir.writeFile(file.path, file.src);
|
||||
}
|
||||
|
||||
if (case.backend == .stage1) {
|
||||
// stage1 backend has limitations:
|
||||
// * leaks memory
|
||||
// * calls exit() when a compile error happens
|
||||
// * cannot handle updates
|
||||
// because of this we must spawn a child process rather than
|
||||
// using Compilation directly.
|
||||
|
||||
if (!std.process.can_spawn) {
|
||||
print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)});
|
||||
return; // Pass test.
|
||||
}
|
||||
|
||||
assert(case.updates.items.len == 1);
|
||||
const update = case.updates.items[0];
|
||||
try tmp.dir.writeFile(tmp_src_path, update.src);
|
||||
|
||||
var zig_args = std.ArrayList([]const u8).init(arena);
|
||||
try zig_args.append(zig_exe_path);
|
||||
|
||||
if (case.is_test) {
|
||||
try zig_args.append("test");
|
||||
} else if (update.case == .Execution) {
|
||||
try zig_args.append("run");
|
||||
} else switch (case.output_mode) {
|
||||
.Obj => try zig_args.append("build-obj"),
|
||||
.Exe => try zig_args.append("build-exe"),
|
||||
.Lib => try zig_args.append("build-lib"),
|
||||
}
|
||||
|
||||
try zig_args.append(try std.fs.path.join(arena, &.{ tmp_dir_path, tmp_src_path }));
|
||||
|
||||
try zig_args.append("--name");
|
||||
try zig_args.append("test");
|
||||
|
||||
try zig_args.append("--cache-dir");
|
||||
try zig_args.append(local_cache_path);
|
||||
|
||||
try zig_args.append("--global-cache-dir");
|
||||
try zig_args.append(global_cache_directory.path orelse ".");
|
||||
|
||||
if (!case.target.isNative()) {
|
||||
try zig_args.append("-target");
|
||||
try zig_args.append(try target.zigTriple(arena));
|
||||
}
|
||||
|
||||
try zig_args.append("-O");
|
||||
try zig_args.append(@tagName(case.optimize_mode));
|
||||
|
||||
// Prevent sub-process progress bar from interfering with the
|
||||
// one in this parent process.
|
||||
try zig_args.append("--color");
|
||||
try zig_args.append("off");
|
||||
|
||||
const result = try std.ChildProcess.exec(.{
|
||||
.allocator = arena,
|
||||
.argv = zig_args.items,
|
||||
});
|
||||
switch (update.case) {
|
||||
.Error => |case_error_list| {
|
||||
switch (result.term) {
|
||||
.Exited => |code| {
|
||||
if (code == 0) {
|
||||
dumpArgs(zig_args.items);
|
||||
return error.CompilationIncorrectlySucceeded;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
std.debug.print("{s}", .{result.stderr});
|
||||
dumpArgs(zig_args.items);
|
||||
return error.CompilationCrashed;
|
||||
},
|
||||
}
|
||||
var ok = true;
|
||||
if (case.expect_exact) {
|
||||
var err_iter = std.mem.split(u8, result.stderr, "\n");
|
||||
var i: usize = 0;
|
||||
ok = while (err_iter.next()) |line| : (i += 1) {
|
||||
if (i >= case_error_list.len) break false;
|
||||
const expected = try std.mem.replaceOwned(
|
||||
u8,
|
||||
arena,
|
||||
try std.fmt.allocPrint(arena, "{s}", .{case_error_list[i]}),
|
||||
"${DIR}",
|
||||
tmp_dir_path_plus_slash,
|
||||
);
|
||||
|
||||
if (std.mem.indexOf(u8, line, expected) == null) break false;
|
||||
continue;
|
||||
} else true;
|
||||
|
||||
ok = ok and i == case_error_list.len;
|
||||
|
||||
if (!ok) {
|
||||
print("\n======== Expected these compile errors: ========\n", .{});
|
||||
for (case_error_list) |msg| {
|
||||
const expected = try std.fmt.allocPrint(arena, "{s}", .{msg});
|
||||
print("{s}\n", .{expected});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (case_error_list) |msg| {
|
||||
const expected = try std.mem.replaceOwned(
|
||||
u8,
|
||||
arena,
|
||||
try std.fmt.allocPrint(arena, "{s}", .{msg}),
|
||||
"${DIR}",
|
||||
tmp_dir_path_plus_slash,
|
||||
);
|
||||
if (std.mem.indexOf(u8, result.stderr, expected) == null) {
|
||||
print(
|
||||
\\
|
||||
\\=========== Expected compile error: ============
|
||||
\\{s}
|
||||
\\
|
||||
, .{expected});
|
||||
ok = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!ok) {
|
||||
print(
|
||||
\\================= Full output: =================
|
||||
\\{s}
|
||||
\\================================================
|
||||
\\
|
||||
, .{result.stderr});
|
||||
return error.TestFailed;
|
||||
}
|
||||
},
|
||||
.CompareObjectFile => @panic("TODO implement in the test harness"),
|
||||
.Execution => |expected_stdout| {
|
||||
switch (result.term) {
|
||||
.Exited => |code| {
|
||||
if (code != 0) {
|
||||
std.debug.print("{s}", .{result.stderr});
|
||||
dumpArgs(zig_args.items);
|
||||
return error.CompilationFailed;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
std.debug.print("{s}", .{result.stderr});
|
||||
dumpArgs(zig_args.items);
|
||||
return error.CompilationCrashed;
|
||||
},
|
||||
}
|
||||
try std.testing.expectEqualStrings("", result.stderr);
|
||||
try std.testing.expectEqualStrings(expected_stdout, result.stdout);
|
||||
},
|
||||
.Header => @panic("TODO implement in the test harness"),
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const zig_cache_directory: Compilation.Directory = .{
|
||||
.handle = cache_dir,
|
||||
.path = local_cache_path,
|
||||
};
|
||||
|
||||
var main_pkg: Package = .{
|
||||
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
|
||||
.root_src_path = tmp_src_path,
|
||||
};
|
||||
defer {
|
||||
var it = main_pkg.table.iterator();
|
||||
while (it.next()) |kv| {
|
||||
allocator.free(kv.key_ptr.*);
|
||||
kv.value_ptr.*.destroy(allocator);
|
||||
}
|
||||
main_pkg.table.deinit(allocator);
|
||||
}
|
||||
|
||||
for (case.deps.items) |dep| {
|
||||
var pkg = try Package.create(
|
||||
allocator,
|
||||
tmp_dir_path,
|
||||
dep.path,
|
||||
);
|
||||
errdefer pkg.destroy(allocator);
|
||||
try main_pkg.add(allocator, dep.name, pkg);
|
||||
}
|
||||
|
||||
const bin_name = try std.zig.binNameAlloc(arena, .{
|
||||
.root_name = "test_case",
|
||||
.target = target,
|
||||
.output_mode = case.output_mode,
|
||||
});
|
||||
|
||||
const emit_directory: Compilation.Directory = .{
|
||||
.path = tmp_dir_path,
|
||||
.handle = tmp.dir,
|
||||
};
|
||||
const emit_bin: Compilation.EmitLoc = .{
|
||||
.directory = emit_directory,
|
||||
.basename = bin_name,
|
||||
};
|
||||
const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{
|
||||
.directory = emit_directory,
|
||||
.basename = "test_case.h",
|
||||
} else null;
|
||||
const use_llvm: bool = switch (case.backend) {
|
||||
.llvm => true,
|
||||
else => false,
|
||||
};
|
||||
const comp = try Compilation.create(allocator, .{
|
||||
.local_cache_directory = zig_cache_directory,
|
||||
.global_cache_directory = global_cache_directory,
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
.thread_pool = thread_pool,
|
||||
.root_name = "test_case",
|
||||
.target = target,
|
||||
// TODO: support tests for object file building, and library builds
|
||||
// and linking. This will require a rework to support multi-file
|
||||
// tests.
|
||||
.output_mode = case.output_mode,
|
||||
.is_test = case.is_test,
|
||||
.optimize_mode = case.optimize_mode,
|
||||
.emit_bin = emit_bin,
|
||||
.emit_h = emit_h,
|
||||
.main_pkg = &main_pkg,
|
||||
.keep_source_files_loaded = true,
|
||||
.is_native_os = case.target.isNativeOs(),
|
||||
.is_native_abi = case.target.isNativeAbi(),
|
||||
.dynamic_linker = target_info.dynamic_linker.get(),
|
||||
.link_libc = case.link_libc,
|
||||
.use_llvm = use_llvm,
|
||||
.self_exe_path = zig_exe_path,
|
||||
// TODO instead of turning off color, pass in a std.Progress.Node
|
||||
.color = .off,
|
||||
.reference_trace = 0,
|
||||
// TODO: force self-hosted linkers with stage2 backend to avoid LLD creeping in
|
||||
// until the auto-select mechanism deems them worthy
|
||||
.use_lld = switch (case.backend) {
|
||||
.stage2 => false,
|
||||
else => null,
|
||||
},
|
||||
});
|
||||
defer comp.destroy();
|
||||
|
||||
update: for (case.updates.items, 0..) |update, update_index| {
|
||||
var update_node = root_node.start(update.name, 3);
|
||||
update_node.activate();
|
||||
defer update_node.end();
|
||||
|
||||
var sync_node = update_node.start("write", 0);
|
||||
sync_node.activate();
|
||||
try tmp.dir.writeFile(tmp_src_path, update.src);
|
||||
sync_node.end();
|
||||
|
||||
var module_node = update_node.start("parse/analysis/codegen", 0);
|
||||
module_node.activate();
|
||||
try comp.makeBinFileWritable();
|
||||
try comp.update(&module_node);
|
||||
module_node.end();
|
||||
|
||||
if (update.case != .Error) {
|
||||
var all_errors = try comp.getAllErrorsAlloc();
|
||||
defer all_errors.deinit(allocator);
|
||||
if (all_errors.errorMessageCount() > 0) {
|
||||
all_errors.renderToStdErr(std.debug.detectTTYConfig(std.io.getStdErr()));
|
||||
// TODO print generated C code
|
||||
return error.UnexpectedCompileErrors;
|
||||
}
|
||||
}
|
||||
|
||||
switch (update.case) {
|
||||
.Header => |expected_output| {
|
||||
var file = try tmp.dir.openFile("test_case.h", .{ .mode = .read_only });
|
||||
defer file.close();
|
||||
const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024);
|
||||
|
||||
try std.testing.expectEqualStrings(expected_output, out);
|
||||
},
|
||||
.CompareObjectFile => |expected_output| {
|
||||
var file = try tmp.dir.openFile(bin_name, .{ .mode = .read_only });
|
||||
defer file.close();
|
||||
const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024);
|
||||
|
||||
try std.testing.expectEqualStrings(expected_output, out);
|
||||
},
|
||||
.Error => |case_error_list| {
|
||||
var test_node = update_node.start("assert", 0);
|
||||
test_node.activate();
|
||||
defer test_node.end();
|
||||
|
||||
const handled_errors = try arena.alloc(bool, case_error_list.len);
|
||||
std.mem.set(bool, handled_errors, false);
|
||||
|
||||
var actual_errors = try comp.getAllErrorsAlloc();
|
||||
defer actual_errors.deinit(allocator);
|
||||
|
||||
var any_failed = false;
|
||||
var notes_to_check = std.ArrayList(*const Compilation.AllErrors.Message).init(allocator);
|
||||
defer notes_to_check.deinit();
|
||||
|
||||
for (actual_errors.list) |actual_error| {
|
||||
for (case_error_list, 0..) |case_msg, i| {
|
||||
if (handled_errors[i]) continue;
|
||||
|
||||
const ex_tag: std.meta.Tag(@TypeOf(case_msg)) = case_msg;
|
||||
switch (actual_error) {
|
||||
.src => |actual_msg| {
|
||||
for (actual_msg.notes) |*note| {
|
||||
try notes_to_check.append(note);
|
||||
}
|
||||
|
||||
if (ex_tag != .src) continue;
|
||||
|
||||
const src_path_ok = case_msg.src.src_path.len == 0 or
|
||||
std.mem.eql(u8, case_msg.src.src_path, actual_msg.src_path);
|
||||
|
||||
const expected_msg = try std.mem.replaceOwned(
|
||||
u8,
|
||||
arena,
|
||||
case_msg.src.msg,
|
||||
"${DIR}",
|
||||
tmp_dir_path_plus_slash,
|
||||
);
|
||||
|
||||
var buf: [1024]u8 = undefined;
|
||||
const rendered_msg = blk: {
|
||||
var msg: Compilation.AllErrors.Message = actual_error;
|
||||
msg.src.src_path = case_msg.src.src_path;
|
||||
msg.src.notes = &.{};
|
||||
msg.src.source_line = null;
|
||||
var fib = std.io.fixedBufferStream(&buf);
|
||||
try msg.renderToWriter(.no_color, fib.writer(), "error", .Red, 0);
|
||||
var it = std.mem.split(u8, fib.getWritten(), "error: ");
|
||||
_ = it.first();
|
||||
const rendered = it.rest();
|
||||
break :blk rendered[0 .. rendered.len - 1]; // trim final newline
|
||||
};
|
||||
|
||||
if (src_path_ok and
|
||||
(case_msg.src.line == std.math.maxInt(u32) or
|
||||
actual_msg.line == case_msg.src.line) and
|
||||
(case_msg.src.column == std.math.maxInt(u32) or
|
||||
actual_msg.column == case_msg.src.column) and
|
||||
std.mem.eql(u8, expected_msg, rendered_msg) and
|
||||
case_msg.src.kind == .@"error" and
|
||||
actual_msg.count == case_msg.src.count)
|
||||
{
|
||||
handled_errors[i] = true;
|
||||
break;
|
||||
}
|
||||
},
|
||||
.plain => |plain| {
|
||||
if (ex_tag != .plain) continue;
|
||||
|
||||
if (std.mem.eql(u8, case_msg.plain.msg, plain.msg) and
|
||||
case_msg.plain.kind == .@"error" and
|
||||
case_msg.plain.count == plain.count)
|
||||
{
|
||||
handled_errors[i] = true;
|
||||
break;
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
print(
|
||||
"\nUnexpected error:\n{s}\n{}\n{s}",
|
||||
.{ hr, ErrorMsg.init(actual_error, .@"error"), hr },
|
||||
);
|
||||
any_failed = true;
|
||||
}
|
||||
}
|
||||
while (notes_to_check.popOrNull()) |note| {
|
||||
for (case_error_list, 0..) |case_msg, i| {
|
||||
const ex_tag: std.meta.Tag(@TypeOf(case_msg)) = case_msg;
|
||||
switch (note.*) {
|
||||
.src => |actual_msg| {
|
||||
for (actual_msg.notes) |*sub_note| {
|
||||
try notes_to_check.append(sub_note);
|
||||
}
|
||||
if (ex_tag != .src) continue;
|
||||
|
||||
const expected_msg = try std.mem.replaceOwned(
|
||||
u8,
|
||||
arena,
|
||||
case_msg.src.msg,
|
||||
"${DIR}",
|
||||
tmp_dir_path_plus_slash,
|
||||
);
|
||||
|
||||
if ((case_msg.src.line == std.math.maxInt(u32) or
|
||||
actual_msg.line == case_msg.src.line) and
|
||||
(case_msg.src.column == std.math.maxInt(u32) or
|
||||
actual_msg.column == case_msg.src.column) and
|
||||
std.mem.eql(u8, expected_msg, actual_msg.msg) and
|
||||
case_msg.src.kind == .note and
|
||||
actual_msg.count == case_msg.src.count)
|
||||
{
|
||||
handled_errors[i] = true;
|
||||
break;
|
||||
}
|
||||
},
|
||||
.plain => |plain| {
|
||||
if (ex_tag != .plain) continue;
|
||||
|
||||
if (std.mem.eql(u8, case_msg.plain.msg, plain.msg) and
|
||||
case_msg.plain.kind == .note and
|
||||
case_msg.plain.count == plain.count)
|
||||
{
|
||||
handled_errors[i] = true;
|
||||
break;
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
print(
|
||||
"\nUnexpected note:\n{s}\n{}\n{s}",
|
||||
.{ hr, ErrorMsg.init(note.*, .note), hr },
|
||||
);
|
||||
any_failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (handled_errors, 0..) |handled, i| {
|
||||
if (!handled) {
|
||||
print(
|
||||
"\nExpected error not found:\n{s}\n{}\n{s}",
|
||||
.{ hr, case_error_list[i], hr },
|
||||
);
|
||||
any_failed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (any_failed) {
|
||||
print("\nupdate_index={d}\n", .{update_index});
|
||||
return error.WrongCompileErrors;
|
||||
}
|
||||
},
|
||||
.Execution => |expected_stdout| {
|
||||
if (!std.process.can_spawn) {
|
||||
print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)});
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
|
||||
update_node.setEstimatedTotalItems(4);
|
||||
|
||||
var argv = std.ArrayList([]const u8).init(allocator);
|
||||
defer argv.deinit();
|
||||
|
||||
var exec_result = x: {
|
||||
var exec_node = update_node.start("execute", 0);
|
||||
exec_node.activate();
|
||||
defer exec_node.end();
|
||||
|
||||
// We go out of our way here to use the unique temporary directory name in
|
||||
// the exe_path so that it makes its way into the cache hash, avoiding
|
||||
// cache collisions from multiple threads doing `zig run` at the same time
|
||||
// on the same test_case.c input filename.
|
||||
const ss = std.fs.path.sep_str;
|
||||
const exe_path = try std.fmt.allocPrint(
|
||||
arena,
|
||||
".." ++ ss ++ "{s}" ++ ss ++ "{s}",
|
||||
.{ &tmp.sub_path, bin_name },
|
||||
);
|
||||
if (case.target.ofmt != null and case.target.ofmt.? == .c) {
|
||||
if (host.getExternalExecutor(target_info, .{ .link_libc = true }) != .native) {
|
||||
// We wouldn't be able to run the compiled C code.
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
try argv.appendSlice(&[_][]const u8{
|
||||
zig_exe_path,
|
||||
"run",
|
||||
"-cflags",
|
||||
"-std=c99",
|
||||
"-pedantic",
|
||||
"-Werror",
|
||||
"-Wno-incompatible-library-redeclaration", // https://github.com/ziglang/zig/issues/875
|
||||
"--",
|
||||
"-lc",
|
||||
exe_path,
|
||||
});
|
||||
if (zig_lib_directory.path) |p| {
|
||||
try argv.appendSlice(&.{ "-I", p });
|
||||
}
|
||||
} else switch (host.getExternalExecutor(target_info, .{ .link_libc = case.link_libc })) {
|
||||
.native => {
|
||||
if (case.backend == .stage2 and case.target.getCpuArch() == .arm) {
|
||||
// https://github.com/ziglang/zig/issues/13623
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
try argv.append(exe_path);
|
||||
},
|
||||
.bad_dl, .bad_os_or_cpu => continue :update, // Pass test.
|
||||
|
||||
.rosetta => if (enable_rosetta) {
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Rosetta not available, pass test.
|
||||
},
|
||||
|
||||
.qemu => |qemu_bin_name| if (enable_qemu) {
|
||||
const need_cross_glibc = target.isGnuLibC() and case.link_libc;
|
||||
const glibc_dir_arg: ?[]const u8 = if (need_cross_glibc)
|
||||
glibc_runtimes_dir orelse continue :update // glibc dir not available; pass test
|
||||
else
|
||||
null;
|
||||
try argv.append(qemu_bin_name);
|
||||
if (glibc_dir_arg) |dir| {
|
||||
const linux_triple = try target.linuxTriple(arena);
|
||||
const full_dir = try std.fs.path.join(arena, &[_][]const u8{
|
||||
dir,
|
||||
linux_triple,
|
||||
});
|
||||
|
||||
try argv.append("-L");
|
||||
try argv.append(full_dir);
|
||||
}
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // QEMU not available; pass test.
|
||||
},
|
||||
|
||||
.wine => |wine_bin_name| if (enable_wine) {
|
||||
try argv.append(wine_bin_name);
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Wine not available; pass test.
|
||||
},
|
||||
|
||||
.wasmtime => |wasmtime_bin_name| if (enable_wasmtime) {
|
||||
try argv.append(wasmtime_bin_name);
|
||||
try argv.append("--dir=.");
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // wasmtime not available; pass test.
|
||||
},
|
||||
|
||||
.darling => |darling_bin_name| if (enable_darling) {
|
||||
try argv.append(darling_bin_name);
|
||||
// Since we use relative to cwd here, we invoke darling with
|
||||
// "shell" subcommand.
|
||||
try argv.append("shell");
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Darling not available; pass test.
|
||||
},
|
||||
}
|
||||
|
||||
try comp.makeBinFileExecutable();
|
||||
|
||||
while (true) {
|
||||
break :x std.ChildProcess.exec(.{
|
||||
.allocator = allocator,
|
||||
.argv = argv.items,
|
||||
.cwd_dir = tmp.dir,
|
||||
.cwd = tmp_dir_path,
|
||||
}) catch |err| switch (err) {
|
||||
error.FileBusy => {
|
||||
// There is a fundamental design flaw in Unix systems with how
|
||||
// ETXTBSY interacts with fork+exec.
|
||||
// https://github.com/golang/go/issues/22315
|
||||
// https://bugs.openjdk.org/browse/JDK-8068370
|
||||
// Unfortunately, this could be a real error, but we can't
|
||||
// tell the difference here.
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
print("\n{s}.{d} The following command failed with {s}:\n", .{
|
||||
case.name, update_index, @errorName(err),
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
var test_node = update_node.start("test", 0);
|
||||
test_node.activate();
|
||||
defer test_node.end();
|
||||
defer allocator.free(exec_result.stdout);
|
||||
defer allocator.free(exec_result.stderr);
|
||||
switch (exec_result.term) {
|
||||
.Exited => |code| {
|
||||
if (code != 0) {
|
||||
print("\n{s}\n{s}: execution exited with code {d}:\n", .{
|
||||
exec_result.stderr, case.name, code,
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
print("\n{s}\n{s}: execution crashed:\n", .{
|
||||
exec_result.stderr, case.name,
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
},
|
||||
}
|
||||
try std.testing.expectEqualStrings(expected_stdout, exec_result.stdout);
|
||||
// We allow stderr to have garbage in it because wasmtime prints a
|
||||
// warning about --invoke even though we don't pass it.
|
||||
//std.testing.expectEqualStrings("", exec_result.stderr);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn dumpArgs(argv: []const []const u8) void {
|
||||
for (argv) |arg| {
|
||||
print("{s} ", .{arg});
|
||||
}
|
||||
print("\n", .{});
|
||||
}
|
||||
|
||||
const tmp_src_path = "tmp.zig";
|
||||
+5
-5
@@ -1,8 +1,8 @@
|
||||
const std = @import("std");
|
||||
const TestContext = @import("../src/test.zig").TestContext;
|
||||
const Cases = @import("src/Cases.zig");
|
||||
|
||||
pub fn addCases(ctx: *TestContext) !void {
|
||||
try @import("compile_errors.zig").addCases(ctx);
|
||||
try @import("stage2/cbe.zig").addCases(ctx);
|
||||
try @import("stage2/nvptx.zig").addCases(ctx);
|
||||
pub fn addCases(cases: *Cases) !void {
|
||||
try @import("compile_errors.zig").addCases(cases);
|
||||
try @import("cbe.zig").addCases(cases);
|
||||
try @import("nvptx.zig").addCases(cases);
|
||||
}
|
||||
|
||||
@@ -21,3 +21,4 @@ pub export fn entry1() void {
|
||||
// :9:15: error: access of union field 'a' while field 'b' is active
|
||||
// :2:21: note: union declared here
|
||||
// :14:16: error: access of union field 'a' while field 'b' is active
|
||||
// :2:21: note: union declared here
|
||||
|
||||
@@ -4,4 +4,4 @@ const bogus = @import("bogus-does-not-exist.zig",);
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :1:23: error: unable to load '${DIR}bogus-does-not-exist.zig': FileNotFound
|
||||
// bogus-does-not-exist.zig': FileNotFound
|
||||
|
||||
@@ -45,4 +45,6 @@ pub export fn entry2() void {
|
||||
// :22:13: error: unable to resolve comptime value
|
||||
// :22:13: note: condition in comptime switch must be comptime-known
|
||||
// :21:17: note: expression is evaluated at comptime because the function returns a comptime-only type 'tmp.S'
|
||||
// :2:12: note: struct requires comptime because of this field
|
||||
// :2:12: note: use '*const fn() void' for a function pointer type
|
||||
// :32:19: note: called from here
|
||||
|
||||
@@ -32,6 +32,7 @@ export fn d() void {
|
||||
// :3:8: error: opaque types have unknown size and therefore cannot be directly embedded in structs
|
||||
// :1:11: note: opaque declared here
|
||||
// :7:10: error: opaque types have unknown size and therefore cannot be directly embedded in unions
|
||||
// :1:11: note: opaque declared here
|
||||
// :19:18: error: opaque types have unknown size and therefore cannot be directly embedded in structs
|
||||
// :18:22: note: opaque declared here
|
||||
// :24:23: error: opaque types have unknown size and therefore cannot be directly embedded in structs
|
||||
|
||||
@@ -12,6 +12,6 @@ comptime { _ = entry2; }
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :1:15: error: comptime parameters not allowed in function with calling convention 'C'
|
||||
// :5:30: error: comptime parameters not allowed in function with calling convention 'C'
|
||||
// :6:30: error: generic parameters not allowed in function with calling convention 'C'
|
||||
// :1:15: error: comptime parameters not allowed in function with calling convention 'C'
|
||||
|
||||
@@ -27,4 +27,5 @@ export fn entry4() void {
|
||||
// :1:17: note: opaque declared here
|
||||
// :8:28: error: parameter of type '@TypeOf(null)' not allowed
|
||||
// :12:8: error: parameter of opaque type 'tmp.FooType' not allowed
|
||||
// :1:17: note: opaque declared here
|
||||
// :17:8: error: parameter of type '@TypeOf(null)' not allowed
|
||||
|
||||
@@ -24,9 +24,9 @@ export fn quux() u32 {
|
||||
// :8:5: error: expected type 'void', found '@typeInfo(@typeInfo(@TypeOf(tmp.bar)).Fn.return_type.?).ErrorUnion.error_set'
|
||||
// :7:17: note: function cannot return an error
|
||||
// :11:15: error: expected type 'u32', found '@typeInfo(@typeInfo(@TypeOf(tmp.bar)).Fn.return_type.?).ErrorUnion.error_set!u32'
|
||||
// :10:17: note: function cannot return an error
|
||||
// :11:15: note: cannot convert error union to payload type
|
||||
// :11:15: note: consider using 'try', 'catch', or 'if'
|
||||
// :10:17: note: function cannot return an error
|
||||
// :15:14: error: expected type 'u32', found '@typeInfo(@typeInfo(@TypeOf(tmp.bar)).Fn.return_type.?).ErrorUnion.error_set!u32'
|
||||
// :15:14: note: cannot convert error union to payload type
|
||||
// :15:14: note: consider using 'try', 'catch', or 'if'
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = comptime {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
comptime ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
defer ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,7 +3,10 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = for(foo()) |_| {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
fn foo() void {}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
|
||||
@@ -3,7 +3,10 @@ export fn entry() void {
|
||||
var good = {};
|
||||
for(foo()) |_| ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
fn foo() void {}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = if(true) {} else if(true) {} else {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
if(true) ({}) else if(true) ({}) else ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = if(true) {} else if(true) {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
if(true) ({}) else if(true) ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = if(true) {} else {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
if(true) ({}) else ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = if(true) {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
if(true) ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,7 +3,10 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = if (foo()) |_| {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
fn foo() void {}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
|
||||
@@ -3,7 +3,10 @@ export fn entry() void {
|
||||
var good = {};
|
||||
if (foo()) |_| ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
fn foo() void {}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = while(true):({}) {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
while(true):({}) ({})
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
_ = while(true) {}
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -3,6 +3,8 @@ export fn entry() void {
|
||||
var good = {};
|
||||
while(true) 1
|
||||
var bad = {};
|
||||
_ = good;
|
||||
_ = bad;
|
||||
}
|
||||
|
||||
// error
|
||||
|
||||
@@ -9,4 +9,4 @@ export fn entry() void {
|
||||
// target=native
|
||||
//
|
||||
// :3:38: error: enum 'builtin.OptimizeMode' has no member named 'x86'
|
||||
// :?:18: note: enum declared here
|
||||
// : note: enum declared here
|
||||
|
||||
@@ -73,11 +73,11 @@ pub export fn entry8() void {
|
||||
//
|
||||
// :6:19: error: value stored in comptime field does not match the default value of the field
|
||||
// :14:19: error: value stored in comptime field does not match the default value of the field
|
||||
// :53:16: error: value stored in comptime field does not match the default value of the field
|
||||
// :19:38: error: value stored in comptime field does not match the default value of the field
|
||||
// :31:19: error: value stored in comptime field does not match the default value of the field
|
||||
// :25:29: note: default value set here
|
||||
// :41:16: error: value stored in comptime field does not match the default value of the field
|
||||
// :45:12: error: value stored in comptime field does not match the default value of the field
|
||||
// :53:16: error: value stored in comptime field does not match the default value of the field
|
||||
// :66:43: error: value stored in comptime field does not match the default value of the field
|
||||
// :59:35: error: value stored in comptime field does not match the default value of the field
|
||||
|
||||
@@ -25,5 +25,6 @@ export fn e() void {
|
||||
// :4:7: error: no field named 'foo' in struct 'tmp.A'
|
||||
// :1:11: note: struct declared here
|
||||
// :10:17: error: no field named 'bar' in struct 'tmp.A'
|
||||
// :1:11: note: struct declared here
|
||||
// :18:45: error: no field named 'f' in struct 'tmp.e.B'
|
||||
// :14:15: note: struct declared here
|
||||
|
||||
@@ -5,5 +5,7 @@
|
||||
// target=x86_64-linux
|
||||
// output_mode=Exe
|
||||
//
|
||||
// :?:?: error: root struct of file 'tmp' has no member named 'main'
|
||||
// :?:?: note: called from here
|
||||
// : error: root struct of file 'tmp' has no member named 'main'
|
||||
// : note: called from here
|
||||
// : note: called from here
|
||||
// : note: called from here
|
||||
|
||||
@@ -5,6 +5,8 @@ fn main() void {}
|
||||
// target=x86_64-linux
|
||||
// output_mode=Exe
|
||||
//
|
||||
// :?:?: error: 'main' is not marked 'pub'
|
||||
// : error: 'main' is not marked 'pub'
|
||||
// :1:1: note: declared here
|
||||
// :?:?: note: called from here
|
||||
// : note: called from here
|
||||
// : note: called from here
|
||||
// : note: called from here
|
||||
|
||||
@@ -15,5 +15,6 @@ export fn entry() void {
|
||||
// target=native
|
||||
//
|
||||
// :9:51: error: values of type '[]const builtin.Type.StructField' must be comptime-known, but index value is runtime-known
|
||||
// :?:21: note: struct requires comptime because of this field
|
||||
// :?:21: note: types are not available at runtime
|
||||
// : note: struct requires comptime because of this field
|
||||
// : note: types are not available at runtime
|
||||
// : struct requires comptime because of this field
|
||||
|
||||
@@ -13,6 +13,6 @@ comptime {
|
||||
// target=native
|
||||
//
|
||||
// :7:16: error: expected type 'tmp.Foo', found 'tmp.Bar'
|
||||
// :1:13: note: struct declared here
|
||||
// :2:13: note: struct declared here
|
||||
// :1:13: note: struct declared here
|
||||
// :4:18: note: parameter type declared here
|
||||
|
||||
@@ -28,10 +28,11 @@ export fn u2m() void {
|
||||
// target=native
|
||||
//
|
||||
// :9:1: error: union initializer must initialize one field
|
||||
// :1:12: note: union declared here
|
||||
// :14:20: error: cannot initialize multiple union fields at once, unions can only have one active field
|
||||
// :14:31: note: additional initializer here
|
||||
// :1:12: note: union declared here
|
||||
// :18:21: error: union initializer must initialize one field
|
||||
// :22:20: error: cannot initialize multiple union fields at once, unions can only have one active field
|
||||
// :22:31: note: additional initializer here
|
||||
// :1:12: note: union declared here
|
||||
// :5:12: note: union declared here
|
||||
|
||||
@@ -5,7 +5,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -5,7 +5,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=stage2,llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -6,7 +6,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -6,7 +6,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=stage2,llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -5,7 +5,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=stage2,llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -5,7 +5,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=stage2,llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -5,7 +5,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=stage2,llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
+1
-1
@@ -5,7 +5,7 @@ pub fn main() void {
|
||||
_ = entry;
|
||||
}
|
||||
|
||||
// error
|
||||
// compile
|
||||
// output_mode=Exe
|
||||
// backend=stage2,llvm
|
||||
// target=x86_64-linux,x86_64-macos
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const std = @import("std");
|
||||
const TestContext = @import("../../src/test.zig").TestContext;
|
||||
const Cases = @import("src/Cases.zig");
|
||||
|
||||
// These tests should work with all platforms, but we're using linux_x64 for
|
||||
// now for consistency. Will be expanded eventually.
|
||||
@@ -8,7 +8,7 @@ const linux_x64 = std.zig.CrossTarget{
|
||||
.os_tag = .linux,
|
||||
};
|
||||
|
||||
pub fn addCases(ctx: *TestContext) !void {
|
||||
pub fn addCases(ctx: *Cases) !void {
|
||||
{
|
||||
var case = ctx.exeFromCompiledC("hello world with updates", .{});
|
||||
|
||||
@@ -71,7 +71,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
}
|
||||
|
||||
{
|
||||
var case = ctx.exeFromCompiledC("@intToError", .{});
|
||||
var case = ctx.exeFromCompiledC("intToError", .{});
|
||||
|
||||
case.addCompareOutput(
|
||||
\\pub export fn main() c_int {
|
||||
@@ -837,7 +837,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
}
|
||||
|
||||
{
|
||||
var case = ctx.exeFromCompiledC("shift right + left", .{});
|
||||
var case = ctx.exeFromCompiledC("shift right and left", .{});
|
||||
case.addCompareOutput(
|
||||
\\pub export fn main() c_int {
|
||||
\\ var i: u32 = 16;
|
||||
@@ -883,7 +883,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
|
||||
{
|
||||
// TODO: add u64 tests, ran into issues with the literal generated for std.math.maxInt(u64)
|
||||
var case = ctx.exeFromCompiledC("add/sub wrapping operations", .{});
|
||||
var case = ctx.exeFromCompiledC("add and sub wrapping operations", .{});
|
||||
case.addCompareOutput(
|
||||
\\pub export fn main() c_int {
|
||||
\\ // Addition
|
||||
@@ -932,7 +932,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
}
|
||||
|
||||
{
|
||||
var case = ctx.exeFromCompiledC("@rem", linux_x64);
|
||||
var case = ctx.exeFromCompiledC("rem", linux_x64);
|
||||
case.addCompareOutput(
|
||||
\\fn assert(ok: bool) void {
|
||||
\\ if (!ok) unreachable;
|
||||
@@ -947,69 +947,4 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\}
|
||||
, "");
|
||||
}
|
||||
|
||||
ctx.h("simple header", linux_x64,
|
||||
\\export fn start() void{}
|
||||
,
|
||||
\\zig_extern void start(void);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with single param function", linux_x64,
|
||||
\\export fn start(a: u8) void{
|
||||
\\ _ = a;
|
||||
\\}
|
||||
,
|
||||
\\zig_extern void start(uint8_t const a0);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with multiple param function", linux_x64,
|
||||
\\export fn start(a: u8, b: u8, c: u8) void{
|
||||
\\ _ = a; _ = b; _ = c;
|
||||
\\}
|
||||
,
|
||||
\\zig_extern void start(uint8_t const a0, uint8_t const a1, uint8_t const a2);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with u32 param function", linux_x64,
|
||||
\\export fn start(a: u32) void{ _ = a; }
|
||||
,
|
||||
\\zig_extern void start(uint32_t const a0);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with usize param function", linux_x64,
|
||||
\\export fn start(a: usize) void{ _ = a; }
|
||||
,
|
||||
\\zig_extern void start(uintptr_t const a0);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with bool param function", linux_x64,
|
||||
\\export fn start(a: bool) void{_ = a;}
|
||||
,
|
||||
\\zig_extern void start(bool const a0);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with noreturn function", linux_x64,
|
||||
\\export fn start() noreturn {
|
||||
\\ unreachable;
|
||||
\\}
|
||||
,
|
||||
\\zig_extern zig_noreturn void start(void);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with multiple functions", linux_x64,
|
||||
\\export fn a() void{}
|
||||
\\export fn b() void{}
|
||||
\\export fn c() void{}
|
||||
,
|
||||
\\zig_extern void a(void);
|
||||
\\zig_extern void b(void);
|
||||
\\zig_extern void c(void);
|
||||
\\
|
||||
);
|
||||
ctx.h("header with multiple includes", linux_x64,
|
||||
\\export fn start(a: u32, b: usize) void{ _ = a; _ = b; }
|
||||
,
|
||||
\\zig_extern void start(uint32_t const a0, uintptr_t const a1);
|
||||
\\
|
||||
);
|
||||
}
|
||||
+25
-199
@@ -1,146 +1,10 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const TestContext = @import("../src/test.zig").TestContext;
|
||||
|
||||
pub fn addCases(ctx: *TestContext) !void {
|
||||
{
|
||||
const case = ctx.obj("wrong same named struct", .{});
|
||||
case.backend = .stage1;
|
||||
|
||||
case.addSourceFile("a.zig",
|
||||
\\pub const Foo = struct {
|
||||
\\ x: i32,
|
||||
\\};
|
||||
);
|
||||
|
||||
case.addSourceFile("b.zig",
|
||||
\\pub const Foo = struct {
|
||||
\\ z: f64,
|
||||
\\};
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\const a = @import("a.zig");
|
||||
\\const b = @import("b.zig");
|
||||
\\
|
||||
\\export fn entry() void {
|
||||
\\ var a1: a.Foo = undefined;
|
||||
\\ bar(&a1);
|
||||
\\}
|
||||
\\
|
||||
\\fn bar(x: *b.Foo) void {_ = x;}
|
||||
, &[_][]const u8{
|
||||
"tmp.zig:6:10: error: expected type '*b.Foo', found '*a.Foo'",
|
||||
"tmp.zig:6:10: note: pointer type child 'a.Foo' cannot cast into pointer type child 'b.Foo'",
|
||||
"a.zig:1:17: note: a.Foo declared here",
|
||||
"b.zig:1:17: note: b.Foo declared here",
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("multiple files with private function error", .{});
|
||||
case.backend = .stage1;
|
||||
|
||||
case.addSourceFile("foo.zig",
|
||||
\\fn privateFunction() void { }
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\const foo = @import("foo.zig",);
|
||||
\\
|
||||
\\export fn callPrivFunction() void {
|
||||
\\ foo.privateFunction();
|
||||
\\}
|
||||
, &[_][]const u8{
|
||||
"tmp.zig:4:8: error: 'privateFunction' is private",
|
||||
"foo.zig:1:1: note: declared here",
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("multiple files with private member instance function (canonical invocation) error", .{});
|
||||
case.backend = .stage1;
|
||||
|
||||
case.addSourceFile("foo.zig",
|
||||
\\pub const Foo = struct {
|
||||
\\ fn privateFunction(self: *Foo) void { _ = self; }
|
||||
\\};
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\const Foo = @import("foo.zig",).Foo;
|
||||
\\
|
||||
\\export fn callPrivFunction() void {
|
||||
\\ var foo = Foo{};
|
||||
\\ Foo.privateFunction(foo);
|
||||
\\}
|
||||
, &[_][]const u8{
|
||||
"tmp.zig:5:8: error: 'privateFunction' is private",
|
||||
"foo.zig:2:5: note: declared here",
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("multiple files with private member instance function error", .{});
|
||||
case.backend = .stage1;
|
||||
|
||||
case.addSourceFile("foo.zig",
|
||||
\\pub const Foo = struct {
|
||||
\\ fn privateFunction(self: *Foo) void { _ = self; }
|
||||
\\};
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\const Foo = @import("foo.zig",).Foo;
|
||||
\\
|
||||
\\export fn callPrivFunction() void {
|
||||
\\ var foo = Foo{};
|
||||
\\ foo.privateFunction();
|
||||
\\}
|
||||
, &[_][]const u8{
|
||||
"tmp.zig:5:8: error: 'privateFunction' is private",
|
||||
"foo.zig:2:5: note: declared here",
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("export collision", .{});
|
||||
case.backend = .stage1;
|
||||
|
||||
case.addSourceFile("foo.zig",
|
||||
\\export fn bar() void {}
|
||||
\\pub const baz = 1234;
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\const foo = @import("foo.zig",);
|
||||
\\
|
||||
\\export fn bar() usize {
|
||||
\\ return foo.baz;
|
||||
\\}
|
||||
, &[_][]const u8{
|
||||
"foo.zig:1:1: error: exported symbol collision: 'bar'",
|
||||
"tmp.zig:3:1: note: other symbol here",
|
||||
});
|
||||
}
|
||||
|
||||
ctx.objErrStage1("non-printable invalid character", "\xff\xfe" ++
|
||||
"fn foo() bool {\r\n" ++
|
||||
" return true;\r\n" ++
|
||||
"}\r\n", &[_][]const u8{
|
||||
"tmp.zig:1:1: error: expected test, comptime, var decl, or container field, found 'invalid bytes'",
|
||||
"tmp.zig:1:1: note: invalid byte: '\\xff'",
|
||||
});
|
||||
|
||||
ctx.objErrStage1("non-printable invalid character with escape alternative", "fn foo() bool {\n" ++
|
||||
"\treturn true;\n" ++
|
||||
"}\n", &[_][]const u8{
|
||||
"tmp.zig:2:1: error: invalid character: '\\t'",
|
||||
});
|
||||
const Cases = @import("src/Cases.zig");
|
||||
|
||||
pub fn addCases(ctx: *Cases) !void {
|
||||
{
|
||||
const case = ctx.obj("multiline error messages", .{});
|
||||
case.backend = .stage2;
|
||||
|
||||
case.addError(
|
||||
\\comptime {
|
||||
@@ -176,7 +40,6 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
|
||||
{
|
||||
const case = ctx.obj("isolated carriage return in multiline string literal", .{});
|
||||
case.backend = .stage2;
|
||||
|
||||
case.addError("const foo = \\\\\test\r\r rogue carriage return\n;", &[_][]const u8{
|
||||
":1:19: error: expected ';' after declaration",
|
||||
@@ -195,16 +58,6 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
|
||||
{
|
||||
const case = ctx.obj("argument causes error", .{});
|
||||
case.backend = .stage2;
|
||||
|
||||
case.addSourceFile("b.zig",
|
||||
\\pub const ElfDynLib = struct {
|
||||
\\ pub fn lookup(self: *ElfDynLib, comptime T: type) ?T {
|
||||
\\ _ = self;
|
||||
\\ return undefined;
|
||||
\\ }
|
||||
\\};
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\pub export fn entry() void {
|
||||
@@ -216,15 +69,18 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
":3:12: note: argument to function being called at comptime must be comptime-known",
|
||||
":2:55: note: expression is evaluated at comptime because the generic function was instantiated with a comptime-only return type",
|
||||
});
|
||||
case.addSourceFile("b.zig",
|
||||
\\pub const ElfDynLib = struct {
|
||||
\\ pub fn lookup(self: *ElfDynLib, comptime T: type) ?T {
|
||||
\\ _ = self;
|
||||
\\ return undefined;
|
||||
\\ }
|
||||
\\};
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("astgen failure in file struct", .{});
|
||||
case.backend = .stage2;
|
||||
|
||||
case.addSourceFile("b.zig",
|
||||
\\+
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\pub export fn entry() void {
|
||||
@@ -233,21 +89,13 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
, &[_][]const u8{
|
||||
":1:1: error: expected type expression, found '+'",
|
||||
});
|
||||
case.addSourceFile("b.zig",
|
||||
\\+
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
const case = ctx.obj("invalid store to comptime field", .{});
|
||||
case.backend = .stage2;
|
||||
|
||||
case.addSourceFile("a.zig",
|
||||
\\pub const S = struct {
|
||||
\\ comptime foo: u32 = 1,
|
||||
\\ bar: u32,
|
||||
\\ pub fn foo(x: @This()) void {
|
||||
\\ _ = x;
|
||||
\\ }
|
||||
\\};
|
||||
);
|
||||
|
||||
case.addError(
|
||||
\\const a = @import("a.zig");
|
||||
@@ -259,44 +107,19 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
":4:23: error: value stored in comptime field does not match the default value of the field",
|
||||
":2:25: note: default value set here",
|
||||
});
|
||||
case.addSourceFile("a.zig",
|
||||
\\pub const S = struct {
|
||||
\\ comptime foo: u32 = 1,
|
||||
\\ bar: u32,
|
||||
\\ pub fn foo(x: @This()) void {
|
||||
\\ _ = x;
|
||||
\\ }
|
||||
\\};
|
||||
);
|
||||
}
|
||||
|
||||
// TODO test this in stage2, but we won't even try in stage1
|
||||
//ctx.objErrStage1("inline fn calls itself indirectly",
|
||||
// \\export fn foo() void {
|
||||
// \\ bar();
|
||||
// \\}
|
||||
// \\fn bar() callconv(.Inline) void {
|
||||
// \\ baz();
|
||||
// \\ quux();
|
||||
// \\}
|
||||
// \\fn baz() callconv(.Inline) void {
|
||||
// \\ bar();
|
||||
// \\ quux();
|
||||
// \\}
|
||||
// \\extern fn quux() void;
|
||||
//, &[_][]const u8{
|
||||
// "tmp.zig:4:1: error: unable to inline function",
|
||||
//});
|
||||
|
||||
//ctx.objErrStage1("save reference to inline function",
|
||||
// \\export fn foo() void {
|
||||
// \\ quux(@ptrToInt(bar));
|
||||
// \\}
|
||||
// \\fn bar() callconv(.Inline) void { }
|
||||
// \\extern fn quux(usize) void;
|
||||
//, &[_][]const u8{
|
||||
// "tmp.zig:4:1: error: unable to inline function",
|
||||
//});
|
||||
|
||||
{
|
||||
const case = ctx.obj("file in multiple modules", .{});
|
||||
case.backend = .stage2;
|
||||
|
||||
case.addSourceFile("foo.zig",
|
||||
\\const dummy = 0;
|
||||
);
|
||||
|
||||
case.addDepModule("foo", "foo.zig");
|
||||
|
||||
case.addError(
|
||||
@@ -309,5 +132,8 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
":1:1: note: root of module root.foo",
|
||||
":3:17: note: imported from module root",
|
||||
});
|
||||
case.addSourceFile("foo.zig",
|
||||
\\const dummy = 0;
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ pub fn build(b: *std.Build) void {
|
||||
// Without -dead_strip, we expect `iAmUnused` symbol present
|
||||
const exe = createScenario(b, optimize, target, "no-gc");
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkInSymtab();
|
||||
check.checkNext("{*} (__TEXT,__text) external _iAmUnused");
|
||||
|
||||
@@ -27,7 +27,7 @@ pub fn build(b: *std.Build) void {
|
||||
const exe = createScenario(b, optimize, target, "yes-gc");
|
||||
exe.link_gc_sections = true;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkInSymtab();
|
||||
check.checkNotPresent("{*} (__TEXT,__text) external _iAmUnused");
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
// Without -dead_strip_dylibs we expect `-la` to include liba.dylib in the final executable
|
||||
const exe = createScenario(b, optimize, "no-dead-strip");
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("cmd LOAD_DYLIB");
|
||||
check.checkNext("name {*}Cocoa");
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
dylib.addCSourceFile("a.c", &.{});
|
||||
dylib.linkLibC();
|
||||
|
||||
const check_dylib = dylib.checkObject(.macho);
|
||||
const check_dylib = dylib.checkObject();
|
||||
check_dylib.checkStart("cmd ID_DYLIB");
|
||||
check_dylib.checkNext("name @rpath/liba.dylib");
|
||||
check_dylib.checkNext("timestamp 2");
|
||||
@@ -44,7 +44,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.addRPathDirectorySource(dylib.getOutputDirectorySource());
|
||||
exe.linkLibC();
|
||||
|
||||
const check_exe = exe.checkObject(.macho);
|
||||
const check_exe = exe.checkObject();
|
||||
check_exe.checkStart("cmd LOAD_DYLIB");
|
||||
check_exe.checkNext("name @rpath/liba.dylib");
|
||||
check_exe.checkNext("timestamp 2");
|
||||
|
||||
@@ -22,7 +22,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.linkLibC();
|
||||
exe.entry_symbol_name = "_non_main";
|
||||
|
||||
const check_exe = exe.checkObject(.macho);
|
||||
const check_exe = exe.checkObject();
|
||||
|
||||
check_exe.checkStart("segname __TEXT");
|
||||
check_exe.checkNext("vmaddr {vmaddr}");
|
||||
|
||||
@@ -20,7 +20,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
const exe = simpleExe(b, optimize);
|
||||
exe.headerpad_max_install_names = true;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("sectname __text");
|
||||
check.checkNext("offset {offset}");
|
||||
|
||||
@@ -45,7 +45,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
const exe = simpleExe(b, optimize);
|
||||
exe.headerpad_size = 0x10000;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("sectname __text");
|
||||
check.checkNext("offset {offset}");
|
||||
check.checkComputeCompare("offset", .{ .op = .gte, .value = .{ .literal = 0x10000 } });
|
||||
@@ -62,7 +62,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.headerpad_max_install_names = true;
|
||||
exe.headerpad_size = 0x10000;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("sectname __text");
|
||||
check.checkNext("offset {offset}");
|
||||
check.checkComputeCompare("offset", .{ .op = .gte, .value = .{ .literal = 0x10000 } });
|
||||
@@ -79,7 +79,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.headerpad_size = 0x1000;
|
||||
exe.headerpad_max_install_names = true;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("sectname __text");
|
||||
check.checkNext("offset {offset}");
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
.target = target,
|
||||
});
|
||||
|
||||
const check = obj.checkObject(.macho);
|
||||
const check = obj.checkObject();
|
||||
|
||||
check.checkInSymtab();
|
||||
check.checkNext("{*} (__DATA,__TestGlobal) external _test_global");
|
||||
|
||||
@@ -25,7 +25,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.linkFrameworkNeeded("Cocoa");
|
||||
exe.dead_strip_dylibs = true;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("cmd LOAD_DYLIB");
|
||||
check.checkNext("name {*}Cocoa");
|
||||
test_step.dependOn(&check.step);
|
||||
|
||||
@@ -38,7 +38,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.addRPathDirectorySource(dylib.getOutputDirectorySource());
|
||||
exe.dead_strip_dylibs = true;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("cmd LOAD_DYLIB");
|
||||
check.checkNext("name @rpath/liba.dylib");
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ pub fn build(b: *std.Build) void {
|
||||
exe.linkLibC();
|
||||
exe.pagezero_size = 0x4000;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("LC 0");
|
||||
check.checkNext("segname __PAGEZERO");
|
||||
check.checkNext("vmaddr 0");
|
||||
@@ -41,7 +41,7 @@ pub fn build(b: *std.Build) void {
|
||||
exe.linkLibC();
|
||||
exe.pagezero_size = 0;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("LC 0");
|
||||
check.checkNext("segname __TEXT");
|
||||
check.checkNext("vmaddr 0");
|
||||
|
||||
@@ -20,7 +20,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
const exe = createScenario(b, optimize, target, "search_dylibs_first");
|
||||
exe.search_strategy = .dylibs_first;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("cmd LOAD_DYLIB");
|
||||
check.checkNext("name @rpath/libsearch_dylibs_first.dylib");
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.linkLibC();
|
||||
exe.stack_size = 0x100000000;
|
||||
|
||||
const check_exe = exe.checkObject(.macho);
|
||||
const check_exe = exe.checkObject();
|
||||
check_exe.checkStart("cmd MAIN");
|
||||
check_exe.checkNext("stacksize 100000000");
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
});
|
||||
exe.linkLibC();
|
||||
|
||||
const check_exe = exe.checkObject(.macho);
|
||||
const check_exe = exe.checkObject();
|
||||
|
||||
check_exe.checkStart("cmd SEGMENT_64");
|
||||
check_exe.checkNext("segname __LINKEDIT");
|
||||
|
||||
@@ -31,7 +31,7 @@ fn testUnwindInfo(
|
||||
const exe = createScenario(b, optimize, target, name);
|
||||
exe.link_gc_sections = dead_strip;
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("segname __TEXT");
|
||||
check.checkNext("sectname __gcc_except_tab");
|
||||
check.checkNext("sectname __unwind_info");
|
||||
|
||||
@@ -22,7 +22,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.linkLibC();
|
||||
exe.linkFrameworkWeak("Cocoa");
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("cmd LOAD_WEAK_DYLIB");
|
||||
check.checkNext("name {*}Cocoa");
|
||||
test_step.dependOn(&check.step);
|
||||
|
||||
@@ -36,7 +36,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
exe.addLibraryPathDirectorySource(dylib.getOutputDirectorySource());
|
||||
exe.addRPathDirectorySource(dylib.getOutputDirectorySource());
|
||||
|
||||
const check = exe.checkObject(.macho);
|
||||
const check = exe.checkObject();
|
||||
check.checkStart("cmd LOAD_WEAK_DYLIB");
|
||||
check.checkNext("name @rpath/liba.dylib");
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
lib.use_lld = false;
|
||||
lib.strip = false;
|
||||
|
||||
const check = lib.checkObject(.wasm);
|
||||
const check = lib.checkObject();
|
||||
check.checkStart("Section custom");
|
||||
check.checkNext("name __truncsfhf2"); // Ensure it was imported and resolved
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ pub fn build(b: *std.Build) void {
|
||||
lib.use_lld = false;
|
||||
|
||||
// Verify the result contains the features explicitly set on the target for the library.
|
||||
const check = lib.checkObject(.wasm);
|
||||
const check = lib.checkObject();
|
||||
check.checkStart("name target_features");
|
||||
check.checkNext("features 1");
|
||||
check.checkNext("+ atomics");
|
||||
|
||||
@@ -19,7 +19,7 @@ pub fn build(b: *std.Build) void {
|
||||
lib.import_memory = true;
|
||||
lib.install();
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
|
||||
// since we import memory, make sure it exists with the correct naming
|
||||
check_lib.checkStart("Section import");
|
||||
|
||||
@@ -19,7 +19,7 @@ pub fn build(b: *std.Build) void {
|
||||
lib.export_symbol_names = &.{ "foo", "bar" };
|
||||
lib.global_base = 0; // put data section at address 0 to make data symbols easier to parse
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
|
||||
check_lib.checkStart("Section global");
|
||||
check_lib.checkNext("entries 3");
|
||||
|
||||
@@ -42,19 +42,19 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
force_export.use_llvm = false;
|
||||
force_export.use_lld = false;
|
||||
|
||||
const check_no_export = no_export.checkObject(.wasm);
|
||||
const check_no_export = no_export.checkObject();
|
||||
check_no_export.checkStart("Section export");
|
||||
check_no_export.checkNext("entries 1");
|
||||
check_no_export.checkNext("name memory");
|
||||
check_no_export.checkNext("kind memory");
|
||||
|
||||
const check_dynamic_export = dynamic_export.checkObject(.wasm);
|
||||
const check_dynamic_export = dynamic_export.checkObject();
|
||||
check_dynamic_export.checkStart("Section export");
|
||||
check_dynamic_export.checkNext("entries 2");
|
||||
check_dynamic_export.checkNext("name foo");
|
||||
check_dynamic_export.checkNext("kind function");
|
||||
|
||||
const check_force_export = force_export.checkObject(.wasm);
|
||||
const check_force_export = force_export.checkObject();
|
||||
check_force_export.checkStart("Section export");
|
||||
check_force_export.checkNext("entries 2");
|
||||
check_force_export.checkNext("name foo");
|
||||
|
||||
@@ -20,7 +20,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
lib.import_symbols = true; // import `a` and `b`
|
||||
lib.rdynamic = true; // export `foo`
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
check_lib.checkStart("Section import");
|
||||
check_lib.checkNext("entries 2"); // a.hello & b.hello
|
||||
check_lib.checkNext("module a");
|
||||
|
||||
@@ -42,9 +42,9 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
regular_table.use_llvm = false;
|
||||
regular_table.use_lld = false;
|
||||
|
||||
const check_import = import_table.checkObject(.wasm);
|
||||
const check_export = export_table.checkObject(.wasm);
|
||||
const check_regular = regular_table.checkObject(.wasm);
|
||||
const check_import = import_table.checkObject();
|
||||
const check_export = export_table.checkObject();
|
||||
const check_regular = regular_table.checkObject();
|
||||
|
||||
check_import.checkStart("Section import");
|
||||
check_import.checkNext("entries 1");
|
||||
|
||||
@@ -32,7 +32,7 @@ pub fn build(b: *std.Build) void {
|
||||
lib.addObject(c_obj);
|
||||
|
||||
// Verify the result contains the features from the C Object file.
|
||||
const check = lib.checkObject(.wasm);
|
||||
const check = lib.checkObject();
|
||||
check.checkStart("name target_features");
|
||||
check.checkNext("features 7");
|
||||
check.checkNext("+ atomics");
|
||||
|
||||
@@ -27,7 +27,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
|
||||
const version_fmt = "version " ++ builtin.zig_version_string;
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
check_lib.checkStart("name producers");
|
||||
check_lib.checkNext("fields 2");
|
||||
check_lib.checkNext("field_name language");
|
||||
|
||||
@@ -24,7 +24,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
lib.strip = false;
|
||||
lib.install();
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
check_lib.checkStart("Section data");
|
||||
check_lib.checkNext("entries 2"); // rodata & data, no bss because we're exporting memory
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
lib.stack_size = std.wasm.page_size * 2; // set an explicit stack size
|
||||
lib.install();
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
|
||||
// ensure global exists and its initial value is equal to explitic stack size
|
||||
check_lib.checkStart("Section global");
|
||||
|
||||
@@ -24,7 +24,7 @@ fn add(b: *std.Build, test_step: *std.Build.Step, optimize: std.builtin.Optimize
|
||||
lib.strip = false;
|
||||
lib.install();
|
||||
|
||||
const check_lib = lib.checkObject(.wasm);
|
||||
const check_lib = lib.checkObject();
|
||||
check_lib.checkStart("Section type");
|
||||
// only 2 entries, although we have 3 functions.
|
||||
// This is to test functions with the same function signature
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
const std = @import("std");
|
||||
const TestContext = @import("../../src/test.zig").TestContext;
|
||||
const Cases = @import("src/Cases.zig");
|
||||
|
||||
pub fn addCases(ctx: *TestContext) !void {
|
||||
pub fn addCases(ctx: *Cases) !void {
|
||||
{
|
||||
var case = addPtx(ctx, "nvptx: simple addition and subtraction");
|
||||
var case = addPtx(ctx, "simple addition and subtraction");
|
||||
|
||||
case.compiles(
|
||||
case.addCompile(
|
||||
\\fn add(a: i32, b: i32) i32 {
|
||||
\\ return a + b;
|
||||
\\}
|
||||
@@ -20,9 +20,9 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
}
|
||||
|
||||
{
|
||||
var case = addPtx(ctx, "nvptx: read special registers");
|
||||
var case = addPtx(ctx, "read special registers");
|
||||
|
||||
case.compiles(
|
||||
case.addCompile(
|
||||
\\fn threadIdX() u32 {
|
||||
\\ return asm ("mov.u32 \t%[r], %tid.x;"
|
||||
\\ : [r] "=r" (-> u32),
|
||||
@@ -37,9 +37,9 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
}
|
||||
|
||||
{
|
||||
var case = addPtx(ctx, "nvptx: address spaces");
|
||||
var case = addPtx(ctx, "address spaces");
|
||||
|
||||
case.compiles(
|
||||
case.addCompile(
|
||||
\\var x: i32 addrspace(.global) = 0;
|
||||
\\
|
||||
\\pub export fn increment(out: *i32) callconv(.PtxKernel) void {
|
||||
@@ -50,8 +50,8 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
}
|
||||
|
||||
{
|
||||
var case = addPtx(ctx, "nvptx: reduce in shared mem");
|
||||
case.compiles(
|
||||
var case = addPtx(ctx, "reduce in shared mem");
|
||||
case.addCompile(
|
||||
\\fn threadIdX() u32 {
|
||||
\\ return asm ("mov.u32 \t%[r], %tid.x;"
|
||||
\\ : [r] "=r" (-> u32),
|
||||
@@ -88,16 +88,15 @@ const nvptx_target = std.zig.CrossTarget{
|
||||
};
|
||||
|
||||
pub fn addPtx(
|
||||
ctx: *TestContext,
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
) *TestContext.Case {
|
||||
ctx.cases.append(TestContext.Case{
|
||||
) *Cases.Case {
|
||||
ctx.cases.append(.{
|
||||
.name = name,
|
||||
.target = nvptx_target,
|
||||
.updates = std.ArrayList(TestContext.Update).init(ctx.cases.allocator),
|
||||
.updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.files = std.ArrayList(TestContext.File).init(ctx.cases.allocator),
|
||||
.deps = std.ArrayList(TestContext.DepModule).init(ctx.cases.allocator),
|
||||
.deps = std.ArrayList(Cases.DepModule).init(ctx.cases.allocator),
|
||||
.link_libc = false,
|
||||
.backend = .llvm,
|
||||
// Bug in Debug mode
|
||||
+1587
@@ -0,0 +1,1587 @@
|
||||
gpa: Allocator,
|
||||
arena: Allocator,
|
||||
cases: std.ArrayList(Case),
|
||||
incremental_cases: std.ArrayList(IncrementalCase),
|
||||
|
||||
pub const IncrementalCase = struct {
|
||||
base_path: []const u8,
|
||||
};
|
||||
|
||||
pub const Update = struct {
|
||||
/// The input to the current update. We simulate an incremental update
|
||||
/// with the file's contents changed to this value each update.
|
||||
///
|
||||
/// This value can change entirely between updates, which would be akin
|
||||
/// to deleting the source file and creating a new one from scratch; or
|
||||
/// you can keep it mostly consistent, with small changes, testing the
|
||||
/// effects of the incremental compilation.
|
||||
files: std.ArrayList(File),
|
||||
/// This is a description of what happens with the update, for debugging
|
||||
/// purposes.
|
||||
name: []const u8,
|
||||
case: union(enum) {
|
||||
/// Check that it compiles with no errors.
|
||||
Compile: void,
|
||||
/// Check the main binary output file against an expected set of bytes.
|
||||
/// This is most useful with, for example, `-ofmt=c`.
|
||||
CompareObjectFile: []const u8,
|
||||
/// An error update attempts to compile bad code, and ensures that it
|
||||
/// fails to compile, and for the expected reasons.
|
||||
/// A slice containing the expected stderr template, which
|
||||
/// gets some values substituted.
|
||||
Error: []const []const u8,
|
||||
/// An execution update compiles and runs the input, testing the
|
||||
/// stdout against the expected results
|
||||
/// This is a slice containing the expected message.
|
||||
Execution: []const u8,
|
||||
/// A header update compiles the input with the equivalent of
|
||||
/// `-femit-h` and tests the produced header against the
|
||||
/// expected result
|
||||
Header: []const u8,
|
||||
},
|
||||
|
||||
pub fn addSourceFile(update: *Update, name: []const u8, src: [:0]const u8) void {
|
||||
update.files.append(.{ .path = name, .src = src }) catch @panic("out of memory");
|
||||
}
|
||||
};
|
||||
|
||||
pub const File = struct {
|
||||
src: [:0]const u8,
|
||||
path: []const u8,
|
||||
};
|
||||
|
||||
pub const DepModule = struct {
|
||||
name: []const u8,
|
||||
path: []const u8,
|
||||
};
|
||||
|
||||
pub const Backend = enum {
|
||||
stage1,
|
||||
stage2,
|
||||
llvm,
|
||||
};
|
||||
|
||||
/// A `Case` consists of a list of `Update`. The same `Compilation` is used for each
|
||||
/// update, so each update's source is treated as a single file being
|
||||
/// updated by the test harness and incrementally compiled.
|
||||
pub const Case = struct {
|
||||
/// The name of the test case. This is shown if a test fails, and
|
||||
/// otherwise ignored.
|
||||
name: []const u8,
|
||||
/// The platform the test targets. For non-native platforms, an emulator
|
||||
/// such as QEMU is required for tests to complete.
|
||||
target: CrossTarget,
|
||||
/// In order to be able to run e.g. Execution updates, this must be set
|
||||
/// to Executable.
|
||||
output_mode: std.builtin.OutputMode,
|
||||
optimize_mode: std.builtin.Mode = .Debug,
|
||||
updates: std.ArrayList(Update),
|
||||
emit_h: bool = false,
|
||||
is_test: bool = false,
|
||||
expect_exact: bool = false,
|
||||
backend: Backend = .stage2,
|
||||
link_libc: bool = false,
|
||||
|
||||
deps: std.ArrayList(DepModule),
|
||||
|
||||
pub fn addSourceFile(case: *Case, name: []const u8, src: [:0]const u8) void {
|
||||
const update = &case.updates.items[case.updates.items.len - 1];
|
||||
update.files.append(.{ .path = name, .src = src }) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn addDepModule(case: *Case, name: []const u8, path: []const u8) void {
|
||||
case.deps.append(.{
|
||||
.name = name,
|
||||
.path = path,
|
||||
}) catch @panic("out of memory");
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, compiled,
|
||||
/// run, and the output is tested against `result`.
|
||||
pub fn addCompareOutput(self: *Case, src: [:0]const u8, result: []const u8) void {
|
||||
self.updates.append(.{
|
||||
.files = std.ArrayList(File).init(self.updates.allocator),
|
||||
.name = "update",
|
||||
.case = .{ .Execution = result },
|
||||
}) catch @panic("out of memory");
|
||||
addSourceFile(self, "tmp.zig", src);
|
||||
}
|
||||
|
||||
pub fn addError(self: *Case, src: [:0]const u8, errors: []const []const u8) void {
|
||||
return self.addErrorNamed("update", src, errors);
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, which
|
||||
/// should contain invalid input, and ensures that compilation fails
|
||||
/// for the expected reasons, given in sequential order in `errors` in
|
||||
/// the form `:line:column: error: message`.
|
||||
pub fn addErrorNamed(
|
||||
self: *Case,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
errors: []const []const u8,
|
||||
) void {
|
||||
assert(errors.len != 0);
|
||||
self.updates.append(.{
|
||||
.files = std.ArrayList(File).init(self.updates.allocator),
|
||||
.name = name,
|
||||
.case = .{ .Error = errors },
|
||||
}) catch @panic("out of memory");
|
||||
addSourceFile(self, "tmp.zig", src);
|
||||
}
|
||||
|
||||
/// Adds a subcase in which the module is updated with `src`, and
|
||||
/// asserts that it compiles without issue
|
||||
pub fn addCompile(self: *Case, src: [:0]const u8) void {
|
||||
self.updates.append(.{
|
||||
.files = std.ArrayList(File).init(self.updates.allocator),
|
||||
.name = "compile",
|
||||
.case = .{ .Compile = {} },
|
||||
}) catch @panic("out of memory");
|
||||
addSourceFile(self, "tmp.zig", src);
|
||||
}
|
||||
};
|
||||
|
||||
pub fn addExe(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig input, producing an executable
|
||||
pub fn exe(ctx: *Cases, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addExe(name, target);
|
||||
}
|
||||
|
||||
pub fn exeFromCompiledC(ctx: *Cases, name: []const u8, target: CrossTarget) *Case {
|
||||
var target_adjusted = target;
|
||||
target_adjusted.ofmt = .c;
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target_adjusted,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
.link_libc = true,
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
/// Adds a test case that uses the LLVM backend to emit an executable.
|
||||
/// Currently this implies linking libc, because only then we can generate a testable executable.
|
||||
pub fn exeUsingLlvmBackend(ctx: *Cases, name: []const u8, target: CrossTarget) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
.backend = .llvm,
|
||||
.link_libc = true,
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
pub fn addObj(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
pub fn addTest(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.is_test = true,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig input, producing an object file.
|
||||
pub fn obj(ctx: *Cases, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addObj(name, target);
|
||||
}
|
||||
|
||||
/// Adds a test case for ZIR input, producing an object file.
|
||||
pub fn objZIR(ctx: *Cases, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addObj(name, target, .ZIR);
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig or ZIR input, producing C code.
|
||||
pub fn addC(ctx: *Cases, name: []const u8, target: CrossTarget) *Case {
|
||||
var target_adjusted = target;
|
||||
target_adjusted.ofmt = std.Target.ObjectFormat.c;
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target_adjusted,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.arena),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
}
|
||||
|
||||
pub fn addCompareOutput(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
ctx.addExe(name, .{}).addCompareOutput(src, expected_stdout);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the Zig source given in `src`, executes
|
||||
/// it, runs it, and tests the output against `expected_stdout`
|
||||
pub fn compareOutput(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
return ctx.addCompareOutput(name, src, expected_stdout);
|
||||
}
|
||||
|
||||
pub fn addTransform(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
result: [:0]const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target).addTransform(src, result);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the Zig given in `src` to ZIR and tests
|
||||
/// the ZIR against `result`
|
||||
pub fn transform(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
result: [:0]const u8,
|
||||
) void {
|
||||
ctx.addTransform(name, target, src, result);
|
||||
}
|
||||
|
||||
pub fn addError(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target).addError(src, expected_errors);
|
||||
}
|
||||
|
||||
/// Adds a test case that ensures that the Zig given in `src` fails to
|
||||
/// compile for the expected reasons, given in sequential order in
|
||||
/// `expected_errors` in the form `:line:column: error: message`.
|
||||
pub fn compileError(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addError(name, target, src, expected_errors);
|
||||
}
|
||||
|
||||
/// Adds a test case that asserts that the Zig given in `src` compiles
|
||||
/// without any errors.
|
||||
pub fn addCompile(
|
||||
ctx: *Cases,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target).addCompile(src);
|
||||
}
|
||||
|
||||
/// Adds a test for each file in the provided directory.
|
||||
/// Testing strategy (TestStrategy) is inferred automatically from filenames.
|
||||
/// Recurses nested directories.
|
||||
///
|
||||
/// Each file should include a test manifest as a contiguous block of comments at
|
||||
/// the end of the file. The first line should be the test type, followed by a set of
|
||||
/// key-value config values, followed by a blank line, then the expected output.
|
||||
pub fn addFromDir(ctx: *Cases, dir: std.fs.IterableDir) void {
|
||||
var current_file: []const u8 = "none";
|
||||
ctx.addFromDirInner(dir, ¤t_file) catch |err| {
|
||||
std.debug.panic("test harness failed to process file '{s}': {s}\n", .{
|
||||
current_file, @errorName(err),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
fn addFromDirInner(
|
||||
ctx: *Cases,
|
||||
iterable_dir: std.fs.IterableDir,
|
||||
/// This is kept up to date with the currently being processed file so
|
||||
/// that if any errors occur the caller knows it happened during this file.
|
||||
current_file: *[]const u8,
|
||||
) !void {
|
||||
var it = try iterable_dir.walk(ctx.arena);
|
||||
var filenames = std.ArrayList([]const u8).init(ctx.arena);
|
||||
|
||||
while (try it.next()) |entry| {
|
||||
if (entry.kind != .File) continue;
|
||||
|
||||
// Ignore stuff such as .swp files
|
||||
switch (Compilation.classifyFileExt(entry.basename)) {
|
||||
.unknown => continue,
|
||||
else => {},
|
||||
}
|
||||
try filenames.append(try ctx.arena.dupe(u8, entry.path));
|
||||
}
|
||||
|
||||
// Sort filenames, so that incremental tests are contiguous and in-order
|
||||
sortTestFilenames(filenames.items);
|
||||
|
||||
var test_it = TestIterator{ .filenames = filenames.items };
|
||||
while (test_it.next()) |maybe_batch| {
|
||||
const batch = maybe_batch orelse break;
|
||||
const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent;
|
||||
const filename = batch[0];
|
||||
current_file.* = filename;
|
||||
if (strategy == .incremental) {
|
||||
try ctx.incremental_cases.append(.{ .base_path = filename });
|
||||
continue;
|
||||
}
|
||||
|
||||
const max_file_size = 10 * 1024 * 1024;
|
||||
const src = try iterable_dir.dir.readFileAllocOptions(ctx.arena, filename, max_file_size, null, 1, 0);
|
||||
|
||||
// Parse the manifest
|
||||
var manifest = try TestManifest.parse(ctx.arena, src);
|
||||
|
||||
const backends = try manifest.getConfigForKeyAlloc(ctx.arena, "backend", Backend);
|
||||
const targets = try manifest.getConfigForKeyAlloc(ctx.arena, "target", CrossTarget);
|
||||
const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool);
|
||||
const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode);
|
||||
|
||||
var cases = std.ArrayList(usize).init(ctx.arena);
|
||||
|
||||
// Cross-product to get all possible test combinations
|
||||
for (backends) |backend| {
|
||||
for (targets) |target| {
|
||||
const next = ctx.cases.items.len;
|
||||
try ctx.cases.append(.{
|
||||
.name = std.fs.path.stem(filename),
|
||||
.target = target,
|
||||
.backend = backend,
|
||||
.updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator),
|
||||
.is_test = is_test,
|
||||
.output_mode = output_mode,
|
||||
.link_libc = backend == .llvm,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.cases.allocator),
|
||||
});
|
||||
try cases.append(next);
|
||||
}
|
||||
}
|
||||
|
||||
for (cases.items) |case_index| {
|
||||
const case = &ctx.cases.items[case_index];
|
||||
switch (manifest.type) {
|
||||
.compile => {
|
||||
case.addCompile(src);
|
||||
},
|
||||
.@"error" => {
|
||||
const errors = try manifest.trailingAlloc(ctx.arena);
|
||||
case.addError(src, errors);
|
||||
},
|
||||
.run => {
|
||||
var output = std.ArrayList(u8).init(ctx.arena);
|
||||
var trailing_it = manifest.trailing();
|
||||
while (trailing_it.next()) |line| {
|
||||
try output.appendSlice(line);
|
||||
try output.append('\n');
|
||||
}
|
||||
if (output.items.len > 0) {
|
||||
try output.resize(output.items.len - 1);
|
||||
}
|
||||
case.addCompareOutput(src, try output.toOwnedSlice());
|
||||
},
|
||||
.cli => @panic("TODO cli tests"),
|
||||
}
|
||||
}
|
||||
} else |err| {
|
||||
// make sure the current file is set to the file that produced an error
|
||||
current_file.* = test_it.currentFilename();
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(gpa: Allocator, arena: Allocator) Cases {
|
||||
return .{
|
||||
.gpa = gpa,
|
||||
.cases = std.ArrayList(Case).init(gpa),
|
||||
.incremental_cases = std.ArrayList(IncrementalCase).init(gpa),
|
||||
.arena = arena,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn lowerToBuildSteps(
|
||||
self: *Cases,
|
||||
b: *std.Build,
|
||||
parent_step: *std.Build.Step,
|
||||
opt_test_filter: ?[]const u8,
|
||||
cases_dir_path: []const u8,
|
||||
incremental_exe: *std.Build.CompileStep,
|
||||
) void {
|
||||
for (self.incremental_cases.items) |incr_case| {
|
||||
if (opt_test_filter) |test_filter| {
|
||||
if (std.mem.indexOf(u8, incr_case.base_path, test_filter) == null) continue;
|
||||
}
|
||||
const case_base_path_with_dir = std.fs.path.join(b.allocator, &.{
|
||||
cases_dir_path, incr_case.base_path,
|
||||
}) catch @panic("OOM");
|
||||
const run = b.addRunArtifact(incremental_exe);
|
||||
run.setName(incr_case.base_path);
|
||||
run.addArgs(&.{
|
||||
case_base_path_with_dir,
|
||||
b.zig_exe,
|
||||
});
|
||||
run.expectStdOutEqual("");
|
||||
parent_step.dependOn(&run.step);
|
||||
}
|
||||
|
||||
for (self.cases.items) |case| {
|
||||
if (case.updates.items.len != 1) continue; // handled with incremental_cases above
|
||||
assert(case.updates.items.len == 1);
|
||||
const update = case.updates.items[0];
|
||||
|
||||
if (opt_test_filter) |test_filter| {
|
||||
if (std.mem.indexOf(u8, case.name, test_filter) == null) continue;
|
||||
}
|
||||
|
||||
const writefiles = b.addWriteFiles();
|
||||
for (update.files.items) |file| {
|
||||
writefiles.add(file.path, file.src);
|
||||
}
|
||||
const root_source_file = writefiles.getFileSource(update.files.items[0].path).?;
|
||||
|
||||
const artifact = switch (case.output_mode) {
|
||||
.Obj => b.addObject(.{
|
||||
.root_source_file = root_source_file,
|
||||
.name = case.name,
|
||||
.target = case.target,
|
||||
.optimize = case.optimize_mode,
|
||||
}),
|
||||
.Lib => b.addStaticLibrary(.{
|
||||
.root_source_file = root_source_file,
|
||||
.name = case.name,
|
||||
.target = case.target,
|
||||
.optimize = case.optimize_mode,
|
||||
}),
|
||||
.Exe => if (case.is_test) b.addTest(.{
|
||||
.root_source_file = root_source_file,
|
||||
.name = case.name,
|
||||
.target = case.target,
|
||||
.optimize = case.optimize_mode,
|
||||
}) else b.addExecutable(.{
|
||||
.root_source_file = root_source_file,
|
||||
.name = case.name,
|
||||
.target = case.target,
|
||||
.optimize = case.optimize_mode,
|
||||
}),
|
||||
};
|
||||
|
||||
if (case.link_libc) artifact.linkLibC();
|
||||
|
||||
switch (case.backend) {
|
||||
.stage1 => continue,
|
||||
.stage2 => {
|
||||
artifact.use_llvm = false;
|
||||
artifact.use_lld = false;
|
||||
},
|
||||
.llvm => {
|
||||
artifact.use_llvm = true;
|
||||
},
|
||||
}
|
||||
|
||||
for (case.deps.items) |dep| {
|
||||
artifact.addAnonymousModule(dep.name, .{
|
||||
.source_file = writefiles.getFileSource(dep.path).?,
|
||||
});
|
||||
}
|
||||
|
||||
switch (update.case) {
|
||||
.Compile => {
|
||||
parent_step.dependOn(&artifact.step);
|
||||
},
|
||||
.CompareObjectFile => |expected_output| {
|
||||
const check = b.addCheckFile(artifact.getOutputSource(), .{
|
||||
.expected_exact = expected_output,
|
||||
});
|
||||
|
||||
parent_step.dependOn(&check.step);
|
||||
},
|
||||
.Error => |expected_msgs| {
|
||||
assert(expected_msgs.len != 0);
|
||||
artifact.expect_errors = expected_msgs;
|
||||
parent_step.dependOn(&artifact.step);
|
||||
},
|
||||
.Execution => |expected_stdout| {
|
||||
if (case.is_test) {
|
||||
parent_step.dependOn(&artifact.step);
|
||||
} else {
|
||||
const run = b.addRunArtifact(artifact);
|
||||
run.skip_foreign_checks = true;
|
||||
run.expectStdOutEqual(expected_stdout);
|
||||
|
||||
parent_step.dependOn(&run.step);
|
||||
}
|
||||
},
|
||||
.Header => @panic("TODO"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Sort test filenames in-place, so that incremental test cases ("foo.0.zig",
|
||||
/// "foo.1.zig", etc.) are contiguous and appear in numerical order.
|
||||
fn sortTestFilenames(filenames: [][]const u8) void {
|
||||
const Context = struct {
|
||||
pub fn lessThan(_: @This(), a: []const u8, b: []const u8) bool {
|
||||
const a_parts = getTestFileNameParts(a);
|
||||
const b_parts = getTestFileNameParts(b);
|
||||
|
||||
// Sort "<base_name>.X.<file_ext>" based on "<base_name>" and "<file_ext>" first
|
||||
return switch (std.mem.order(u8, a_parts.base_name, b_parts.base_name)) {
|
||||
.lt => true,
|
||||
.gt => false,
|
||||
.eq => switch (std.mem.order(u8, a_parts.file_ext, b_parts.file_ext)) {
|
||||
.lt => true,
|
||||
.gt => false,
|
||||
.eq => {
|
||||
// a and b differ only in their ".X" part
|
||||
|
||||
// Sort "<base_name>.<file_ext>" before any "<base_name>.X.<file_ext>"
|
||||
if (a_parts.test_index) |a_index| {
|
||||
if (b_parts.test_index) |b_index| {
|
||||
// Make sure that incremental tests appear in linear order
|
||||
return a_index < b_index;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return b_parts.test_index != null;
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
std.sort.sort([]const u8, filenames, Context{}, Context.lessThan);
|
||||
}
|
||||
|
||||
/// Iterates a set of filenames extracting batches that are either incremental
|
||||
/// ("foo.0.zig", "foo.1.zig", etc.) or independent ("foo.zig", "bar.zig", etc.).
|
||||
/// Assumes filenames are sorted.
|
||||
const TestIterator = struct {
|
||||
start: usize = 0,
|
||||
end: usize = 0,
|
||||
filenames: []const []const u8,
|
||||
/// reset on each call to `next`
|
||||
index: usize = 0,
|
||||
|
||||
const Error = error{InvalidIncrementalTestIndex};
|
||||
|
||||
fn next(it: *TestIterator) Error!?[]const []const u8 {
|
||||
try it.nextInner();
|
||||
if (it.start == it.end) return null;
|
||||
return it.filenames[it.start..it.end];
|
||||
}
|
||||
|
||||
fn nextInner(it: *TestIterator) Error!void {
|
||||
it.start = it.end;
|
||||
if (it.end == it.filenames.len) return;
|
||||
if (it.end + 1 == it.filenames.len) {
|
||||
it.end += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
const remaining = it.filenames[it.end..];
|
||||
it.index = 0;
|
||||
while (it.index < remaining.len - 1) : (it.index += 1) {
|
||||
// First, check if this file is part of an incremental update sequence
|
||||
// Split filename into "<base_name>.<index>.<file_ext>"
|
||||
const prev_parts = getTestFileNameParts(remaining[it.index]);
|
||||
const new_parts = getTestFileNameParts(remaining[it.index + 1]);
|
||||
|
||||
// If base_name and file_ext match, these files are in the same test sequence
|
||||
// and the new one should be the incremented version of the previous test
|
||||
if (std.mem.eql(u8, prev_parts.base_name, new_parts.base_name) and
|
||||
std.mem.eql(u8, prev_parts.file_ext, new_parts.file_ext))
|
||||
{
|
||||
// This is "foo.X.zig" followed by "foo.Y.zig". Make sure that X = Y + 1
|
||||
if (prev_parts.test_index == null)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
if (new_parts.test_index == null)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
if (new_parts.test_index.? != prev_parts.test_index.? + 1)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
} else {
|
||||
// This is not the same test sequence, so the new file must be the first file
|
||||
// in a new sequence ("*.0.zig") or an independent test file ("*.zig")
|
||||
if (new_parts.test_index != null and new_parts.test_index.? != 0)
|
||||
return error.InvalidIncrementalTestIndex;
|
||||
|
||||
it.end += it.index + 1;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
it.end += remaining.len;
|
||||
}
|
||||
}
|
||||
|
||||
/// In the event of an `error.InvalidIncrementalTestIndex`, this function can
|
||||
/// be used to find the current filename that was being processed.
|
||||
/// Asserts the iterator hasn't reached the end.
|
||||
fn currentFilename(it: TestIterator) []const u8 {
|
||||
assert(it.end != it.filenames.len);
|
||||
const remaining = it.filenames[it.end..];
|
||||
return remaining[it.index + 1];
|
||||
}
|
||||
};
|
||||
|
||||
/// For a filename in the format "<filename>.X.<ext>" or "<filename>.<ext>", returns
|
||||
/// "<filename>", "<ext>" and X parsed as a decimal number. If X is not present, or
|
||||
/// cannot be parsed as a decimal number, it is treated as part of <filename>
|
||||
fn getTestFileNameParts(name: []const u8) struct {
|
||||
base_name: []const u8,
|
||||
file_ext: []const u8,
|
||||
test_index: ?usize,
|
||||
} {
|
||||
const file_ext = std.fs.path.extension(name);
|
||||
const trimmed = name[0 .. name.len - file_ext.len]; // Trim off ".<ext>"
|
||||
const maybe_index = std.fs.path.extension(trimmed); // Extract ".X"
|
||||
|
||||
// Attempt to parse index
|
||||
const index: ?usize = if (maybe_index.len > 0)
|
||||
std.fmt.parseInt(usize, maybe_index[1..], 10) catch null
|
||||
else
|
||||
null;
|
||||
|
||||
// Adjust "<filename>" extent based on parsing success
|
||||
const base_name_end = trimmed.len - if (index != null) maybe_index.len else 0;
|
||||
return .{
|
||||
.base_name = name[0..base_name_end],
|
||||
.file_ext = if (file_ext.len > 0) file_ext[1..] else file_ext,
|
||||
.test_index = index,
|
||||
};
|
||||
}
|
||||
|
||||
const TestStrategy = enum {
|
||||
/// Execute tests as independent compilations, unless they are explicitly
|
||||
/// incremental ("foo.0.zig", "foo.1.zig", etc.)
|
||||
independent,
|
||||
/// Execute all tests as incremental updates to a single compilation. Explicitly
|
||||
/// incremental tests ("foo.0.zig", "foo.1.zig", etc.) still execute in order
|
||||
incremental,
|
||||
};
|
||||
|
||||
/// Default config values for known test manifest key-value pairings.
|
||||
/// Currently handled defaults are:
|
||||
/// * backend
|
||||
/// * target
|
||||
/// * output_mode
|
||||
/// * is_test
|
||||
const TestManifestConfigDefaults = struct {
|
||||
/// Asserts if the key doesn't exist - yep, it's an oversight alright.
|
||||
fn get(@"type": TestManifest.Type, key: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, key, "backend")) {
|
||||
return "stage2";
|
||||
} else if (std.mem.eql(u8, key, "target")) {
|
||||
if (@"type" == .@"error") {
|
||||
return "native";
|
||||
}
|
||||
comptime {
|
||||
var defaults: []const u8 = "";
|
||||
// TODO should we only return "mainstream" targets by default here?
|
||||
// TODO we should also specify ABIs explicitly as the backends are
|
||||
// getting more and more complete
|
||||
// Linux
|
||||
inline for (&[_][]const u8{ "x86_64", "arm", "aarch64" }) |arch| {
|
||||
defaults = defaults ++ arch ++ "-linux" ++ ",";
|
||||
}
|
||||
// macOS
|
||||
inline for (&[_][]const u8{ "x86_64", "aarch64" }) |arch| {
|
||||
defaults = defaults ++ arch ++ "-macos" ++ ",";
|
||||
}
|
||||
// Windows
|
||||
defaults = defaults ++ "x86_64-windows" ++ ",";
|
||||
// Wasm
|
||||
defaults = defaults ++ "wasm32-wasi";
|
||||
return defaults;
|
||||
}
|
||||
} else if (std.mem.eql(u8, key, "output_mode")) {
|
||||
return switch (@"type") {
|
||||
.@"error" => "Obj",
|
||||
.run => "Exe",
|
||||
.compile => "Obj",
|
||||
.cli => @panic("TODO test harness for CLI tests"),
|
||||
};
|
||||
} else if (std.mem.eql(u8, key, "is_test")) {
|
||||
return "0";
|
||||
} else unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
/// Manifest syntax example:
|
||||
/// (see https://github.com/ziglang/zig/issues/11288)
|
||||
///
|
||||
/// error
|
||||
/// backend=stage1,stage2
|
||||
/// output_mode=exe
|
||||
///
|
||||
/// :3:19: error: foo
|
||||
///
|
||||
/// run
|
||||
/// target=x86_64-linux,aarch64-macos
|
||||
///
|
||||
/// I am expected stdout! Hello!
|
||||
///
|
||||
/// cli
|
||||
///
|
||||
/// build test
|
||||
const TestManifest = struct {
|
||||
type: Type,
|
||||
config_map: std.StringHashMap([]const u8),
|
||||
trailing_bytes: []const u8 = "",
|
||||
|
||||
const Type = enum {
|
||||
@"error",
|
||||
run,
|
||||
cli,
|
||||
compile,
|
||||
};
|
||||
|
||||
const TrailingIterator = struct {
|
||||
inner: std.mem.TokenIterator(u8),
|
||||
|
||||
fn next(self: *TrailingIterator) ?[]const u8 {
|
||||
const next_inner = self.inner.next() orelse return null;
|
||||
return std.mem.trim(u8, next_inner[2..], " \t");
|
||||
}
|
||||
};
|
||||
|
||||
fn ConfigValueIterator(comptime T: type) type {
|
||||
return struct {
|
||||
inner: std.mem.SplitIterator(u8),
|
||||
|
||||
fn next(self: *@This()) !?T {
|
||||
const next_raw = self.inner.next() orelse return null;
|
||||
const parseFn = getDefaultParser(T);
|
||||
return try parseFn(next_raw);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn parse(arena: Allocator, bytes: []const u8) !TestManifest {
|
||||
// The manifest is the last contiguous block of comments in the file
|
||||
// We scan for the beginning by searching backward for the first non-empty line that does not start with "//"
|
||||
var start: ?usize = null;
|
||||
var end: usize = bytes.len;
|
||||
if (bytes.len > 0) {
|
||||
var cursor: usize = bytes.len - 1;
|
||||
while (true) {
|
||||
// Move to beginning of line
|
||||
while (cursor > 0 and bytes[cursor - 1] != '\n') cursor -= 1;
|
||||
|
||||
if (std.mem.startsWith(u8, bytes[cursor..], "//")) {
|
||||
start = cursor; // Contiguous comment line, include in manifest
|
||||
} else {
|
||||
if (start != null) break; // Encountered non-comment line, end of manifest
|
||||
|
||||
// We ignore all-whitespace lines following the comment block, but anything else
|
||||
// means that there is no manifest present.
|
||||
if (std.mem.trim(u8, bytes[cursor..end], " \r\n\t").len == 0) {
|
||||
end = cursor;
|
||||
} else break; // If it's not whitespace, there is no manifest
|
||||
}
|
||||
|
||||
// Move to previous line
|
||||
if (cursor != 0) cursor -= 1 else break;
|
||||
}
|
||||
}
|
||||
|
||||
const actual_start = start orelse return error.MissingTestManifest;
|
||||
const manifest_bytes = bytes[actual_start..end];
|
||||
|
||||
var it = std.mem.tokenize(u8, manifest_bytes, "\r\n");
|
||||
|
||||
// First line is the test type
|
||||
const tt: Type = blk: {
|
||||
const line = it.next() orelse return error.MissingTestCaseType;
|
||||
const raw = std.mem.trim(u8, line[2..], " \t");
|
||||
if (std.mem.eql(u8, raw, "error")) {
|
||||
break :blk .@"error";
|
||||
} else if (std.mem.eql(u8, raw, "run")) {
|
||||
break :blk .run;
|
||||
} else if (std.mem.eql(u8, raw, "cli")) {
|
||||
break :blk .cli;
|
||||
} else if (std.mem.eql(u8, raw, "compile")) {
|
||||
break :blk .compile;
|
||||
} else {
|
||||
std.log.warn("unknown test case type requested: {s}", .{raw});
|
||||
return error.UnknownTestCaseType;
|
||||
}
|
||||
};
|
||||
|
||||
var manifest: TestManifest = .{
|
||||
.type = tt,
|
||||
.config_map = std.StringHashMap([]const u8).init(arena),
|
||||
};
|
||||
|
||||
// Any subsequent line until a blank comment line is key=value(s) pair
|
||||
while (it.next()) |line| {
|
||||
const trimmed = std.mem.trim(u8, line[2..], " \t");
|
||||
if (trimmed.len == 0) break;
|
||||
|
||||
// Parse key=value(s)
|
||||
var kv_it = std.mem.split(u8, trimmed, "=");
|
||||
const key = kv_it.first();
|
||||
try manifest.config_map.putNoClobber(key, kv_it.next() orelse return error.MissingValuesForConfig);
|
||||
}
|
||||
|
||||
// Finally, trailing is expected output
|
||||
manifest.trailing_bytes = manifest_bytes[it.index..];
|
||||
|
||||
return manifest;
|
||||
}
|
||||
|
||||
fn getConfigForKey(
|
||||
self: TestManifest,
|
||||
key: []const u8,
|
||||
comptime T: type,
|
||||
) ConfigValueIterator(T) {
|
||||
const bytes = self.config_map.get(key) orelse TestManifestConfigDefaults.get(self.type, key);
|
||||
return ConfigValueIterator(T){
|
||||
.inner = std.mem.split(u8, bytes, ","),
|
||||
};
|
||||
}
|
||||
|
||||
fn getConfigForKeyAlloc(
|
||||
self: TestManifest,
|
||||
allocator: Allocator,
|
||||
key: []const u8,
|
||||
comptime T: type,
|
||||
) ![]const T {
|
||||
var out = std.ArrayList(T).init(allocator);
|
||||
defer out.deinit();
|
||||
var it = self.getConfigForKey(key, T);
|
||||
while (try it.next()) |item| {
|
||||
try out.append(item);
|
||||
}
|
||||
return try out.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn getConfigForKeyAssertSingle(self: TestManifest, key: []const u8, comptime T: type) !T {
|
||||
var it = self.getConfigForKey(key, T);
|
||||
const res = (try it.next()) orelse unreachable;
|
||||
assert((try it.next()) == null);
|
||||
return res;
|
||||
}
|
||||
|
||||
fn trailing(self: TestManifest) TrailingIterator {
|
||||
return .{
|
||||
.inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"),
|
||||
};
|
||||
}
|
||||
|
||||
fn trailingAlloc(self: TestManifest, allocator: Allocator) error{OutOfMemory}![]const []const u8 {
|
||||
var out = std.ArrayList([]const u8).init(allocator);
|
||||
defer out.deinit();
|
||||
var it = self.trailing();
|
||||
while (it.next()) |line| {
|
||||
try out.append(line);
|
||||
}
|
||||
return try out.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn ParseFn(comptime T: type) type {
|
||||
return fn ([]const u8) anyerror!T;
|
||||
}
|
||||
|
||||
fn getDefaultParser(comptime T: type) ParseFn(T) {
|
||||
if (T == CrossTarget) return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
var opts = CrossTarget.ParseOptions{
|
||||
.arch_os_abi = str,
|
||||
};
|
||||
return try CrossTarget.parse(opts);
|
||||
}
|
||||
}.parse;
|
||||
|
||||
switch (@typeInfo(T)) {
|
||||
.Int => return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
return try std.fmt.parseInt(T, str, 0);
|
||||
}
|
||||
}.parse,
|
||||
.Bool => return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
const as_int = try std.fmt.parseInt(u1, str, 0);
|
||||
return as_int > 0;
|
||||
}
|
||||
}.parse,
|
||||
.Enum => return struct {
|
||||
fn parse(str: []const u8) anyerror!T {
|
||||
return std.meta.stringToEnum(T, str) orelse {
|
||||
std.log.err("unknown enum variant for {s}: {s}", .{ @typeName(T), str });
|
||||
return error.UnknownEnumVariant;
|
||||
};
|
||||
}
|
||||
}.parse,
|
||||
.Struct => @compileError("no default parser for " ++ @typeName(T)),
|
||||
else => @compileError("no default parser for " ++ @typeName(T)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const Cases = @This();
|
||||
const builtin = @import("builtin");
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const CrossTarget = std.zig.CrossTarget;
|
||||
const Compilation = @import("../../src/Compilation.zig");
|
||||
const zig_h = @import("../../src/link.zig").File.C.zig_h;
|
||||
const introspect = @import("../../src/introspect.zig");
|
||||
const ThreadPool = std.Thread.Pool;
|
||||
const WaitGroup = std.Thread.WaitGroup;
|
||||
const build_options = @import("build_options");
|
||||
const Package = @import("../../src/Package.zig");
|
||||
|
||||
pub const std_options = struct {
|
||||
pub const log_level: std.log.Level = .err;
|
||||
};
|
||||
|
||||
var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{
|
||||
.stack_trace_frames = build_options.mem_leak_frames,
|
||||
}){};
|
||||
|
||||
// TODO: instead of embedding the compiler in this process, spawn the compiler
|
||||
// as a sub-process and communicate the updates using the compiler protocol.
|
||||
pub fn main() !void {
|
||||
const use_gpa = build_options.force_gpa or !builtin.link_libc;
|
||||
const gpa = gpa: {
|
||||
if (use_gpa) {
|
||||
break :gpa general_purpose_allocator.allocator();
|
||||
}
|
||||
// We would prefer to use raw libc allocator here, but cannot
|
||||
// use it if it won't support the alignment we need.
|
||||
if (@alignOf(std.c.max_align_t) < @alignOf(i128)) {
|
||||
break :gpa std.heap.c_allocator;
|
||||
}
|
||||
break :gpa std.heap.raw_c_allocator;
|
||||
};
|
||||
|
||||
var single_threaded_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
defer single_threaded_arena.deinit();
|
||||
|
||||
var thread_safe_arena: std.heap.ThreadSafeAllocator = .{
|
||||
.child_allocator = single_threaded_arena.allocator(),
|
||||
};
|
||||
const arena = thread_safe_arena.allocator();
|
||||
|
||||
const args = try std.process.argsAlloc(arena);
|
||||
const case_file_path = args[1];
|
||||
const zig_exe_path = args[2];
|
||||
|
||||
var filenames = std.ArrayList([]const u8).init(arena);
|
||||
|
||||
const case_dirname = std.fs.path.dirname(case_file_path).?;
|
||||
var iterable_dir = try std.fs.cwd().openIterableDir(case_dirname, .{});
|
||||
defer iterable_dir.close();
|
||||
|
||||
if (std.mem.endsWith(u8, case_file_path, ".0.zig")) {
|
||||
const stem = case_file_path[case_dirname.len + 1 .. case_file_path.len - "0.zig".len];
|
||||
var it = iterable_dir.iterate();
|
||||
while (try it.next()) |entry| {
|
||||
if (entry.kind != .File) continue;
|
||||
if (!std.mem.startsWith(u8, entry.name, stem)) continue;
|
||||
try filenames.append(try std.fs.path.join(arena, &.{ case_dirname, entry.name }));
|
||||
}
|
||||
} else {
|
||||
try filenames.append(case_file_path);
|
||||
}
|
||||
|
||||
if (filenames.items.len == 0) {
|
||||
std.debug.print("failed to find the input source file(s) from '{s}'\n", .{
|
||||
case_file_path,
|
||||
});
|
||||
std.process.exit(1);
|
||||
}
|
||||
|
||||
// Sort filenames, so that incremental tests are contiguous and in-order
|
||||
sortTestFilenames(filenames.items);
|
||||
|
||||
var ctx = Cases.init(gpa, arena);
|
||||
|
||||
var test_it = TestIterator{ .filenames = filenames.items };
|
||||
while (test_it.next()) |maybe_batch| {
|
||||
const batch = maybe_batch orelse break;
|
||||
const strategy: TestStrategy = if (batch.len > 1) .incremental else .independent;
|
||||
var cases = std.ArrayList(usize).init(arena);
|
||||
|
||||
for (batch) |filename| {
|
||||
const max_file_size = 10 * 1024 * 1024;
|
||||
const src = try iterable_dir.dir.readFileAllocOptions(arena, filename, max_file_size, null, 1, 0);
|
||||
|
||||
// Parse the manifest
|
||||
var manifest = try TestManifest.parse(arena, src);
|
||||
|
||||
if (cases.items.len == 0) {
|
||||
const backends = try manifest.getConfigForKeyAlloc(arena, "backend", Backend);
|
||||
const targets = try manifest.getConfigForKeyAlloc(arena, "target", CrossTarget);
|
||||
const is_test = try manifest.getConfigForKeyAssertSingle("is_test", bool);
|
||||
const output_mode = try manifest.getConfigForKeyAssertSingle("output_mode", std.builtin.OutputMode);
|
||||
|
||||
// Cross-product to get all possible test combinations
|
||||
for (backends) |backend| {
|
||||
for (targets) |target| {
|
||||
const next = ctx.cases.items.len;
|
||||
try ctx.cases.append(.{
|
||||
.name = std.fs.path.stem(filename),
|
||||
.target = target,
|
||||
.backend = backend,
|
||||
.updates = std.ArrayList(Cases.Update).init(ctx.cases.allocator),
|
||||
.is_test = is_test,
|
||||
.output_mode = output_mode,
|
||||
.link_libc = backend == .llvm,
|
||||
.deps = std.ArrayList(DepModule).init(ctx.cases.allocator),
|
||||
});
|
||||
try cases.append(next);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (cases.items) |case_index| {
|
||||
const case = &ctx.cases.items[case_index];
|
||||
switch (manifest.type) {
|
||||
.compile => {
|
||||
case.addCompile(src);
|
||||
},
|
||||
.@"error" => {
|
||||
const errors = try manifest.trailingAlloc(arena);
|
||||
switch (strategy) {
|
||||
.independent => {
|
||||
case.addError(src, errors);
|
||||
},
|
||||
.incremental => {
|
||||
case.addErrorNamed("update", src, errors);
|
||||
},
|
||||
}
|
||||
},
|
||||
.run => {
|
||||
var output = std.ArrayList(u8).init(arena);
|
||||
var trailing_it = manifest.trailing();
|
||||
while (trailing_it.next()) |line| {
|
||||
try output.appendSlice(line);
|
||||
try output.append('\n');
|
||||
}
|
||||
if (output.items.len > 0) {
|
||||
try output.resize(output.items.len - 1);
|
||||
}
|
||||
case.addCompareOutput(src, try output.toOwnedSlice());
|
||||
},
|
||||
.cli => @panic("TODO cli tests"),
|
||||
}
|
||||
}
|
||||
}
|
||||
} else |err| {
|
||||
return err;
|
||||
}
|
||||
|
||||
return runCases(&ctx, zig_exe_path);
|
||||
}
|
||||
|
||||
fn runCases(self: *Cases, zig_exe_path: []const u8) !void {
|
||||
const host = try std.zig.system.NativeTargetInfo.detect(.{});
|
||||
|
||||
var progress = std.Progress{};
|
||||
const root_node = progress.start("compiler", self.cases.items.len);
|
||||
progress.terminal = null;
|
||||
defer root_node.end();
|
||||
|
||||
var zig_lib_directory = try introspect.findZigLibDir(self.gpa);
|
||||
defer zig_lib_directory.handle.close();
|
||||
defer self.gpa.free(zig_lib_directory.path.?);
|
||||
|
||||
var aux_thread_pool: ThreadPool = undefined;
|
||||
try aux_thread_pool.init(.{ .allocator = self.gpa });
|
||||
defer aux_thread_pool.deinit();
|
||||
|
||||
// Use the same global cache dir for all the tests, such that we for example don't have to
|
||||
// rebuild musl libc for every case (when LLVM backend is enabled).
|
||||
var global_tmp = std.testing.tmpDir(.{});
|
||||
defer global_tmp.cleanup();
|
||||
|
||||
var cache_dir = try global_tmp.dir.makeOpenPath("zig-cache", .{});
|
||||
defer cache_dir.close();
|
||||
const tmp_dir_path = try std.fs.path.join(self.gpa, &[_][]const u8{ ".", "zig-cache", "tmp", &global_tmp.sub_path });
|
||||
defer self.gpa.free(tmp_dir_path);
|
||||
|
||||
const global_cache_directory: Compilation.Directory = .{
|
||||
.handle = cache_dir,
|
||||
.path = try std.fs.path.join(self.gpa, &[_][]const u8{ tmp_dir_path, "zig-cache" }),
|
||||
};
|
||||
defer self.gpa.free(global_cache_directory.path.?);
|
||||
|
||||
{
|
||||
for (self.cases.items) |*case| {
|
||||
if (build_options.skip_non_native) {
|
||||
if (case.target.getCpuArch() != builtin.cpu.arch)
|
||||
continue;
|
||||
if (case.target.getObjectFormat() != builtin.object_format)
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip tests that require LLVM backend when it is not available
|
||||
if (!build_options.have_llvm and case.backend == .llvm)
|
||||
continue;
|
||||
|
||||
assert(case.backend != .stage1);
|
||||
|
||||
if (build_options.test_filter) |test_filter| {
|
||||
if (std.mem.indexOf(u8, case.name, test_filter) == null) continue;
|
||||
}
|
||||
|
||||
var prg_node = root_node.start(case.name, case.updates.items.len);
|
||||
prg_node.activate();
|
||||
defer prg_node.end();
|
||||
|
||||
try runOneCase(
|
||||
self.gpa,
|
||||
&prg_node,
|
||||
case.*,
|
||||
zig_lib_directory,
|
||||
zig_exe_path,
|
||||
&aux_thread_pool,
|
||||
global_cache_directory,
|
||||
host,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn runOneCase(
|
||||
allocator: Allocator,
|
||||
root_node: *std.Progress.Node,
|
||||
case: Case,
|
||||
zig_lib_directory: Compilation.Directory,
|
||||
zig_exe_path: []const u8,
|
||||
thread_pool: *ThreadPool,
|
||||
global_cache_directory: Compilation.Directory,
|
||||
host: std.zig.system.NativeTargetInfo,
|
||||
) !void {
|
||||
const tmp_src_path = "tmp.zig";
|
||||
const enable_rosetta = build_options.enable_rosetta;
|
||||
const enable_qemu = build_options.enable_qemu;
|
||||
const enable_wine = build_options.enable_wine;
|
||||
const enable_wasmtime = build_options.enable_wasmtime;
|
||||
const enable_darling = build_options.enable_darling;
|
||||
const glibc_runtimes_dir: ?[]const u8 = build_options.glibc_runtimes_dir;
|
||||
|
||||
const target_info = try std.zig.system.NativeTargetInfo.detect(case.target);
|
||||
const target = target_info.target;
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const arena = arena_allocator.allocator();
|
||||
|
||||
var tmp = std.testing.tmpDir(.{});
|
||||
defer tmp.cleanup();
|
||||
|
||||
var cache_dir = try tmp.dir.makeOpenPath("zig-cache", .{});
|
||||
defer cache_dir.close();
|
||||
|
||||
const tmp_dir_path = try std.fs.path.join(
|
||||
arena,
|
||||
&[_][]const u8{ ".", "zig-cache", "tmp", &tmp.sub_path },
|
||||
);
|
||||
const local_cache_path = try std.fs.path.join(
|
||||
arena,
|
||||
&[_][]const u8{ tmp_dir_path, "zig-cache" },
|
||||
);
|
||||
|
||||
const zig_cache_directory: Compilation.Directory = .{
|
||||
.handle = cache_dir,
|
||||
.path = local_cache_path,
|
||||
};
|
||||
|
||||
var main_pkg: Package = .{
|
||||
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
|
||||
.root_src_path = tmp_src_path,
|
||||
};
|
||||
defer {
|
||||
var it = main_pkg.table.iterator();
|
||||
while (it.next()) |kv| {
|
||||
allocator.free(kv.key_ptr.*);
|
||||
kv.value_ptr.*.destroy(allocator);
|
||||
}
|
||||
main_pkg.table.deinit(allocator);
|
||||
}
|
||||
|
||||
for (case.deps.items) |dep| {
|
||||
var pkg = try Package.create(
|
||||
allocator,
|
||||
tmp_dir_path,
|
||||
dep.path,
|
||||
);
|
||||
errdefer pkg.destroy(allocator);
|
||||
try main_pkg.add(allocator, dep.name, pkg);
|
||||
}
|
||||
|
||||
const bin_name = try std.zig.binNameAlloc(arena, .{
|
||||
.root_name = "test_case",
|
||||
.target = target,
|
||||
.output_mode = case.output_mode,
|
||||
});
|
||||
|
||||
const emit_directory: Compilation.Directory = .{
|
||||
.path = tmp_dir_path,
|
||||
.handle = tmp.dir,
|
||||
};
|
||||
const emit_bin: Compilation.EmitLoc = .{
|
||||
.directory = emit_directory,
|
||||
.basename = bin_name,
|
||||
};
|
||||
const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{
|
||||
.directory = emit_directory,
|
||||
.basename = "test_case.h",
|
||||
} else null;
|
||||
const use_llvm: bool = switch (case.backend) {
|
||||
.llvm => true,
|
||||
else => false,
|
||||
};
|
||||
const comp = try Compilation.create(allocator, .{
|
||||
.local_cache_directory = zig_cache_directory,
|
||||
.global_cache_directory = global_cache_directory,
|
||||
.zig_lib_directory = zig_lib_directory,
|
||||
.thread_pool = thread_pool,
|
||||
.root_name = "test_case",
|
||||
.target = target,
|
||||
// TODO: support tests for object file building, and library builds
|
||||
// and linking. This will require a rework to support multi-file
|
||||
// tests.
|
||||
.output_mode = case.output_mode,
|
||||
.is_test = case.is_test,
|
||||
.optimize_mode = case.optimize_mode,
|
||||
.emit_bin = emit_bin,
|
||||
.emit_h = emit_h,
|
||||
.main_pkg = &main_pkg,
|
||||
.keep_source_files_loaded = true,
|
||||
.is_native_os = case.target.isNativeOs(),
|
||||
.is_native_abi = case.target.isNativeAbi(),
|
||||
.dynamic_linker = target_info.dynamic_linker.get(),
|
||||
.link_libc = case.link_libc,
|
||||
.use_llvm = use_llvm,
|
||||
.self_exe_path = zig_exe_path,
|
||||
// TODO instead of turning off color, pass in a std.Progress.Node
|
||||
.color = .off,
|
||||
.reference_trace = 0,
|
||||
// TODO: force self-hosted linkers with stage2 backend to avoid LLD creeping in
|
||||
// until the auto-select mechanism deems them worthy
|
||||
.use_lld = switch (case.backend) {
|
||||
.stage2 => false,
|
||||
else => null,
|
||||
},
|
||||
});
|
||||
defer comp.destroy();
|
||||
|
||||
update: for (case.updates.items, 0..) |update, update_index| {
|
||||
var update_node = root_node.start(update.name, 3);
|
||||
update_node.activate();
|
||||
defer update_node.end();
|
||||
|
||||
var sync_node = update_node.start("write", 0);
|
||||
sync_node.activate();
|
||||
for (update.files.items) |file| {
|
||||
try tmp.dir.writeFile(file.path, file.src);
|
||||
}
|
||||
sync_node.end();
|
||||
|
||||
var module_node = update_node.start("parse/analysis/codegen", 0);
|
||||
module_node.activate();
|
||||
try comp.makeBinFileWritable();
|
||||
try comp.update(&module_node);
|
||||
module_node.end();
|
||||
|
||||
if (update.case != .Error) {
|
||||
var all_errors = try comp.getAllErrorsAlloc();
|
||||
defer all_errors.deinit(allocator);
|
||||
if (all_errors.errorMessageCount() > 0) {
|
||||
all_errors.renderToStdErr(.{
|
||||
.ttyconf = std.debug.detectTTYConfig(std.io.getStdErr()),
|
||||
});
|
||||
// TODO print generated C code
|
||||
return error.UnexpectedCompileErrors;
|
||||
}
|
||||
}
|
||||
|
||||
switch (update.case) {
|
||||
.Header => |expected_output| {
|
||||
var file = try tmp.dir.openFile("test_case.h", .{ .mode = .read_only });
|
||||
defer file.close();
|
||||
const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024);
|
||||
|
||||
try std.testing.expectEqualStrings(expected_output, out);
|
||||
},
|
||||
.CompareObjectFile => |expected_output| {
|
||||
var file = try tmp.dir.openFile(bin_name, .{ .mode = .read_only });
|
||||
defer file.close();
|
||||
const out = try file.reader().readAllAlloc(arena, 5 * 1024 * 1024);
|
||||
|
||||
try std.testing.expectEqualStrings(expected_output, out);
|
||||
},
|
||||
.Compile => {},
|
||||
.Error => |expected_errors| {
|
||||
var test_node = update_node.start("assert", 0);
|
||||
test_node.activate();
|
||||
defer test_node.end();
|
||||
|
||||
var error_bundle = try comp.getAllErrorsAlloc();
|
||||
defer error_bundle.deinit(allocator);
|
||||
|
||||
if (error_bundle.errorMessageCount() == 0) {
|
||||
return error.ExpectedCompilationErrors;
|
||||
}
|
||||
|
||||
var actual_stderr = std.ArrayList(u8).init(arena);
|
||||
try error_bundle.renderToWriter(.{
|
||||
.ttyconf = .no_color,
|
||||
.include_reference_trace = false,
|
||||
.include_source_line = false,
|
||||
}, actual_stderr.writer());
|
||||
|
||||
// Render the expected lines into a string that we can compare verbatim.
|
||||
var expected_generated = std.ArrayList(u8).init(arena);
|
||||
|
||||
var actual_line_it = std.mem.split(u8, actual_stderr.items, "\n");
|
||||
for (expected_errors) |expect_line| {
|
||||
const actual_line = actual_line_it.next() orelse {
|
||||
try expected_generated.appendSlice(expect_line);
|
||||
try expected_generated.append('\n');
|
||||
continue;
|
||||
};
|
||||
if (std.mem.endsWith(u8, actual_line, expect_line)) {
|
||||
try expected_generated.appendSlice(actual_line);
|
||||
try expected_generated.append('\n');
|
||||
continue;
|
||||
}
|
||||
if (std.mem.startsWith(u8, expect_line, ":?:?: ")) {
|
||||
if (std.mem.endsWith(u8, actual_line, expect_line[":?:?: ".len..])) {
|
||||
try expected_generated.appendSlice(actual_line);
|
||||
try expected_generated.append('\n');
|
||||
continue;
|
||||
}
|
||||
}
|
||||
try expected_generated.appendSlice(expect_line);
|
||||
try expected_generated.append('\n');
|
||||
}
|
||||
|
||||
try std.testing.expectEqualStrings(expected_generated.items, actual_stderr.items);
|
||||
},
|
||||
.Execution => |expected_stdout| {
|
||||
if (!std.process.can_spawn) {
|
||||
std.debug.print("Unable to spawn child processes on {s}, skipping test.\n", .{@tagName(builtin.os.tag)});
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
|
||||
update_node.setEstimatedTotalItems(4);
|
||||
|
||||
var argv = std.ArrayList([]const u8).init(allocator);
|
||||
defer argv.deinit();
|
||||
|
||||
var exec_result = x: {
|
||||
var exec_node = update_node.start("execute", 0);
|
||||
exec_node.activate();
|
||||
defer exec_node.end();
|
||||
|
||||
// We go out of our way here to use the unique temporary directory name in
|
||||
// the exe_path so that it makes its way into the cache hash, avoiding
|
||||
// cache collisions from multiple threads doing `zig run` at the same time
|
||||
// on the same test_case.c input filename.
|
||||
const ss = std.fs.path.sep_str;
|
||||
const exe_path = try std.fmt.allocPrint(
|
||||
arena,
|
||||
".." ++ ss ++ "{s}" ++ ss ++ "{s}",
|
||||
.{ &tmp.sub_path, bin_name },
|
||||
);
|
||||
if (case.target.ofmt != null and case.target.ofmt.? == .c) {
|
||||
if (host.getExternalExecutor(target_info, .{ .link_libc = true }) != .native) {
|
||||
// We wouldn't be able to run the compiled C code.
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
try argv.appendSlice(&[_][]const u8{
|
||||
zig_exe_path,
|
||||
"run",
|
||||
"-cflags",
|
||||
"-std=c99",
|
||||
"-pedantic",
|
||||
"-Werror",
|
||||
"-Wno-incompatible-library-redeclaration", // https://github.com/ziglang/zig/issues/875
|
||||
"--",
|
||||
"-lc",
|
||||
exe_path,
|
||||
});
|
||||
if (zig_lib_directory.path) |p| {
|
||||
try argv.appendSlice(&.{ "-I", p });
|
||||
}
|
||||
} else switch (host.getExternalExecutor(target_info, .{ .link_libc = case.link_libc })) {
|
||||
.native => {
|
||||
if (case.backend == .stage2 and case.target.getCpuArch() == .arm) {
|
||||
// https://github.com/ziglang/zig/issues/13623
|
||||
continue :update; // Pass test.
|
||||
}
|
||||
try argv.append(exe_path);
|
||||
},
|
||||
.bad_dl, .bad_os_or_cpu => continue :update, // Pass test.
|
||||
|
||||
.rosetta => if (enable_rosetta) {
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Rosetta not available, pass test.
|
||||
},
|
||||
|
||||
.qemu => |qemu_bin_name| if (enable_qemu) {
|
||||
const need_cross_glibc = target.isGnuLibC() and case.link_libc;
|
||||
const glibc_dir_arg: ?[]const u8 = if (need_cross_glibc)
|
||||
glibc_runtimes_dir orelse continue :update // glibc dir not available; pass test
|
||||
else
|
||||
null;
|
||||
try argv.append(qemu_bin_name);
|
||||
if (glibc_dir_arg) |dir| {
|
||||
const linux_triple = try target.linuxTriple(arena);
|
||||
const full_dir = try std.fs.path.join(arena, &[_][]const u8{
|
||||
dir,
|
||||
linux_triple,
|
||||
});
|
||||
|
||||
try argv.append("-L");
|
||||
try argv.append(full_dir);
|
||||
}
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // QEMU not available; pass test.
|
||||
},
|
||||
|
||||
.wine => |wine_bin_name| if (enable_wine) {
|
||||
try argv.append(wine_bin_name);
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Wine not available; pass test.
|
||||
},
|
||||
|
||||
.wasmtime => |wasmtime_bin_name| if (enable_wasmtime) {
|
||||
try argv.append(wasmtime_bin_name);
|
||||
try argv.append("--dir=.");
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // wasmtime not available; pass test.
|
||||
},
|
||||
|
||||
.darling => |darling_bin_name| if (enable_darling) {
|
||||
try argv.append(darling_bin_name);
|
||||
// Since we use relative to cwd here, we invoke darling with
|
||||
// "shell" subcommand.
|
||||
try argv.append("shell");
|
||||
try argv.append(exe_path);
|
||||
} else {
|
||||
continue :update; // Darling not available; pass test.
|
||||
},
|
||||
}
|
||||
|
||||
try comp.makeBinFileExecutable();
|
||||
|
||||
while (true) {
|
||||
break :x std.ChildProcess.exec(.{
|
||||
.allocator = allocator,
|
||||
.argv = argv.items,
|
||||
.cwd_dir = tmp.dir,
|
||||
.cwd = tmp_dir_path,
|
||||
}) catch |err| switch (err) {
|
||||
error.FileBusy => {
|
||||
// There is a fundamental design flaw in Unix systems with how
|
||||
// ETXTBSY interacts with fork+exec.
|
||||
// https://github.com/golang/go/issues/22315
|
||||
// https://bugs.openjdk.org/browse/JDK-8068370
|
||||
// Unfortunately, this could be a real error, but we can't
|
||||
// tell the difference here.
|
||||
continue;
|
||||
},
|
||||
else => {
|
||||
std.debug.print("\n{s}.{d} The following command failed with {s}:\n", .{
|
||||
case.name, update_index, @errorName(err),
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
var test_node = update_node.start("test", 0);
|
||||
test_node.activate();
|
||||
defer test_node.end();
|
||||
defer allocator.free(exec_result.stdout);
|
||||
defer allocator.free(exec_result.stderr);
|
||||
switch (exec_result.term) {
|
||||
.Exited => |code| {
|
||||
if (code != 0) {
|
||||
std.debug.print("\n{s}\n{s}: execution exited with code {d}:\n", .{
|
||||
exec_result.stderr, case.name, code,
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
}
|
||||
},
|
||||
else => {
|
||||
std.debug.print("\n{s}\n{s}: execution crashed:\n", .{
|
||||
exec_result.stderr, case.name,
|
||||
});
|
||||
dumpArgs(argv.items);
|
||||
return error.ChildProcessExecution;
|
||||
},
|
||||
}
|
||||
try std.testing.expectEqualStrings(expected_stdout, exec_result.stdout);
|
||||
// We allow stderr to have garbage in it because wasmtime prints a
|
||||
// warning about --invoke even though we don't pass it.
|
||||
//std.testing.expectEqualStrings("", exec_result.stderr);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn dumpArgs(argv: []const []const u8) void {
|
||||
for (argv) |arg| {
|
||||
std.debug.print("{s} ", .{arg});
|
||||
}
|
||||
std.debug.print("\n", .{});
|
||||
}
|
||||
@@ -1055,3 +1055,30 @@ pub fn addCAbiTests(b: *std.Build, skip_non_native: bool, skip_release: bool) *S
|
||||
}
|
||||
return step;
|
||||
}
|
||||
|
||||
pub fn addCases(
|
||||
b: *std.Build,
|
||||
parent_step: *Step,
|
||||
opt_test_filter: ?[]const u8,
|
||||
check_case_exe: *std.Build.CompileStep,
|
||||
) !void {
|
||||
const arena = b.allocator;
|
||||
const gpa = b.allocator;
|
||||
|
||||
var cases = @import("src/Cases.zig").init(gpa, arena);
|
||||
|
||||
var dir = try b.build_root.handle.openIterableDir("test/cases", .{});
|
||||
defer dir.close();
|
||||
|
||||
cases.addFromDir(dir);
|
||||
try @import("cases.zig").addCases(&cases);
|
||||
|
||||
const cases_dir_path = try b.build_root.join(b.allocator, &.{ "test", "cases" });
|
||||
cases.lowerToBuildSteps(
|
||||
b,
|
||||
parent_step,
|
||||
opt_test_filter,
|
||||
cases_dir_path,
|
||||
check_case_exe,
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user